Oct 02 10:46:44 localhost kernel: Linux version 5.14.0-620.el9.x86_64 (mockbuild@x86-05.stream.rdu2.redhat.com) (gcc (GCC) 11.5.0 20240719 (Red Hat 11.5.0-11), GNU ld version 2.35.2-67.el9) #1 SMP PREEMPT_DYNAMIC Fri Sep 26 01:13:23 UTC 2025
Oct 02 10:46:44 localhost kernel: The list of certified hardware and cloud instances for Red Hat Enterprise Linux 9 can be viewed at the Red Hat Ecosystem Catalog, https://catalog.redhat.com.
Oct 02 10:46:44 localhost kernel: Command line: BOOT_IMAGE=(hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64 root=UUID=1631a6ad-43b8-436d-ae76-16fa14b94458 ro console=ttyS0,115200n8 no_timer_check net.ifnames=0 crashkernel=1G-2G:192M,2G-64G:256M,64G-:512M
Oct 02 10:46:44 localhost kernel: BIOS-provided physical RAM map:
Oct 02 10:46:44 localhost kernel: BIOS-e820: [mem 0x0000000000000000-0x000000000009fbff] usable
Oct 02 10:46:44 localhost kernel: BIOS-e820: [mem 0x000000000009fc00-0x000000000009ffff] reserved
Oct 02 10:46:44 localhost kernel: BIOS-e820: [mem 0x00000000000f0000-0x00000000000fffff] reserved
Oct 02 10:46:44 localhost kernel: BIOS-e820: [mem 0x0000000000100000-0x00000000bffdafff] usable
Oct 02 10:46:44 localhost kernel: BIOS-e820: [mem 0x00000000bffdb000-0x00000000bfffffff] reserved
Oct 02 10:46:44 localhost kernel: BIOS-e820: [mem 0x00000000feffc000-0x00000000feffffff] reserved
Oct 02 10:46:44 localhost kernel: BIOS-e820: [mem 0x00000000fffc0000-0x00000000ffffffff] reserved
Oct 02 10:46:44 localhost kernel: BIOS-e820: [mem 0x0000000100000000-0x000000023fffffff] usable
Oct 02 10:46:44 localhost kernel: NX (Execute Disable) protection: active
Oct 02 10:46:44 localhost kernel: APIC: Static calls initialized
Oct 02 10:46:44 localhost kernel: SMBIOS 2.8 present.
Oct 02 10:46:44 localhost kernel: DMI: OpenStack Foundation OpenStack Nova, BIOS 1.15.0-1 04/01/2014
Oct 02 10:46:44 localhost kernel: Hypervisor detected: KVM
Oct 02 10:46:44 localhost kernel: kvm-clock: Using msrs 4b564d01 and 4b564d00
Oct 02 10:46:44 localhost kernel: kvm-clock: using sched offset of 6269896997 cycles
Oct 02 10:46:44 localhost kernel: clocksource: kvm-clock: mask: 0xffffffffffffffff max_cycles: 0x1cd42e4dffb, max_idle_ns: 881590591483 ns
Oct 02 10:46:44 localhost kernel: tsc: Detected 2799.998 MHz processor
Oct 02 10:46:44 localhost kernel: e820: update [mem 0x00000000-0x00000fff] usable ==> reserved
Oct 02 10:46:44 localhost kernel: e820: remove [mem 0x000a0000-0x000fffff] usable
Oct 02 10:46:44 localhost kernel: last_pfn = 0x240000 max_arch_pfn = 0x400000000
Oct 02 10:46:44 localhost kernel: MTRR map: 4 entries (3 fixed + 1 variable; max 19), built from 8 variable MTRRs
Oct 02 10:46:44 localhost kernel: x86/PAT: Configuration [0-7]: WB  WC  UC- UC  WB  WP  UC- WT  
Oct 02 10:46:44 localhost kernel: last_pfn = 0xbffdb max_arch_pfn = 0x400000000
Oct 02 10:46:44 localhost kernel: found SMP MP-table at [mem 0x000f5ae0-0x000f5aef]
Oct 02 10:46:44 localhost kernel: Using GB pages for direct mapping
Oct 02 10:46:44 localhost kernel: RAMDISK: [mem 0x2d7c4000-0x32bd9fff]
Oct 02 10:46:44 localhost kernel: ACPI: Early table checksum verification disabled
Oct 02 10:46:44 localhost kernel: ACPI: RSDP 0x00000000000F5AA0 000014 (v00 BOCHS )
Oct 02 10:46:44 localhost kernel: ACPI: RSDT 0x00000000BFFE16BD 000030 (v01 BOCHS  BXPC     00000001 BXPC 00000001)
Oct 02 10:46:44 localhost kernel: ACPI: FACP 0x00000000BFFE1571 000074 (v01 BOCHS  BXPC     00000001 BXPC 00000001)
Oct 02 10:46:44 localhost kernel: ACPI: DSDT 0x00000000BFFDFC80 0018F1 (v01 BOCHS  BXPC     00000001 BXPC 00000001)
Oct 02 10:46:44 localhost kernel: ACPI: FACS 0x00000000BFFDFC40 000040
Oct 02 10:46:44 localhost kernel: ACPI: APIC 0x00000000BFFE15E5 0000B0 (v01 BOCHS  BXPC     00000001 BXPC 00000001)
Oct 02 10:46:44 localhost kernel: ACPI: WAET 0x00000000BFFE1695 000028 (v01 BOCHS  BXPC     00000001 BXPC 00000001)
Oct 02 10:46:44 localhost kernel: ACPI: Reserving FACP table memory at [mem 0xbffe1571-0xbffe15e4]
Oct 02 10:46:44 localhost kernel: ACPI: Reserving DSDT table memory at [mem 0xbffdfc80-0xbffe1570]
Oct 02 10:46:44 localhost kernel: ACPI: Reserving FACS table memory at [mem 0xbffdfc40-0xbffdfc7f]
Oct 02 10:46:44 localhost kernel: ACPI: Reserving APIC table memory at [mem 0xbffe15e5-0xbffe1694]
Oct 02 10:46:44 localhost kernel: ACPI: Reserving WAET table memory at [mem 0xbffe1695-0xbffe16bc]
Oct 02 10:46:44 localhost kernel: No NUMA configuration found
Oct 02 10:46:44 localhost kernel: Faking a node at [mem 0x0000000000000000-0x000000023fffffff]
Oct 02 10:46:44 localhost kernel: NODE_DATA(0) allocated [mem 0x23ffd3000-0x23fffdfff]
Oct 02 10:46:44 localhost kernel: crashkernel reserved: 0x00000000af000000 - 0x00000000bf000000 (256 MB)
Oct 02 10:46:44 localhost kernel: Zone ranges:
Oct 02 10:46:44 localhost kernel:   DMA      [mem 0x0000000000001000-0x0000000000ffffff]
Oct 02 10:46:44 localhost kernel:   DMA32    [mem 0x0000000001000000-0x00000000ffffffff]
Oct 02 10:46:44 localhost kernel:   Normal   [mem 0x0000000100000000-0x000000023fffffff]
Oct 02 10:46:44 localhost kernel:   Device   empty
Oct 02 10:46:44 localhost kernel: Movable zone start for each node
Oct 02 10:46:44 localhost kernel: Early memory node ranges
Oct 02 10:46:44 localhost kernel:   node   0: [mem 0x0000000000001000-0x000000000009efff]
Oct 02 10:46:44 localhost kernel:   node   0: [mem 0x0000000000100000-0x00000000bffdafff]
Oct 02 10:46:44 localhost kernel:   node   0: [mem 0x0000000100000000-0x000000023fffffff]
Oct 02 10:46:44 localhost kernel: Initmem setup node 0 [mem 0x0000000000001000-0x000000023fffffff]
Oct 02 10:46:44 localhost kernel: On node 0, zone DMA: 1 pages in unavailable ranges
Oct 02 10:46:44 localhost kernel: On node 0, zone DMA: 97 pages in unavailable ranges
Oct 02 10:46:44 localhost kernel: On node 0, zone Normal: 37 pages in unavailable ranges
Oct 02 10:46:44 localhost kernel: ACPI: PM-Timer IO Port: 0x608
Oct 02 10:46:44 localhost kernel: ACPI: LAPIC_NMI (acpi_id[0xff] dfl dfl lint[0x1])
Oct 02 10:46:44 localhost kernel: IOAPIC[0]: apic_id 0, version 17, address 0xfec00000, GSI 0-23
Oct 02 10:46:44 localhost kernel: ACPI: INT_SRC_OVR (bus 0 bus_irq 0 global_irq 2 dfl dfl)
Oct 02 10:46:44 localhost kernel: ACPI: INT_SRC_OVR (bus 0 bus_irq 5 global_irq 5 high level)
Oct 02 10:46:44 localhost kernel: ACPI: INT_SRC_OVR (bus 0 bus_irq 9 global_irq 9 high level)
Oct 02 10:46:44 localhost kernel: ACPI: INT_SRC_OVR (bus 0 bus_irq 10 global_irq 10 high level)
Oct 02 10:46:44 localhost kernel: ACPI: INT_SRC_OVR (bus 0 bus_irq 11 global_irq 11 high level)
Oct 02 10:46:44 localhost kernel: ACPI: Using ACPI (MADT) for SMP configuration information
Oct 02 10:46:44 localhost kernel: TSC deadline timer available
Oct 02 10:46:44 localhost kernel: CPU topo: Max. logical packages:   8
Oct 02 10:46:44 localhost kernel: CPU topo: Max. logical dies:       8
Oct 02 10:46:44 localhost kernel: CPU topo: Max. dies per package:   1
Oct 02 10:46:44 localhost kernel: CPU topo: Max. threads per core:   1
Oct 02 10:46:44 localhost kernel: CPU topo: Num. cores per package:     1
Oct 02 10:46:44 localhost kernel: CPU topo: Num. threads per package:   1
Oct 02 10:46:44 localhost kernel: CPU topo: Allowing 8 present CPUs plus 0 hotplug CPUs
Oct 02 10:46:44 localhost kernel: kvm-guest: APIC: eoi() replaced with kvm_guest_apic_eoi_write()
Oct 02 10:46:44 localhost kernel: PM: hibernation: Registered nosave memory: [mem 0x00000000-0x00000fff]
Oct 02 10:46:44 localhost kernel: PM: hibernation: Registered nosave memory: [mem 0x0009f000-0x0009ffff]
Oct 02 10:46:44 localhost kernel: PM: hibernation: Registered nosave memory: [mem 0x000a0000-0x000effff]
Oct 02 10:46:44 localhost kernel: PM: hibernation: Registered nosave memory: [mem 0x000f0000-0x000fffff]
Oct 02 10:46:44 localhost kernel: PM: hibernation: Registered nosave memory: [mem 0xbffdb000-0xbfffffff]
Oct 02 10:46:44 localhost kernel: PM: hibernation: Registered nosave memory: [mem 0xc0000000-0xfeffbfff]
Oct 02 10:46:44 localhost kernel: PM: hibernation: Registered nosave memory: [mem 0xfeffc000-0xfeffffff]
Oct 02 10:46:44 localhost kernel: PM: hibernation: Registered nosave memory: [mem 0xff000000-0xfffbffff]
Oct 02 10:46:44 localhost kernel: PM: hibernation: Registered nosave memory: [mem 0xfffc0000-0xffffffff]
Oct 02 10:46:44 localhost kernel: [mem 0xc0000000-0xfeffbfff] available for PCI devices
Oct 02 10:46:44 localhost kernel: Booting paravirtualized kernel on KVM
Oct 02 10:46:44 localhost kernel: clocksource: refined-jiffies: mask: 0xffffffff max_cycles: 0xffffffff, max_idle_ns: 1910969940391419 ns
Oct 02 10:46:44 localhost kernel: setup_percpu: NR_CPUS:8192 nr_cpumask_bits:8 nr_cpu_ids:8 nr_node_ids:1
Oct 02 10:46:44 localhost kernel: percpu: Embedded 64 pages/cpu s225280 r8192 d28672 u262144
Oct 02 10:46:44 localhost kernel: pcpu-alloc: s225280 r8192 d28672 u262144 alloc=1*2097152
Oct 02 10:46:44 localhost kernel: pcpu-alloc: [0] 0 1 2 3 4 5 6 7 
Oct 02 10:46:44 localhost kernel: kvm-guest: PV spinlocks disabled, no host support
Oct 02 10:46:44 localhost kernel: Kernel command line: BOOT_IMAGE=(hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64 root=UUID=1631a6ad-43b8-436d-ae76-16fa14b94458 ro console=ttyS0,115200n8 no_timer_check net.ifnames=0 crashkernel=1G-2G:192M,2G-64G:256M,64G-:512M
Oct 02 10:46:44 localhost kernel: Unknown kernel command line parameters "BOOT_IMAGE=(hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64", will be passed to user space.
Oct 02 10:46:44 localhost kernel: random: crng init done
Oct 02 10:46:44 localhost kernel: Dentry cache hash table entries: 1048576 (order: 11, 8388608 bytes, linear)
Oct 02 10:46:44 localhost kernel: Inode-cache hash table entries: 524288 (order: 10, 4194304 bytes, linear)
Oct 02 10:46:44 localhost kernel: Fallback order for Node 0: 0 
Oct 02 10:46:44 localhost kernel: Built 1 zonelists, mobility grouping on.  Total pages: 2064091
Oct 02 10:46:44 localhost kernel: Policy zone: Normal
Oct 02 10:46:44 localhost kernel: mem auto-init: stack:off, heap alloc:off, heap free:off
Oct 02 10:46:44 localhost kernel: software IO TLB: area num 8.
Oct 02 10:46:44 localhost kernel: SLUB: HWalign=64, Order=0-3, MinObjects=0, CPUs=8, Nodes=1
Oct 02 10:46:44 localhost kernel: ftrace: allocating 49370 entries in 193 pages
Oct 02 10:46:44 localhost kernel: ftrace: allocated 193 pages with 3 groups
Oct 02 10:46:44 localhost kernel: Dynamic Preempt: voluntary
Oct 02 10:46:44 localhost kernel: rcu: Preemptible hierarchical RCU implementation.
Oct 02 10:46:44 localhost kernel: rcu:         RCU event tracing is enabled.
Oct 02 10:46:44 localhost kernel: rcu:         RCU restricting CPUs from NR_CPUS=8192 to nr_cpu_ids=8.
Oct 02 10:46:44 localhost kernel:         Trampoline variant of Tasks RCU enabled.
Oct 02 10:46:44 localhost kernel:         Rude variant of Tasks RCU enabled.
Oct 02 10:46:44 localhost kernel:         Tracing variant of Tasks RCU enabled.
Oct 02 10:46:44 localhost kernel: rcu: RCU calculated value of scheduler-enlistment delay is 100 jiffies.
Oct 02 10:46:44 localhost kernel: rcu: Adjusting geometry for rcu_fanout_leaf=16, nr_cpu_ids=8
Oct 02 10:46:44 localhost kernel: RCU Tasks: Setting shift to 3 and lim to 1 rcu_task_cb_adjust=1 rcu_task_cpu_ids=8.
Oct 02 10:46:44 localhost kernel: RCU Tasks Rude: Setting shift to 3 and lim to 1 rcu_task_cb_adjust=1 rcu_task_cpu_ids=8.
Oct 02 10:46:44 localhost kernel: RCU Tasks Trace: Setting shift to 3 and lim to 1 rcu_task_cb_adjust=1 rcu_task_cpu_ids=8.
Oct 02 10:46:44 localhost kernel: NR_IRQS: 524544, nr_irqs: 488, preallocated irqs: 16
Oct 02 10:46:44 localhost kernel: rcu: srcu_init: Setting srcu_struct sizes based on contention.
Oct 02 10:46:44 localhost kernel: kfence: initialized - using 2097152 bytes for 255 objects at 0x(____ptrval____)-0x(____ptrval____)
Oct 02 10:46:44 localhost kernel: Console: colour VGA+ 80x25
Oct 02 10:46:44 localhost kernel: printk: console [ttyS0] enabled
Oct 02 10:46:44 localhost kernel: ACPI: Core revision 20230331
Oct 02 10:46:44 localhost kernel: APIC: Switch to symmetric I/O mode setup
Oct 02 10:46:44 localhost kernel: x2apic enabled
Oct 02 10:46:44 localhost kernel: APIC: Switched APIC routing to: physical x2apic
Oct 02 10:46:44 localhost kernel: tsc: Marking TSC unstable due to TSCs unsynchronized
Oct 02 10:46:44 localhost kernel: Calibrating delay loop (skipped) preset value.. 5599.99 BogoMIPS (lpj=2799998)
Oct 02 10:46:44 localhost kernel: x86/cpu: User Mode Instruction Prevention (UMIP) activated
Oct 02 10:46:44 localhost kernel: Last level iTLB entries: 4KB 512, 2MB 255, 4MB 127
Oct 02 10:46:44 localhost kernel: Last level dTLB entries: 4KB 512, 2MB 255, 4MB 127, 1GB 0
Oct 02 10:46:44 localhost kernel: Spectre V1 : Mitigation: usercopy/swapgs barriers and __user pointer sanitization
Oct 02 10:46:44 localhost kernel: Spectre V2 : Mitigation: Retpolines
Oct 02 10:46:44 localhost kernel: Spectre V2 : Spectre v2 / SpectreRSB: Filling RSB on context switch and VMEXIT
Oct 02 10:46:44 localhost kernel: Spectre V2 : Enabling Speculation Barrier for firmware calls
Oct 02 10:46:44 localhost kernel: RETBleed: Mitigation: untrained return thunk
Oct 02 10:46:44 localhost kernel: Spectre V2 : mitigation: Enabling conditional Indirect Branch Prediction Barrier
Oct 02 10:46:44 localhost kernel: Speculative Store Bypass: Mitigation: Speculative Store Bypass disabled via prctl
Oct 02 10:46:44 localhost kernel: Speculative Return Stack Overflow: IBPB-extending microcode not applied!
Oct 02 10:46:44 localhost kernel: Speculative Return Stack Overflow: WARNING: See https://kernel.org/doc/html/latest/admin-guide/hw-vuln/srso.html for mitigation options.
Oct 02 10:46:44 localhost kernel: x86/bugs: return thunk changed
Oct 02 10:46:44 localhost kernel: Speculative Return Stack Overflow: Vulnerable: Safe RET, no microcode
Oct 02 10:46:44 localhost kernel: x86/fpu: Supporting XSAVE feature 0x001: 'x87 floating point registers'
Oct 02 10:46:44 localhost kernel: x86/fpu: Supporting XSAVE feature 0x002: 'SSE registers'
Oct 02 10:46:44 localhost kernel: x86/fpu: Supporting XSAVE feature 0x004: 'AVX registers'
Oct 02 10:46:44 localhost kernel: x86/fpu: xstate_offset[2]:  576, xstate_sizes[2]:  256
Oct 02 10:46:44 localhost kernel: x86/fpu: Enabled xstate features 0x7, context size is 832 bytes, using 'compacted' format.
Oct 02 10:46:44 localhost kernel: Freeing SMP alternatives memory: 40K
Oct 02 10:46:44 localhost kernel: pid_max: default: 32768 minimum: 301
Oct 02 10:46:44 localhost kernel: LSM: initializing lsm=lockdown,capability,landlock,yama,integrity,selinux,bpf
Oct 02 10:46:44 localhost kernel: landlock: Up and running.
Oct 02 10:46:44 localhost kernel: Yama: becoming mindful.
Oct 02 10:46:44 localhost kernel: SELinux:  Initializing.
Oct 02 10:46:44 localhost kernel: LSM support for eBPF active
Oct 02 10:46:44 localhost kernel: Mount-cache hash table entries: 16384 (order: 5, 131072 bytes, linear)
Oct 02 10:46:44 localhost kernel: Mountpoint-cache hash table entries: 16384 (order: 5, 131072 bytes, linear)
Oct 02 10:46:44 localhost kernel: smpboot: CPU0: AMD EPYC-Rome Processor (family: 0x17, model: 0x31, stepping: 0x0)
Oct 02 10:46:44 localhost kernel: Performance Events: Fam17h+ core perfctr, AMD PMU driver.
Oct 02 10:46:44 localhost kernel: ... version:                0
Oct 02 10:46:44 localhost kernel: ... bit width:              48
Oct 02 10:46:44 localhost kernel: ... generic registers:      6
Oct 02 10:46:44 localhost kernel: ... value mask:             0000ffffffffffff
Oct 02 10:46:44 localhost kernel: ... max period:             00007fffffffffff
Oct 02 10:46:44 localhost kernel: ... fixed-purpose events:   0
Oct 02 10:46:44 localhost kernel: ... event mask:             000000000000003f
Oct 02 10:46:44 localhost kernel: signal: max sigframe size: 1776
Oct 02 10:46:44 localhost kernel: rcu: Hierarchical SRCU implementation.
Oct 02 10:46:44 localhost kernel: rcu:         Max phase no-delay instances is 400.
Oct 02 10:46:44 localhost kernel: smp: Bringing up secondary CPUs ...
Oct 02 10:46:44 localhost kernel: smpboot: x86: Booting SMP configuration:
Oct 02 10:46:44 localhost kernel: .... node  #0, CPUs:      #1 #2 #3 #4 #5 #6 #7
Oct 02 10:46:44 localhost kernel: smp: Brought up 1 node, 8 CPUs
Oct 02 10:46:44 localhost kernel: smpboot: Total of 8 processors activated (44799.96 BogoMIPS)
Oct 02 10:46:44 localhost kernel: node 0 deferred pages initialised in 17ms
Oct 02 10:46:44 localhost kernel: Memory: 7765548K/8388068K available (16384K kernel code, 5784K rwdata, 13996K rodata, 4068K init, 7304K bss, 616516K reserved, 0K cma-reserved)
Oct 02 10:46:44 localhost kernel: devtmpfs: initialized
Oct 02 10:46:44 localhost kernel: x86/mm: Memory block size: 128MB
Oct 02 10:46:44 localhost kernel: clocksource: jiffies: mask: 0xffffffff max_cycles: 0xffffffff, max_idle_ns: 1911260446275000 ns
Oct 02 10:46:44 localhost kernel: futex hash table entries: 2048 (order: 5, 131072 bytes, linear)
Oct 02 10:46:44 localhost kernel: pinctrl core: initialized pinctrl subsystem
Oct 02 10:46:44 localhost kernel: NET: Registered PF_NETLINK/PF_ROUTE protocol family
Oct 02 10:46:44 localhost kernel: DMA: preallocated 1024 KiB GFP_KERNEL pool for atomic allocations
Oct 02 10:46:44 localhost kernel: DMA: preallocated 1024 KiB GFP_KERNEL|GFP_DMA pool for atomic allocations
Oct 02 10:46:44 localhost kernel: DMA: preallocated 1024 KiB GFP_KERNEL|GFP_DMA32 pool for atomic allocations
Oct 02 10:46:44 localhost kernel: audit: initializing netlink subsys (disabled)
Oct 02 10:46:44 localhost kernel: audit: type=2000 audit(1759402002.923:1): state=initialized audit_enabled=0 res=1
Oct 02 10:46:44 localhost kernel: thermal_sys: Registered thermal governor 'fair_share'
Oct 02 10:46:44 localhost kernel: thermal_sys: Registered thermal governor 'step_wise'
Oct 02 10:46:44 localhost kernel: thermal_sys: Registered thermal governor 'user_space'
Oct 02 10:46:44 localhost kernel: cpuidle: using governor menu
Oct 02 10:46:44 localhost kernel: acpiphp: ACPI Hot Plug PCI Controller Driver version: 0.5
Oct 02 10:46:44 localhost kernel: PCI: Using configuration type 1 for base access
Oct 02 10:46:44 localhost kernel: PCI: Using configuration type 1 for extended access
Oct 02 10:46:44 localhost kernel: kprobes: kprobe jump-optimization is enabled. All kprobes are optimized if possible.
Oct 02 10:46:44 localhost kernel: HugeTLB: registered 1.00 GiB page size, pre-allocated 0 pages
Oct 02 10:46:44 localhost kernel: HugeTLB: 16380 KiB vmemmap can be freed for a 1.00 GiB page
Oct 02 10:46:44 localhost kernel: HugeTLB: registered 2.00 MiB page size, pre-allocated 0 pages
Oct 02 10:46:44 localhost kernel: HugeTLB: 28 KiB vmemmap can be freed for a 2.00 MiB page
Oct 02 10:46:44 localhost kernel: Demotion targets for Node 0: null
Oct 02 10:46:44 localhost kernel: cryptd: max_cpu_qlen set to 1000
Oct 02 10:46:44 localhost kernel: ACPI: Added _OSI(Module Device)
Oct 02 10:46:44 localhost kernel: ACPI: Added _OSI(Processor Device)
Oct 02 10:46:44 localhost kernel: ACPI: Added _OSI(3.0 _SCP Extensions)
Oct 02 10:46:44 localhost kernel: ACPI: Added _OSI(Processor Aggregator Device)
Oct 02 10:46:44 localhost kernel: ACPI: 1 ACPI AML tables successfully acquired and loaded
Oct 02 10:46:44 localhost kernel: ACPI: _OSC evaluation for CPUs failed, trying _PDC
Oct 02 10:46:44 localhost kernel: ACPI: Interpreter enabled
Oct 02 10:46:44 localhost kernel: ACPI: PM: (supports S0 S3 S4 S5)
Oct 02 10:46:44 localhost kernel: ACPI: Using IOAPIC for interrupt routing
Oct 02 10:46:44 localhost kernel: PCI: Using host bridge windows from ACPI; if necessary, use "pci=nocrs" and report a bug
Oct 02 10:46:44 localhost kernel: PCI: Using E820 reservations for host bridge windows
Oct 02 10:46:44 localhost kernel: ACPI: Enabled 2 GPEs in block 00 to 0F
Oct 02 10:46:44 localhost kernel: ACPI: PCI Root Bridge [PCI0] (domain 0000 [bus 00-ff])
Oct 02 10:46:44 localhost kernel: acpi PNP0A03:00: _OSC: OS supports [ExtendedConfig ASPM ClockPM Segments MSI EDR HPX-Type3]
Oct 02 10:46:44 localhost kernel: acpiphp: Slot [3] registered
Oct 02 10:46:44 localhost kernel: acpiphp: Slot [4] registered
Oct 02 10:46:44 localhost kernel: acpiphp: Slot [5] registered
Oct 02 10:46:44 localhost kernel: acpiphp: Slot [6] registered
Oct 02 10:46:44 localhost kernel: acpiphp: Slot [7] registered
Oct 02 10:46:44 localhost kernel: acpiphp: Slot [8] registered
Oct 02 10:46:44 localhost kernel: acpiphp: Slot [9] registered
Oct 02 10:46:44 localhost kernel: acpiphp: Slot [10] registered
Oct 02 10:46:44 localhost kernel: acpiphp: Slot [11] registered
Oct 02 10:46:44 localhost kernel: acpiphp: Slot [12] registered
Oct 02 10:46:44 localhost kernel: acpiphp: Slot [13] registered
Oct 02 10:46:44 localhost kernel: acpiphp: Slot [14] registered
Oct 02 10:46:44 localhost kernel: acpiphp: Slot [15] registered
Oct 02 10:46:44 localhost kernel: acpiphp: Slot [16] registered
Oct 02 10:46:44 localhost kernel: acpiphp: Slot [17] registered
Oct 02 10:46:44 localhost kernel: acpiphp: Slot [18] registered
Oct 02 10:46:44 localhost kernel: acpiphp: Slot [19] registered
Oct 02 10:46:44 localhost kernel: acpiphp: Slot [20] registered
Oct 02 10:46:44 localhost kernel: acpiphp: Slot [21] registered
Oct 02 10:46:44 localhost kernel: acpiphp: Slot [22] registered
Oct 02 10:46:44 localhost kernel: acpiphp: Slot [23] registered
Oct 02 10:46:44 localhost kernel: acpiphp: Slot [24] registered
Oct 02 10:46:44 localhost kernel: acpiphp: Slot [25] registered
Oct 02 10:46:44 localhost kernel: acpiphp: Slot [26] registered
Oct 02 10:46:44 localhost kernel: acpiphp: Slot [27] registered
Oct 02 10:46:44 localhost kernel: acpiphp: Slot [28] registered
Oct 02 10:46:44 localhost kernel: acpiphp: Slot [29] registered
Oct 02 10:46:44 localhost kernel: acpiphp: Slot [30] registered
Oct 02 10:46:44 localhost kernel: acpiphp: Slot [31] registered
Oct 02 10:46:44 localhost kernel: PCI host bridge to bus 0000:00
Oct 02 10:46:44 localhost kernel: pci_bus 0000:00: root bus resource [io  0x0000-0x0cf7 window]
Oct 02 10:46:44 localhost kernel: pci_bus 0000:00: root bus resource [io  0x0d00-0xffff window]
Oct 02 10:46:44 localhost kernel: pci_bus 0000:00: root bus resource [mem 0x000a0000-0x000bffff window]
Oct 02 10:46:44 localhost kernel: pci_bus 0000:00: root bus resource [mem 0xc0000000-0xfebfffff window]
Oct 02 10:46:44 localhost kernel: pci_bus 0000:00: root bus resource [mem 0x240000000-0x2bfffffff window]
Oct 02 10:46:44 localhost kernel: pci_bus 0000:00: root bus resource [bus 00-ff]
Oct 02 10:46:44 localhost kernel: pci 0000:00:00.0: [8086:1237] type 00 class 0x060000 conventional PCI endpoint
Oct 02 10:46:44 localhost kernel: pci 0000:00:01.0: [8086:7000] type 00 class 0x060100 conventional PCI endpoint
Oct 02 10:46:44 localhost kernel: pci 0000:00:01.1: [8086:7010] type 00 class 0x010180 conventional PCI endpoint
Oct 02 10:46:44 localhost kernel: pci 0000:00:01.1: BAR 4 [io  0xc140-0xc14f]
Oct 02 10:46:44 localhost kernel: pci 0000:00:01.1: BAR 0 [io  0x01f0-0x01f7]: legacy IDE quirk
Oct 02 10:46:44 localhost kernel: pci 0000:00:01.1: BAR 1 [io  0x03f6]: legacy IDE quirk
Oct 02 10:46:44 localhost kernel: pci 0000:00:01.1: BAR 2 [io  0x0170-0x0177]: legacy IDE quirk
Oct 02 10:46:44 localhost kernel: pci 0000:00:01.1: BAR 3 [io  0x0376]: legacy IDE quirk
Oct 02 10:46:44 localhost kernel: pci 0000:00:01.2: [8086:7020] type 00 class 0x0c0300 conventional PCI endpoint
Oct 02 10:46:44 localhost kernel: pci 0000:00:01.2: BAR 4 [io  0xc100-0xc11f]
Oct 02 10:46:44 localhost kernel: pci 0000:00:01.3: [8086:7113] type 00 class 0x068000 conventional PCI endpoint
Oct 02 10:46:44 localhost kernel: pci 0000:00:01.3: quirk: [io  0x0600-0x063f] claimed by PIIX4 ACPI
Oct 02 10:46:44 localhost kernel: pci 0000:00:01.3: quirk: [io  0x0700-0x070f] claimed by PIIX4 SMB
Oct 02 10:46:44 localhost kernel: pci 0000:00:02.0: [1af4:1050] type 00 class 0x030000 conventional PCI endpoint
Oct 02 10:46:44 localhost kernel: pci 0000:00:02.0: BAR 0 [mem 0xfe000000-0xfe7fffff pref]
Oct 02 10:46:44 localhost kernel: pci 0000:00:02.0: BAR 2 [mem 0xfe800000-0xfe803fff 64bit pref]
Oct 02 10:46:44 localhost kernel: pci 0000:00:02.0: BAR 4 [mem 0xfeb90000-0xfeb90fff]
Oct 02 10:46:44 localhost kernel: pci 0000:00:02.0: ROM [mem 0xfeb80000-0xfeb8ffff pref]
Oct 02 10:46:44 localhost kernel: pci 0000:00:02.0: Video device with shadowed ROM at [mem 0x000c0000-0x000dffff]
Oct 02 10:46:44 localhost kernel: pci 0000:00:03.0: [1af4:1000] type 00 class 0x020000 conventional PCI endpoint
Oct 02 10:46:44 localhost kernel: pci 0000:00:03.0: BAR 0 [io  0xc080-0xc0bf]
Oct 02 10:46:44 localhost kernel: pci 0000:00:03.0: BAR 1 [mem 0xfeb91000-0xfeb91fff]
Oct 02 10:46:44 localhost kernel: pci 0000:00:03.0: BAR 4 [mem 0xfe804000-0xfe807fff 64bit pref]
Oct 02 10:46:44 localhost kernel: pci 0000:00:03.0: ROM [mem 0xfeb00000-0xfeb7ffff pref]
Oct 02 10:46:44 localhost kernel: pci 0000:00:04.0: [1af4:1001] type 00 class 0x010000 conventional PCI endpoint
Oct 02 10:46:44 localhost kernel: pci 0000:00:04.0: BAR 0 [io  0xc000-0xc07f]
Oct 02 10:46:44 localhost kernel: pci 0000:00:04.0: BAR 1 [mem 0xfeb92000-0xfeb92fff]
Oct 02 10:46:44 localhost kernel: pci 0000:00:04.0: BAR 4 [mem 0xfe808000-0xfe80bfff 64bit pref]
Oct 02 10:46:44 localhost kernel: pci 0000:00:05.0: [1af4:1002] type 00 class 0x00ff00 conventional PCI endpoint
Oct 02 10:46:44 localhost kernel: pci 0000:00:05.0: BAR 0 [io  0xc0c0-0xc0ff]
Oct 02 10:46:44 localhost kernel: pci 0000:00:05.0: BAR 4 [mem 0xfe80c000-0xfe80ffff 64bit pref]
Oct 02 10:46:44 localhost kernel: pci 0000:00:06.0: [1af4:1005] type 00 class 0x00ff00 conventional PCI endpoint
Oct 02 10:46:44 localhost kernel: pci 0000:00:06.0: BAR 0 [io  0xc120-0xc13f]
Oct 02 10:46:44 localhost kernel: pci 0000:00:06.0: BAR 4 [mem 0xfe810000-0xfe813fff 64bit pref]
Oct 02 10:46:44 localhost kernel: ACPI: PCI: Interrupt link LNKA configured for IRQ 10
Oct 02 10:46:44 localhost kernel: ACPI: PCI: Interrupt link LNKB configured for IRQ 10
Oct 02 10:46:44 localhost kernel: ACPI: PCI: Interrupt link LNKC configured for IRQ 11
Oct 02 10:46:44 localhost kernel: ACPI: PCI: Interrupt link LNKD configured for IRQ 11
Oct 02 10:46:44 localhost kernel: ACPI: PCI: Interrupt link LNKS configured for IRQ 9
Oct 02 10:46:44 localhost kernel: iommu: Default domain type: Translated
Oct 02 10:46:44 localhost kernel: iommu: DMA domain TLB invalidation policy: lazy mode
Oct 02 10:46:44 localhost kernel: SCSI subsystem initialized
Oct 02 10:46:44 localhost kernel: ACPI: bus type USB registered
Oct 02 10:46:44 localhost kernel: usbcore: registered new interface driver usbfs
Oct 02 10:46:44 localhost kernel: usbcore: registered new interface driver hub
Oct 02 10:46:44 localhost kernel: usbcore: registered new device driver usb
Oct 02 10:46:44 localhost kernel: pps_core: LinuxPPS API ver. 1 registered
Oct 02 10:46:44 localhost kernel: pps_core: Software ver. 5.3.6 - Copyright 2005-2007 Rodolfo Giometti <giometti@linux.it>
Oct 02 10:46:44 localhost kernel: PTP clock support registered
Oct 02 10:46:44 localhost kernel: EDAC MC: Ver: 3.0.0
Oct 02 10:46:44 localhost kernel: NetLabel: Initializing
Oct 02 10:46:44 localhost kernel: NetLabel:  domain hash size = 128
Oct 02 10:46:44 localhost kernel: NetLabel:  protocols = UNLABELED CIPSOv4 CALIPSO
Oct 02 10:46:44 localhost kernel: NetLabel:  unlabeled traffic allowed by default
Oct 02 10:46:44 localhost kernel: PCI: Using ACPI for IRQ routing
Oct 02 10:46:44 localhost kernel: PCI: pci_cache_line_size set to 64 bytes
Oct 02 10:46:44 localhost kernel: e820: reserve RAM buffer [mem 0x0009fc00-0x0009ffff]
Oct 02 10:46:44 localhost kernel: e820: reserve RAM buffer [mem 0xbffdb000-0xbfffffff]
Oct 02 10:46:44 localhost kernel: pci 0000:00:02.0: vgaarb: setting as boot VGA device
Oct 02 10:46:44 localhost kernel: pci 0000:00:02.0: vgaarb: bridge control possible
Oct 02 10:46:44 localhost kernel: pci 0000:00:02.0: vgaarb: VGA device added: decodes=io+mem,owns=io+mem,locks=none
Oct 02 10:46:44 localhost kernel: vgaarb: loaded
Oct 02 10:46:44 localhost kernel: clocksource: Switched to clocksource kvm-clock
Oct 02 10:46:44 localhost kernel: VFS: Disk quotas dquot_6.6.0
Oct 02 10:46:44 localhost kernel: VFS: Dquot-cache hash table entries: 512 (order 0, 4096 bytes)
Oct 02 10:46:44 localhost kernel: pnp: PnP ACPI init
Oct 02 10:46:44 localhost kernel: pnp 00:03: [dma 2]
Oct 02 10:46:44 localhost kernel: pnp: PnP ACPI: found 5 devices
Oct 02 10:46:44 localhost kernel: clocksource: acpi_pm: mask: 0xffffff max_cycles: 0xffffff, max_idle_ns: 2085701024 ns
Oct 02 10:46:44 localhost kernel: NET: Registered PF_INET protocol family
Oct 02 10:46:44 localhost kernel: IP idents hash table entries: 131072 (order: 8, 1048576 bytes, linear)
Oct 02 10:46:44 localhost kernel: tcp_listen_portaddr_hash hash table entries: 4096 (order: 4, 65536 bytes, linear)
Oct 02 10:46:44 localhost kernel: Table-perturb hash table entries: 65536 (order: 6, 262144 bytes, linear)
Oct 02 10:46:44 localhost kernel: TCP established hash table entries: 65536 (order: 7, 524288 bytes, linear)
Oct 02 10:46:44 localhost kernel: TCP bind hash table entries: 65536 (order: 8, 1048576 bytes, linear)
Oct 02 10:46:44 localhost kernel: TCP: Hash tables configured (established 65536 bind 65536)
Oct 02 10:46:44 localhost kernel: MPTCP token hash table entries: 8192 (order: 5, 196608 bytes, linear)
Oct 02 10:46:44 localhost kernel: UDP hash table entries: 4096 (order: 5, 131072 bytes, linear)
Oct 02 10:46:44 localhost kernel: UDP-Lite hash table entries: 4096 (order: 5, 131072 bytes, linear)
Oct 02 10:46:44 localhost kernel: NET: Registered PF_UNIX/PF_LOCAL protocol family
Oct 02 10:46:44 localhost kernel: NET: Registered PF_XDP protocol family
Oct 02 10:46:44 localhost kernel: pci_bus 0000:00: resource 4 [io  0x0000-0x0cf7 window]
Oct 02 10:46:44 localhost kernel: pci_bus 0000:00: resource 5 [io  0x0d00-0xffff window]
Oct 02 10:46:44 localhost kernel: pci_bus 0000:00: resource 6 [mem 0x000a0000-0x000bffff window]
Oct 02 10:46:44 localhost kernel: pci_bus 0000:00: resource 7 [mem 0xc0000000-0xfebfffff window]
Oct 02 10:46:44 localhost kernel: pci_bus 0000:00: resource 8 [mem 0x240000000-0x2bfffffff window]
Oct 02 10:46:44 localhost kernel: pci 0000:00:01.0: PIIX3: Enabling Passive Release
Oct 02 10:46:44 localhost kernel: pci 0000:00:00.0: Limiting direct PCI/PCI transfers
Oct 02 10:46:44 localhost kernel: ACPI: \_SB_.LNKD: Enabled at IRQ 11
Oct 02 10:46:44 localhost kernel: pci 0000:00:01.2: quirk_usb_early_handoff+0x0/0x140 took 81860 usecs
Oct 02 10:46:44 localhost kernel: PCI: CLS 0 bytes, default 64
Oct 02 10:46:44 localhost kernel: PCI-DMA: Using software bounce buffering for IO (SWIOTLB)
Oct 02 10:46:44 localhost kernel: software IO TLB: mapped [mem 0x00000000ab000000-0x00000000af000000] (64MB)
Oct 02 10:46:44 localhost kernel: Trying to unpack rootfs image as initramfs...
Oct 02 10:46:44 localhost kernel: ACPI: bus type thunderbolt registered
Oct 02 10:46:44 localhost kernel: Initialise system trusted keyrings
Oct 02 10:46:44 localhost kernel: Key type blacklist registered
Oct 02 10:46:44 localhost kernel: workingset: timestamp_bits=36 max_order=21 bucket_order=0
Oct 02 10:46:44 localhost kernel: zbud: loaded
Oct 02 10:46:44 localhost kernel: integrity: Platform Keyring initialized
Oct 02 10:46:44 localhost kernel: integrity: Machine keyring initialized
Oct 02 10:46:44 localhost kernel: Freeing initrd memory: 86104K
Oct 02 10:46:44 localhost kernel: NET: Registered PF_ALG protocol family
Oct 02 10:46:44 localhost kernel: xor: automatically using best checksumming function   avx       
Oct 02 10:46:44 localhost kernel: Key type asymmetric registered
Oct 02 10:46:44 localhost kernel: Asymmetric key parser 'x509' registered
Oct 02 10:46:44 localhost kernel: Block layer SCSI generic (bsg) driver version 0.4 loaded (major 246)
Oct 02 10:46:44 localhost kernel: io scheduler mq-deadline registered
Oct 02 10:46:44 localhost kernel: io scheduler kyber registered
Oct 02 10:46:44 localhost kernel: io scheduler bfq registered
Oct 02 10:46:44 localhost kernel: atomic64_test: passed for x86-64 platform with CX8 and with SSE
Oct 02 10:46:44 localhost kernel: shpchp: Standard Hot Plug PCI Controller Driver version: 0.4
Oct 02 10:46:44 localhost kernel: input: Power Button as /devices/LNXSYSTM:00/LNXPWRBN:00/input/input0
Oct 02 10:46:44 localhost kernel: ACPI: button: Power Button [PWRF]
Oct 02 10:46:44 localhost kernel: ACPI: \_SB_.LNKB: Enabled at IRQ 10
Oct 02 10:46:44 localhost kernel: ACPI: \_SB_.LNKC: Enabled at IRQ 11
Oct 02 10:46:44 localhost kernel: ACPI: \_SB_.LNKA: Enabled at IRQ 10
Oct 02 10:46:44 localhost kernel: Serial: 8250/16550 driver, 4 ports, IRQ sharing enabled
Oct 02 10:46:44 localhost kernel: 00:00: ttyS0 at I/O 0x3f8 (irq = 4, base_baud = 115200) is a 16550A
Oct 02 10:46:44 localhost kernel: Non-volatile memory driver v1.3
Oct 02 10:46:44 localhost kernel: rdac: device handler registered
Oct 02 10:46:44 localhost kernel: hp_sw: device handler registered
Oct 02 10:46:44 localhost kernel: emc: device handler registered
Oct 02 10:46:44 localhost kernel: alua: device handler registered
Oct 02 10:46:44 localhost kernel: uhci_hcd 0000:00:01.2: UHCI Host Controller
Oct 02 10:46:44 localhost kernel: uhci_hcd 0000:00:01.2: new USB bus registered, assigned bus number 1
Oct 02 10:46:44 localhost kernel: uhci_hcd 0000:00:01.2: detected 2 ports
Oct 02 10:46:44 localhost kernel: uhci_hcd 0000:00:01.2: irq 11, io port 0x0000c100
Oct 02 10:46:44 localhost kernel: usb usb1: New USB device found, idVendor=1d6b, idProduct=0001, bcdDevice= 5.14
Oct 02 10:46:44 localhost kernel: usb usb1: New USB device strings: Mfr=3, Product=2, SerialNumber=1
Oct 02 10:46:44 localhost kernel: usb usb1: Product: UHCI Host Controller
Oct 02 10:46:44 localhost kernel: usb usb1: Manufacturer: Linux 5.14.0-620.el9.x86_64 uhci_hcd
Oct 02 10:46:44 localhost kernel: usb usb1: SerialNumber: 0000:00:01.2
Oct 02 10:46:44 localhost kernel: hub 1-0:1.0: USB hub found
Oct 02 10:46:44 localhost kernel: hub 1-0:1.0: 2 ports detected
Oct 02 10:46:44 localhost kernel: usbcore: registered new interface driver usbserial_generic
Oct 02 10:46:44 localhost kernel: usbserial: USB Serial support registered for generic
Oct 02 10:46:44 localhost kernel: i8042: PNP: PS/2 Controller [PNP0303:KBD,PNP0f13:MOU] at 0x60,0x64 irq 1,12
Oct 02 10:46:44 localhost kernel: serio: i8042 KBD port at 0x60,0x64 irq 1
Oct 02 10:46:44 localhost kernel: serio: i8042 AUX port at 0x60,0x64 irq 12
Oct 02 10:46:44 localhost kernel: mousedev: PS/2 mouse device common for all mice
Oct 02 10:46:44 localhost kernel: rtc_cmos 00:04: RTC can wake from S4
Oct 02 10:46:44 localhost kernel: input: AT Translated Set 2 keyboard as /devices/platform/i8042/serio0/input/input1
Oct 02 10:46:44 localhost kernel: rtc_cmos 00:04: registered as rtc0
Oct 02 10:46:44 localhost kernel: rtc_cmos 00:04: setting system clock to 2025-10-02T10:46:43 UTC (1759402003)
Oct 02 10:46:44 localhost kernel: rtc_cmos 00:04: alarms up to one day, y3k, 242 bytes nvram
Oct 02 10:46:44 localhost kernel: amd_pstate: the _CPC object is not present in SBIOS or ACPI disabled
Oct 02 10:46:44 localhost kernel: input: VirtualPS/2 VMware VMMouse as /devices/platform/i8042/serio1/input/input4
Oct 02 10:46:44 localhost kernel: hid: raw HID events driver (C) Jiri Kosina
Oct 02 10:46:44 localhost kernel: input: VirtualPS/2 VMware VMMouse as /devices/platform/i8042/serio1/input/input3
Oct 02 10:46:44 localhost kernel: usbcore: registered new interface driver usbhid
Oct 02 10:46:44 localhost kernel: usbhid: USB HID core driver
Oct 02 10:46:44 localhost kernel: drop_monitor: Initializing network drop monitor service
Oct 02 10:46:44 localhost kernel: Initializing XFRM netlink socket
Oct 02 10:46:44 localhost kernel: NET: Registered PF_INET6 protocol family
Oct 02 10:46:44 localhost kernel: Segment Routing with IPv6
Oct 02 10:46:44 localhost kernel: NET: Registered PF_PACKET protocol family
Oct 02 10:46:44 localhost kernel: mpls_gso: MPLS GSO support
Oct 02 10:46:44 localhost kernel: IPI shorthand broadcast: enabled
Oct 02 10:46:44 localhost kernel: AVX2 version of gcm_enc/dec engaged.
Oct 02 10:46:44 localhost kernel: AES CTR mode by8 optimization enabled
Oct 02 10:46:44 localhost kernel: sched_clock: Marking stable (1231001372, 153520244)->(1476489997, -91968381)
Oct 02 10:46:44 localhost kernel: registered taskstats version 1
Oct 02 10:46:44 localhost kernel: Loading compiled-in X.509 certificates
Oct 02 10:46:44 localhost kernel: Loaded X.509 cert 'The CentOS Project: CentOS Stream kernel signing key: 4ff821c4997fbb659836adb05f5bc400c914e148'
Oct 02 10:46:44 localhost kernel: Loaded X.509 cert 'Red Hat Enterprise Linux Driver Update Program (key 3): bf57f3e87362bc7229d9f465321773dfd1f77a80'
Oct 02 10:46:44 localhost kernel: Loaded X.509 cert 'Red Hat Enterprise Linux kpatch signing key: 4d38fd864ebe18c5f0b72e3852e2014c3a676fc8'
Oct 02 10:46:44 localhost kernel: Loaded X.509 cert 'RH-IMA-CA: Red Hat IMA CA: fb31825dd0e073685b264e3038963673f753959a'
Oct 02 10:46:44 localhost kernel: Loaded X.509 cert 'Nvidia GPU OOT signing 001: 55e1cef88193e60419f0b0ec379c49f77545acf0'
Oct 02 10:46:44 localhost kernel: Demotion targets for Node 0: null
Oct 02 10:46:44 localhost kernel: page_owner is disabled
Oct 02 10:46:44 localhost kernel: Key type .fscrypt registered
Oct 02 10:46:44 localhost kernel: Key type fscrypt-provisioning registered
Oct 02 10:46:44 localhost kernel: Key type big_key registered
Oct 02 10:46:44 localhost kernel: Key type encrypted registered
Oct 02 10:46:44 localhost kernel: ima: No TPM chip found, activating TPM-bypass!
Oct 02 10:46:44 localhost kernel: Loading compiled-in module X.509 certificates
Oct 02 10:46:44 localhost kernel: Loaded X.509 cert 'The CentOS Project: CentOS Stream kernel signing key: 4ff821c4997fbb659836adb05f5bc400c914e148'
Oct 02 10:46:44 localhost kernel: ima: Allocated hash algorithm: sha256
Oct 02 10:46:44 localhost kernel: ima: No architecture policies found
Oct 02 10:46:44 localhost kernel: evm: Initialising EVM extended attributes:
Oct 02 10:46:44 localhost kernel: evm: security.selinux
Oct 02 10:46:44 localhost kernel: evm: security.SMACK64 (disabled)
Oct 02 10:46:44 localhost kernel: evm: security.SMACK64EXEC (disabled)
Oct 02 10:46:44 localhost kernel: evm: security.SMACK64TRANSMUTE (disabled)
Oct 02 10:46:44 localhost kernel: evm: security.SMACK64MMAP (disabled)
Oct 02 10:46:44 localhost kernel: evm: security.apparmor (disabled)
Oct 02 10:46:44 localhost kernel: evm: security.ima
Oct 02 10:46:44 localhost kernel: evm: security.capability
Oct 02 10:46:44 localhost kernel: evm: HMAC attrs: 0x1
Oct 02 10:46:44 localhost kernel: usb 1-1: new full-speed USB device number 2 using uhci_hcd
Oct 02 10:46:44 localhost kernel: Running certificate verification RSA selftest
Oct 02 10:46:44 localhost kernel: Loaded X.509 cert 'Certificate verification self-testing key: f58703bb33ce1b73ee02eccdee5b8817518fe3db'
Oct 02 10:46:44 localhost kernel: Running certificate verification ECDSA selftest
Oct 02 10:46:44 localhost kernel: Loaded X.509 cert 'Certificate verification ECDSA self-testing key: 2900bcea1deb7bc8479a84a23d758efdfdd2b2d3'
Oct 02 10:46:44 localhost kernel: clk: Disabling unused clocks
Oct 02 10:46:44 localhost kernel: Freeing unused decrypted memory: 2028K
Oct 02 10:46:44 localhost kernel: Freeing unused kernel image (initmem) memory: 4068K
Oct 02 10:46:44 localhost kernel: Write protecting the kernel read-only data: 30720k
Oct 02 10:46:44 localhost kernel: Freeing unused kernel image (rodata/data gap) memory: 340K
Oct 02 10:46:44 localhost kernel: usb 1-1: New USB device found, idVendor=0627, idProduct=0001, bcdDevice= 0.00
Oct 02 10:46:44 localhost kernel: usb 1-1: New USB device strings: Mfr=1, Product=3, SerialNumber=10
Oct 02 10:46:44 localhost kernel: usb 1-1: Product: QEMU USB Tablet
Oct 02 10:46:44 localhost kernel: usb 1-1: Manufacturer: QEMU
Oct 02 10:46:44 localhost kernel: usb 1-1: SerialNumber: 28754-0000:00:01.2-1
Oct 02 10:46:44 localhost kernel: input: QEMU QEMU USB Tablet as /devices/pci0000:00/0000:00:01.2/usb1/1-1/1-1:1.0/0003:0627:0001.0001/input/input5
Oct 02 10:46:44 localhost kernel: hid-generic 0003:0627:0001.0001: input,hidraw0: USB HID v0.01 Mouse [QEMU QEMU USB Tablet] on usb-0000:00:01.2-1/input0
Oct 02 10:46:44 localhost kernel: x86/mm: Checked W+X mappings: passed, no W+X pages found.
Oct 02 10:46:44 localhost kernel: Run /init as init process
Oct 02 10:46:44 localhost kernel:   with arguments:
Oct 02 10:46:44 localhost kernel:     /init
Oct 02 10:46:44 localhost kernel:   with environment:
Oct 02 10:46:44 localhost kernel:     HOME=/
Oct 02 10:46:44 localhost kernel:     TERM=linux
Oct 02 10:46:44 localhost kernel:     BOOT_IMAGE=(hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64
Oct 02 10:46:44 localhost systemd[1]: systemd 252-55.el9 running in system mode (+PAM +AUDIT +SELINUX -APPARMOR +IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN -IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK +PCRE2 -PWQUALITY +P11KIT -QRENCODE +TPM2 +BZIP2 +LZ4 +XZ +ZLIB +ZSTD -BPF_FRAMEWORK +XKBCOMMON +UTMP +SYSVINIT default-hierarchy=unified)
Oct 02 10:46:44 localhost systemd[1]: Detected virtualization kvm.
Oct 02 10:46:44 localhost systemd[1]: Detected architecture x86-64.
Oct 02 10:46:44 localhost systemd[1]: Running in initrd.
Oct 02 10:46:44 localhost systemd[1]: No hostname configured, using default hostname.
Oct 02 10:46:44 localhost systemd[1]: Hostname set to <localhost>.
Oct 02 10:46:44 localhost systemd[1]: Initializing machine ID from VM UUID.
Oct 02 10:46:44 localhost systemd[1]: Queued start job for default target Initrd Default Target.
Oct 02 10:46:44 localhost systemd[1]: Started Dispatch Password Requests to Console Directory Watch.
Oct 02 10:46:44 localhost systemd[1]: Reached target Local Encrypted Volumes.
Oct 02 10:46:44 localhost systemd[1]: Reached target Initrd /usr File System.
Oct 02 10:46:44 localhost systemd[1]: Reached target Local File Systems.
Oct 02 10:46:44 localhost systemd[1]: Reached target Path Units.
Oct 02 10:46:44 localhost systemd[1]: Reached target Slice Units.
Oct 02 10:46:44 localhost systemd[1]: Reached target Swaps.
Oct 02 10:46:44 localhost systemd[1]: Reached target Timer Units.
Oct 02 10:46:44 localhost systemd[1]: Listening on D-Bus System Message Bus Socket.
Oct 02 10:46:44 localhost systemd[1]: Listening on Journal Socket (/dev/log).
Oct 02 10:46:44 localhost systemd[1]: Listening on Journal Socket.
Oct 02 10:46:44 localhost systemd[1]: Listening on udev Control Socket.
Oct 02 10:46:44 localhost systemd[1]: Listening on udev Kernel Socket.
Oct 02 10:46:44 localhost systemd[1]: Reached target Socket Units.
Oct 02 10:46:44 localhost systemd[1]: Starting Create List of Static Device Nodes...
Oct 02 10:46:44 localhost systemd[1]: Starting Journal Service...
Oct 02 10:46:44 localhost systemd[1]: Load Kernel Modules was skipped because no trigger condition checks were met.
Oct 02 10:46:44 localhost systemd[1]: Starting Apply Kernel Variables...
Oct 02 10:46:44 localhost systemd[1]: Starting Create System Users...
Oct 02 10:46:44 localhost systemd[1]: Starting Setup Virtual Console...
Oct 02 10:46:44 localhost systemd[1]: Finished Create List of Static Device Nodes.
Oct 02 10:46:44 localhost systemd[1]: Finished Apply Kernel Variables.
Oct 02 10:46:44 localhost systemd[1]: Finished Create System Users.
Oct 02 10:46:44 localhost systemd-journald[311]: Journal started
Oct 02 10:46:44 localhost systemd-journald[311]: Runtime Journal (/run/log/journal/a6ea5ec0bd37473594f0b41eba3dd400) is 8.0M, max 153.5M, 145.5M free.
Oct 02 10:46:44 localhost systemd-sysusers[315]: Creating group 'users' with GID 100.
Oct 02 10:46:44 localhost systemd-sysusers[315]: Creating group 'dbus' with GID 81.
Oct 02 10:46:44 localhost systemd-sysusers[315]: Creating user 'dbus' (System Message Bus) with UID 81 and GID 81.
Oct 02 10:46:44 localhost systemd[1]: Started Journal Service.
Oct 02 10:46:44 localhost systemd[1]: Starting Create Static Device Nodes in /dev...
Oct 02 10:46:44 localhost systemd[1]: Starting Create Volatile Files and Directories...
Oct 02 10:46:44 localhost systemd[1]: Finished Create Static Device Nodes in /dev.
Oct 02 10:46:44 localhost systemd[1]: Finished Create Volatile Files and Directories.
Oct 02 10:46:44 localhost systemd[1]: Finished Setup Virtual Console.
Oct 02 10:46:44 localhost systemd[1]: dracut ask for additional cmdline parameters was skipped because no trigger condition checks were met.
Oct 02 10:46:44 localhost systemd[1]: Starting dracut cmdline hook...
Oct 02 10:46:44 localhost dracut-cmdline[331]: dracut-9 dracut-057-102.git20250818.el9
Oct 02 10:46:44 localhost dracut-cmdline[331]: Using kernel command line parameters:    BOOT_IMAGE=(hd0,msdos1)/boot/vmlinuz-5.14.0-620.el9.x86_64 root=UUID=1631a6ad-43b8-436d-ae76-16fa14b94458 ro console=ttyS0,115200n8 no_timer_check net.ifnames=0 crashkernel=1G-2G:192M,2G-64G:256M,64G-:512M
Oct 02 10:46:44 localhost systemd[1]: Finished dracut cmdline hook.
Oct 02 10:46:44 localhost systemd[1]: Starting dracut pre-udev hook...
Oct 02 10:46:44 localhost kernel: device-mapper: core: CONFIG_IMA_DISABLE_HTABLE is disabled. Duplicate IMA measurements will not be recorded in the IMA log.
Oct 02 10:46:44 localhost kernel: device-mapper: uevent: version 1.0.3
Oct 02 10:46:44 localhost kernel: device-mapper: ioctl: 4.50.0-ioctl (2025-04-28) initialised: dm-devel@lists.linux.dev
Oct 02 10:46:44 localhost kernel: RPC: Registered named UNIX socket transport module.
Oct 02 10:46:44 localhost kernel: RPC: Registered udp transport module.
Oct 02 10:46:44 localhost kernel: RPC: Registered tcp transport module.
Oct 02 10:46:44 localhost kernel: RPC: Registered tcp-with-tls transport module.
Oct 02 10:46:44 localhost kernel: RPC: Registered tcp NFSv4.1 backchannel transport module.
Oct 02 10:46:44 localhost rpc.statd[447]: Version 2.5.4 starting
Oct 02 10:46:44 localhost rpc.statd[447]: Initializing NSM state
Oct 02 10:46:44 localhost rpc.idmapd[452]: Setting log level to 0
Oct 02 10:46:44 localhost systemd[1]: Finished dracut pre-udev hook.
Oct 02 10:46:44 localhost systemd[1]: Starting Rule-based Manager for Device Events and Files...
Oct 02 10:46:44 localhost systemd-udevd[465]: Using default interface naming scheme 'rhel-9.0'.
Oct 02 10:46:44 localhost systemd[1]: Started Rule-based Manager for Device Events and Files.
Oct 02 10:46:44 localhost systemd[1]: Starting dracut pre-trigger hook...
Oct 02 10:46:44 localhost systemd[1]: Finished dracut pre-trigger hook.
Oct 02 10:46:44 localhost systemd[1]: Starting Coldplug All udev Devices...
Oct 02 10:46:44 localhost systemd[1]: Created slice Slice /system/modprobe.
Oct 02 10:46:44 localhost systemd[1]: Starting Load Kernel Module configfs...
Oct 02 10:46:44 localhost systemd[1]: Finished Coldplug All udev Devices.
Oct 02 10:46:44 localhost systemd[1]: modprobe@configfs.service: Deactivated successfully.
Oct 02 10:46:44 localhost systemd[1]: Finished Load Kernel Module configfs.
Oct 02 10:46:44 localhost systemd[1]: nm-initrd.service was skipped because of an unmet condition check (ConditionPathExists=/run/NetworkManager/initrd/neednet).
Oct 02 10:46:44 localhost systemd[1]: Reached target Network.
Oct 02 10:46:44 localhost systemd[1]: nm-wait-online-initrd.service was skipped because of an unmet condition check (ConditionPathExists=/run/NetworkManager/initrd/neednet).
Oct 02 10:46:45 localhost systemd[1]: Starting dracut initqueue hook...
Oct 02 10:46:45 localhost kernel: virtio_blk virtio2: 8/0/0 default/read/poll queues
Oct 02 10:46:45 localhost kernel: virtio_blk virtio2: [vda] 167772160 512-byte logical blocks (85.9 GB/80.0 GiB)
Oct 02 10:46:45 localhost kernel:  vda: vda1
Oct 02 10:46:45 localhost kernel: libata version 3.00 loaded.
Oct 02 10:46:45 localhost kernel: ata_piix 0000:00:01.1: version 2.13
Oct 02 10:46:45 localhost kernel: scsi host0: ata_piix
Oct 02 10:46:45 localhost kernel: scsi host1: ata_piix
Oct 02 10:46:45 localhost kernel: ata1: PATA max MWDMA2 cmd 0x1f0 ctl 0x3f6 bmdma 0xc140 irq 14 lpm-pol 0
Oct 02 10:46:45 localhost kernel: ata2: PATA max MWDMA2 cmd 0x170 ctl 0x376 bmdma 0xc148 irq 15 lpm-pol 0
Oct 02 10:46:45 localhost systemd-udevd[480]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 10:46:45 localhost systemd[1]: Found device /dev/disk/by-uuid/1631a6ad-43b8-436d-ae76-16fa14b94458.
Oct 02 10:46:45 localhost systemd[1]: Reached target Initrd Root Device.
Oct 02 10:46:45 localhost systemd[1]: Mounting Kernel Configuration File System...
Oct 02 10:46:45 localhost kernel: ata1: found unknown device (class 0)
Oct 02 10:46:45 localhost kernel: ata1.00: ATAPI: QEMU DVD-ROM, 2.5+, max UDMA/100
Oct 02 10:46:45 localhost kernel: scsi 0:0:0:0: CD-ROM            QEMU     QEMU DVD-ROM     2.5+ PQ: 0 ANSI: 5
Oct 02 10:46:45 localhost systemd[1]: Mounted Kernel Configuration File System.
Oct 02 10:46:45 localhost systemd[1]: Reached target System Initialization.
Oct 02 10:46:45 localhost systemd[1]: Reached target Basic System.
Oct 02 10:46:45 localhost kernel: scsi 0:0:0:0: Attached scsi generic sg0 type 5
Oct 02 10:46:45 localhost kernel: sr 0:0:0:0: [sr0] scsi3-mmc drive: 4x/4x cd/rw xa/form2 tray
Oct 02 10:46:45 localhost kernel: cdrom: Uniform CD-ROM driver Revision: 3.20
Oct 02 10:46:45 localhost kernel: sr 0:0:0:0: Attached scsi CD-ROM sr0
Oct 02 10:46:45 localhost systemd[1]: Finished dracut initqueue hook.
Oct 02 10:46:45 localhost systemd[1]: Reached target Preparation for Remote File Systems.
Oct 02 10:46:45 localhost systemd[1]: Reached target Remote Encrypted Volumes.
Oct 02 10:46:45 localhost systemd[1]: Reached target Remote File Systems.
Oct 02 10:46:45 localhost systemd[1]: Starting dracut pre-mount hook...
Oct 02 10:46:45 localhost systemd[1]: Finished dracut pre-mount hook.
Oct 02 10:46:45 localhost systemd[1]: Starting File System Check on /dev/disk/by-uuid/1631a6ad-43b8-436d-ae76-16fa14b94458...
Oct 02 10:46:45 localhost systemd-fsck[559]: /usr/sbin/fsck.xfs: XFS file system.
Oct 02 10:46:45 localhost systemd[1]: Finished File System Check on /dev/disk/by-uuid/1631a6ad-43b8-436d-ae76-16fa14b94458.
Oct 02 10:46:45 localhost systemd[1]: Mounting /sysroot...
Oct 02 10:46:46 localhost kernel: SGI XFS with ACLs, security attributes, scrub, quota, no debug enabled
Oct 02 10:46:46 localhost kernel: XFS (vda1): Mounting V5 Filesystem 1631a6ad-43b8-436d-ae76-16fa14b94458
Oct 02 10:46:46 localhost kernel: XFS (vda1): Ending clean mount
Oct 02 10:46:46 localhost systemd[1]: Mounted /sysroot.
Oct 02 10:46:46 localhost systemd[1]: Reached target Initrd Root File System.
Oct 02 10:46:46 localhost systemd[1]: Starting Mountpoints Configured in the Real Root...
Oct 02 10:46:46 localhost systemd[1]: initrd-parse-etc.service: Deactivated successfully.
Oct 02 10:46:46 localhost systemd[1]: Finished Mountpoints Configured in the Real Root.
Oct 02 10:46:46 localhost systemd[1]: Reached target Initrd File Systems.
Oct 02 10:46:46 localhost systemd[1]: Reached target Initrd Default Target.
Oct 02 10:46:46 localhost systemd[1]: Starting dracut mount hook...
Oct 02 10:46:46 localhost systemd[1]: Finished dracut mount hook.
Oct 02 10:46:46 localhost systemd[1]: Starting dracut pre-pivot and cleanup hook...
Oct 02 10:46:46 localhost rpc.idmapd[452]: exiting on signal 15
Oct 02 10:46:46 localhost systemd[1]: var-lib-nfs-rpc_pipefs.mount: Deactivated successfully.
Oct 02 10:46:46 localhost systemd[1]: Finished dracut pre-pivot and cleanup hook.
Oct 02 10:46:46 localhost systemd[1]: Starting Cleaning Up and Shutting Down Daemons...
Oct 02 10:46:46 localhost systemd[1]: Stopped target Network.
Oct 02 10:46:46 localhost systemd[1]: Stopped target Remote Encrypted Volumes.
Oct 02 10:46:46 localhost systemd[1]: Stopped target Timer Units.
Oct 02 10:46:46 localhost systemd[1]: dbus.socket: Deactivated successfully.
Oct 02 10:46:46 localhost systemd[1]: Closed D-Bus System Message Bus Socket.
Oct 02 10:46:46 localhost systemd[1]: dracut-pre-pivot.service: Deactivated successfully.
Oct 02 10:46:46 localhost systemd[1]: Stopped dracut pre-pivot and cleanup hook.
Oct 02 10:46:46 localhost systemd[1]: Stopped target Initrd Default Target.
Oct 02 10:46:46 localhost systemd[1]: Stopped target Basic System.
Oct 02 10:46:46 localhost systemd[1]: Stopped target Initrd Root Device.
Oct 02 10:46:46 localhost systemd[1]: Stopped target Initrd /usr File System.
Oct 02 10:46:46 localhost systemd[1]: Stopped target Path Units.
Oct 02 10:46:46 localhost systemd[1]: Stopped target Remote File Systems.
Oct 02 10:46:46 localhost systemd[1]: Stopped target Preparation for Remote File Systems.
Oct 02 10:46:46 localhost systemd[1]: Stopped target Slice Units.
Oct 02 10:46:46 localhost systemd[1]: Stopped target Socket Units.
Oct 02 10:46:46 localhost systemd[1]: Stopped target System Initialization.
Oct 02 10:46:46 localhost systemd[1]: Stopped target Local File Systems.
Oct 02 10:46:46 localhost systemd[1]: Stopped target Swaps.
Oct 02 10:46:46 localhost systemd[1]: dracut-mount.service: Deactivated successfully.
Oct 02 10:46:46 localhost systemd[1]: Stopped dracut mount hook.
Oct 02 10:46:46 localhost systemd[1]: dracut-pre-mount.service: Deactivated successfully.
Oct 02 10:46:46 localhost systemd[1]: Stopped dracut pre-mount hook.
Oct 02 10:46:46 localhost systemd[1]: Stopped target Local Encrypted Volumes.
Oct 02 10:46:46 localhost systemd[1]: systemd-ask-password-console.path: Deactivated successfully.
Oct 02 10:46:46 localhost systemd[1]: Stopped Dispatch Password Requests to Console Directory Watch.
Oct 02 10:46:46 localhost systemd[1]: dracut-initqueue.service: Deactivated successfully.
Oct 02 10:46:46 localhost systemd[1]: Stopped dracut initqueue hook.
Oct 02 10:46:46 localhost systemd[1]: systemd-sysctl.service: Deactivated successfully.
Oct 02 10:46:46 localhost systemd[1]: Stopped Apply Kernel Variables.
Oct 02 10:46:46 localhost systemd[1]: systemd-tmpfiles-setup.service: Deactivated successfully.
Oct 02 10:46:46 localhost systemd[1]: Stopped Create Volatile Files and Directories.
Oct 02 10:46:46 localhost systemd[1]: systemd-udev-trigger.service: Deactivated successfully.
Oct 02 10:46:46 localhost systemd[1]: Stopped Coldplug All udev Devices.
Oct 02 10:46:46 localhost systemd[1]: dracut-pre-trigger.service: Deactivated successfully.
Oct 02 10:46:46 localhost systemd[1]: Stopped dracut pre-trigger hook.
Oct 02 10:46:46 localhost systemd[1]: Stopping Rule-based Manager for Device Events and Files...
Oct 02 10:46:46 localhost systemd[1]: systemd-vconsole-setup.service: Deactivated successfully.
Oct 02 10:46:46 localhost systemd[1]: Stopped Setup Virtual Console.
Oct 02 10:46:46 localhost systemd[1]: run-credentials-systemd\x2dtmpfiles\x2dsetup.service.mount: Deactivated successfully.
Oct 02 10:46:46 localhost systemd[1]: run-credentials-systemd\x2dsysctl.service.mount: Deactivated successfully.
Oct 02 10:46:46 localhost systemd[1]: initrd-cleanup.service: Deactivated successfully.
Oct 02 10:46:46 localhost systemd[1]: Finished Cleaning Up and Shutting Down Daemons.
Oct 02 10:46:46 localhost systemd[1]: systemd-udevd.service: Deactivated successfully.
Oct 02 10:46:46 localhost systemd[1]: Stopped Rule-based Manager for Device Events and Files.
Oct 02 10:46:46 localhost systemd[1]: systemd-udevd-control.socket: Deactivated successfully.
Oct 02 10:46:46 localhost systemd[1]: Closed udev Control Socket.
Oct 02 10:46:46 localhost systemd[1]: systemd-udevd-kernel.socket: Deactivated successfully.
Oct 02 10:46:46 localhost systemd[1]: Closed udev Kernel Socket.
Oct 02 10:46:46 localhost systemd[1]: dracut-pre-udev.service: Deactivated successfully.
Oct 02 10:46:46 localhost systemd[1]: Stopped dracut pre-udev hook.
Oct 02 10:46:46 localhost systemd[1]: dracut-cmdline.service: Deactivated successfully.
Oct 02 10:46:46 localhost systemd[1]: Stopped dracut cmdline hook.
Oct 02 10:46:46 localhost systemd[1]: Starting Cleanup udev Database...
Oct 02 10:46:46 localhost systemd[1]: systemd-tmpfiles-setup-dev.service: Deactivated successfully.
Oct 02 10:46:46 localhost systemd[1]: Stopped Create Static Device Nodes in /dev.
Oct 02 10:46:46 localhost systemd[1]: kmod-static-nodes.service: Deactivated successfully.
Oct 02 10:46:46 localhost systemd[1]: Stopped Create List of Static Device Nodes.
Oct 02 10:46:46 localhost systemd[1]: systemd-sysusers.service: Deactivated successfully.
Oct 02 10:46:46 localhost systemd[1]: Stopped Create System Users.
Oct 02 10:46:46 localhost systemd[1]: run-credentials-systemd\x2dtmpfiles\x2dsetup\x2ddev.service.mount: Deactivated successfully.
Oct 02 10:46:46 localhost systemd[1]: run-credentials-systemd\x2dsysusers.service.mount: Deactivated successfully.
Oct 02 10:46:46 localhost systemd[1]: initrd-udevadm-cleanup-db.service: Deactivated successfully.
Oct 02 10:46:46 localhost systemd[1]: Finished Cleanup udev Database.
Oct 02 10:46:46 localhost systemd[1]: Reached target Switch Root.
Oct 02 10:46:46 localhost systemd[1]: Starting Switch Root...
Oct 02 10:46:46 localhost systemd[1]: Switching root.
Oct 02 10:46:46 localhost systemd-journald[311]: Journal stopped
Oct 02 10:46:49 localhost systemd-journald[311]: Received SIGTERM from PID 1 (systemd).
Oct 02 10:46:49 localhost kernel: audit: type=1404 audit(1759402007.296:2): enforcing=1 old_enforcing=0 auid=4294967295 ses=4294967295 enabled=1 old-enabled=1 lsm=selinux res=1
Oct 02 10:46:49 localhost kernel: SELinux:  policy capability network_peer_controls=1
Oct 02 10:46:49 localhost kernel: SELinux:  policy capability open_perms=1
Oct 02 10:46:49 localhost kernel: SELinux:  policy capability extended_socket_class=1
Oct 02 10:46:49 localhost kernel: SELinux:  policy capability always_check_network=0
Oct 02 10:46:49 localhost kernel: SELinux:  policy capability cgroup_seclabel=1
Oct 02 10:46:49 localhost kernel: SELinux:  policy capability nnp_nosuid_transition=1
Oct 02 10:46:49 localhost kernel: SELinux:  policy capability genfs_seclabel_symlinks=1
Oct 02 10:46:49 localhost kernel: audit: type=1403 audit(1759402007.511:3): auid=4294967295 ses=4294967295 lsm=selinux res=1
Oct 02 10:46:49 localhost systemd[1]: Successfully loaded SELinux policy in 219.532ms.
Oct 02 10:46:49 localhost systemd[1]: Relabelled /dev, /dev/shm, /run, /sys/fs/cgroup in 64.966ms.
Oct 02 10:46:49 localhost systemd[1]: systemd 252-55.el9 running in system mode (+PAM +AUDIT +SELINUX -APPARMOR +IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN -IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK +PCRE2 -PWQUALITY +P11KIT -QRENCODE +TPM2 +BZIP2 +LZ4 +XZ +ZLIB +ZSTD -BPF_FRAMEWORK +XKBCOMMON +UTMP +SYSVINIT default-hierarchy=unified)
Oct 02 10:46:49 localhost systemd[1]: Detected virtualization kvm.
Oct 02 10:46:49 localhost systemd[1]: Detected architecture x86-64.
Oct 02 10:46:49 localhost systemd-rc-local-generator[641]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 10:46:49 localhost systemd[1]: initrd-switch-root.service: Deactivated successfully.
Oct 02 10:46:49 localhost systemd[1]: Stopped Switch Root.
Oct 02 10:46:49 localhost systemd[1]: systemd-journald.service: Scheduled restart job, restart counter is at 1.
Oct 02 10:46:49 localhost systemd[1]: Created slice Slice /system/getty.
Oct 02 10:46:49 localhost systemd[1]: Created slice Slice /system/serial-getty.
Oct 02 10:46:49 localhost systemd[1]: Created slice Slice /system/sshd-keygen.
Oct 02 10:46:49 localhost systemd[1]: Created slice User and Session Slice.
Oct 02 10:46:49 localhost systemd[1]: Started Dispatch Password Requests to Console Directory Watch.
Oct 02 10:46:49 localhost systemd[1]: Started Forward Password Requests to Wall Directory Watch.
Oct 02 10:46:49 localhost systemd[1]: Set up automount Arbitrary Executable File Formats File System Automount Point.
Oct 02 10:46:49 localhost systemd[1]: Reached target Local Encrypted Volumes.
Oct 02 10:46:49 localhost systemd[1]: Stopped target Switch Root.
Oct 02 10:46:49 localhost systemd[1]: Stopped target Initrd File Systems.
Oct 02 10:46:49 localhost systemd[1]: Stopped target Initrd Root File System.
Oct 02 10:46:49 localhost systemd[1]: Reached target Local Integrity Protected Volumes.
Oct 02 10:46:49 localhost systemd[1]: Reached target Path Units.
Oct 02 10:46:49 localhost systemd[1]: Reached target rpc_pipefs.target.
Oct 02 10:46:49 localhost systemd[1]: Reached target Slice Units.
Oct 02 10:46:49 localhost systemd[1]: Reached target Swaps.
Oct 02 10:46:49 localhost systemd[1]: Reached target Local Verity Protected Volumes.
Oct 02 10:46:49 localhost systemd[1]: Listening on RPCbind Server Activation Socket.
Oct 02 10:46:49 localhost systemd[1]: Reached target RPC Port Mapper.
Oct 02 10:46:49 localhost systemd[1]: Listening on Process Core Dump Socket.
Oct 02 10:46:49 localhost systemd[1]: Listening on initctl Compatibility Named Pipe.
Oct 02 10:46:49 localhost systemd[1]: Listening on udev Control Socket.
Oct 02 10:46:49 localhost systemd[1]: Listening on udev Kernel Socket.
Oct 02 10:46:49 localhost systemd[1]: Mounting Huge Pages File System...
Oct 02 10:46:49 localhost systemd[1]: Mounting POSIX Message Queue File System...
Oct 02 10:46:49 localhost systemd[1]: Mounting Kernel Debug File System...
Oct 02 10:46:49 localhost systemd[1]: Mounting Kernel Trace File System...
Oct 02 10:46:49 localhost systemd[1]: Kernel Module supporting RPCSEC_GSS was skipped because of an unmet condition check (ConditionPathExists=/etc/krb5.keytab).
Oct 02 10:46:49 localhost systemd[1]: Starting Create List of Static Device Nodes...
Oct 02 10:46:49 localhost systemd[1]: Starting Load Kernel Module configfs...
Oct 02 10:46:49 localhost systemd[1]: Starting Load Kernel Module drm...
Oct 02 10:46:49 localhost systemd[1]: Starting Load Kernel Module efi_pstore...
Oct 02 10:46:49 localhost systemd[1]: Starting Load Kernel Module fuse...
Oct 02 10:46:49 localhost systemd[1]: Starting Read and set NIS domainname from /etc/sysconfig/network...
Oct 02 10:46:49 localhost systemd[1]: systemd-fsck-root.service: Deactivated successfully.
Oct 02 10:46:49 localhost systemd[1]: Stopped File System Check on Root Device.
Oct 02 10:46:49 localhost systemd[1]: Stopped Journal Service.
Oct 02 10:46:49 localhost systemd[1]: Starting Journal Service...
Oct 02 10:46:49 localhost systemd[1]: Load Kernel Modules was skipped because no trigger condition checks were met.
Oct 02 10:46:49 localhost systemd[1]: Starting Generate network units from Kernel command line...
Oct 02 10:46:49 localhost systemd[1]: TPM2 PCR Machine ID Measurement was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/StubPcrKernelImage-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f).
Oct 02 10:46:49 localhost systemd[1]: Starting Remount Root and Kernel File Systems...
Oct 02 10:46:49 localhost systemd[1]: Repartition Root Disk was skipped because no trigger condition checks were met.
Oct 02 10:46:49 localhost systemd[1]: Starting Apply Kernel Variables...
Oct 02 10:46:49 localhost kernel: fuse: init (API version 7.37)
Oct 02 10:46:49 localhost systemd[1]: Starting Coldplug All udev Devices...
Oct 02 10:46:49 localhost systemd[1]: Mounted Huge Pages File System.
Oct 02 10:46:49 localhost systemd[1]: Mounted POSIX Message Queue File System.
Oct 02 10:46:49 localhost systemd[1]: Mounted Kernel Debug File System.
Oct 02 10:46:49 localhost systemd[1]: Mounted Kernel Trace File System.
Oct 02 10:46:49 localhost systemd[1]: Finished Create List of Static Device Nodes.
Oct 02 10:46:49 localhost systemd[1]: modprobe@configfs.service: Deactivated successfully.
Oct 02 10:46:49 localhost systemd[1]: Finished Load Kernel Module configfs.
Oct 02 10:46:49 localhost systemd[1]: modprobe@efi_pstore.service: Deactivated successfully.
Oct 02 10:46:49 localhost systemd[1]: Finished Load Kernel Module efi_pstore.
Oct 02 10:46:49 localhost kernel: xfs filesystem being remounted at / supports timestamps until 2038 (0x7fffffff)
Oct 02 10:46:49 localhost systemd[1]: modprobe@fuse.service: Deactivated successfully.
Oct 02 10:46:49 localhost systemd[1]: Finished Load Kernel Module fuse.
Oct 02 10:46:49 localhost systemd[1]: Finished Read and set NIS domainname from /etc/sysconfig/network.
Oct 02 10:46:49 localhost systemd-journald[682]: Journal started
Oct 02 10:46:49 localhost systemd-journald[682]: Runtime Journal (/run/log/journal/42833e1b511a402df82cb9cb2fc36491) is 8.0M, max 153.5M, 145.5M free.
Oct 02 10:46:49 localhost systemd[1]: Queued start job for default target Multi-User System.
Oct 02 10:46:49 localhost systemd[1]: systemd-journald.service: Deactivated successfully.
Oct 02 10:46:49 localhost systemd[1]: Started Journal Service.
Oct 02 10:46:49 localhost systemd[1]: Finished Generate network units from Kernel command line.
Oct 02 10:46:49 localhost kernel: ACPI: bus type drm_connector registered
Oct 02 10:46:49 localhost systemd[1]: Finished Remount Root and Kernel File Systems.
Oct 02 10:46:49 localhost systemd[1]: modprobe@drm.service: Deactivated successfully.
Oct 02 10:46:49 localhost systemd[1]: Finished Load Kernel Module drm.
Oct 02 10:46:49 localhost systemd[1]: Finished Apply Kernel Variables.
Oct 02 10:46:49 localhost systemd[1]: Mounting FUSE Control File System...
Oct 02 10:46:49 localhost systemd[1]: First Boot Wizard was skipped because of an unmet condition check (ConditionFirstBoot=yes).
Oct 02 10:46:49 localhost systemd[1]: Starting Rebuild Hardware Database...
Oct 02 10:46:49 localhost systemd[1]: Starting Flush Journal to Persistent Storage...
Oct 02 10:46:49 localhost systemd[1]: Platform Persistent Storage Archival was skipped because of an unmet condition check (ConditionDirectoryNotEmpty=/sys/fs/pstore).
Oct 02 10:46:49 localhost systemd[1]: Starting Load/Save OS Random Seed...
Oct 02 10:46:49 localhost systemd[1]: Starting Create System Users...
Oct 02 10:46:49 localhost systemd[1]: Mounted FUSE Control File System.
Oct 02 10:46:49 localhost systemd[1]: Finished Coldplug All udev Devices.
Oct 02 10:46:49 localhost systemd-journald[682]: Runtime Journal (/run/log/journal/42833e1b511a402df82cb9cb2fc36491) is 8.0M, max 153.5M, 145.5M free.
Oct 02 10:46:49 localhost systemd-journald[682]: Received client request to flush runtime journal.
Oct 02 10:46:49 localhost systemd[1]: Finished Flush Journal to Persistent Storage.
Oct 02 10:46:50 localhost systemd[1]: Finished Create System Users.
Oct 02 10:46:50 localhost systemd[1]: Starting Create Static Device Nodes in /dev...
Oct 02 10:46:50 localhost systemd[1]: Finished Load/Save OS Random Seed.
Oct 02 10:46:50 localhost systemd[1]: First Boot Complete was skipped because of an unmet condition check (ConditionFirstBoot=yes).
Oct 02 10:46:50 localhost systemd[1]: Finished Create Static Device Nodes in /dev.
Oct 02 10:46:50 localhost systemd[1]: Reached target Preparation for Local File Systems.
Oct 02 10:46:50 localhost systemd[1]: Reached target Local File Systems.
Oct 02 10:46:50 localhost systemd[1]: Starting Rebuild Dynamic Linker Cache...
Oct 02 10:46:50 localhost systemd[1]: Mark the need to relabel after reboot was skipped because of an unmet condition check (ConditionSecurity=!selinux).
Oct 02 10:46:50 localhost systemd[1]: Set Up Additional Binary Formats was skipped because no trigger condition checks were met.
Oct 02 10:46:50 localhost systemd[1]: Update Boot Loader Random Seed was skipped because no trigger condition checks were met.
Oct 02 10:46:50 localhost systemd[1]: Starting Automatic Boot Loader Update...
Oct 02 10:46:50 localhost systemd[1]: Commit a transient machine-id on disk was skipped because of an unmet condition check (ConditionPathIsMountPoint=/etc/machine-id).
Oct 02 10:46:50 localhost systemd[1]: Starting Create Volatile Files and Directories...
Oct 02 10:46:50 localhost bootctl[703]: Couldn't find EFI system partition, skipping.
Oct 02 10:46:50 localhost systemd[1]: Finished Automatic Boot Loader Update.
Oct 02 10:46:50 localhost systemd[1]: Finished Create Volatile Files and Directories.
Oct 02 10:46:50 localhost systemd[1]: Starting Security Auditing Service...
Oct 02 10:46:50 localhost systemd[1]: Starting RPC Bind...
Oct 02 10:46:50 localhost systemd[1]: Starting Rebuild Journal Catalog...
Oct 02 10:46:50 localhost auditd[709]: audit dispatcher initialized with q_depth=2000 and 1 active plugins
Oct 02 10:46:50 localhost auditd[709]: Init complete, auditd 3.1.5 listening for events (startup state enable)
Oct 02 10:46:50 localhost systemd[1]: Finished Rebuild Journal Catalog.
Oct 02 10:46:50 localhost systemd[1]: Started RPC Bind.
Oct 02 10:46:50 localhost augenrules[714]: /sbin/augenrules: No change
Oct 02 10:46:50 localhost augenrules[729]: No rules
Oct 02 10:46:50 localhost augenrules[729]: enabled 1
Oct 02 10:46:50 localhost augenrules[729]: failure 1
Oct 02 10:46:50 localhost augenrules[729]: pid 709
Oct 02 10:46:50 localhost augenrules[729]: rate_limit 0
Oct 02 10:46:50 localhost augenrules[729]: backlog_limit 8192
Oct 02 10:46:50 localhost augenrules[729]: lost 0
Oct 02 10:46:50 localhost augenrules[729]: backlog 0
Oct 02 10:46:50 localhost augenrules[729]: backlog_wait_time 60000
Oct 02 10:46:50 localhost augenrules[729]: backlog_wait_time_actual 0
Oct 02 10:46:50 localhost augenrules[729]: enabled 1
Oct 02 10:46:50 localhost augenrules[729]: failure 1
Oct 02 10:46:50 localhost augenrules[729]: pid 709
Oct 02 10:46:50 localhost augenrules[729]: rate_limit 0
Oct 02 10:46:50 localhost augenrules[729]: backlog_limit 8192
Oct 02 10:46:50 localhost augenrules[729]: lost 0
Oct 02 10:46:50 localhost augenrules[729]: backlog 0
Oct 02 10:46:50 localhost augenrules[729]: backlog_wait_time 60000
Oct 02 10:46:50 localhost augenrules[729]: backlog_wait_time_actual 0
Oct 02 10:46:50 localhost augenrules[729]: enabled 1
Oct 02 10:46:50 localhost augenrules[729]: failure 1
Oct 02 10:46:50 localhost augenrules[729]: pid 709
Oct 02 10:46:50 localhost augenrules[729]: rate_limit 0
Oct 02 10:46:50 localhost augenrules[729]: backlog_limit 8192
Oct 02 10:46:50 localhost augenrules[729]: lost 0
Oct 02 10:46:50 localhost augenrules[729]: backlog 0
Oct 02 10:46:50 localhost augenrules[729]: backlog_wait_time 60000
Oct 02 10:46:50 localhost augenrules[729]: backlog_wait_time_actual 0
Oct 02 10:46:50 localhost systemd[1]: Started Security Auditing Service.
Oct 02 10:46:50 localhost systemd[1]: Starting Record System Boot/Shutdown in UTMP...
Oct 02 10:46:50 localhost systemd[1]: Finished Record System Boot/Shutdown in UTMP.
Oct 02 10:46:51 localhost systemd[1]: Finished Rebuild Hardware Database.
Oct 02 10:46:51 localhost systemd[1]: Starting Rule-based Manager for Device Events and Files...
Oct 02 10:46:51 localhost systemd-udevd[737]: Using default interface naming scheme 'rhel-9.0'.
Oct 02 10:46:51 localhost systemd[1]: Started Rule-based Manager for Device Events and Files.
Oct 02 10:46:51 localhost systemd[1]: Starting Load Kernel Module configfs...
Oct 02 10:46:51 localhost systemd[1]: modprobe@configfs.service: Deactivated successfully.
Oct 02 10:46:51 localhost systemd[1]: Finished Load Kernel Module configfs.
Oct 02 10:46:51 localhost systemd[1]: Condition check resulted in /dev/ttyS0 being skipped.
Oct 02 10:46:51 localhost kernel: input: PC Speaker as /devices/platform/pcspkr/input/input6
Oct 02 10:46:51 localhost kernel: piix4_smbus 0000:00:01.3: SMBus Host Controller at 0x700, revision 0
Oct 02 10:46:51 localhost kernel: i2c i2c-0: 1/1 memory slots populated (from DMI)
Oct 02 10:46:51 localhost kernel: i2c i2c-0: Memory type 0x07 not supported yet, not instantiating SPD
Oct 02 10:46:51 localhost systemd-udevd[751]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 10:46:51 localhost kernel: [drm] pci: virtio-vga detected at 0000:00:02.0
Oct 02 10:46:51 localhost kernel: virtio-pci 0000:00:02.0: vgaarb: deactivate vga console
Oct 02 10:46:51 localhost kernel: Console: switching to colour dummy device 80x25
Oct 02 10:46:51 localhost kernel: [drm] features: -virgl +edid -resource_blob -host_visible
Oct 02 10:46:51 localhost kernel: [drm] features: -context_init
Oct 02 10:46:51 localhost kernel: [drm] number of scanouts: 1
Oct 02 10:46:51 localhost kernel: [drm] number of cap sets: 0
Oct 02 10:46:51 localhost kernel: [drm] Initialized virtio_gpu 0.1.0 for 0000:00:02.0 on minor 0
Oct 02 10:46:51 localhost kernel: fbcon: virtio_gpudrmfb (fb0) is primary device
Oct 02 10:46:51 localhost kernel: Console: switching to colour frame buffer device 128x48
Oct 02 10:46:51 localhost kernel: virtio-pci 0000:00:02.0: [drm] fb0: virtio_gpudrmfb frame buffer device
Oct 02 10:46:51 localhost kernel: kvm_amd: TSC scaling supported
Oct 02 10:46:51 localhost kernel: kvm_amd: Nested Virtualization enabled
Oct 02 10:46:51 localhost kernel: kvm_amd: Nested Paging enabled
Oct 02 10:46:51 localhost kernel: kvm_amd: LBR virtualization supported
Oct 02 10:46:51 localhost systemd[1]: Finished Rebuild Dynamic Linker Cache.
Oct 02 10:46:51 localhost systemd[1]: Starting Update is Completed...
Oct 02 10:46:51 localhost systemd[1]: Finished Update is Completed.
Oct 02 10:46:51 localhost systemd[1]: Reached target System Initialization.
Oct 02 10:46:51 localhost systemd[1]: Started dnf makecache --timer.
Oct 02 10:46:51 localhost systemd[1]: Started Daily rotation of log files.
Oct 02 10:46:51 localhost systemd[1]: Started Daily Cleanup of Temporary Directories.
Oct 02 10:46:51 localhost systemd[1]: Reached target Timer Units.
Oct 02 10:46:51 localhost systemd[1]: Listening on D-Bus System Message Bus Socket.
Oct 02 10:46:51 localhost systemd[1]: Listening on SSSD Kerberos Cache Manager responder socket.
Oct 02 10:46:51 localhost systemd[1]: Reached target Socket Units.
Oct 02 10:46:51 localhost systemd[1]: Starting D-Bus System Message Bus...
Oct 02 10:46:51 localhost systemd[1]: TPM2 PCR Barrier (Initialization) was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/StubPcrKernelImage-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f).
Oct 02 10:46:51 localhost systemd[1]: Started D-Bus System Message Bus.
Oct 02 10:46:51 localhost systemd[1]: Reached target Basic System.
Oct 02 10:46:51 localhost dbus-broker-lau[817]: Ready
Oct 02 10:46:51 localhost systemd[1]: Starting NTP client/server...
Oct 02 10:46:51 localhost systemd[1]: Starting Cloud-init: Local Stage (pre-network)...
Oct 02 10:46:51 localhost systemd[1]: Starting Restore /run/initramfs on shutdown...
Oct 02 10:46:51 localhost systemd[1]: Starting IPv4 firewall with iptables...
Oct 02 10:46:51 localhost systemd[1]: Started irqbalance daemon.
Oct 02 10:46:51 localhost systemd[1]: Load CPU microcode update was skipped because of an unmet condition check (ConditionPathExists=/sys/devices/system/cpu/microcode/reload).
Oct 02 10:46:51 localhost systemd[1]: OpenSSH ecdsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).
Oct 02 10:46:51 localhost systemd[1]: OpenSSH ed25519 Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).
Oct 02 10:46:51 localhost systemd[1]: OpenSSH rsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).
Oct 02 10:46:51 localhost systemd[1]: Reached target sshd-keygen.target.
Oct 02 10:46:51 localhost systemd[1]: System Security Services Daemon was skipped because no trigger condition checks were met.
Oct 02 10:46:51 localhost systemd[1]: Reached target User and Group Name Lookups.
Oct 02 10:46:51 localhost systemd[1]: Starting User Login Management...
Oct 02 10:46:51 localhost systemd[1]: Finished Restore /run/initramfs on shutdown.
Oct 02 10:46:51 localhost systemd-logind[827]: New seat seat0.
Oct 02 10:46:51 localhost systemd-logind[827]: Watching system buttons on /dev/input/event0 (Power Button)
Oct 02 10:46:51 localhost systemd-logind[827]: Watching system buttons on /dev/input/event1 (AT Translated Set 2 keyboard)
Oct 02 10:46:51 localhost systemd[1]: Started User Login Management.
Oct 02 10:46:51 localhost kernel: Warning: Deprecated Driver is detected: nft_compat will not be maintained in a future major release and may be disabled
Oct 02 10:46:52 localhost kernel: Warning: Deprecated Driver is detected: nft_compat_module_init will not be maintained in a future major release and may be disabled
Oct 02 10:46:52 localhost chronyd[837]: chronyd version 4.6.1 starting (+CMDMON +NTP +REFCLOCK +RTC +PRIVDROP +SCFILTER +SIGND +ASYNCDNS +NTS +SECHASH +IPV6 +DEBUG)
Oct 02 10:46:52 localhost chronyd[837]: Loaded 0 symmetric keys
Oct 02 10:46:52 localhost chronyd[837]: Using right/UTC timezone to obtain leap second data
Oct 02 10:46:52 localhost chronyd[837]: Loaded seccomp filter (level 2)
Oct 02 10:46:52 localhost systemd[1]: Started NTP client/server.
Oct 02 10:46:52 localhost iptables.init[822]: iptables: Applying firewall rules: [  OK  ]
Oct 02 10:46:52 localhost systemd[1]: Finished IPv4 firewall with iptables.
Oct 02 10:46:53 localhost cloud-init[845]: Cloud-init v. 24.4-7.el9 running 'init-local' at Thu, 02 Oct 2025 10:46:53 +0000. Up 11.51 seconds.
Oct 02 10:46:54 localhost kernel: ISO 9660 Extensions: Microsoft Joliet Level 3
Oct 02 10:46:54 localhost kernel: ISO 9660 Extensions: RRIP_1991A
Oct 02 10:46:54 localhost systemd[1]: run-cloud\x2dinit-tmp-tmpm5dpjphy.mount: Deactivated successfully.
Oct 02 10:46:54 localhost systemd[1]: Starting Hostname Service...
Oct 02 10:46:54 localhost systemd[1]: Started Hostname Service.
Oct 02 10:46:54 np0005466011.novalocal systemd-hostnamed[859]: Hostname set to <np0005466011.novalocal> (static)
Oct 02 10:46:54 np0005466011.novalocal systemd[1]: Finished Cloud-init: Local Stage (pre-network).
Oct 02 10:46:54 np0005466011.novalocal systemd[1]: Reached target Preparation for Network.
Oct 02 10:46:54 np0005466011.novalocal systemd[1]: Starting Network Manager...
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.8011] NetworkManager (version 1.54.1-1.el9) is starting... (boot:1e8e4eaa-6890-46e6-baf5-d7fee48b6edb)
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.8017] Read config: /etc/NetworkManager/NetworkManager.conf, /run/NetworkManager/conf.d/15-carrier-timeout.conf
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.8476] manager[0x558b0289a080]: monitoring kernel firmware directory '/lib/firmware'.
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.8519] hostname: hostname: using hostnamed
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.8519] hostname: static hostname changed from (none) to "np0005466011.novalocal"
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.8523] dns-mgr: init: dns=default,systemd-resolved rc-manager=symlink (auto)
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.8658] manager[0x558b0289a080]: rfkill: Wi-Fi hardware radio set enabled
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.8659] manager[0x558b0289a080]: rfkill: WWAN hardware radio set enabled
Oct 02 10:46:54 np0005466011.novalocal systemd[1]: Listening on Load/Save RF Kill Switch Status /dev/rfkill Watch.
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9361] Loaded device plugin: NMTeamFactory (/usr/lib64/NetworkManager/1.54.1-1.el9/libnm-device-plugin-team.so)
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9361] manager: rfkill: Wi-Fi enabled by radio killswitch; enabled by state file
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9362] manager: rfkill: WWAN enabled by radio killswitch; enabled by state file
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9362] manager: Networking is enabled by state file
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9364] settings: Loaded settings plugin: keyfile (internal)
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9420] settings: Loaded settings plugin: ifcfg-rh ("/usr/lib64/NetworkManager/1.54.1-1.el9/libnm-settings-plugin-ifcfg-rh.so")
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9451] Warning: the ifcfg-rh plugin is deprecated, please migrate connections to the keyfile format using "nmcli connection migrate"
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9498] dhcp: init: Using DHCP client 'internal'
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9503] manager: (lo): new Loopback device (/org/freedesktop/NetworkManager/Devices/1)
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9519] device (lo): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9540] device (lo): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external')
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9548] device (lo): Activation: starting connection 'lo' (9c045e93-5256-40fb-a074-0144ed71625c)
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9557] manager: (eth0): new Ethernet device (/org/freedesktop/NetworkManager/Devices/2)
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9561] device (eth0): state change: unmanaged -> unavailable (reason 'managed', managed-type: 'external')
Oct 02 10:46:54 np0005466011.novalocal systemd[1]: Starting Network Manager Script Dispatcher Service...
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9593] bus-manager: acquired D-Bus service "org.freedesktop.NetworkManager"
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9595] device (lo): state change: disconnected -> prepare (reason 'none', managed-type: 'external')
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9597] device (lo): state change: prepare -> config (reason 'none', managed-type: 'external')
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9598] device (lo): state change: config -> ip-config (reason 'none', managed-type: 'external')
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9601] device (eth0): carrier: link connected
Oct 02 10:46:54 np0005466011.novalocal systemd[1]: Started Network Manager.
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9606] device (lo): state change: ip-config -> ip-check (reason 'none', managed-type: 'external')
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9610] device (eth0): state change: unavailable -> disconnected (reason 'carrier-changed', managed-type: 'full')
Oct 02 10:46:54 np0005466011.novalocal systemd[1]: Reached target Network.
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9626] policy: auto-activating connection 'System eth0' (5fb06bd0-0bb0-7ffb-45f1-d6edd65f3e03)
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9631] device (eth0): Activation: starting connection 'System eth0' (5fb06bd0-0bb0-7ffb-45f1-d6edd65f3e03)
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9632] device (eth0): state change: disconnected -> prepare (reason 'none', managed-type: 'full')
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9635] manager: NetworkManager state is now CONNECTING
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9640] device (eth0): state change: prepare -> config (reason 'none', managed-type: 'full')
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9648] device (eth0): state change: config -> ip-config (reason 'none', managed-type: 'full')
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9651] dhcp4 (eth0): activation: beginning transaction (timeout in 45 seconds)
Oct 02 10:46:54 np0005466011.novalocal systemd[1]: Starting Network Manager Wait Online...
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9686] dhcp4 (eth0): state changed new lease, address=38.129.56.69
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9693] policy: set 'System eth0' (eth0) as default for IPv4 routing and DNS
Oct 02 10:46:54 np0005466011.novalocal systemd[1]: Starting GSSAPI Proxy Daemon...
Oct 02 10:46:54 np0005466011.novalocal NetworkManager[864]: <info>  [1759402014.9716] device (eth0): state change: ip-config -> ip-check (reason 'none', managed-type: 'full')
Oct 02 10:46:55 np0005466011.novalocal systemd[1]: Started Network Manager Script Dispatcher Service.
Oct 02 10:46:55 np0005466011.novalocal NetworkManager[864]: <info>  [1759402015.0100] device (lo): state change: ip-check -> secondaries (reason 'none', managed-type: 'external')
Oct 02 10:46:55 np0005466011.novalocal NetworkManager[864]: <info>  [1759402015.0104] device (eth0): state change: ip-check -> secondaries (reason 'none', managed-type: 'full')
Oct 02 10:46:55 np0005466011.novalocal NetworkManager[864]: <info>  [1759402015.0105] device (lo): state change: secondaries -> activated (reason 'none', managed-type: 'external')
Oct 02 10:46:55 np0005466011.novalocal NetworkManager[864]: <info>  [1759402015.0111] device (lo): Activation: successful, device activated.
Oct 02 10:46:55 np0005466011.novalocal NetworkManager[864]: <info>  [1759402015.0117] device (eth0): state change: secondaries -> activated (reason 'none', managed-type: 'full')
Oct 02 10:46:55 np0005466011.novalocal NetworkManager[864]: <info>  [1759402015.0121] manager: NetworkManager state is now CONNECTED_SITE
Oct 02 10:46:55 np0005466011.novalocal NetworkManager[864]: <info>  [1759402015.0125] device (eth0): Activation: successful, device activated.
Oct 02 10:46:55 np0005466011.novalocal NetworkManager[864]: <info>  [1759402015.0131] manager: NetworkManager state is now CONNECTED_GLOBAL
Oct 02 10:46:55 np0005466011.novalocal NetworkManager[864]: <info>  [1759402015.0142] manager: startup complete
Oct 02 10:46:55 np0005466011.novalocal systemd[1]: Started GSSAPI Proxy Daemon.
Oct 02 10:46:55 np0005466011.novalocal systemd[1]: RPC security service for NFS client and server was skipped because of an unmet condition check (ConditionPathExists=/etc/krb5.keytab).
Oct 02 10:46:55 np0005466011.novalocal systemd[1]: Reached target NFS client services.
Oct 02 10:46:55 np0005466011.novalocal systemd[1]: Reached target Preparation for Remote File Systems.
Oct 02 10:46:55 np0005466011.novalocal systemd[1]: Reached target Remote File Systems.
Oct 02 10:46:55 np0005466011.novalocal systemd[1]: TPM2 PCR Barrier (User) was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/StubPcrKernelImage-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f).
Oct 02 10:46:55 np0005466011.novalocal systemd[1]: Finished Network Manager Wait Online.
Oct 02 10:46:55 np0005466011.novalocal systemd[1]: Starting Cloud-init: Network Stage...
Oct 02 10:46:55 np0005466011.novalocal cloud-init[930]: Cloud-init v. 24.4-7.el9 running 'init' at Thu, 02 Oct 2025 10:46:55 +0000. Up 13.00 seconds.
Oct 02 10:46:55 np0005466011.novalocal cloud-init[930]: ci-info: +++++++++++++++++++++++++++++++++++++++Net device info+++++++++++++++++++++++++++++++++++++++
Oct 02 10:46:55 np0005466011.novalocal cloud-init[930]: ci-info: +--------+------+------------------------------+---------------+--------+-------------------+
Oct 02 10:46:55 np0005466011.novalocal cloud-init[930]: ci-info: | Device |  Up  |           Address            |      Mask     | Scope  |     Hw-Address    |
Oct 02 10:46:55 np0005466011.novalocal cloud-init[930]: ci-info: +--------+------+------------------------------+---------------+--------+-------------------+
Oct 02 10:46:55 np0005466011.novalocal cloud-init[930]: ci-info: |  eth0  | True |         38.129.56.69         | 255.255.255.0 | global | fa:16:3e:fc:5e:78 |
Oct 02 10:46:55 np0005466011.novalocal cloud-init[930]: ci-info: |  eth0  | True | fe80::f816:3eff:fefc:5e78/64 |       .       |  link  | fa:16:3e:fc:5e:78 |
Oct 02 10:46:55 np0005466011.novalocal cloud-init[930]: ci-info: |   lo   | True |          127.0.0.1           |   255.0.0.0   |  host  |         .         |
Oct 02 10:46:55 np0005466011.novalocal cloud-init[930]: ci-info: |   lo   | True |           ::1/128            |       .       |  host  |         .         |
Oct 02 10:46:55 np0005466011.novalocal cloud-init[930]: ci-info: +--------+------+------------------------------+---------------+--------+-------------------+
Oct 02 10:46:55 np0005466011.novalocal cloud-init[930]: ci-info: ++++++++++++++++++++++++++++++++Route IPv4 info++++++++++++++++++++++++++++++++
Oct 02 10:46:55 np0005466011.novalocal cloud-init[930]: ci-info: +-------+-----------------+-------------+-----------------+-----------+-------+
Oct 02 10:46:55 np0005466011.novalocal cloud-init[930]: ci-info: | Route |   Destination   |   Gateway   |     Genmask     | Interface | Flags |
Oct 02 10:46:55 np0005466011.novalocal cloud-init[930]: ci-info: +-------+-----------------+-------------+-----------------+-----------+-------+
Oct 02 10:46:55 np0005466011.novalocal cloud-init[930]: ci-info: |   0   |     0.0.0.0     | 38.129.56.1 |     0.0.0.0     |    eth0   |   UG  |
Oct 02 10:46:55 np0005466011.novalocal cloud-init[930]: ci-info: |   1   |   38.129.56.0   |   0.0.0.0   |  255.255.255.0  |    eth0   |   U   |
Oct 02 10:46:55 np0005466011.novalocal cloud-init[930]: ci-info: |   2   | 169.254.169.254 | 38.129.56.5 | 255.255.255.255 |    eth0   |  UGH  |
Oct 02 10:46:55 np0005466011.novalocal cloud-init[930]: ci-info: +-------+-----------------+-------------+-----------------+-----------+-------+
Oct 02 10:46:55 np0005466011.novalocal cloud-init[930]: ci-info: +++++++++++++++++++Route IPv6 info+++++++++++++++++++
Oct 02 10:46:55 np0005466011.novalocal cloud-init[930]: ci-info: +-------+-------------+---------+-----------+-------+
Oct 02 10:46:55 np0005466011.novalocal cloud-init[930]: ci-info: | Route | Destination | Gateway | Interface | Flags |
Oct 02 10:46:55 np0005466011.novalocal cloud-init[930]: ci-info: +-------+-------------+---------+-----------+-------+
Oct 02 10:46:55 np0005466011.novalocal cloud-init[930]: ci-info: |   1   |  fe80::/64  |    ::   |    eth0   |   U   |
Oct 02 10:46:55 np0005466011.novalocal cloud-init[930]: ci-info: |   3   |  multicast  |    ::   |    eth0   |   U   |
Oct 02 10:46:55 np0005466011.novalocal cloud-init[930]: ci-info: +-------+-------------+---------+-----------+-------+
Oct 02 10:46:57 np0005466011.novalocal useradd[997]: new group: name=cloud-user, GID=1001
Oct 02 10:46:57 np0005466011.novalocal useradd[997]: new user: name=cloud-user, UID=1001, GID=1001, home=/home/cloud-user, shell=/bin/bash, from=none
Oct 02 10:46:57 np0005466011.novalocal useradd[997]: add 'cloud-user' to group 'adm'
Oct 02 10:46:57 np0005466011.novalocal useradd[997]: add 'cloud-user' to group 'systemd-journal'
Oct 02 10:46:57 np0005466011.novalocal useradd[997]: add 'cloud-user' to shadow group 'adm'
Oct 02 10:46:57 np0005466011.novalocal useradd[997]: add 'cloud-user' to shadow group 'systemd-journal'
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: Generating public/private rsa key pair.
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: Your identification has been saved in /etc/ssh/ssh_host_rsa_key
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: Your public key has been saved in /etc/ssh/ssh_host_rsa_key.pub
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: The key fingerprint is:
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: SHA256:ZEfXvSKcoTcTzB8VJ9E70UYlqO2tvRiDIT7qqc7GNkk root@np0005466011.novalocal
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: The key's randomart image is:
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: +---[RSA 3072]----+
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: |          + .o=BB|
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: |         . *...+*|
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: |        o +o= ..+|
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: |       o o.B.o + |
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: |        S o.+.. .|
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: |    E  . . o. .  |
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: |   o .  o . oo   |
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: |   .*  o .  .+.  |
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: |   ++++     . .. |
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: +----[SHA256]-----+
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: Generating public/private ecdsa key pair.
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: Your identification has been saved in /etc/ssh/ssh_host_ecdsa_key
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: Your public key has been saved in /etc/ssh/ssh_host_ecdsa_key.pub
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: The key fingerprint is:
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: SHA256:LgXm1cLX7hmpAb9pRk+/RFe4v7KSt1sj531oJDP4ro4 root@np0005466011.novalocal
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: The key's randomart image is:
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: +---[ECDSA 256]---+
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: |                 |
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: |       . . .   . |
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: |      o = o . . .|
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: |     o o = . . ..|
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: |      . S +.= o .|
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: |       o ..O+=.o |
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: |      . . *.=*o=.|
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: |       . + o.+B.=|
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: |        E.oo+=*oo|
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: +----[SHA256]-----+
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: Generating public/private ed25519 key pair.
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: Your identification has been saved in /etc/ssh/ssh_host_ed25519_key
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: Your public key has been saved in /etc/ssh/ssh_host_ed25519_key.pub
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: The key fingerprint is:
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: SHA256:65Gq8tB2cQu7muh29KA78uTeDwNkFN54HvnlwG0fXxQ root@np0005466011.novalocal
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: The key's randomart image is:
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: +--[ED25519 256]--+
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: |  o.          E. |
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: | o o o .     .   |
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: |  = = o + .   .  |
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: | o o o = . o .   |
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: |  . . + S . .    |
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: |   oo  = +       |
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: |  oo=oo =        |
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: |.o==.=.+ .       |
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: | *B**++ .        |
Oct 02 10:46:58 np0005466011.novalocal cloud-init[930]: +----[SHA256]-----+
Oct 02 10:46:58 np0005466011.novalocal sm-notify[1012]: Version 2.5.4 starting
Oct 02 10:46:58 np0005466011.novalocal systemd[1]: Finished Cloud-init: Network Stage.
Oct 02 10:46:58 np0005466011.novalocal systemd[1]: Reached target Cloud-config availability.
Oct 02 10:46:58 np0005466011.novalocal systemd[1]: Reached target Network is Online.
Oct 02 10:46:58 np0005466011.novalocal systemd[1]: Starting Cloud-init: Config Stage...
Oct 02 10:46:58 np0005466011.novalocal systemd[1]: Starting Notify NFS peers of a restart...
Oct 02 10:46:58 np0005466011.novalocal systemd[1]: Starting System Logging Service...
Oct 02 10:46:58 np0005466011.novalocal systemd[1]: Starting OpenSSH server daemon...
Oct 02 10:46:58 np0005466011.novalocal systemd[1]: Starting Permit User Sessions...
Oct 02 10:46:58 np0005466011.novalocal systemd[1]: Started Notify NFS peers of a restart.
Oct 02 10:46:58 np0005466011.novalocal sshd[1014]: Server listening on 0.0.0.0 port 22.
Oct 02 10:46:58 np0005466011.novalocal sshd[1014]: Server listening on :: port 22.
Oct 02 10:46:58 np0005466011.novalocal systemd[1]: Started OpenSSH server daemon.
Oct 02 10:46:58 np0005466011.novalocal systemd[1]: Finished Permit User Sessions.
Oct 02 10:46:58 np0005466011.novalocal systemd[1]: Started Command Scheduler.
Oct 02 10:46:58 np0005466011.novalocal systemd[1]: Started Getty on tty1.
Oct 02 10:46:58 np0005466011.novalocal crond[1016]: (CRON) STARTUP (1.5.7)
Oct 02 10:46:58 np0005466011.novalocal crond[1016]: (CRON) INFO (Syslog will be used instead of sendmail.)
Oct 02 10:46:58 np0005466011.novalocal crond[1016]: (CRON) INFO (RANDOM_DELAY will be scaled with factor 68% if used.)
Oct 02 10:46:58 np0005466011.novalocal crond[1016]: (CRON) INFO (running with inotify support)
Oct 02 10:46:58 np0005466011.novalocal systemd[1]: Started Serial Getty on ttyS0.
Oct 02 10:46:58 np0005466011.novalocal systemd[1]: Reached target Login Prompts.
Oct 02 10:46:58 np0005466011.novalocal rsyslogd[1013]: [origin software="rsyslogd" swVersion="8.2506.0-2.el9" x-pid="1013" x-info="https://www.rsyslog.com"] start
Oct 02 10:46:58 np0005466011.novalocal systemd[1]: Started System Logging Service.
Oct 02 10:46:58 np0005466011.novalocal rsyslogd[1013]: imjournal: No statefile exists, /var/lib/rsyslog/imjournal.state will be created (ignore if this is first run): No such file or directory [v8.2506.0-2.el9 try https://www.rsyslog.com/e/2040 ]
Oct 02 10:46:58 np0005466011.novalocal systemd[1]: Reached target Multi-User System.
Oct 02 10:46:58 np0005466011.novalocal systemd[1]: Starting Record Runlevel Change in UTMP...
Oct 02 10:46:58 np0005466011.novalocal systemd[1]: systemd-update-utmp-runlevel.service: Deactivated successfully.
Oct 02 10:46:58 np0005466011.novalocal systemd[1]: Finished Record Runlevel Change in UTMP.
Oct 02 10:46:58 np0005466011.novalocal rsyslogd[1013]: imjournal: journal files changed, reloading...  [v8.2506.0-2.el9 try https://www.rsyslog.com/e/0 ]
Oct 02 10:46:58 np0005466011.novalocal cloud-init[1026]: Cloud-init v. 24.4-7.el9 running 'modules:config' at Thu, 02 Oct 2025 10:46:58 +0000. Up 16.19 seconds.
Oct 02 10:46:58 np0005466011.novalocal systemd[1]: Finished Cloud-init: Config Stage.
Oct 02 10:46:58 np0005466011.novalocal systemd[1]: Starting Cloud-init: Final Stage...
Oct 02 10:46:58 np0005466011.novalocal cloud-init[1030]: Cloud-init v. 24.4-7.el9 running 'modules:final' at Thu, 02 Oct 2025 10:46:58 +0000. Up 16.62 seconds.
Oct 02 10:46:59 np0005466011.novalocal cloud-init[1032]: #############################################################
Oct 02 10:46:59 np0005466011.novalocal cloud-init[1033]: -----BEGIN SSH HOST KEY FINGERPRINTS-----
Oct 02 10:46:59 np0005466011.novalocal cloud-init[1035]: 256 SHA256:LgXm1cLX7hmpAb9pRk+/RFe4v7KSt1sj531oJDP4ro4 root@np0005466011.novalocal (ECDSA)
Oct 02 10:46:59 np0005466011.novalocal cloud-init[1037]: 256 SHA256:65Gq8tB2cQu7muh29KA78uTeDwNkFN54HvnlwG0fXxQ root@np0005466011.novalocal (ED25519)
Oct 02 10:46:59 np0005466011.novalocal cloud-init[1039]: 3072 SHA256:ZEfXvSKcoTcTzB8VJ9E70UYlqO2tvRiDIT7qqc7GNkk root@np0005466011.novalocal (RSA)
Oct 02 10:46:59 np0005466011.novalocal cloud-init[1040]: -----END SSH HOST KEY FINGERPRINTS-----
Oct 02 10:46:59 np0005466011.novalocal cloud-init[1041]: #############################################################
Oct 02 10:46:59 np0005466011.novalocal cloud-init[1030]: Cloud-init v. 24.4-7.el9 finished at Thu, 02 Oct 2025 10:46:59 +0000. Datasource DataSourceConfigDrive [net,ver=2][source=/dev/sr0].  Up 16.80 seconds
Oct 02 10:46:59 np0005466011.novalocal systemd[1]: Finished Cloud-init: Final Stage.
Oct 02 10:46:59 np0005466011.novalocal systemd[1]: Reached target Cloud-init target.
Oct 02 10:46:59 np0005466011.novalocal systemd[1]: Startup finished in 1.616s (kernel) + 3.305s (initrd) + 11.968s (userspace) = 16.891s.
Oct 02 10:46:59 np0005466011.novalocal chronyd[837]: Selected source 23.133.168.244 (2.centos.pool.ntp.org)
Oct 02 10:46:59 np0005466011.novalocal chronyd[837]: System clock TAI offset set to 37 seconds
Oct 02 10:46:59 np0005466011.novalocal chronyd[837]: Selected source 149.56.19.163 (2.centos.pool.ntp.org)
Oct 02 10:47:02 np0005466011.novalocal irqbalance[823]: Cannot change IRQ 25 affinity: Operation not permitted
Oct 02 10:47:02 np0005466011.novalocal irqbalance[823]: IRQ 25 affinity is now unmanaged
Oct 02 10:47:02 np0005466011.novalocal irqbalance[823]: Cannot change IRQ 31 affinity: Operation not permitted
Oct 02 10:47:02 np0005466011.novalocal irqbalance[823]: IRQ 31 affinity is now unmanaged
Oct 02 10:47:02 np0005466011.novalocal irqbalance[823]: Cannot change IRQ 28 affinity: Operation not permitted
Oct 02 10:47:02 np0005466011.novalocal irqbalance[823]: IRQ 28 affinity is now unmanaged
Oct 02 10:47:02 np0005466011.novalocal irqbalance[823]: Cannot change IRQ 32 affinity: Operation not permitted
Oct 02 10:47:02 np0005466011.novalocal irqbalance[823]: IRQ 32 affinity is now unmanaged
Oct 02 10:47:02 np0005466011.novalocal irqbalance[823]: Cannot change IRQ 30 affinity: Operation not permitted
Oct 02 10:47:02 np0005466011.novalocal irqbalance[823]: IRQ 30 affinity is now unmanaged
Oct 02 10:47:02 np0005466011.novalocal irqbalance[823]: Cannot change IRQ 29 affinity: Operation not permitted
Oct 02 10:47:02 np0005466011.novalocal irqbalance[823]: IRQ 29 affinity is now unmanaged
Oct 02 10:47:05 np0005466011.novalocal systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.
Oct 02 10:47:07 np0005466011.novalocal sshd-session[1046]: Connection closed by 38.102.83.114 port 42626 [preauth]
Oct 02 10:47:07 np0005466011.novalocal sshd-session[1048]: Unable to negotiate with 38.102.83.114 port 42628: no matching host key type found. Their offer: ssh-ed25519,ssh-ed25519-cert-v01@openssh.com [preauth]
Oct 02 10:47:07 np0005466011.novalocal sshd-session[1052]: Unable to negotiate with 38.102.83.114 port 42638: no matching host key type found. Their offer: ecdsa-sha2-nistp384,ecdsa-sha2-nistp384-cert-v01@openssh.com [preauth]
Oct 02 10:47:07 np0005466011.novalocal sshd-session[1054]: Unable to negotiate with 38.102.83.114 port 42646: no matching host key type found. Their offer: ecdsa-sha2-nistp521,ecdsa-sha2-nistp521-cert-v01@openssh.com [preauth]
Oct 02 10:47:07 np0005466011.novalocal sshd-session[1050]: Connection closed by 38.102.83.114 port 42636 [preauth]
Oct 02 10:47:07 np0005466011.novalocal sshd-session[1060]: Unable to negotiate with 38.102.83.114 port 42674: no matching host key type found. Their offer: ssh-rsa,ssh-rsa-cert-v01@openssh.com [preauth]
Oct 02 10:47:07 np0005466011.novalocal sshd-session[1062]: Unable to negotiate with 38.102.83.114 port 42682: no matching host key type found. Their offer: ssh-dss,ssh-dss-cert-v01@openssh.com [preauth]
Oct 02 10:47:07 np0005466011.novalocal sshd-session[1056]: Connection closed by 38.102.83.114 port 42656 [preauth]
Oct 02 10:47:07 np0005466011.novalocal sshd-session[1058]: Connection closed by 38.102.83.114 port 42664 [preauth]
Oct 02 10:47:24 np0005466011.novalocal systemd[1]: systemd-hostnamed.service: Deactivated successfully.
Oct 02 10:54:42 np0005466011.novalocal sshd-session[1070]: Connection reset by 198.235.24.224 port 64912 [preauth]
Oct 02 10:59:13 np0005466011.novalocal systemd[1]: Starting dnf makecache...
Oct 02 10:59:13 np0005466011.novalocal dnf[1074]: Failed determining last makecache time.
Oct 02 10:59:13 np0005466011.novalocal dnf[1074]: CentOS Stream 9 - BaseOS                         45 kB/s | 6.7 kB     00:00
Oct 02 10:59:13 np0005466011.novalocal dnf[1074]: CentOS Stream 9 - AppStream                      62 kB/s | 6.8 kB     00:00
Oct 02 10:59:14 np0005466011.novalocal dnf[1074]: CentOS Stream 9 - CRB                            68 kB/s | 6.6 kB     00:00
Oct 02 10:59:14 np0005466011.novalocal dnf[1074]: CentOS Stream 9 - Extras packages                30 kB/s | 8.0 kB     00:00
Oct 02 10:59:14 np0005466011.novalocal dnf[1074]: Metadata cache created.
Oct 02 10:59:14 np0005466011.novalocal systemd[1]: dnf-makecache.service: Deactivated successfully.
Oct 02 10:59:14 np0005466011.novalocal systemd[1]: Finished dnf makecache.
Oct 02 11:01:01 np0005466011.novalocal CROND[1083]: (root) CMD (run-parts /etc/cron.hourly)
Oct 02 11:01:01 np0005466011.novalocal run-parts[1086]: (/etc/cron.hourly) starting 0anacron
Oct 02 11:01:01 np0005466011.novalocal anacron[1094]: Anacron started on 2025-10-02
Oct 02 11:01:01 np0005466011.novalocal anacron[1094]: Will run job `cron.daily' in 40 min.
Oct 02 11:01:01 np0005466011.novalocal anacron[1094]: Will run job `cron.weekly' in 60 min.
Oct 02 11:01:01 np0005466011.novalocal anacron[1094]: Will run job `cron.monthly' in 80 min.
Oct 02 11:01:01 np0005466011.novalocal anacron[1094]: Jobs will be executed sequentially
Oct 02 11:01:01 np0005466011.novalocal run-parts[1096]: (/etc/cron.hourly) finished 0anacron
Oct 02 11:01:01 np0005466011.novalocal CROND[1082]: (root) CMDEND (run-parts /etc/cron.hourly)
Oct 02 11:01:41 np0005466011.novalocal sshd-session[1097]: Accepted publickey for zuul from 38.102.83.114 port 50926 ssh2: RSA SHA256:zhs3MiW0JhxzckYcMHQES8SMYHj1iGcomnyzmbiwor8
Oct 02 11:01:41 np0005466011.novalocal systemd[1]: Created slice User Slice of UID 1000.
Oct 02 11:01:41 np0005466011.novalocal systemd[1]: Starting User Runtime Directory /run/user/1000...
Oct 02 11:01:41 np0005466011.novalocal systemd-logind[827]: New session 1 of user zuul.
Oct 02 11:01:41 np0005466011.novalocal systemd[1]: Finished User Runtime Directory /run/user/1000.
Oct 02 11:01:41 np0005466011.novalocal systemd[1]: Starting User Manager for UID 1000...
Oct 02 11:01:41 np0005466011.novalocal systemd[1101]: pam_unix(systemd-user:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Oct 02 11:01:41 np0005466011.novalocal systemd[1101]: Queued start job for default target Main User Target.
Oct 02 11:01:41 np0005466011.novalocal systemd[1101]: Created slice User Application Slice.
Oct 02 11:01:41 np0005466011.novalocal systemd[1101]: Started Mark boot as successful after the user session has run 2 minutes.
Oct 02 11:01:41 np0005466011.novalocal systemd[1101]: Started Daily Cleanup of User's Temporary Directories.
Oct 02 11:01:41 np0005466011.novalocal systemd[1101]: Reached target Paths.
Oct 02 11:01:41 np0005466011.novalocal systemd[1101]: Reached target Timers.
Oct 02 11:01:41 np0005466011.novalocal systemd[1101]: Starting D-Bus User Message Bus Socket...
Oct 02 11:01:41 np0005466011.novalocal systemd[1101]: Starting Create User's Volatile Files and Directories...
Oct 02 11:01:41 np0005466011.novalocal systemd[1101]: Listening on D-Bus User Message Bus Socket.
Oct 02 11:01:41 np0005466011.novalocal systemd[1101]: Reached target Sockets.
Oct 02 11:01:41 np0005466011.novalocal systemd[1101]: Finished Create User's Volatile Files and Directories.
Oct 02 11:01:41 np0005466011.novalocal systemd[1101]: Reached target Basic System.
Oct 02 11:01:41 np0005466011.novalocal systemd[1101]: Reached target Main User Target.
Oct 02 11:01:41 np0005466011.novalocal systemd[1101]: Startup finished in 160ms.
Oct 02 11:01:41 np0005466011.novalocal systemd[1]: Started User Manager for UID 1000.
Oct 02 11:01:41 np0005466011.novalocal systemd[1]: Started Session 1 of User zuul.
Oct 02 11:01:41 np0005466011.novalocal sshd-session[1097]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Oct 02 11:01:42 np0005466011.novalocal python3[1184]: ansible-setup Invoked with gather_subset=['!all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:01:42 np0005466011.novalocal systemd[1]: Starting Cleanup of Temporary Directories...
Oct 02 11:01:42 np0005466011.novalocal systemd[1]: systemd-tmpfiles-clean.service: Deactivated successfully.
Oct 02 11:01:42 np0005466011.novalocal systemd[1]: Finished Cleanup of Temporary Directories.
Oct 02 11:01:42 np0005466011.novalocal systemd[1]: run-credentials-systemd\x2dtmpfiles\x2dclean.service.mount: Deactivated successfully.
Oct 02 11:01:45 np0005466011.novalocal python3[1214]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:01:54 np0005466011.novalocal python3[1272]: ansible-setup Invoked with gather_subset=['network'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:01:55 np0005466011.novalocal python3[1312]: ansible-zuul_console Invoked with path=/tmp/console-{log_uuid}.log port=19885 state=present
Oct 02 11:01:57 np0005466011.novalocal python3[1338]: ansible-authorized_key Invoked with user=zuul state=present key=ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDQCZ3vFv33zuU9QR4Erz5ZRISFa/oPvha0xwrBdyzVa18ydYUaCm/1GZP9yUXeHFz7iqX2LFQYNsjkqZJz1Uu67Idku6xgJC7Fx6g9BMv0MT1Zlak1CqYHg2DEyLxPerFs9LKBlOaZV+zN8b4kdG8Ww5E2kG2A7Ui3Cuzht/VP01bi+s4UjtwKH6CZ6X56ylQhY7z0Z+hPDBDFz1Oy2SYkyvdrztTs4eWaoebh/cWCdWX0V2djhSx6cc/r+wVBz3Aibc6gZzEn+Gpq8ffdM/6w/oD9Iqy6ijpCtmVA92FGjAJvr33J1xKd5XxDh4pvKaqFm7hjEeL+KJ1Z1ABjWrwV0uQNNHxit/J8k2+UdRsH+ZYoO3rrg4X8rEHQr981ffbmUPm16g5UJE1TZx20ZMh8oTkA5hXg5ydzjiktL9jGvgn+fSI1iCi1fdR/jUZ3xfQN6Q23wnG7lApoHjP4JXM75nxGNc0elGo9oGrDGWSVEwTOqp4qQPIuFtq+hNm1uTU= zuul-build-sshkey manage_dir=True exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:01:57 np0005466011.novalocal python3[1362]: ansible-file Invoked with state=directory path=/home/zuul/.ssh mode=448 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:01:58 np0005466011.novalocal python3[1461]: ansible-ansible.legacy.stat Invoked with path=/home/zuul/.ssh/id_rsa follow=False get_checksum=False checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True
Oct 02 11:01:58 np0005466011.novalocal python3[1532]: ansible-ansible.legacy.copy Invoked with src=/home/zuul/.ansible/tmp/ansible-tmp-1759402918.0105083-251-141301171627894/source dest=/home/zuul/.ssh/id_rsa mode=384 force=False _original_basename=e98c7d2ea8ba4729beeb0aae1d087b01_id_rsa follow=False checksum=84e221810e16da2c918261cb937e6458833c76e7 backup=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:01:59 np0005466011.novalocal python3[1655]: ansible-ansible.legacy.stat Invoked with path=/home/zuul/.ssh/id_rsa.pub follow=False get_checksum=False checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True
Oct 02 11:01:59 np0005466011.novalocal python3[1726]: ansible-ansible.legacy.copy Invoked with src=/home/zuul/.ansible/tmp/ansible-tmp-1759402919.035403-306-280936097776375/source dest=/home/zuul/.ssh/id_rsa.pub mode=420 force=False _original_basename=e98c7d2ea8ba4729beeb0aae1d087b01_id_rsa.pub follow=False checksum=4dfd599c92ceccedc02682f946e69efec3324503 backup=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:02:01 np0005466011.novalocal python3[1774]: ansible-ping Invoked with data=pong
Oct 02 11:02:02 np0005466011.novalocal python3[1798]: ansible-setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:02:04 np0005466011.novalocal python3[1856]: ansible-zuul_debug_info Invoked with ipv4_route_required=False ipv6_route_required=False image_manifest_files=['/etc/dib-builddate.txt', '/etc/image-hostname.txt'] image_manifest=None traceroute_host=None
Oct 02 11:02:05 np0005466011.novalocal python3[1888]: ansible-file Invoked with path=/home/zuul/zuul-output/logs state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:02:05 np0005466011.novalocal python3[1912]: ansible-file Invoked with path=/home/zuul/zuul-output/artifacts state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:02:06 np0005466011.novalocal python3[1936]: ansible-file Invoked with path=/home/zuul/zuul-output/docs state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:02:06 np0005466011.novalocal python3[1960]: ansible-file Invoked with path=/home/zuul/zuul-output/logs state=directory mode=493 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:02:06 np0005466011.novalocal python3[1984]: ansible-file Invoked with path=/home/zuul/zuul-output/artifacts state=directory mode=493 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:02:06 np0005466011.novalocal python3[2008]: ansible-file Invoked with path=/home/zuul/zuul-output/docs state=directory mode=493 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:02:08 np0005466011.novalocal sudo[2032]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-achxakprhxrbllokurojbqsrzbclwsas ; /usr/bin/python3'
Oct 02 11:02:08 np0005466011.novalocal sudo[2032]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:02:08 np0005466011.novalocal python3[2034]: ansible-file Invoked with path=/etc/ci state=directory owner=root group=root mode=493 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:02:08 np0005466011.novalocal sudo[2032]: pam_unix(sudo:session): session closed for user root
Oct 02 11:02:09 np0005466011.novalocal sudo[2110]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-btbsorswbfghevszzanfttdfnfunivij ; /usr/bin/python3'
Oct 02 11:02:09 np0005466011.novalocal sudo[2110]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:02:09 np0005466011.novalocal python3[2112]: ansible-ansible.legacy.stat Invoked with path=/etc/ci/mirror_info.sh follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True
Oct 02 11:02:09 np0005466011.novalocal sudo[2110]: pam_unix(sudo:session): session closed for user root
Oct 02 11:02:09 np0005466011.novalocal sudo[2183]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fusmptorcflsouxuvwjfqepjwfbwxmqh ; /usr/bin/python3'
Oct 02 11:02:09 np0005466011.novalocal sudo[2183]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:02:09 np0005466011.novalocal python3[2185]: ansible-ansible.legacy.copy Invoked with dest=/etc/ci/mirror_info.sh owner=root group=root mode=420 src=/home/zuul/.ansible/tmp/ansible-tmp-1759402929.0361786-31-236387549859310/source follow=False _original_basename=mirror_info.sh.j2 checksum=92d92a03afdddee82732741071f662c729080c35 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:02:09 np0005466011.novalocal sudo[2183]: pam_unix(sudo:session): session closed for user root
Oct 02 11:02:10 np0005466011.novalocal python3[2233]: ansible-authorized_key Invoked with user=zuul state=present key=ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA4Z/c9osaGGtU6X8fgELwfj/yayRurfcKA0HMFfdpPxev2dbwljysMuzoVp4OZmW1gvGtyYPSNRvnzgsaabPNKNo2ym5NToCP6UM+KSe93aln4BcM/24mXChYAbXJQ5Bqq/pIzsGs/pKetQN+vwvMxLOwTvpcsCJBXaa981RKML6xj9l/UZ7IIq1HSEKMvPLxZMWdu0Ut8DkCd5F4nOw9Wgml2uYpDCj5LLCrQQ9ChdOMz8hz6SighhNlRpPkvPaet3OXxr/ytFMu7j7vv06CaEnuMMiY2aTWN1Imin9eHAylIqFHta/3gFfQSWt9jXM7owkBLKL7ATzhaAn+fjNupw== arxcruz@redhat.com manage_dir=True exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:02:10 np0005466011.novalocal python3[2257]: ansible-authorized_key Invoked with user=zuul state=present key=ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDS4Fn6k4deCnIlOtLWqZJyksbepjQt04j8Ed8CGx9EKkj0fKiAxiI4TadXQYPuNHMixZy4Nevjb6aDhL5Z906TfvNHKUrjrG7G26a0k8vdc61NEQ7FmcGMWRLwwc6ReDO7lFpzYKBMk4YqfWgBuGU/K6WLKiVW2cVvwIuGIaYrE1OiiX0iVUUk7KApXlDJMXn7qjSYynfO4mF629NIp8FJal38+Kv+HA+0QkE5Y2xXnzD4Lar5+keymiCHRntPppXHeLIRzbt0gxC7v3L72hpQ3BTBEzwHpeS8KY+SX1y5lRMN45thCHfJqGmARJREDjBvWG8JXOPmVIKQtZmVcD5b mandreou@redhat.com manage_dir=True exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:02:11 np0005466011.novalocal python3[2281]: ansible-authorized_key Invoked with user=zuul state=present key=ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC9MiLfy30deHA7xPOAlew5qUq3UP2gmRMYJi8PtkjFB20/DKeWwWNnkZPqP9AayruRoo51SIiVg870gbZE2jYl+Ncx/FYDe56JeC3ySZsXoAVkC9bP7gkOGqOmJjirvAgPMI7bogVz8i+66Q4Ar7OKTp3762G4IuWPPEg4ce4Y7lx9qWocZapHYq4cYKMxrOZ7SEbFSATBbe2bPZAPKTw8do/Eny+Hq/LkHFhIeyra6cqTFQYShr+zPln0Cr+ro/pDX3bB+1ubFgTpjpkkkQsLhDfR6cCdCWM2lgnS3BTtYj5Ct9/JRPR5YOphqZz+uB+OEu2IL68hmU9vNTth1KeX rlandy@redhat.com manage_dir=True exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:02:11 np0005466011.novalocal python3[2305]: ansible-authorized_key Invoked with user=zuul state=present key=ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFCbgz8gdERiJlk2IKOtkjQxEXejrio6ZYMJAVJYpOIp raukadah@gmail.com manage_dir=True exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:02:11 np0005466011.novalocal python3[2329]: ansible-authorized_key Invoked with user=zuul state=present key=ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIBqb3Q/9uDf4LmihQ7xeJ9gA/STIQUFPSfyyV0m8AoQi bshewale@redhat.com manage_dir=True exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:02:12 np0005466011.novalocal python3[2353]: ansible-authorized_key Invoked with user=zuul state=present key=ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC0I8QqQx0Az2ysJt2JuffucLijhBqnsXKEIx5GyHwxVULROa8VtNFXUDH6ZKZavhiMcmfHB2+TBTda+lDP4FldYj06dGmzCY+IYGa+uDRdxHNGYjvCfLFcmLlzRK6fNbTcui+KlUFUdKe0fb9CRoGKyhlJD5GRkM1Dv+Yb6Bj+RNnmm1fVGYxzmrD2utvffYEb0SZGWxq2R9gefx1q/3wCGjeqvufEV+AskPhVGc5T7t9eyZ4qmslkLh1/nMuaIBFcr9AUACRajsvk6mXrAN1g3HlBf2gQlhi1UEyfbqIQvzzFtsbLDlSum/KmKjy818GzvWjERfQ0VkGzCd9bSLVL dviroel@redhat.com manage_dir=True exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:02:12 np0005466011.novalocal python3[2377]: ansible-authorized_key Invoked with user=zuul state=present key=ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDLOQd4ZLtkZXQGY6UwAr/06ppWQK4fDO3HaqxPk98csyOCBXsliSKK39Bso828+5srIXiW7aI6aC9P5mwi4mUZlGPfJlQbfrcGvY+b/SocuvaGK+1RrHLoJCT52LBhwgrzlXio2jeksZeein8iaTrhsPrOAs7KggIL/rB9hEiB3NaOPWhhoCP4vlW6MEMExGcqB/1FVxXFBPnLkEyW0Lk7ycVflZl2ocRxbfjZi0+tI1Wlinp8PvSQSc/WVrAcDgKjc/mB4ODPOyYy3G8FHgfMsrXSDEyjBKgLKMsdCrAUcqJQWjkqXleXSYOV4q3pzL+9umK+q/e3P/bIoSFQzmJKTU1eDfuvPXmow9F5H54fii/Da7ezlMJ+wPGHJrRAkmzvMbALy7xwswLhZMkOGNtRcPqaKYRmIBKpw3o6bCTtcNUHOtOQnzwY8JzrM2eBWJBXAANYw+9/ho80JIiwhg29CFNpVBuHbql2YxJQNrnl90guN65rYNpDxdIluweyUf8= anbanerj@kaermorhen manage_dir=True exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:02:12 np0005466011.novalocal python3[2401]: ansible-authorized_key Invoked with user=zuul state=present key=ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC3VwV8Im9kRm49lt3tM36hj4Zv27FxGo4C1Q/0jqhzFmHY7RHbmeRr8ObhwWoHjXSozKWg8FL5ER0z3hTwL0W6lez3sL7hUaCmSuZmG5Hnl3x4vTSxDI9JZ/Y65rtYiiWQo2fC5xJhU/4+0e5e/pseCm8cKRSu+SaxhO+sd6FDojA2x1BzOzKiQRDy/1zWGp/cZkxcEuB1wHI5LMzN03c67vmbu+fhZRAUO4dQkvcnj2LrhQtpa+ytvnSjr8icMDosf1OsbSffwZFyHB/hfWGAfe0eIeSA2XPraxiPknXxiPKx2MJsaUTYbsZcm3EjFdHBBMumw5rBI74zLrMRvCO9GwBEmGT4rFng1nP+yw5DB8sn2zqpOsPg1LYRwCPOUveC13P6pgsZZPh812e8v5EKnETct+5XI3dVpdw6CnNiLwAyVAF15DJvBGT/u1k0Myg/bQn+Gv9k2MSj6LvQmf6WbZu2Wgjm30z3FyCneBqTL7mLF19YXzeC0ufHz5pnO1E= dasm@fedora manage_dir=True exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:02:12 np0005466011.novalocal python3[2425]: ansible-authorized_key Invoked with user=zuul state=present key=ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIHUnwjB20UKmsSed9X73eGNV5AOEFccQ3NYrRW776pEk cjeanner manage_dir=True exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:02:13 np0005466011.novalocal python3[2449]: ansible-authorized_key Invoked with user=zuul state=present key=ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDercCMGn8rW1C4P67tHgtflPdTeXlpyUJYH+6XDd2lR jgilaber@redhat.com manage_dir=True exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:02:13 np0005466011.novalocal python3[2473]: ansible-authorized_key Invoked with user=zuul state=present key=ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIAMI6kkg9Wg0sG7jIJmyZemEBwUn1yzNpQQd3gnulOmZ adrianfuscoarnejo@gmail.com manage_dir=True exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:02:13 np0005466011.novalocal python3[2497]: ansible-authorized_key Invoked with user=zuul state=present key=ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBPijwpQu/3jhhhBZInXNOLEH57DrknPc3PLbsRvYyJIFzwYjX+WD4a7+nGnMYS42MuZk6TJcVqgnqofVx4isoD4= ramishra@redhat.com manage_dir=True exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:02:13 np0005466011.novalocal python3[2521]: ansible-authorized_key Invoked with user=zuul state=present key=ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIGpU/BepK3qX0NRf5Np+dOBDqzQEefhNrw2DCZaH3uWW rebtoor@monolith manage_dir=True exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:02:14 np0005466011.novalocal python3[2545]: ansible-authorized_key Invoked with user=zuul state=present key=ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDK0iKdi8jQTpQrDdLVH/AAgLVYyTXF7AQ1gjc/5uT3t ykarel@yatinkarel manage_dir=True exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:02:14 np0005466011.novalocal python3[2569]: ansible-authorized_key Invoked with user=zuul state=present key=ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIF/V/cLotA6LZeO32VL45Hd78skuA2lJA425Sm2LlQeZ fmount@horcrux manage_dir=True exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:02:14 np0005466011.novalocal python3[2593]: ansible-authorized_key Invoked with user=zuul state=present key=ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDa7QCjuDMVmRPo1rREbGwzYeBCYVN+Ou/3WKXZEC6Sr manage_dir=True exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:02:15 np0005466011.novalocal python3[2617]: ansible-authorized_key Invoked with user=zuul state=present key=ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCfNtF7NvKl915TGsGGoseUb06Hj8L/S4toWf0hExeY+F00woL6NvBlJD0nDct+P5a22I4EhvoQCRQ8reaPCm1lybR3uiRIJsj+8zkVvLwby9LXzfZorlNG9ofjd00FEmB09uW/YvTl6Q9XwwwX6tInzIOv3TMqTHHGOL74ibbj8J/FJR0cFEyj0z4WQRvtkh32xAHl83gbuINryMt0sqRI+clj2381NKL55DRLQrVw0gsfqqxiHAnXg21qWmc4J+b9e9kiuAFQjcjwTVkwJCcg3xbPwC/qokYRby/Y5S40UUd7/jEARGXT7RZgpzTuDd1oZiCVrnrqJNPaMNdVv5MLeFdf1B7iIe5aa/fGouX7AO4SdKhZUdnJmCFAGvjC6S3JMZ2wAcUl+OHnssfmdj7XL50cLo27vjuzMtLAgSqi6N99m92WCF2s8J9aVzszX7Xz9OKZCeGsiVJp3/NdABKzSEAyM9xBD/5Vho894Sav+otpySHe3p6RUTgbB5Zu8VyZRZ/UtB3ueXxyo764yrc6qWIDqrehm84Xm9g+/jpIBzGPl07NUNJpdt/6Sgf9RIKXw/7XypO5yZfUcuFNGTxLfqjTNrtgLZNcjfav6sSdVXVcMPL//XNuRdKmVFaO76eV/oGMQGr1fGcCD+N+CpI7+Q+fCNB6VFWG4nZFuI/Iuw== averdagu@redhat.com manage_dir=True exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:02:15 np0005466011.novalocal python3[2641]: ansible-authorized_key Invoked with user=zuul state=present key=ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDq8l27xI+QlQVdS4djp9ogSoyrNE2+Ox6vKPdhSNL1J3PE5w+WCSvMz9A5gnNuH810zwbekEApbxTze/gLQJwBHA52CChfURpXrFaxY7ePXRElwKAL3mJfzBWY/c5jnNL9TCVmFJTGZkFZP3Nh+BMgZvL6xBkt3WKm6Uq18qzd9XeKcZusrA+O+uLv1fVeQnadY9RIqOCyeFYCzLWrUfTyE8x/XG0hAWIM7qpnF2cALQS2h9n4hW5ybiUN790H08wf9hFwEf5nxY9Z9dVkPFQiTSGKNBzmnCXU9skxS/xhpFjJ5duGSZdtAHe9O+nGZm9c67hxgtf8e5PDuqAdXEv2cf6e3VBAt+Bz8EKI3yosTj0oZHfwr42Yzb1l/SKy14Rggsrc9KAQlrGXan6+u2jcQqqx7l+SWmnpFiWTV9u5cWj2IgOhApOitmRBPYqk9rE2usfO0hLn/Pj/R/Nau4803e1/EikdLE7Ps95s9mX5jRDjAoUa2JwFF5RsVFyL910= ashigupt@ashigupt.remote.csb manage_dir=True exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:02:15 np0005466011.novalocal python3[2665]: ansible-authorized_key Invoked with user=zuul state=present key=ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIOKLl0NYKwoZ/JY5KeZU8VwRAggeOxqQJeoqp3dsAaY9 manage_dir=True exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:02:15 np0005466011.novalocal python3[2689]: ansible-authorized_key Invoked with user=zuul state=present key=ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIASASQOH2BcOyLKuuDOdWZlPi2orcjcA8q4400T73DLH evallesp@fedora manage_dir=True exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:02:16 np0005466011.novalocal python3[2713]: ansible-authorized_key Invoked with user=zuul state=present key=ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAILeBWlamUph+jRKV2qrx1PGU7vWuGIt5+z9k96I8WehW amsinha@amsinha-mac manage_dir=True exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:02:16 np0005466011.novalocal python3[2737]: ansible-authorized_key Invoked with user=zuul state=present key=ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIANvVgvJBlK3gb1yz5uef/JqIGq4HLEmY2dYA8e37swb morenod@redhat-laptop manage_dir=True exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:02:16 np0005466011.novalocal python3[2761]: ansible-authorized_key Invoked with user=zuul state=present key=ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDZdI7t1cxYx65heVI24HTV4F7oQLW1zyfxHreL2TIJKxjyrUUKIFEUmTutcBlJRLNT2Eoix6x1sOw9YrchloCLcn//SGfTElr9mSc5jbjb7QXEU+zJMhtxyEJ1Po3CUGnj7ckiIXw7wcawZtrEOAQ9pH3ExYCJcEMiyNjRQZCxT3tPK+S4B95EWh5Fsrz9CkwpjNRPPH7LigCeQTM3Wc7r97utAslBUUvYceDSLA7rMgkitJE38b7rZBeYzsGQ8YYUBjTCtehqQXxCRjizbHWaaZkBU+N3zkKB6n/iCNGIO690NK7A/qb6msTijiz1PeuM8ThOsi9qXnbX5v0PoTpcFSojV7NHAQ71f0XXuS43FhZctT+Dcx44dT8Fb5vJu2cJGrk+qF8ZgJYNpRS7gPg0EG2EqjK7JMf9ULdjSu0r+KlqIAyLvtzT4eOnQipoKlb/WG5D/0ohKv7OMQ352ggfkBFIQsRXyyTCT98Ft9juqPuahi3CAQmP4H9dyE+7+Kz437PEtsxLmfm6naNmWi7Ee1DqWPwS8rEajsm4sNM4wW9gdBboJQtc0uZw0DfLj1I9r3Mc8Ol0jYtz0yNQDSzVLrGCaJlC311trU70tZ+ZkAVV6Mn8lOhSbj1cK0lvSr6ZK4dgqGl3I1eTZJJhbLNdg7UOVaiRx9543+C/p/As7w== brjackma@redhat.com manage_dir=True exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:02:16 np0005466011.novalocal python3[2785]: ansible-authorized_key Invoked with user=zuul state=present key=ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKwedoZ0TWPJX/z/4TAbO/kKcDZOQVgRH0hAqrL5UCI1 vcastell@redhat.com manage_dir=True exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:02:17 np0005466011.novalocal python3[2809]: ansible-authorized_key Invoked with user=zuul state=present key=ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIEmv8sE8GCk6ZTPIqF0FQrttBdL3mq7rCm/IJy0xDFh7 michburk@redhat.com manage_dir=True exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:02:17 np0005466011.novalocal python3[2833]: ansible-authorized_key Invoked with user=zuul state=present key=ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAICy6GpGEtwevXEEn4mmLR5lmSLe23dGgAvzkB9DMNbkf rsafrono@rsafrono manage_dir=True exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:02:20 np0005466011.novalocal sudo[2857]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lwnvkimvmxzietasnifirhoemjauwfxk ; /usr/bin/python3'
Oct 02 11:02:20 np0005466011.novalocal sudo[2857]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:02:20 np0005466011.novalocal python3[2859]: ansible-community.general.timezone Invoked with name=UTC hwclock=None
Oct 02 11:02:20 np0005466011.novalocal systemd[1]: Starting Time & Date Service...
Oct 02 11:02:20 np0005466011.novalocal systemd[1]: Started Time & Date Service.
Oct 02 11:02:20 np0005466011.novalocal systemd-timedated[2861]: Changed time zone to 'UTC' (UTC).
Oct 02 11:02:20 np0005466011.novalocal sudo[2857]: pam_unix(sudo:session): session closed for user root
Oct 02 11:02:22 np0005466011.novalocal sudo[2888]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yqstkjonqwxnrsbxvmetsvzqhstszgur ; /usr/bin/python3'
Oct 02 11:02:22 np0005466011.novalocal sudo[2888]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:02:22 np0005466011.novalocal python3[2890]: ansible-file Invoked with path=/etc/nodepool state=directory mode=511 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:02:22 np0005466011.novalocal sudo[2888]: pam_unix(sudo:session): session closed for user root
Oct 02 11:02:22 np0005466011.novalocal python3[2966]: ansible-ansible.legacy.stat Invoked with path=/etc/nodepool/sub_nodes follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True
Oct 02 11:02:22 np0005466011.novalocal python3[3037]: ansible-ansible.legacy.copy Invoked with dest=/etc/nodepool/sub_nodes src=/home/zuul/.ansible/tmp/ansible-tmp-1759402942.4767723-251-59976458191845/source _original_basename=tmpaol7hgza follow=False checksum=da39a3ee5e6b4b0d3255bfef95601890afd80709 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:02:23 np0005466011.novalocal python3[3137]: ansible-ansible.legacy.stat Invoked with path=/etc/nodepool/sub_nodes_private follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True
Oct 02 11:02:23 np0005466011.novalocal python3[3208]: ansible-ansible.legacy.copy Invoked with dest=/etc/nodepool/sub_nodes_private src=/home/zuul/.ansible/tmp/ansible-tmp-1759402943.3673015-301-233984192821549/source _original_basename=tmpk7jtmjwx follow=False checksum=da39a3ee5e6b4b0d3255bfef95601890afd80709 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:02:24 np0005466011.novalocal sudo[3308]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-diztceghwegfunsgymbppgaxkthwzccd ; /usr/bin/python3'
Oct 02 11:02:24 np0005466011.novalocal sudo[3308]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:02:24 np0005466011.novalocal python3[3310]: ansible-ansible.legacy.stat Invoked with path=/etc/nodepool/node_private follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True
Oct 02 11:02:24 np0005466011.novalocal sudo[3308]: pam_unix(sudo:session): session closed for user root
Oct 02 11:02:24 np0005466011.novalocal sudo[3381]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yglfnlbguzeslaipcjflhlukwiamrgqk ; /usr/bin/python3'
Oct 02 11:02:24 np0005466011.novalocal sudo[3381]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:02:25 np0005466011.novalocal python3[3383]: ansible-ansible.legacy.copy Invoked with dest=/etc/nodepool/node_private src=/home/zuul/.ansible/tmp/ansible-tmp-1759402944.5515854-381-248695536101285/source _original_basename=tmpkqzcksux follow=False checksum=74b5a1367b6fb99fab59122c5e0a08fc8bf78644 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:02:25 np0005466011.novalocal sudo[3381]: pam_unix(sudo:session): session closed for user root
Oct 02 11:02:25 np0005466011.novalocal python3[3431]: ansible-ansible.legacy.command Invoked with _raw_params=cp .ssh/id_rsa /etc/nodepool/id_rsa zuul_log_id=in-loop-ignore zuul_ansible_split_streams=False _uses_shell=False warn=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:02:26 np0005466011.novalocal python3[3457]: ansible-ansible.legacy.command Invoked with _raw_params=cp .ssh/id_rsa.pub /etc/nodepool/id_rsa.pub zuul_log_id=in-loop-ignore zuul_ansible_split_streams=False _uses_shell=False warn=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:02:26 np0005466011.novalocal sudo[3535]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-chghsqprcmmnndyxzbrjkkivwghxyluk ; /usr/bin/python3'
Oct 02 11:02:26 np0005466011.novalocal sudo[3535]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:02:26 np0005466011.novalocal python3[3537]: ansible-ansible.legacy.stat Invoked with path=/etc/sudoers.d/zuul-sudo-grep follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True
Oct 02 11:02:26 np0005466011.novalocal sudo[3535]: pam_unix(sudo:session): session closed for user root
Oct 02 11:02:26 np0005466011.novalocal sudo[3608]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sssismxtifmpctskcgotutgdkxmmjicv ; /usr/bin/python3'
Oct 02 11:02:26 np0005466011.novalocal sudo[3608]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:02:26 np0005466011.novalocal python3[3610]: ansible-ansible.legacy.copy Invoked with dest=/etc/sudoers.d/zuul-sudo-grep mode=288 src=/home/zuul/.ansible/tmp/ansible-tmp-1759402946.264036-451-193431691871619/source _original_basename=tmpgvax31qq follow=False checksum=bdca1a77493d00fb51567671791f4aa30f66c2f0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:02:26 np0005466011.novalocal sudo[3608]: pam_unix(sudo:session): session closed for user root
Oct 02 11:02:27 np0005466011.novalocal sudo[3659]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xmkqdeucggsebstjmomjnomuptbaosnf ; /usr/bin/python3'
Oct 02 11:02:27 np0005466011.novalocal sudo[3659]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:02:27 np0005466011.novalocal python3[3661]: ansible-ansible.legacy.command Invoked with _raw_params=/usr/sbin/visudo -c zuul_log_id=fa163ef9-e89a-893c-5f89-00000000001f-1-compute0 zuul_ansible_split_streams=False _uses_shell=False warn=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:02:27 np0005466011.novalocal sudo[3659]: pam_unix(sudo:session): session closed for user root
Oct 02 11:02:28 np0005466011.novalocal python3[3689]: ansible-ansible.legacy.command Invoked with executable=/bin/bash _raw_params=env
                                                       _uses_shell=True zuul_log_id=fa163ef9-e89a-893c-5f89-000000000020-1-compute0 zuul_ansible_split_streams=False warn=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None creates=None removes=None stdin=None
Oct 02 11:02:29 np0005466011.novalocal python3[3717]: ansible-file Invoked with path=/home/zuul/workspace state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:02:47 np0005466011.novalocal sudo[3741]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sonmrwthcluwnaniskgtorwmywdtzczh ; /usr/bin/python3'
Oct 02 11:02:47 np0005466011.novalocal sudo[3741]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:02:47 np0005466011.novalocal python3[3743]: ansible-ansible.builtin.file Invoked with path=/etc/ci/env state=directory mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:02:47 np0005466011.novalocal sudo[3741]: pam_unix(sudo:session): session closed for user root
Oct 02 11:02:50 np0005466011.novalocal systemd[1]: systemd-timedated.service: Deactivated successfully.
Oct 02 11:03:32 np0005466011.novalocal kernel: pci 0000:00:07.0: [1af4:1000] type 00 class 0x020000 conventional PCI endpoint
Oct 02 11:03:32 np0005466011.novalocal kernel: pci 0000:00:07.0: BAR 0 [io  0x0000-0x003f]
Oct 02 11:03:32 np0005466011.novalocal kernel: pci 0000:00:07.0: BAR 1 [mem 0x00000000-0x00000fff]
Oct 02 11:03:32 np0005466011.novalocal kernel: pci 0000:00:07.0: BAR 4 [mem 0x00000000-0x00003fff 64bit pref]
Oct 02 11:03:32 np0005466011.novalocal kernel: pci 0000:00:07.0: ROM [mem 0x00000000-0x0007ffff pref]
Oct 02 11:03:32 np0005466011.novalocal kernel: pci 0000:00:07.0: ROM [mem 0xc0000000-0xc007ffff pref]: assigned
Oct 02 11:03:32 np0005466011.novalocal kernel: pci 0000:00:07.0: BAR 4 [mem 0x240000000-0x240003fff 64bit pref]: assigned
Oct 02 11:03:32 np0005466011.novalocal kernel: pci 0000:00:07.0: BAR 1 [mem 0xc0080000-0xc0080fff]: assigned
Oct 02 11:03:32 np0005466011.novalocal kernel: pci 0000:00:07.0: BAR 0 [io  0x1000-0x103f]: assigned
Oct 02 11:03:32 np0005466011.novalocal kernel: virtio-pci 0000:00:07.0: enabling device (0000 -> 0003)
Oct 02 11:03:32 np0005466011.novalocal NetworkManager[864]: <info>  [1759403012.8531] manager: (eth1): new Ethernet device (/org/freedesktop/NetworkManager/Devices/3)
Oct 02 11:03:32 np0005466011.novalocal systemd-udevd[3747]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 11:03:32 np0005466011.novalocal NetworkManager[864]: <info>  [1759403012.8673] device (eth1): state change: unmanaged -> unavailable (reason 'managed', managed-type: 'external')
Oct 02 11:03:32 np0005466011.novalocal NetworkManager[864]: <info>  [1759403012.8697] settings: (eth1): created default wired connection 'Wired connection 1'
Oct 02 11:03:32 np0005466011.novalocal NetworkManager[864]: <info>  [1759403012.8701] device (eth1): carrier: link connected
Oct 02 11:03:32 np0005466011.novalocal NetworkManager[864]: <info>  [1759403012.8703] device (eth1): state change: unavailable -> disconnected (reason 'carrier-changed', managed-type: 'full')
Oct 02 11:03:32 np0005466011.novalocal NetworkManager[864]: <info>  [1759403012.8708] policy: auto-activating connection 'Wired connection 1' (80d64d2d-b88f-378c-97fd-b46295de63bc)
Oct 02 11:03:32 np0005466011.novalocal NetworkManager[864]: <info>  [1759403012.8712] device (eth1): Activation: starting connection 'Wired connection 1' (80d64d2d-b88f-378c-97fd-b46295de63bc)
Oct 02 11:03:32 np0005466011.novalocal NetworkManager[864]: <info>  [1759403012.8713] device (eth1): state change: disconnected -> prepare (reason 'none', managed-type: 'full')
Oct 02 11:03:32 np0005466011.novalocal NetworkManager[864]: <info>  [1759403012.8716] device (eth1): state change: prepare -> config (reason 'none', managed-type: 'full')
Oct 02 11:03:32 np0005466011.novalocal NetworkManager[864]: <info>  [1759403012.8720] device (eth1): state change: config -> ip-config (reason 'none', managed-type: 'full')
Oct 02 11:03:32 np0005466011.novalocal NetworkManager[864]: <info>  [1759403012.8725] dhcp4 (eth1): activation: beginning transaction (timeout in 45 seconds)
Oct 02 11:03:33 np0005466011.novalocal python3[3773]: ansible-ansible.legacy.command Invoked with _raw_params=ip -j link zuul_log_id=fa163ef9-e89a-d220-2459-000000000128-0-controller zuul_ansible_split_streams=False _uses_shell=False warn=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:03:40 np0005466011.novalocal sudo[3851]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vttawmwvbojkvloyxjdeghzgmcniheiv ; OS_CLOUD=vexxhost /usr/bin/python3'
Oct 02 11:03:40 np0005466011.novalocal sudo[3851]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:03:40 np0005466011.novalocal python3[3853]: ansible-ansible.legacy.stat Invoked with path=/etc/NetworkManager/system-connections/ci-private-network.nmconnection follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True
Oct 02 11:03:40 np0005466011.novalocal sudo[3851]: pam_unix(sudo:session): session closed for user root
Oct 02 11:03:40 np0005466011.novalocal sudo[3924]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qsyjkkvommdxurawjqkpgzxmqiekuhpv ; OS_CLOUD=vexxhost /usr/bin/python3'
Oct 02 11:03:40 np0005466011.novalocal sudo[3924]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:03:40 np0005466011.novalocal python3[3926]: ansible-ansible.legacy.copy Invoked with src=/home/zuul/.ansible/tmp/ansible-tmp-1759403020.2623556-104-241110549197532/source dest=/etc/NetworkManager/system-connections/ci-private-network.nmconnection mode=0600 owner=root group=root follow=False _original_basename=bootstrap-ci-network-nm-connection.nmconnection.j2 checksum=3994d261fef902cd800726cff99eb241fa8945bc backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:03:40 np0005466011.novalocal sudo[3924]: pam_unix(sudo:session): session closed for user root
Oct 02 11:03:41 np0005466011.novalocal sudo[3974]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bmzzozshfbawdinmdvbdymbijtlngztk ; OS_CLOUD=vexxhost /usr/bin/python3'
Oct 02 11:03:41 np0005466011.novalocal sudo[3974]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:03:41 np0005466011.novalocal python3[3976]: ansible-ansible.builtin.systemd Invoked with name=NetworkManager state=restarted daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None
Oct 02 11:03:41 np0005466011.novalocal systemd[1]: NetworkManager-wait-online.service: Deactivated successfully.
Oct 02 11:03:41 np0005466011.novalocal systemd[1]: Stopped Network Manager Wait Online.
Oct 02 11:03:41 np0005466011.novalocal systemd[1]: Stopping Network Manager Wait Online...
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[864]: <info>  [1759403021.7037] caught SIGTERM, shutting down normally.
Oct 02 11:03:41 np0005466011.novalocal systemd[1]: Stopping Network Manager...
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[864]: <info>  [1759403021.7046] dhcp4 (eth0): canceled DHCP transaction
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[864]: <info>  [1759403021.7046] dhcp4 (eth0): activation: beginning transaction (timeout in 45 seconds)
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[864]: <info>  [1759403021.7047] dhcp4 (eth0): state changed no lease
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[864]: <info>  [1759403021.7049] manager: NetworkManager state is now CONNECTING
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[864]: <info>  [1759403021.7200] dhcp4 (eth1): canceled DHCP transaction
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[864]: <info>  [1759403021.7200] dhcp4 (eth1): state changed no lease
Oct 02 11:03:41 np0005466011.novalocal systemd[1]: Starting Network Manager Script Dispatcher Service...
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[864]: <info>  [1759403021.7250] exiting (success)
Oct 02 11:03:41 np0005466011.novalocal systemd[1]: Started Network Manager Script Dispatcher Service.
Oct 02 11:03:41 np0005466011.novalocal systemd[1]: NetworkManager.service: Deactivated successfully.
Oct 02 11:03:41 np0005466011.novalocal systemd[1]: Stopped Network Manager.
Oct 02 11:03:41 np0005466011.novalocal systemd[1]: NetworkManager.service: Consumed 5.276s CPU time, 10.0M memory peak.
Oct 02 11:03:41 np0005466011.novalocal systemd[1]: Starting Network Manager...
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.7905] NetworkManager (version 1.54.1-1.el9) is starting... (after a restart, boot:1e8e4eaa-6890-46e6-baf5-d7fee48b6edb)
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.7907] Read config: /etc/NetworkManager/NetworkManager.conf, /run/NetworkManager/conf.d/15-carrier-timeout.conf
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.7968] manager[0x556c894aa070]: monitoring kernel firmware directory '/lib/firmware'.
Oct 02 11:03:41 np0005466011.novalocal systemd[1]: Starting Hostname Service...
Oct 02 11:03:41 np0005466011.novalocal systemd[1]: Started Hostname Service.
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8657] hostname: hostname: using hostnamed
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8658] hostname: static hostname changed from (none) to "np0005466011.novalocal"
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8664] dns-mgr: init: dns=default,systemd-resolved rc-manager=symlink (auto)
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8668] manager[0x556c894aa070]: rfkill: Wi-Fi hardware radio set enabled
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8668] manager[0x556c894aa070]: rfkill: WWAN hardware radio set enabled
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8693] Loaded device plugin: NMTeamFactory (/usr/lib64/NetworkManager/1.54.1-1.el9/libnm-device-plugin-team.so)
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8694] manager: rfkill: Wi-Fi enabled by radio killswitch; enabled by state file
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8694] manager: rfkill: WWAN enabled by radio killswitch; enabled by state file
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8694] manager: Networking is enabled by state file
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8696] settings: Loaded settings plugin: keyfile (internal)
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8700] settings: Loaded settings plugin: ifcfg-rh ("/usr/lib64/NetworkManager/1.54.1-1.el9/libnm-settings-plugin-ifcfg-rh.so")
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8718] Warning: the ifcfg-rh plugin is deprecated, please migrate connections to the keyfile format using "nmcli connection migrate"
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8727] dhcp: init: Using DHCP client 'internal'
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8730] manager: (lo): new Loopback device (/org/freedesktop/NetworkManager/Devices/1)
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8735] device (lo): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8738] device (lo): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external')
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8744] device (lo): Activation: starting connection 'lo' (9c045e93-5256-40fb-a074-0144ed71625c)
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8748] device (eth0): carrier: link connected
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8752] manager: (eth0): new Ethernet device (/org/freedesktop/NetworkManager/Devices/2)
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8755] manager: (eth0): assume: will attempt to assume matching connection 'System eth0' (5fb06bd0-0bb0-7ffb-45f1-d6edd65f3e03) (indicated)
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8755] device (eth0): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'assume')
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8759] device (eth0): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'assume')
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8763] device (eth0): Activation: starting connection 'System eth0' (5fb06bd0-0bb0-7ffb-45f1-d6edd65f3e03)
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8767] device (eth1): carrier: link connected
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8770] manager: (eth1): new Ethernet device (/org/freedesktop/NetworkManager/Devices/3)
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8773] manager: (eth1): assume: will attempt to assume matching connection 'Wired connection 1' (80d64d2d-b88f-378c-97fd-b46295de63bc) (indicated)
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8774] device (eth1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'assume')
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8777] device (eth1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'assume')
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8781] device (eth1): Activation: starting connection 'Wired connection 1' (80d64d2d-b88f-378c-97fd-b46295de63bc)
Oct 02 11:03:41 np0005466011.novalocal systemd[1]: Started Network Manager.
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8791] bus-manager: acquired D-Bus service "org.freedesktop.NetworkManager"
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8794] device (lo): state change: disconnected -> prepare (reason 'none', managed-type: 'external')
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8795] device (lo): state change: prepare -> config (reason 'none', managed-type: 'external')
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8796] device (lo): state change: config -> ip-config (reason 'none', managed-type: 'external')
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8797] device (eth0): state change: disconnected -> prepare (reason 'none', managed-type: 'assume')
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8799] device (eth0): state change: prepare -> config (reason 'none', managed-type: 'assume')
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8801] device (eth1): state change: disconnected -> prepare (reason 'none', managed-type: 'assume')
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8803] device (eth1): state change: prepare -> config (reason 'none', managed-type: 'assume')
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8805] device (lo): state change: ip-config -> ip-check (reason 'none', managed-type: 'external')
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8811] device (eth0): state change: config -> ip-config (reason 'none', managed-type: 'assume')
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8814] dhcp4 (eth0): activation: beginning transaction (timeout in 45 seconds)
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8821] device (eth1): state change: config -> ip-config (reason 'none', managed-type: 'assume')
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8823] dhcp4 (eth1): activation: beginning transaction (timeout in 45 seconds)
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8841] device (lo): state change: ip-check -> secondaries (reason 'none', managed-type: 'external')
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8843] device (lo): state change: secondaries -> activated (reason 'none', managed-type: 'external')
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8849] device (lo): Activation: successful, device activated.
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8857] dhcp4 (eth0): state changed new lease, address=38.129.56.69
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.8865] policy: set 'System eth0' (eth0) as default for IPv4 routing and DNS
Oct 02 11:03:41 np0005466011.novalocal systemd[1]: Starting Network Manager Wait Online...
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.9034] device (eth0): state change: ip-config -> ip-check (reason 'none', managed-type: 'assume')
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.9055] device (eth0): state change: ip-check -> secondaries (reason 'none', managed-type: 'assume')
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.9057] device (eth0): state change: secondaries -> activated (reason 'none', managed-type: 'assume')
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.9063] manager: NetworkManager state is now CONNECTED_SITE
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.9075] device (eth0): Activation: successful, device activated.
Oct 02 11:03:41 np0005466011.novalocal sudo[3974]: pam_unix(sudo:session): session closed for user root
Oct 02 11:03:41 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403021.9079] manager: NetworkManager state is now CONNECTED_GLOBAL
Oct 02 11:03:42 np0005466011.novalocal python3[4061]: ansible-ansible.legacy.command Invoked with _raw_params=ip route zuul_log_id=fa163ef9-e89a-d220-2459-0000000000bd-0-controller zuul_ansible_split_streams=False _uses_shell=False warn=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:03:51 np0005466011.novalocal systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.
Oct 02 11:04:11 np0005466011.novalocal systemd[1]: systemd-hostnamed.service: Deactivated successfully.
Oct 02 11:04:13 np0005466011.novalocal systemd[1101]: Starting Mark boot as successful...
Oct 02 11:04:13 np0005466011.novalocal systemd[1101]: Finished Mark boot as successful.
Oct 02 11:04:27 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403067.3132] device (eth1): state change: ip-config -> ip-check (reason 'none', managed-type: 'assume')
Oct 02 11:04:27 np0005466011.novalocal systemd[1]: Starting Network Manager Script Dispatcher Service...
Oct 02 11:04:27 np0005466011.novalocal systemd[1]: Started Network Manager Script Dispatcher Service.
Oct 02 11:04:27 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403067.3401] device (eth1): state change: ip-check -> secondaries (reason 'none', managed-type: 'assume')
Oct 02 11:04:27 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403067.3404] device (eth1): state change: secondaries -> activated (reason 'none', managed-type: 'assume')
Oct 02 11:04:27 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403067.3412] device (eth1): Activation: successful, device activated.
Oct 02 11:04:27 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403067.3419] manager: startup complete
Oct 02 11:04:27 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403067.3421] device (eth1): state change: activated -> failed (reason 'ip-config-unavailable', managed-type: 'full')
Oct 02 11:04:27 np0005466011.novalocal NetworkManager[3990]: <warn>  [1759403067.3428] device (eth1): Activation: failed for connection 'Wired connection 1'
Oct 02 11:04:27 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403067.3436] device (eth1): state change: failed -> disconnected (reason 'none', managed-type: 'full')
Oct 02 11:04:27 np0005466011.novalocal systemd[1]: Finished Network Manager Wait Online.
Oct 02 11:04:27 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403067.3540] dhcp4 (eth1): canceled DHCP transaction
Oct 02 11:04:27 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403067.3541] dhcp4 (eth1): activation: beginning transaction (timeout in 45 seconds)
Oct 02 11:04:27 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403067.3541] dhcp4 (eth1): state changed no lease
Oct 02 11:04:27 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403067.3556] policy: auto-activating connection 'ci-private-network' (7a49c97b-caea-555e-9162-65c1fd602491)
Oct 02 11:04:27 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403067.3560] device (eth1): Activation: starting connection 'ci-private-network' (7a49c97b-caea-555e-9162-65c1fd602491)
Oct 02 11:04:27 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403067.3561] device (eth1): state change: disconnected -> prepare (reason 'none', managed-type: 'full')
Oct 02 11:04:27 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403067.3565] device (eth1): state change: prepare -> config (reason 'none', managed-type: 'full')
Oct 02 11:04:27 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403067.3571] device (eth1): state change: config -> ip-config (reason 'none', managed-type: 'full')
Oct 02 11:04:27 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403067.3580] device (eth1): state change: ip-config -> ip-check (reason 'none', managed-type: 'full')
Oct 02 11:04:27 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403067.3613] device (eth1): state change: ip-check -> secondaries (reason 'none', managed-type: 'full')
Oct 02 11:04:27 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403067.3616] device (eth1): state change: secondaries -> activated (reason 'none', managed-type: 'full')
Oct 02 11:04:27 np0005466011.novalocal NetworkManager[3990]: <info>  [1759403067.3623] device (eth1): Activation: successful, device activated.
Oct 02 11:04:37 np0005466011.novalocal systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.
Oct 02 11:04:42 np0005466011.novalocal sshd-session[1111]: Received disconnect from 38.102.83.114 port 50926:11: disconnected by user
Oct 02 11:04:42 np0005466011.novalocal sshd-session[1111]: Disconnected from user zuul 38.102.83.114 port 50926
Oct 02 11:04:42 np0005466011.novalocal sshd-session[1097]: pam_unix(sshd:session): session closed for user zuul
Oct 02 11:04:42 np0005466011.novalocal systemd-logind[827]: Session 1 logged out. Waiting for processes to exit.
Oct 02 11:05:33 np0005466011.novalocal sshd-session[4093]: Accepted publickey for zuul from 38.102.83.114 port 53822 ssh2: RSA SHA256:cGVaibQZU+1xXpl3EOpnEmu1huhmLGviN6SuAxWWn+4
Oct 02 11:05:33 np0005466011.novalocal systemd-logind[827]: New session 3 of user zuul.
Oct 02 11:05:33 np0005466011.novalocal systemd[1]: Started Session 3 of User zuul.
Oct 02 11:05:33 np0005466011.novalocal sshd-session[4093]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Oct 02 11:05:33 np0005466011.novalocal sudo[4172]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-catmunhcbqnwhqlqjnzoawjhcoghjzyt ; OS_CLOUD=vexxhost /usr/bin/python3'
Oct 02 11:05:33 np0005466011.novalocal sudo[4172]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:05:33 np0005466011.novalocal python3[4174]: ansible-ansible.legacy.stat Invoked with path=/etc/ci/env/networking-info.yml follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True
Oct 02 11:05:33 np0005466011.novalocal sudo[4172]: pam_unix(sudo:session): session closed for user root
Oct 02 11:05:33 np0005466011.novalocal sudo[4245]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cctmolwrnozgpdiblyisubooljqpvayt ; OS_CLOUD=vexxhost /usr/bin/python3'
Oct 02 11:05:33 np0005466011.novalocal sudo[4245]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:05:33 np0005466011.novalocal python3[4247]: ansible-ansible.legacy.copy Invoked with dest=/etc/ci/env/networking-info.yml owner=root group=root mode=0644 src=/home/zuul/.ansible/tmp/ansible-tmp-1759403133.293403-365-84299321442392/source _original_basename=tmp8dq0p1uf follow=False checksum=7ead7cbef44571b5903e56d225b6c0c65e6bdcb6 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:05:33 np0005466011.novalocal sudo[4245]: pam_unix(sudo:session): session closed for user root
Oct 02 11:05:38 np0005466011.novalocal sshd-session[4096]: Connection closed by 38.102.83.114 port 53822
Oct 02 11:05:38 np0005466011.novalocal sshd-session[4093]: pam_unix(sshd:session): session closed for user zuul
Oct 02 11:05:38 np0005466011.novalocal systemd[1]: session-3.scope: Deactivated successfully.
Oct 02 11:05:38 np0005466011.novalocal systemd-logind[827]: Session 3 logged out. Waiting for processes to exit.
Oct 02 11:05:38 np0005466011.novalocal systemd-logind[827]: Removed session 3.
Oct 02 11:05:42 np0005466011.novalocal irqbalance[823]: Cannot change IRQ 26 affinity: Operation not permitted
Oct 02 11:05:42 np0005466011.novalocal irqbalance[823]: IRQ 26 affinity is now unmanaged
Oct 02 11:07:13 np0005466011.novalocal systemd[1101]: Created slice User Background Tasks Slice.
Oct 02 11:07:13 np0005466011.novalocal systemd[1101]: Starting Cleanup of User's Temporary Files and Directories...
Oct 02 11:07:13 np0005466011.novalocal systemd[1101]: Finished Cleanup of User's Temporary Files and Directories.
Oct 02 11:11:24 np0005466011.novalocal sshd-session[4276]: Accepted publickey for zuul from 38.102.83.114 port 42070 ssh2: RSA SHA256:cGVaibQZU+1xXpl3EOpnEmu1huhmLGviN6SuAxWWn+4
Oct 02 11:11:24 np0005466011.novalocal systemd-logind[827]: New session 4 of user zuul.
Oct 02 11:11:24 np0005466011.novalocal systemd[1]: Started Session 4 of User zuul.
Oct 02 11:11:24 np0005466011.novalocal sshd-session[4276]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Oct 02 11:11:24 np0005466011.novalocal sudo[4303]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-whaotzsyuudybyoicjpuxietrptknkgm ; /usr/bin/python3'
Oct 02 11:11:24 np0005466011.novalocal sudo[4303]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:11:24 np0005466011.novalocal python3[4305]: ansible-ansible.legacy.command Invoked with _raw_params=lsblk -nd -o MAJ:MIN /dev/vda
                                                       _uses_shell=True zuul_log_id=fa163ef9-e89a-67bc-3be9-000000000ca4-1-compute0 zuul_ansible_split_streams=False warn=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:11:24 np0005466011.novalocal sudo[4303]: pam_unix(sudo:session): session closed for user root
Oct 02 11:11:25 np0005466011.novalocal sudo[4332]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ortcxinxbckusngmllwonkkbrggogmce ; /usr/bin/python3'
Oct 02 11:11:25 np0005466011.novalocal sudo[4332]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:11:25 np0005466011.novalocal python3[4334]: ansible-ansible.builtin.file Invoked with path=/sys/fs/cgroup/init.scope state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:11:25 np0005466011.novalocal sudo[4332]: pam_unix(sudo:session): session closed for user root
Oct 02 11:11:25 np0005466011.novalocal sudo[4358]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xhzwxhuyctooafoufpqztufwcxrilqoe ; /usr/bin/python3'
Oct 02 11:11:25 np0005466011.novalocal sudo[4358]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:11:25 np0005466011.novalocal python3[4360]: ansible-ansible.builtin.file Invoked with path=/sys/fs/cgroup/machine.slice state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:11:25 np0005466011.novalocal sudo[4358]: pam_unix(sudo:session): session closed for user root
Oct 02 11:11:25 np0005466011.novalocal sudo[4384]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jutgpuqwcvkenlrjtgfkwujvpmwrrqku ; /usr/bin/python3'
Oct 02 11:11:25 np0005466011.novalocal sudo[4384]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:11:25 np0005466011.novalocal python3[4386]: ansible-ansible.builtin.file Invoked with path=/sys/fs/cgroup/system.slice state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:11:25 np0005466011.novalocal sudo[4384]: pam_unix(sudo:session): session closed for user root
Oct 02 11:11:25 np0005466011.novalocal sudo[4410]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xqkxxxexgpgxsbdxxmtnhizwqlbreduh ; /usr/bin/python3'
Oct 02 11:11:25 np0005466011.novalocal sudo[4410]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:11:25 np0005466011.novalocal python3[4412]: ansible-ansible.builtin.file Invoked with path=/sys/fs/cgroup/user.slice state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:11:25 np0005466011.novalocal sudo[4410]: pam_unix(sudo:session): session closed for user root
Oct 02 11:11:26 np0005466011.novalocal sudo[4436]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qlchikszevqnhuufrosmlzokcpzegpft ; /usr/bin/python3'
Oct 02 11:11:26 np0005466011.novalocal sudo[4436]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:11:26 np0005466011.novalocal python3[4438]: ansible-ansible.builtin.lineinfile Invoked with path=/etc/systemd/system.conf regexp=^#DefaultIOAccounting=no line=DefaultIOAccounting=yes state=present backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:11:26 np0005466011.novalocal python3[4438]: ansible-ansible.builtin.lineinfile [WARNING] Module remote_tmp /root/.ansible/tmp did not exist and was created with a mode of 0700, this may cause issues when running as another user. To avoid this, create the remote_tmp dir with the correct permissions manually
Oct 02 11:11:26 np0005466011.novalocal sudo[4436]: pam_unix(sudo:session): session closed for user root
Oct 02 11:11:27 np0005466011.novalocal sudo[4462]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pqqgtkpbcmanywdqtmzhsaypeuyhuwfo ; /usr/bin/python3'
Oct 02 11:11:27 np0005466011.novalocal sudo[4462]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:11:27 np0005466011.novalocal python3[4464]: ansible-ansible.builtin.systemd_service Invoked with daemon_reload=True daemon_reexec=False scope=system no_block=False name=None state=None enabled=None force=None masked=None
Oct 02 11:11:27 np0005466011.novalocal systemd[1]: Reloading.
Oct 02 11:11:27 np0005466011.novalocal systemd-rc-local-generator[4486]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:11:27 np0005466011.novalocal sudo[4462]: pam_unix(sudo:session): session closed for user root
Oct 02 11:11:29 np0005466011.novalocal sudo[4518]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vwpleysinpmbfrqcepuxdbwqieutzeod ; /usr/bin/python3'
Oct 02 11:11:29 np0005466011.novalocal sudo[4518]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:11:29 np0005466011.novalocal python3[4520]: ansible-ansible.builtin.wait_for Invoked with path=/sys/fs/cgroup/system.slice/io.max state=present timeout=30 host=127.0.0.1 connect_timeout=5 delay=0 active_connection_states=['ESTABLISHED', 'FIN_WAIT1', 'FIN_WAIT2', 'SYN_RECV', 'SYN_SENT', 'TIME_WAIT'] sleep=1 port=None search_regex=None exclude_hosts=None msg=None
Oct 02 11:11:29 np0005466011.novalocal sudo[4518]: pam_unix(sudo:session): session closed for user root
Oct 02 11:11:29 np0005466011.novalocal sudo[4544]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kpfjozogujkszzfoqvmrqcgtehtyptet ; /usr/bin/python3'
Oct 02 11:11:29 np0005466011.novalocal sudo[4544]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:11:29 np0005466011.novalocal python3[4546]: ansible-ansible.legacy.command Invoked with _raw_params=echo "252:0   riops=18000 wiops=18000 rbps=262144000 wbps=262144000" > /sys/fs/cgroup/init.scope/io.max
                                                       _uses_shell=True zuul_log_id=in-loop-ignore zuul_ansible_split_streams=False warn=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:11:29 np0005466011.novalocal sudo[4544]: pam_unix(sudo:session): session closed for user root
Oct 02 11:11:29 np0005466011.novalocal sudo[4572]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jhmnaqnpndshtjfnbduhgnjtmvqgykss ; /usr/bin/python3'
Oct 02 11:11:29 np0005466011.novalocal sudo[4572]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:11:29 np0005466011.novalocal python3[4574]: ansible-ansible.legacy.command Invoked with _raw_params=echo "252:0   riops=18000 wiops=18000 rbps=262144000 wbps=262144000" > /sys/fs/cgroup/machine.slice/io.max
                                                       _uses_shell=True zuul_log_id=in-loop-ignore zuul_ansible_split_streams=False warn=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:11:29 np0005466011.novalocal sudo[4572]: pam_unix(sudo:session): session closed for user root
Oct 02 11:11:30 np0005466011.novalocal sudo[4600]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gdcobfojbuwgfaitpmadxnkgtbxhisqu ; /usr/bin/python3'
Oct 02 11:11:30 np0005466011.novalocal sudo[4600]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:11:30 np0005466011.novalocal python3[4602]: ansible-ansible.legacy.command Invoked with _raw_params=echo "252:0   riops=18000 wiops=18000 rbps=262144000 wbps=262144000" > /sys/fs/cgroup/system.slice/io.max
                                                       _uses_shell=True zuul_log_id=in-loop-ignore zuul_ansible_split_streams=False warn=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:11:30 np0005466011.novalocal sudo[4600]: pam_unix(sudo:session): session closed for user root
Oct 02 11:11:30 np0005466011.novalocal sudo[4628]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jqbaydzaqwitbrdunwbpkhaniidyfuaz ; /usr/bin/python3'
Oct 02 11:11:30 np0005466011.novalocal sudo[4628]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:11:30 np0005466011.novalocal python3[4630]: ansible-ansible.legacy.command Invoked with _raw_params=echo "252:0   riops=18000 wiops=18000 rbps=262144000 wbps=262144000" > /sys/fs/cgroup/user.slice/io.max
                                                       _uses_shell=True zuul_log_id=in-loop-ignore zuul_ansible_split_streams=False warn=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:11:30 np0005466011.novalocal sudo[4628]: pam_unix(sudo:session): session closed for user root
Oct 02 11:11:31 np0005466011.novalocal python3[4657]: ansible-ansible.legacy.command Invoked with _raw_params=echo "init";    cat /sys/fs/cgroup/init.scope/io.max; echo "machine"; cat /sys/fs/cgroup/machine.slice/io.max; echo "system";  cat /sys/fs/cgroup/system.slice/io.max; echo "user";    cat /sys/fs/cgroup/user.slice/io.max;
                                                       _uses_shell=True zuul_log_id=fa163ef9-e89a-67bc-3be9-000000000caa-1-compute0 zuul_ansible_split_streams=False warn=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:11:31 np0005466011.novalocal python3[4687]: ansible-ansible.builtin.stat Invoked with path=/sys/fs/cgroup/kubepods.slice/io.max follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:11:34 np0005466011.novalocal sshd-session[4279]: Connection closed by 38.102.83.114 port 42070
Oct 02 11:11:34 np0005466011.novalocal sshd-session[4276]: pam_unix(sshd:session): session closed for user zuul
Oct 02 11:11:34 np0005466011.novalocal systemd[1]: session-4.scope: Deactivated successfully.
Oct 02 11:11:34 np0005466011.novalocal systemd[1]: session-4.scope: Consumed 3.141s CPU time.
Oct 02 11:11:34 np0005466011.novalocal systemd-logind[827]: Session 4 logged out. Waiting for processes to exit.
Oct 02 11:11:34 np0005466011.novalocal systemd-logind[827]: Removed session 4.
Oct 02 11:11:36 np0005466011.novalocal sshd-session[4693]: Accepted publickey for zuul from 38.102.83.114 port 33904 ssh2: RSA SHA256:cGVaibQZU+1xXpl3EOpnEmu1huhmLGviN6SuAxWWn+4
Oct 02 11:11:36 np0005466011.novalocal systemd-logind[827]: New session 5 of user zuul.
Oct 02 11:11:36 np0005466011.novalocal systemd[1]: Started Session 5 of User zuul.
Oct 02 11:11:36 np0005466011.novalocal sshd-session[4693]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Oct 02 11:11:36 np0005466011.novalocal sudo[4720]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vdmeushoxwjxueckyjobzqvdzihikyul ; /usr/bin/python3'
Oct 02 11:11:36 np0005466011.novalocal sudo[4720]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:11:36 np0005466011.novalocal python3[4722]: ansible-ansible.legacy.dnf Invoked with name=['podman', 'buildah'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None
Oct 02 11:11:56 np0005466011.novalocal kernel: SELinux:  Converting 365 SID table entries...
Oct 02 11:11:56 np0005466011.novalocal kernel: SELinux:  policy capability network_peer_controls=1
Oct 02 11:11:56 np0005466011.novalocal kernel: SELinux:  policy capability open_perms=1
Oct 02 11:11:56 np0005466011.novalocal kernel: SELinux:  policy capability extended_socket_class=1
Oct 02 11:11:56 np0005466011.novalocal kernel: SELinux:  policy capability always_check_network=0
Oct 02 11:11:56 np0005466011.novalocal kernel: SELinux:  policy capability cgroup_seclabel=1
Oct 02 11:11:56 np0005466011.novalocal kernel: SELinux:  policy capability nnp_nosuid_transition=1
Oct 02 11:11:56 np0005466011.novalocal kernel: SELinux:  policy capability genfs_seclabel_symlinks=1
Oct 02 11:12:07 np0005466011.novalocal kernel: SELinux:  Converting 365 SID table entries...
Oct 02 11:12:07 np0005466011.novalocal kernel: SELinux:  policy capability network_peer_controls=1
Oct 02 11:12:07 np0005466011.novalocal kernel: SELinux:  policy capability open_perms=1
Oct 02 11:12:07 np0005466011.novalocal kernel: SELinux:  policy capability extended_socket_class=1
Oct 02 11:12:07 np0005466011.novalocal kernel: SELinux:  policy capability always_check_network=0
Oct 02 11:12:07 np0005466011.novalocal kernel: SELinux:  policy capability cgroup_seclabel=1
Oct 02 11:12:07 np0005466011.novalocal kernel: SELinux:  policy capability nnp_nosuid_transition=1
Oct 02 11:12:07 np0005466011.novalocal kernel: SELinux:  policy capability genfs_seclabel_symlinks=1
Oct 02 11:12:16 np0005466011.novalocal kernel: SELinux:  Converting 365 SID table entries...
Oct 02 11:12:17 np0005466011.novalocal kernel: SELinux:  policy capability network_peer_controls=1
Oct 02 11:12:17 np0005466011.novalocal kernel: SELinux:  policy capability open_perms=1
Oct 02 11:12:17 np0005466011.novalocal kernel: SELinux:  policy capability extended_socket_class=1
Oct 02 11:12:17 np0005466011.novalocal kernel: SELinux:  policy capability always_check_network=0
Oct 02 11:12:17 np0005466011.novalocal kernel: SELinux:  policy capability cgroup_seclabel=1
Oct 02 11:12:17 np0005466011.novalocal kernel: SELinux:  policy capability nnp_nosuid_transition=1
Oct 02 11:12:17 np0005466011.novalocal kernel: SELinux:  policy capability genfs_seclabel_symlinks=1
Oct 02 11:12:18 np0005466011.novalocal setsebool[4784]: The virt_use_nfs policy boolean was changed to 1 by root
Oct 02 11:12:18 np0005466011.novalocal setsebool[4784]: The virt_sandbox_use_all_caps policy boolean was changed to 1 by root
Oct 02 11:12:29 np0005466011.novalocal kernel: SELinux:  Converting 368 SID table entries...
Oct 02 11:12:29 np0005466011.novalocal kernel: SELinux:  policy capability network_peer_controls=1
Oct 02 11:12:29 np0005466011.novalocal kernel: SELinux:  policy capability open_perms=1
Oct 02 11:12:29 np0005466011.novalocal kernel: SELinux:  policy capability extended_socket_class=1
Oct 02 11:12:29 np0005466011.novalocal kernel: SELinux:  policy capability always_check_network=0
Oct 02 11:12:29 np0005466011.novalocal kernel: SELinux:  policy capability cgroup_seclabel=1
Oct 02 11:12:29 np0005466011.novalocal kernel: SELinux:  policy capability nnp_nosuid_transition=1
Oct 02 11:12:29 np0005466011.novalocal kernel: SELinux:  policy capability genfs_seclabel_symlinks=1
Oct 02 11:12:48 np0005466011.novalocal dbus-broker-launch[818]: avc:  op=load_policy lsm=selinux seqno=6 res=1
Oct 02 11:12:48 np0005466011.novalocal systemd[1]: Started /usr/bin/systemctl start man-db-cache-update.
Oct 02 11:12:48 np0005466011.novalocal systemd[1]: Starting man-db-cache-update.service...
Oct 02 11:12:48 np0005466011.novalocal systemd[1]: Reloading.
Oct 02 11:12:48 np0005466011.novalocal systemd-rc-local-generator[5535]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:12:48 np0005466011.novalocal systemd[1]: Queuing reload/restart jobs for marked units…
Oct 02 11:12:49 np0005466011.novalocal systemd[1]: Starting PackageKit Daemon...
Oct 02 11:12:49 np0005466011.novalocal PackageKit[6388]: daemon start
Oct 02 11:12:49 np0005466011.novalocal systemd[1]: Starting Authorization Manager...
Oct 02 11:12:49 np0005466011.novalocal polkitd[6487]: Started polkitd version 0.117
Oct 02 11:12:49 np0005466011.novalocal polkitd[6487]: Loading rules from directory /etc/polkit-1/rules.d
Oct 02 11:12:49 np0005466011.novalocal polkitd[6487]: Loading rules from directory /usr/share/polkit-1/rules.d
Oct 02 11:12:49 np0005466011.novalocal polkitd[6487]: Finished loading, compiling and executing 3 rules
Oct 02 11:12:49 np0005466011.novalocal polkitd[6487]: Acquired the name org.freedesktop.PolicyKit1 on the system bus
Oct 02 11:12:49 np0005466011.novalocal systemd[1]: Started Authorization Manager.
Oct 02 11:12:49 np0005466011.novalocal systemd[1]: Started PackageKit Daemon.
Oct 02 11:12:50 np0005466011.novalocal sudo[4720]: pam_unix(sudo:session): session closed for user root
Oct 02 11:12:52 np0005466011.novalocal irqbalance[823]: Cannot change IRQ 27 affinity: Operation not permitted
Oct 02 11:12:52 np0005466011.novalocal irqbalance[823]: IRQ 27 affinity is now unmanaged
Oct 02 11:13:14 np0005466011.novalocal python3[18348]: ansible-ansible.legacy.command Invoked with _raw_params=echo "openstack-k8s-operators+cirobot"
                                                        _uses_shell=True zuul_log_id=fa163ef9-e89a-ae2a-8cff-00000000000c-1-compute0 zuul_ansible_split_streams=False warn=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:13:15 np0005466011.novalocal kernel: evm: overlay not supported
Oct 02 11:13:15 np0005466011.novalocal systemd[1101]: Starting D-Bus User Message Bus...
Oct 02 11:13:15 np0005466011.novalocal dbus-broker-launch[18868]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored
Oct 02 11:13:15 np0005466011.novalocal dbus-broker-launch[18868]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored
Oct 02 11:13:15 np0005466011.novalocal systemd[1101]: Started D-Bus User Message Bus.
Oct 02 11:13:15 np0005466011.novalocal dbus-broker-lau[18868]: Ready
Oct 02 11:13:15 np0005466011.novalocal systemd[1101]: selinux: avc:  op=load_policy lsm=selinux seqno=6 res=1
Oct 02 11:13:15 np0005466011.novalocal systemd[1101]: Created slice Slice /user.
Oct 02 11:13:15 np0005466011.novalocal systemd[1101]: podman-18813.scope: unit configures an IP firewall, but not running as root.
Oct 02 11:13:15 np0005466011.novalocal systemd[1101]: (This warning is only shown for the first unit using IP firewalling.)
Oct 02 11:13:15 np0005466011.novalocal systemd[1101]: Started podman-18813.scope.
Oct 02 11:13:15 np0005466011.novalocal systemd[1101]: Started podman-pause-ddcc20a0.scope.
Oct 02 11:13:18 np0005466011.novalocal sudo[20069]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vjczsblrfiejnpqoslpeuygiztogdwux ; /usr/bin/python3'
Oct 02 11:13:18 np0005466011.novalocal sudo[20069]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:13:18 np0005466011.novalocal python3[20077]: ansible-ansible.builtin.blockinfile Invoked with state=present insertafter=EOF dest=/etc/containers/registries.conf content=[[registry]]
                                                       location = "38.102.83.80:5001"
                                                       insecure = true path=/etc/containers/registries.conf block=[[registry]]
                                                       location = "38.102.83.80:5001"
                                                       insecure = true marker=# {mark} ANSIBLE MANAGED BLOCK create=False backup=False marker_begin=BEGIN marker_end=END unsafe_writes=False insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:13:18 np0005466011.novalocal sudo[20069]: pam_unix(sudo:session): session closed for user root
Oct 02 11:13:18 np0005466011.novalocal sshd-session[4696]: Connection closed by 38.102.83.114 port 33904
Oct 02 11:13:18 np0005466011.novalocal sshd-session[4693]: pam_unix(sshd:session): session closed for user zuul
Oct 02 11:13:18 np0005466011.novalocal systemd[1]: session-5.scope: Deactivated successfully.
Oct 02 11:13:18 np0005466011.novalocal systemd[1]: session-5.scope: Consumed 1min 1.322s CPU time.
Oct 02 11:13:18 np0005466011.novalocal systemd-logind[827]: Session 5 logged out. Waiting for processes to exit.
Oct 02 11:13:18 np0005466011.novalocal systemd-logind[827]: Removed session 5.
Oct 02 11:13:37 np0005466011.novalocal systemd[1]: man-db-cache-update.service: Deactivated successfully.
Oct 02 11:13:37 np0005466011.novalocal systemd[1]: Finished man-db-cache-update.service.
Oct 02 11:13:37 np0005466011.novalocal systemd[1]: man-db-cache-update.service: Consumed 52.136s CPU time.
Oct 02 11:13:37 np0005466011.novalocal systemd[1]: run-r6c61022d84ba4007a598e5616928a78f.service: Deactivated successfully.
Oct 02 11:13:44 np0005466011.novalocal sshd-session[26263]: Unable to negotiate with 38.129.56.185 port 54828: no matching host key type found. Their offer: sk-ecdsa-sha2-nistp256@openssh.com [preauth]
Oct 02 11:13:44 np0005466011.novalocal sshd-session[26266]: Unable to negotiate with 38.129.56.185 port 54844: no matching host key type found. Their offer: sk-ssh-ed25519@openssh.com [preauth]
Oct 02 11:13:44 np0005466011.novalocal sshd-session[26265]: Connection closed by 38.129.56.185 port 54796 [preauth]
Oct 02 11:13:44 np0005466011.novalocal sshd-session[26267]: Connection closed by 38.129.56.185 port 54810 [preauth]
Oct 02 11:13:44 np0005466011.novalocal sshd-session[26264]: Unable to negotiate with 38.129.56.185 port 54826: no matching host key type found. Their offer: ssh-ed25519 [preauth]
Oct 02 11:13:50 np0005466011.novalocal sshd-session[26273]: Accepted publickey for zuul from 38.102.83.114 port 42812 ssh2: RSA SHA256:cGVaibQZU+1xXpl3EOpnEmu1huhmLGviN6SuAxWWn+4
Oct 02 11:13:50 np0005466011.novalocal systemd-logind[827]: New session 6 of user zuul.
Oct 02 11:13:50 np0005466011.novalocal systemd[1]: Started Session 6 of User zuul.
Oct 02 11:13:50 np0005466011.novalocal sshd-session[26273]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Oct 02 11:13:51 np0005466011.novalocal python3[26300]: ansible-ansible.posix.authorized_key Invoked with user=zuul key=ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBG5wXEu1JOQA5KJoTkupC8GEbQNIbg6S2Q6Mp50kFLAjQIUiHO0Vf9azsWL1hcnqZwbQOjTwG/mdjPHjLP6jQ28= zuul@np0005466010.novalocal
                                                        manage_dir=True state=present exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:13:51 np0005466011.novalocal sudo[26324]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ovyjtwarqxafaeuiywwvrwefmwxgoxgu ; /usr/bin/python3'
Oct 02 11:13:51 np0005466011.novalocal sudo[26324]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:13:51 np0005466011.novalocal python3[26326]: ansible-ansible.posix.authorized_key Invoked with user=root key=ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBG5wXEu1JOQA5KJoTkupC8GEbQNIbg6S2Q6Mp50kFLAjQIUiHO0Vf9azsWL1hcnqZwbQOjTwG/mdjPHjLP6jQ28= zuul@np0005466010.novalocal
                                                        manage_dir=True state=present exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:13:51 np0005466011.novalocal sudo[26324]: pam_unix(sudo:session): session closed for user root
Oct 02 11:13:52 np0005466011.novalocal sudo[26350]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gkgbuszbyuzumfgcgwfapdkhhlaxljya ; /usr/bin/python3'
Oct 02 11:13:52 np0005466011.novalocal sudo[26350]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:13:52 np0005466011.novalocal python3[26352]: ansible-ansible.builtin.user Invoked with name=cloud-admin shell=/bin/bash state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on np0005466011.novalocal update_password=always uid=None group=None groups=None comment=None home=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None
Oct 02 11:13:52 np0005466011.novalocal useradd[26354]: new group: name=cloud-admin, GID=1002
Oct 02 11:13:52 np0005466011.novalocal useradd[26354]: new user: name=cloud-admin, UID=1002, GID=1002, home=/home/cloud-admin, shell=/bin/bash, from=none
Oct 02 11:13:53 np0005466011.novalocal sudo[26350]: pam_unix(sudo:session): session closed for user root
Oct 02 11:13:53 np0005466011.novalocal sudo[26384]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qyyuyydawyqgpbstgwxwhtmjcrvrwncn ; /usr/bin/python3'
Oct 02 11:13:53 np0005466011.novalocal sudo[26384]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:13:53 np0005466011.novalocal python3[26386]: ansible-ansible.posix.authorized_key Invoked with user=cloud-admin key=ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBG5wXEu1JOQA5KJoTkupC8GEbQNIbg6S2Q6Mp50kFLAjQIUiHO0Vf9azsWL1hcnqZwbQOjTwG/mdjPHjLP6jQ28= zuul@np0005466010.novalocal
                                                        manage_dir=True state=present exclusive=False validate_certs=True follow=False path=None key_options=None comment=None
Oct 02 11:13:53 np0005466011.novalocal sudo[26384]: pam_unix(sudo:session): session closed for user root
Oct 02 11:13:53 np0005466011.novalocal sudo[26462]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pcjawoxaqsdxfwqittscoopudccxvqjg ; /usr/bin/python3'
Oct 02 11:13:53 np0005466011.novalocal sudo[26462]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:13:53 np0005466011.novalocal python3[26464]: ansible-ansible.legacy.stat Invoked with path=/etc/sudoers.d/cloud-admin follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True
Oct 02 11:13:53 np0005466011.novalocal sudo[26462]: pam_unix(sudo:session): session closed for user root
Oct 02 11:13:54 np0005466011.novalocal sudo[26535]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bfyyxmyhfshajbfmigjecmtistyfzzbk ; /usr/bin/python3'
Oct 02 11:13:54 np0005466011.novalocal sudo[26535]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:13:54 np0005466011.novalocal python3[26537]: ansible-ansible.legacy.copy Invoked with dest=/etc/sudoers.d/cloud-admin mode=0640 src=/home/zuul/.ansible/tmp/ansible-tmp-1759403633.5326014-167-145396306371648/source _original_basename=tmpu_amhg06 follow=False checksum=e7614e5ad3ab06eaae55b8efaa2ed81b63ea5634 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:13:54 np0005466011.novalocal sudo[26535]: pam_unix(sudo:session): session closed for user root
Oct 02 11:13:54 np0005466011.novalocal sudo[26585]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-srjlibwngijcglghradjukyukvlmjlgd ; /usr/bin/python3'
Oct 02 11:13:54 np0005466011.novalocal sudo[26585]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:13:55 np0005466011.novalocal python3[26587]: ansible-ansible.builtin.hostname Invoked with name=compute-0 use=systemd
Oct 02 11:13:55 np0005466011.novalocal systemd[1]: Starting Hostname Service...
Oct 02 11:13:55 np0005466011.novalocal systemd[1]: Started Hostname Service.
Oct 02 11:13:55 np0005466011.novalocal systemd-hostnamed[26591]: Changed pretty hostname to 'compute-0'
Oct 02 11:13:55 compute-0 systemd-hostnamed[26591]: Hostname set to <compute-0> (static)
Oct 02 11:13:55 compute-0 NetworkManager[3990]: <info>  [1759403635.3676] hostname: static hostname changed from "np0005466011.novalocal" to "compute-0"
Oct 02 11:13:55 compute-0 systemd[1]: Starting Network Manager Script Dispatcher Service...
Oct 02 11:13:55 compute-0 systemd[1]: Started Network Manager Script Dispatcher Service.
Oct 02 11:13:55 compute-0 sudo[26585]: pam_unix(sudo:session): session closed for user root
Oct 02 11:13:56 compute-0 sshd-session[26276]: Connection closed by 38.102.83.114 port 42812
Oct 02 11:13:56 compute-0 sshd-session[26273]: pam_unix(sshd:session): session closed for user zuul
Oct 02 11:13:56 compute-0 systemd-logind[827]: Session 6 logged out. Waiting for processes to exit.
Oct 02 11:13:56 compute-0 systemd[1]: session-6.scope: Deactivated successfully.
Oct 02 11:13:56 compute-0 systemd[1]: session-6.scope: Consumed 2.230s CPU time.
Oct 02 11:13:56 compute-0 systemd-logind[827]: Removed session 6.
Oct 02 11:14:05 compute-0 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.
Oct 02 11:14:25 compute-0 systemd[1]: systemd-hostnamed.service: Deactivated successfully.
Oct 02 11:17:55 compute-0 PackageKit[6388]: daemon quit
Oct 02 11:17:55 compute-0 systemd[1]: packagekit.service: Deactivated successfully.
Oct 02 11:18:19 compute-0 sshd-session[26613]: Accepted publickey for zuul from 38.129.56.185 port 47462 ssh2: RSA SHA256:cGVaibQZU+1xXpl3EOpnEmu1huhmLGviN6SuAxWWn+4
Oct 02 11:18:19 compute-0 systemd-logind[827]: New session 7 of user zuul.
Oct 02 11:18:19 compute-0 systemd[1]: Started Session 7 of User zuul.
Oct 02 11:18:19 compute-0 sshd-session[26613]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Oct 02 11:18:20 compute-0 python3[26689]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:18:21 compute-0 sudo[26803]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vichvfyphjcaaomtlcgqwevfexaaucqf ; /usr/bin/python3'
Oct 02 11:18:21 compute-0 sudo[26803]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:18:21 compute-0 python3[26805]: ansible-ansible.legacy.stat Invoked with path=/etc/yum.repos.d/delorean.repo follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True
Oct 02 11:18:21 compute-0 sudo[26803]: pam_unix(sudo:session): session closed for user root
Oct 02 11:18:22 compute-0 sudo[26876]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xhnpzasiryysfzjwfhisfdckyfndaxyl ; /usr/bin/python3'
Oct 02 11:18:22 compute-0 sudo[26876]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:18:22 compute-0 python3[26878]: ansible-ansible.legacy.copy Invoked with dest=/etc/yum.repos.d/ src=/home/zuul/.ansible/tmp/ansible-tmp-1759403901.6002035-30693-255689855849291/source mode=0755 _original_basename=delorean.repo follow=False checksum=bb4c2ff9dad546f135d54d9729ea11b84117755d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:18:22 compute-0 sudo[26876]: pam_unix(sudo:session): session closed for user root
Oct 02 11:18:22 compute-0 sudo[26902]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lgucxzhaqiqvwdxqfyuunqtdozsowfyx ; /usr/bin/python3'
Oct 02 11:18:22 compute-0 sudo[26902]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:18:22 compute-0 python3[26904]: ansible-ansible.legacy.stat Invoked with path=/etc/yum.repos.d/delorean-antelope-testing.repo follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True
Oct 02 11:18:22 compute-0 sudo[26902]: pam_unix(sudo:session): session closed for user root
Oct 02 11:18:22 compute-0 sudo[26975]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ptjejzdndlutfsgquxmkgmxfgdgqpvav ; /usr/bin/python3'
Oct 02 11:18:22 compute-0 sudo[26975]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:18:23 compute-0 python3[26977]: ansible-ansible.legacy.copy Invoked with dest=/etc/yum.repos.d/ src=/home/zuul/.ansible/tmp/ansible-tmp-1759403901.6002035-30693-255689855849291/source mode=0755 _original_basename=delorean-antelope-testing.repo follow=False checksum=0bdbb813b840548359ae77c28d76ca272ccaf31b backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:18:23 compute-0 sudo[26975]: pam_unix(sudo:session): session closed for user root
Oct 02 11:18:23 compute-0 sudo[27001]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vvxxuqxbqsaibxxnyjxbfzrqotrxhrth ; /usr/bin/python3'
Oct 02 11:18:23 compute-0 sudo[27001]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:18:23 compute-0 python3[27003]: ansible-ansible.legacy.stat Invoked with path=/etc/yum.repos.d/repo-setup-centos-highavailability.repo follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True
Oct 02 11:18:23 compute-0 sudo[27001]: pam_unix(sudo:session): session closed for user root
Oct 02 11:18:23 compute-0 sudo[27074]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-oylncofmvvvmnmvdvkyvrxlftgvappio ; /usr/bin/python3'
Oct 02 11:18:23 compute-0 sudo[27074]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:18:23 compute-0 python3[27076]: ansible-ansible.legacy.copy Invoked with dest=/etc/yum.repos.d/ src=/home/zuul/.ansible/tmp/ansible-tmp-1759403901.6002035-30693-255689855849291/source mode=0755 _original_basename=repo-setup-centos-highavailability.repo follow=False checksum=55d0f695fd0d8f47cbc3044ce0dcf5f88862490f backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:18:23 compute-0 sudo[27074]: pam_unix(sudo:session): session closed for user root
Oct 02 11:18:23 compute-0 sudo[27100]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wupvregwzcyhvwgikjodbhrmqrqrhzoe ; /usr/bin/python3'
Oct 02 11:18:23 compute-0 sudo[27100]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:18:23 compute-0 python3[27102]: ansible-ansible.legacy.stat Invoked with path=/etc/yum.repos.d/repo-setup-centos-powertools.repo follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True
Oct 02 11:18:23 compute-0 sudo[27100]: pam_unix(sudo:session): session closed for user root
Oct 02 11:18:23 compute-0 sudo[27173]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ctvxnxvvctqadagryppfgogkedjofofh ; /usr/bin/python3'
Oct 02 11:18:23 compute-0 sudo[27173]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:18:24 compute-0 python3[27175]: ansible-ansible.legacy.copy Invoked with dest=/etc/yum.repos.d/ src=/home/zuul/.ansible/tmp/ansible-tmp-1759403901.6002035-30693-255689855849291/source mode=0755 _original_basename=repo-setup-centos-powertools.repo follow=False checksum=4b0cf99aa89c5c5be0151545863a7a7568f67568 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:18:24 compute-0 sudo[27173]: pam_unix(sudo:session): session closed for user root
Oct 02 11:18:24 compute-0 sudo[27199]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lqrkffpygfurzyzjqfobxbovmnkrzgjl ; /usr/bin/python3'
Oct 02 11:18:24 compute-0 sudo[27199]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:18:24 compute-0 python3[27201]: ansible-ansible.legacy.stat Invoked with path=/etc/yum.repos.d/repo-setup-centos-appstream.repo follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True
Oct 02 11:18:24 compute-0 sudo[27199]: pam_unix(sudo:session): session closed for user root
Oct 02 11:18:24 compute-0 sudo[27272]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zzoqpzqjrthnvadacqfkajikqspjzlvu ; /usr/bin/python3'
Oct 02 11:18:24 compute-0 sudo[27272]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:18:24 compute-0 python3[27274]: ansible-ansible.legacy.copy Invoked with dest=/etc/yum.repos.d/ src=/home/zuul/.ansible/tmp/ansible-tmp-1759403901.6002035-30693-255689855849291/source mode=0755 _original_basename=repo-setup-centos-appstream.repo follow=False checksum=e89244d2503b2996429dda1857290c1e91e393a1 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:18:24 compute-0 sudo[27272]: pam_unix(sudo:session): session closed for user root
Oct 02 11:18:24 compute-0 sudo[27298]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zpbghazakotcsgbuclhnymnwqvqwpiqp ; /usr/bin/python3'
Oct 02 11:18:24 compute-0 sudo[27298]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:18:24 compute-0 python3[27300]: ansible-ansible.legacy.stat Invoked with path=/etc/yum.repos.d/repo-setup-centos-baseos.repo follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True
Oct 02 11:18:24 compute-0 sudo[27298]: pam_unix(sudo:session): session closed for user root
Oct 02 11:18:25 compute-0 sudo[27371]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hhcdbejsqxdsszzdyyrbimamnocpcnlw ; /usr/bin/python3'
Oct 02 11:18:25 compute-0 sudo[27371]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:18:25 compute-0 python3[27373]: ansible-ansible.legacy.copy Invoked with dest=/etc/yum.repos.d/ src=/home/zuul/.ansible/tmp/ansible-tmp-1759403901.6002035-30693-255689855849291/source mode=0755 _original_basename=repo-setup-centos-baseos.repo follow=False checksum=36d926db23a40dbfa5c84b5e4d43eac6fa2301d6 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:18:25 compute-0 sudo[27371]: pam_unix(sudo:session): session closed for user root
Oct 02 11:18:25 compute-0 sudo[27397]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-maamgyhwzwdhaqtuvhogddoofpluritw ; /usr/bin/python3'
Oct 02 11:18:25 compute-0 sudo[27397]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:18:25 compute-0 python3[27399]: ansible-ansible.legacy.stat Invoked with path=/etc/yum.repos.d/delorean.repo.md5 follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True
Oct 02 11:18:25 compute-0 sudo[27397]: pam_unix(sudo:session): session closed for user root
Oct 02 11:18:25 compute-0 sudo[27470]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-plpowhbeylblqyjulisvmxlgbgcrohom ; /usr/bin/python3'
Oct 02 11:18:25 compute-0 sudo[27470]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:18:25 compute-0 python3[27472]: ansible-ansible.legacy.copy Invoked with dest=/etc/yum.repos.d/ src=/home/zuul/.ansible/tmp/ansible-tmp-1759403901.6002035-30693-255689855849291/source mode=0755 _original_basename=delorean.repo.md5 follow=False checksum=d911291791b114a72daf18f370e91cb1ae300933 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:18:25 compute-0 sudo[27470]: pam_unix(sudo:session): session closed for user root
Oct 02 11:18:28 compute-0 sshd-session[27497]: Connection closed by 192.168.122.11 port 59580 [preauth]
Oct 02 11:18:28 compute-0 sshd-session[27499]: Unable to negotiate with 192.168.122.11 port 59590: no matching host key type found. Their offer: ssh-ed25519 [preauth]
Oct 02 11:18:28 compute-0 sshd-session[27498]: Unable to negotiate with 192.168.122.11 port 59602: no matching host key type found. Their offer: sk-ecdsa-sha2-nistp256@openssh.com [preauth]
Oct 02 11:18:28 compute-0 sshd-session[27500]: Unable to negotiate with 192.168.122.11 port 59610: no matching host key type found. Their offer: sk-ssh-ed25519@openssh.com [preauth]
Oct 02 11:18:28 compute-0 sshd-session[27501]: Connection closed by 192.168.122.11 port 59570 [preauth]
Oct 02 11:18:34 compute-0 python3[27530]: ansible-ansible.legacy.command Invoked with _raw_params=hostname _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:23:34 compute-0 sshd-session[26616]: Received disconnect from 38.129.56.185 port 47462:11: disconnected by user
Oct 02 11:23:34 compute-0 sshd-session[26616]: Disconnected from user zuul 38.129.56.185 port 47462
Oct 02 11:23:34 compute-0 sshd-session[26613]: pam_unix(sshd:session): session closed for user zuul
Oct 02 11:23:34 compute-0 systemd[1]: session-7.scope: Deactivated successfully.
Oct 02 11:23:34 compute-0 systemd[1]: session-7.scope: Consumed 4.429s CPU time.
Oct 02 11:23:34 compute-0 systemd-logind[827]: Session 7 logged out. Waiting for processes to exit.
Oct 02 11:23:34 compute-0 systemd-logind[827]: Removed session 7.
Oct 02 11:32:51 compute-0 sshd-session[27537]: Accepted publickey for zuul from 192.168.122.30 port 50894 ssh2: ECDSA SHA256:tAEsFSop2MjuMQQ/UqKU2uCkxqWXGfsM5crdhd6tlI4
Oct 02 11:32:51 compute-0 systemd-logind[827]: New session 8 of user zuul.
Oct 02 11:32:51 compute-0 systemd[1]: Started Session 8 of User zuul.
Oct 02 11:32:51 compute-0 sshd-session[27537]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Oct 02 11:32:53 compute-0 python3.9[27690]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:32:54 compute-0 sudo[27869]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-eyinkjhbqjrlsmkxfmpkpqkvchwjnswu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404773.8227575-61-264363102064342/AnsiballZ_command.py'
Oct 02 11:32:54 compute-0 sudo[27869]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:32:54 compute-0 python3.9[27871]: ansible-ansible.legacy.command Invoked with _raw_params=set -euxo pipefail
                                            pushd /var/tmp
                                            curl -sL https://github.com/openstack-k8s-operators/repo-setup/archive/refs/heads/main.tar.gz | tar -xz
                                            pushd repo-setup-main
                                            python3 -m venv ./venv
                                            PBR_VERSION=0.0.0 ./venv/bin/pip install ./
                                            ./venv/bin/repo-setup current-podified -b antelope
                                            popd
                                            rm -rf repo-setup-main
                                             _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:33:01 compute-0 sudo[27869]: pam_unix(sudo:session): session closed for user root
Oct 02 11:33:03 compute-0 sshd-session[27540]: Connection closed by 192.168.122.30 port 50894
Oct 02 11:33:03 compute-0 sshd-session[27537]: pam_unix(sshd:session): session closed for user zuul
Oct 02 11:33:03 compute-0 systemd[1]: session-8.scope: Deactivated successfully.
Oct 02 11:33:03 compute-0 systemd[1]: session-8.scope: Consumed 7.517s CPU time.
Oct 02 11:33:03 compute-0 systemd-logind[827]: Session 8 logged out. Waiting for processes to exit.
Oct 02 11:33:03 compute-0 systemd-logind[827]: Removed session 8.
Oct 02 11:33:19 compute-0 sshd-session[27929]: Accepted publickey for zuul from 192.168.122.30 port 52606 ssh2: ECDSA SHA256:tAEsFSop2MjuMQQ/UqKU2uCkxqWXGfsM5crdhd6tlI4
Oct 02 11:33:19 compute-0 systemd-logind[827]: New session 9 of user zuul.
Oct 02 11:33:19 compute-0 systemd[1]: Started Session 9 of User zuul.
Oct 02 11:33:19 compute-0 sshd-session[27929]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Oct 02 11:33:20 compute-0 python3.9[28082]: ansible-ansible.legacy.ping Invoked with data=pong
Oct 02 11:33:21 compute-0 python3.9[28256]: ansible-ansible.builtin.setup Invoked with gather_subset=['!all', '!min', 'local'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:33:22 compute-0 sudo[28406]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xhwnuqaxrurgsogfvisvtiujlabjjdvt ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404801.9810793-98-22361322677224/AnsiballZ_command.py'
Oct 02 11:33:22 compute-0 sudo[28406]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:33:22 compute-0 python3.9[28408]: ansible-ansible.legacy.command Invoked with _raw_params=PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin which growvols
                                             _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:33:22 compute-0 sudo[28406]: pam_unix(sudo:session): session closed for user root
Oct 02 11:33:23 compute-0 sudo[28559]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zkenszfizfkfpuqnmysyqboqxsuzqxuj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404803.063431-134-74103008200310/AnsiballZ_stat.py'
Oct 02 11:33:23 compute-0 sudo[28559]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:33:23 compute-0 python3.9[28561]: ansible-ansible.builtin.stat Invoked with path=/etc/ansible/facts.d/bootc.fact follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:33:23 compute-0 sudo[28559]: pam_unix(sudo:session): session closed for user root
Oct 02 11:33:24 compute-0 sudo[28711]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jkochiyzhdxdoxlvrzhfvhmbizahhsgq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404803.9210916-158-15762183505367/AnsiballZ_file.py'
Oct 02 11:33:24 compute-0 sudo[28711]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:33:24 compute-0 python3.9[28713]: ansible-ansible.builtin.file Invoked with mode=755 path=/etc/ansible/facts.d state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:33:24 compute-0 sudo[28711]: pam_unix(sudo:session): session closed for user root
Oct 02 11:33:25 compute-0 sudo[28864]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-njqblhbpldivwyjxsokhpontlwvkwpnf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404804.7675307-182-200515547788691/AnsiballZ_stat.py'
Oct 02 11:33:25 compute-0 sudo[28864]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:33:25 compute-0 python3.9[28866]: ansible-ansible.legacy.stat Invoked with path=/etc/ansible/facts.d/bootc.fact follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:33:25 compute-0 sudo[28864]: pam_unix(sudo:session): session closed for user root
Oct 02 11:33:25 compute-0 sudo[28987]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nupsipzepzbjpxbzbeyholijqunlcvll ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404804.7675307-182-200515547788691/AnsiballZ_copy.py'
Oct 02 11:33:25 compute-0 sudo[28987]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:33:25 compute-0 python3.9[28989]: ansible-ansible.legacy.copy Invoked with dest=/etc/ansible/facts.d/bootc.fact mode=755 src=/home/zuul/.ansible/tmp/ansible-tmp-1759404804.7675307-182-200515547788691/.source.fact _original_basename=bootc.fact follow=False checksum=eb4122ce7fc50a38407beb511c4ff8c178005b12 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:33:25 compute-0 sudo[28987]: pam_unix(sudo:session): session closed for user root
Oct 02 11:33:26 compute-0 sudo[29139]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ykppmymastwhqmpbygtdmjbnekhhvcsm ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404806.250249-227-75927633335558/AnsiballZ_setup.py'
Oct 02 11:33:26 compute-0 sudo[29139]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:33:26 compute-0 python3.9[29141]: ansible-ansible.builtin.setup Invoked with gather_subset=['!all', '!min', 'local'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:33:27 compute-0 sudo[29139]: pam_unix(sudo:session): session closed for user root
Oct 02 11:33:27 compute-0 sudo[29295]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ghgsscyyeqkbgnddqpanrkxgpprrobmx ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404807.3226016-251-101840381274305/AnsiballZ_file.py'
Oct 02 11:33:27 compute-0 sudo[29295]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:33:27 compute-0 python3.9[29297]: ansible-ansible.builtin.file Invoked with group=root mode=0750 owner=root path=/var/log/journal setype=var_log_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:33:27 compute-0 sudo[29295]: pam_unix(sudo:session): session closed for user root
Oct 02 11:33:28 compute-0 python3.9[29447]: ansible-ansible.builtin.service_facts Invoked
Oct 02 11:33:33 compute-0 python3.9[29702]: ansible-ansible.builtin.lineinfile Invoked with line=cloud-init=disabled path=/proc/cmdline state=present backrefs=False create=False backup=False firstmatch=False unsafe_writes=False regexp=None search_string=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:33:34 compute-0 python3.9[29852]: ansible-ansible.builtin.setup Invoked with gather_subset=['!all', '!min', 'local'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:33:35 compute-0 python3.9[30006]: ansible-ansible.builtin.setup Invoked with gather_subset=['!all', '!min', 'local', 'distribution'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:33:36 compute-0 sudo[30162]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qivcvnzlteeuxhrecqpwedmncycrhhnq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404816.3733597-395-263865643842311/AnsiballZ_setup.py'
Oct 02 11:33:36 compute-0 sudo[30162]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:33:36 compute-0 python3.9[30164]: ansible-ansible.legacy.setup Invoked with filter=['ansible_pkg_mgr'] gather_subset=['!all'] gather_timeout=10 fact_path=/etc/ansible/facts.d
Oct 02 11:33:37 compute-0 sudo[30162]: pam_unix(sudo:session): session closed for user root
Oct 02 11:33:37 compute-0 sudo[30246]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mkmarcswqskybhnrnwmifbzovpenksnn ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404816.3733597-395-263865643842311/AnsiballZ_dnf.py'
Oct 02 11:33:37 compute-0 sudo[30246]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:33:37 compute-0 python3.9[30248]: ansible-ansible.legacy.dnf Invoked with name=['driverctl', 'lvm2', 'crudini', 'jq', 'nftables', 'NetworkManager', 'openstack-selinux', 'python3-libselinux', 'python3-pyyaml', 'rsync', 'tmpwatch', 'sysstat', 'iproute-tc', 'ksmtuned', 'systemd-container', 'crypto-policies-scripts', 'grubby', 'sos'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Oct 02 11:34:21 compute-0 systemd[1]: Reloading.
Oct 02 11:34:21 compute-0 systemd-rc-local-generator[30449]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:34:21 compute-0 systemd[1]: Listening on Device-mapper event daemon FIFOs.
Oct 02 11:34:21 compute-0 systemd[1]: Reloading.
Oct 02 11:34:21 compute-0 systemd-rc-local-generator[30489]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:34:22 compute-0 systemd[1]: Starting Monitoring of LVM2 mirrors, snapshots etc. using dmeventd or progress polling...
Oct 02 11:34:22 compute-0 systemd[1]: Finished Monitoring of LVM2 mirrors, snapshots etc. using dmeventd or progress polling.
Oct 02 11:34:22 compute-0 systemd[1]: Reloading.
Oct 02 11:34:22 compute-0 systemd-rc-local-generator[30525]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:34:22 compute-0 systemd[1]: Listening on LVM2 poll daemon socket.
Oct 02 11:34:22 compute-0 dbus-broker-launch[817]: Noticed file-system modification, trigger reload.
Oct 02 11:34:22 compute-0 dbus-broker-launch[817]: Noticed file-system modification, trigger reload.
Oct 02 11:34:22 compute-0 dbus-broker-launch[817]: Noticed file-system modification, trigger reload.
Oct 02 11:35:26 compute-0 kernel: SELinux:  Converting 2713 SID table entries...
Oct 02 11:35:26 compute-0 kernel: SELinux:  policy capability network_peer_controls=1
Oct 02 11:35:26 compute-0 kernel: SELinux:  policy capability open_perms=1
Oct 02 11:35:26 compute-0 kernel: SELinux:  policy capability extended_socket_class=1
Oct 02 11:35:26 compute-0 kernel: SELinux:  policy capability always_check_network=0
Oct 02 11:35:26 compute-0 kernel: SELinux:  policy capability cgroup_seclabel=1
Oct 02 11:35:26 compute-0 kernel: SELinux:  policy capability nnp_nosuid_transition=1
Oct 02 11:35:26 compute-0 kernel: SELinux:  policy capability genfs_seclabel_symlinks=1
Oct 02 11:35:26 compute-0 dbus-broker-launch[818]: avc:  op=load_policy lsm=selinux seqno=8 res=1
Oct 02 11:35:26 compute-0 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update.
Oct 02 11:35:26 compute-0 systemd[1]: Starting man-db-cache-update.service...
Oct 02 11:35:26 compute-0 systemd[1]: Reloading.
Oct 02 11:35:26 compute-0 systemd-rc-local-generator[30865]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:35:26 compute-0 systemd[1]: Queuing reload/restart jobs for marked units…
Oct 02 11:35:26 compute-0 systemd[1]: Starting PackageKit Daemon...
Oct 02 11:35:26 compute-0 PackageKit[31115]: daemon start
Oct 02 11:35:26 compute-0 systemd[1]: Started PackageKit Daemon.
Oct 02 11:35:27 compute-0 sudo[30246]: pam_unix(sudo:session): session closed for user root
Oct 02 11:35:27 compute-0 systemd[1]: man-db-cache-update.service: Deactivated successfully.
Oct 02 11:35:27 compute-0 systemd[1]: Finished man-db-cache-update.service.
Oct 02 11:35:27 compute-0 systemd[1]: man-db-cache-update.service: Consumed 1.119s CPU time.
Oct 02 11:35:27 compute-0 systemd[1]: run-r4d3eaeeba2f947df87d04c577aae516c.service: Deactivated successfully.
Oct 02 11:35:39 compute-0 sudo[31783]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-caasfhkyqagqyveltensmrciuiphqvhb ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404939.5714576-431-169952830216477/AnsiballZ_command.py'
Oct 02 11:35:39 compute-0 sudo[31783]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:35:39 compute-0 python3.9[31785]: ansible-ansible.legacy.command Invoked with _raw_params=rpm -V driverctl lvm2 crudini jq nftables NetworkManager openstack-selinux python3-libselinux python3-pyyaml rsync tmpwatch sysstat iproute-tc ksmtuned systemd-container crypto-policies-scripts grubby sos _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:35:41 compute-0 sudo[31783]: pam_unix(sudo:session): session closed for user root
Oct 02 11:35:42 compute-0 sudo[32064]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ecxakxvjtmxznpjvyocomhcostckbrjr ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404942.0936108-455-196695653822512/AnsiballZ_selinux.py'
Oct 02 11:35:42 compute-0 sudo[32064]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:35:43 compute-0 python3.9[32066]: ansible-ansible.posix.selinux Invoked with policy=targeted state=enforcing configfile=/etc/selinux/config update_kernel_param=False
Oct 02 11:35:43 compute-0 sudo[32064]: pam_unix(sudo:session): session closed for user root
Oct 02 11:35:43 compute-0 sudo[32216]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tmtsefjzoiyuvagiatlmxgfxhhsoxqqz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404943.5687509-488-194706204620215/AnsiballZ_command.py'
Oct 02 11:35:43 compute-0 sudo[32216]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:35:44 compute-0 python3.9[32218]: ansible-ansible.legacy.command Invoked with cmd=dd if=/dev/zero of=/swap count=1024 bs=1M creates=/swap _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True _raw_params=None argv=None chdir=None executable=None removes=None stdin=None
Oct 02 11:35:45 compute-0 sudo[32216]: pam_unix(sudo:session): session closed for user root
Oct 02 11:35:45 compute-0 sudo[32369]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zlqhthirxozfufpnpyikdlnuosffaibf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404945.1949353-512-79731490337337/AnsiballZ_file.py'
Oct 02 11:35:45 compute-0 sudo[32369]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:35:47 compute-0 python3.9[32371]: ansible-ansible.builtin.file Invoked with group=root mode=0600 owner=root path=/swap recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False state=None _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:35:47 compute-0 sudo[32369]: pam_unix(sudo:session): session closed for user root
Oct 02 11:35:48 compute-0 sudo[32522]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kqefeccmzsuiecddwbrnqoxzfhfuduhs ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404947.9008288-536-15871078392697/AnsiballZ_mount.py'
Oct 02 11:35:48 compute-0 sudo[32522]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:35:55 compute-0 python3.9[32524]: ansible-ansible.posix.mount Invoked with dump=0 fstype=swap name=none opts=sw passno=0 src=/swap state=present path=none boot=True opts_no_log=False backup=False fstab=None
Oct 02 11:35:55 compute-0 sudo[32522]: pam_unix(sudo:session): session closed for user root
Oct 02 11:35:56 compute-0 sudo[32674]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ejrateezoxkmpqalecpdnedfsjzfxpui ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404956.4847624-620-204402909584091/AnsiballZ_file.py'
Oct 02 11:35:56 compute-0 sudo[32674]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:35:56 compute-0 python3.9[32676]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/etc/pki/ca-trust/source/anchors setype=cert_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:35:56 compute-0 sudo[32674]: pam_unix(sudo:session): session closed for user root
Oct 02 11:35:57 compute-0 sudo[32826]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wemxerlhbvhazdrcyqktgqzncpzrypjj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404957.234483-644-198434095377054/AnsiballZ_stat.py'
Oct 02 11:35:57 compute-0 sudo[32826]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:35:57 compute-0 python3.9[32828]: ansible-ansible.legacy.stat Invoked with path=/etc/pki/ca-trust/source/anchors/tls-ca-bundle.pem follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:35:57 compute-0 sudo[32826]: pam_unix(sudo:session): session closed for user root
Oct 02 11:35:58 compute-0 sudo[32949]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cweznaloqvvhlxqtfmcpimlfbmtmwhww ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404957.234483-644-198434095377054/AnsiballZ_copy.py'
Oct 02 11:35:58 compute-0 sudo[32949]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:35:58 compute-0 python3.9[32951]: ansible-ansible.legacy.copy Invoked with dest=/etc/pki/ca-trust/source/anchors/tls-ca-bundle.pem group=root mode=0644 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759404957.234483-644-198434095377054/.source.pem _original_basename=tls-ca-bundle.pem follow=False checksum=74de1ba89bc28b0be0e3b8a77822f232ede7d253 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:35:58 compute-0 sudo[32949]: pam_unix(sudo:session): session closed for user root
Oct 02 11:35:59 compute-0 sudo[33101]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bqogrfvhljegpbuczwieupqlfikxujol ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404959.3341022-725-151586878845756/AnsiballZ_getent.py'
Oct 02 11:35:59 compute-0 sudo[33101]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:00 compute-0 python3.9[33103]: ansible-ansible.builtin.getent Invoked with database=passwd key=qemu fail_key=True service=None split=None
Oct 02 11:36:00 compute-0 sudo[33101]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:00 compute-0 sudo[33254]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-auilghxfxrnkuyvhbrfycmnyghrsyxvr ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404960.36636-749-180848112028810/AnsiballZ_group.py'
Oct 02 11:36:00 compute-0 sudo[33254]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:01 compute-0 python3.9[33256]: ansible-ansible.builtin.group Invoked with gid=107 name=qemu state=present force=False system=False local=False non_unique=False gid_min=None gid_max=None
Oct 02 11:36:01 compute-0 groupadd[33257]: group added to /etc/group: name=qemu, GID=107
Oct 02 11:36:01 compute-0 rsyslogd[1013]: imjournal: journal files changed, reloading...  [v8.2506.0-2.el9 try https://www.rsyslog.com/e/0 ]
Oct 02 11:36:01 compute-0 groupadd[33257]: group added to /etc/gshadow: name=qemu
Oct 02 11:36:01 compute-0 groupadd[33257]: new group: name=qemu, GID=107
Oct 02 11:36:01 compute-0 sudo[33254]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:01 compute-0 sudo[33413]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xgxowouivaltpicsofkeyjyxgmsykmmk ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404961.5020676-773-173804246531569/AnsiballZ_user.py'
Oct 02 11:36:01 compute-0 sudo[33413]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:02 compute-0 python3.9[33415]: ansible-ansible.builtin.user Invoked with comment=qemu user group=qemu groups=[''] name=qemu shell=/sbin/nologin state=present uid=107 non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on compute-0 update_password=always home=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None password_expire_account_disable=None uid_min=None uid_max=None
Oct 02 11:36:02 compute-0 useradd[33417]: new user: name=qemu, UID=107, GID=107, home=/home/qemu, shell=/sbin/nologin, from=/dev/pts/0
Oct 02 11:36:02 compute-0 sudo[33413]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:03 compute-0 sudo[33573]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-crvyabaprenejztjbaitzrnlanpcdtqf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404962.8811505-797-52848191743838/AnsiballZ_getent.py'
Oct 02 11:36:03 compute-0 sudo[33573]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:03 compute-0 python3.9[33575]: ansible-ansible.builtin.getent Invoked with database=passwd key=hugetlbfs fail_key=True service=None split=None
Oct 02 11:36:03 compute-0 sudo[33573]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:03 compute-0 sudo[33726]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tqnoygzolumbrvlbygmfiyyeygcpjydj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404963.5651827-821-254064397417197/AnsiballZ_group.py'
Oct 02 11:36:03 compute-0 sudo[33726]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:04 compute-0 python3.9[33728]: ansible-ansible.builtin.group Invoked with gid=42477 name=hugetlbfs state=present force=False system=False local=False non_unique=False gid_min=None gid_max=None
Oct 02 11:36:04 compute-0 groupadd[33729]: group added to /etc/group: name=hugetlbfs, GID=42477
Oct 02 11:36:04 compute-0 groupadd[33729]: group added to /etc/gshadow: name=hugetlbfs
Oct 02 11:36:04 compute-0 groupadd[33729]: new group: name=hugetlbfs, GID=42477
Oct 02 11:36:04 compute-0 sudo[33726]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:04 compute-0 sudo[33884]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wxrslpehtrruwsulzbmkoamskbefpxes ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404964.4219217-848-268044221205634/AnsiballZ_file.py'
Oct 02 11:36:04 compute-0 sudo[33884]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:05 compute-0 python3.9[33886]: ansible-ansible.builtin.file Invoked with group=qemu mode=0755 owner=qemu path=/var/lib/vhost_sockets setype=virt_cache_t seuser=system_u state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None serole=None selevel=None attributes=None
Oct 02 11:36:05 compute-0 sudo[33884]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:05 compute-0 sudo[34036]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-owtjvecbyhqexzexbmgibrdbirksyima ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404965.4780464-881-56324655475687/AnsiballZ_dnf.py'
Oct 02 11:36:05 compute-0 sudo[34036]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:05 compute-0 python3.9[34038]: ansible-ansible.legacy.dnf Invoked with name=['dracut-config-generic'] state=absent allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Oct 02 11:36:07 compute-0 sudo[34036]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:08 compute-0 sudo[34189]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kaogypykojpevdgncawltqwjpvyiynax ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404967.844675-905-39731170441759/AnsiballZ_file.py'
Oct 02 11:36:08 compute-0 sudo[34189]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:08 compute-0 python3.9[34191]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/etc/modules-load.d setype=etc_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:36:08 compute-0 sudo[34189]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:08 compute-0 sudo[34341]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hlndsmltfehdxiwkxhfjamfstjzzaeqe ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404968.6094558-929-229278000048559/AnsiballZ_stat.py'
Oct 02 11:36:08 compute-0 sudo[34341]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:09 compute-0 python3.9[34343]: ansible-ansible.legacy.stat Invoked with path=/etc/modules-load.d/99-edpm.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:36:09 compute-0 sudo[34341]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:09 compute-0 sudo[34464]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sqdzhsmgumftsyozmhsueldrtranuojc ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404968.6094558-929-229278000048559/AnsiballZ_copy.py'
Oct 02 11:36:09 compute-0 sudo[34464]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:09 compute-0 python3.9[34466]: ansible-ansible.legacy.copy Invoked with dest=/etc/modules-load.d/99-edpm.conf group=root mode=0644 owner=root setype=etc_t src=/home/zuul/.ansible/tmp/ansible-tmp-1759404968.6094558-929-229278000048559/.source.conf follow=False _original_basename=edpm-modprobe.conf.j2 checksum=8021efe01721d8fa8cab46b95c00ec1be6dbb9d0 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:36:09 compute-0 sudo[34464]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:10 compute-0 sudo[34616]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tfnntnertrtpiahrvuqoxxlfogxnhxgk ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404970.0005271-974-10543659907765/AnsiballZ_systemd.py'
Oct 02 11:36:10 compute-0 sudo[34616]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:10 compute-0 python3.9[34618]: ansible-ansible.builtin.systemd Invoked with name=systemd-modules-load.service state=restarted daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None
Oct 02 11:36:10 compute-0 systemd[1]: Starting Load Kernel Modules...
Oct 02 11:36:10 compute-0 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this.
Oct 02 11:36:10 compute-0 kernel: Bridge firewalling registered
Oct 02 11:36:10 compute-0 systemd-modules-load[34622]: Inserted module 'br_netfilter'
Oct 02 11:36:10 compute-0 systemd[1]: Finished Load Kernel Modules.
Oct 02 11:36:10 compute-0 sudo[34616]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:11 compute-0 sudo[34775]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-msltljfvnpyiefdhchejxufqziijtekz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404971.2269154-998-36526104127207/AnsiballZ_stat.py'
Oct 02 11:36:11 compute-0 sudo[34775]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:11 compute-0 python3.9[34777]: ansible-ansible.legacy.stat Invoked with path=/etc/sysctl.d/99-edpm.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:36:11 compute-0 sudo[34775]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:11 compute-0 sudo[34898]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jbkmdrzdwlscincqmobjzxkgzxmombfs ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404971.2269154-998-36526104127207/AnsiballZ_copy.py'
Oct 02 11:36:11 compute-0 sudo[34898]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:12 compute-0 python3.9[34900]: ansible-ansible.legacy.copy Invoked with dest=/etc/sysctl.d/99-edpm.conf group=root mode=0644 owner=root setype=etc_t src=/home/zuul/.ansible/tmp/ansible-tmp-1759404971.2269154-998-36526104127207/.source.conf follow=False _original_basename=edpm-sysctl.conf.j2 checksum=2a366439721b855adcfe4d7f152babb68596a007 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:36:12 compute-0 sudo[34898]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:12 compute-0 sudo[35050]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-aftedhvxzlgjtddqmtgggbyhkdkpiivd ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404972.6234896-1052-139915829840175/AnsiballZ_dnf.py'
Oct 02 11:36:12 compute-0 sudo[35050]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:13 compute-0 python3.9[35052]: ansible-ansible.legacy.dnf Invoked with name=['tuned', 'tuned-profiles-cpu-partitioning'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Oct 02 11:36:15 compute-0 dbus-broker-launch[817]: Noticed file-system modification, trigger reload.
Oct 02 11:36:15 compute-0 dbus-broker-launch[817]: Noticed file-system modification, trigger reload.
Oct 02 11:36:16 compute-0 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update.
Oct 02 11:36:16 compute-0 systemd[1]: Starting man-db-cache-update.service...
Oct 02 11:36:16 compute-0 systemd[1]: Reloading.
Oct 02 11:36:16 compute-0 systemd-rc-local-generator[35111]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:36:16 compute-0 systemd[1]: Queuing reload/restart jobs for marked units…
Oct 02 11:36:16 compute-0 sudo[35050]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:19 compute-0 python3.9[38090]: ansible-ansible.builtin.stat Invoked with path=/etc/tuned/active_profile follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:36:19 compute-0 systemd[1]: man-db-cache-update.service: Deactivated successfully.
Oct 02 11:36:19 compute-0 systemd[1]: Finished man-db-cache-update.service.
Oct 02 11:36:19 compute-0 systemd[1]: man-db-cache-update.service: Consumed 4.234s CPU time.
Oct 02 11:36:19 compute-0 systemd[1]: run-r6c3508f167314b5f9f7b3a9ccedb15ca.service: Deactivated successfully.
Oct 02 11:36:19 compute-0 python3.9[38913]: ansible-ansible.builtin.slurp Invoked with src=/etc/tuned/active_profile
Oct 02 11:36:20 compute-0 python3.9[39063]: ansible-ansible.builtin.stat Invoked with path=/etc/tuned/throughput-performance-variables.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:36:21 compute-0 sudo[39213]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mwyryubatkcbyxcfbfgbotylkwrhplii ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404980.8279831-1169-234431620864556/AnsiballZ_command.py'
Oct 02 11:36:21 compute-0 sudo[39213]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:21 compute-0 python3.9[39215]: ansible-ansible.legacy.command Invoked with _raw_params=/usr/sbin/tuned-adm profile throughput-performance _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:36:21 compute-0 systemd[1]: Starting Dynamic System Tuning Daemon...
Oct 02 11:36:21 compute-0 systemd[1]: Started Dynamic System Tuning Daemon.
Oct 02 11:36:21 compute-0 sudo[39213]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:22 compute-0 sudo[39586]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hmbdzglebnlwdeofwqlxhglqrqegheid ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404982.27518-1196-110138911508001/AnsiballZ_systemd.py'
Oct 02 11:36:22 compute-0 sudo[39586]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:22 compute-0 python3.9[39588]: ansible-ansible.builtin.systemd Invoked with enabled=True name=tuned state=restarted daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:36:22 compute-0 systemd[1]: Stopping Dynamic System Tuning Daemon...
Oct 02 11:36:22 compute-0 systemd[1]: tuned.service: Deactivated successfully.
Oct 02 11:36:22 compute-0 systemd[1]: Stopped Dynamic System Tuning Daemon.
Oct 02 11:36:22 compute-0 systemd[1]: Starting Dynamic System Tuning Daemon...
Oct 02 11:36:23 compute-0 systemd[1]: Started Dynamic System Tuning Daemon.
Oct 02 11:36:23 compute-0 sudo[39586]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:23 compute-0 python3.9[39749]: ansible-ansible.builtin.slurp Invoked with src=/proc/cmdline
Oct 02 11:36:26 compute-0 sudo[39899]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jqetfzpyhzqxglyjwynwwhtkcprzrmzq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404986.4896781-1367-84229548504255/AnsiballZ_systemd.py'
Oct 02 11:36:26 compute-0 sudo[39899]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:27 compute-0 python3.9[39901]: ansible-ansible.builtin.systemd Invoked with enabled=False name=ksm.service state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:36:27 compute-0 systemd[1]: Reloading.
Oct 02 11:36:27 compute-0 systemd-rc-local-generator[39932]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:36:27 compute-0 sudo[39899]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:27 compute-0 sudo[40089]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bhfxohesfhbdhmickknwrfxfkmhurasc ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404987.4732106-1367-224824280744269/AnsiballZ_systemd.py'
Oct 02 11:36:27 compute-0 sudo[40089]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:28 compute-0 python3.9[40091]: ansible-ansible.builtin.systemd Invoked with enabled=False name=ksmtuned.service state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:36:28 compute-0 systemd[1]: Reloading.
Oct 02 11:36:28 compute-0 systemd-rc-local-generator[40121]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:36:28 compute-0 sudo[40089]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:28 compute-0 sudo[40278]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zzeiikwcvgxtvjgmjznzphobgcseihop ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404988.6760886-1415-116880141994346/AnsiballZ_command.py'
Oct 02 11:36:28 compute-0 sudo[40278]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:29 compute-0 python3.9[40280]: ansible-ansible.legacy.command Invoked with _raw_params=mkswap "/swap" _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:36:29 compute-0 sudo[40278]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:29 compute-0 sudo[40431]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-toyqmlrnlxyhasiqpuowbxqdbunarhlk ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404989.479569-1439-231903971161646/AnsiballZ_command.py'
Oct 02 11:36:29 compute-0 sudo[40431]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:29 compute-0 python3.9[40433]: ansible-ansible.legacy.command Invoked with _raw_params=swapon "/swap" _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:36:29 compute-0 kernel: Adding 1048572k swap on /swap.  Priority:-2 extents:1 across:1048572k 
Oct 02 11:36:29 compute-0 sudo[40431]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:30 compute-0 sudo[40584]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dtyxlaovdklqiffectvfbyvvsinmbadp ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404990.2396967-1463-255445289671027/AnsiballZ_command.py'
Oct 02 11:36:30 compute-0 sudo[40584]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:30 compute-0 python3.9[40586]: ansible-ansible.legacy.command Invoked with _raw_params=/usr/bin/update-ca-trust _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:36:31 compute-0 sudo[40584]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:32 compute-0 sudo[40746]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hjneoxkgblxlvvclumfzruppyjmhpfth ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404992.6913702-1487-21721237430431/AnsiballZ_command.py'
Oct 02 11:36:32 compute-0 sudo[40746]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:33 compute-0 python3.9[40748]: ansible-ansible.legacy.command Invoked with _raw_params=echo 2 >/sys/kernel/mm/ksm/run _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:36:33 compute-0 sudo[40746]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:33 compute-0 sudo[40899]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wdbbzxmihsgrskalplvtenspucsxxpsy ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759404993.4571803-1511-208944627904568/AnsiballZ_systemd.py'
Oct 02 11:36:33 compute-0 sudo[40899]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:34 compute-0 python3.9[40901]: ansible-ansible.builtin.systemd Invoked with name=systemd-sysctl.service state=restarted daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None
Oct 02 11:36:34 compute-0 systemd[1]: systemd-sysctl.service: Deactivated successfully.
Oct 02 11:36:34 compute-0 systemd[1]: Stopped Apply Kernel Variables.
Oct 02 11:36:34 compute-0 systemd[1]: Stopping Apply Kernel Variables...
Oct 02 11:36:34 compute-0 systemd[1]: Starting Apply Kernel Variables...
Oct 02 11:36:34 compute-0 systemd[1]: run-credentials-systemd\x2dsysctl.service.mount: Deactivated successfully.
Oct 02 11:36:34 compute-0 systemd[1]: Finished Apply Kernel Variables.
Oct 02 11:36:34 compute-0 sudo[40899]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:34 compute-0 sshd-session[27932]: Connection closed by 192.168.122.30 port 52606
Oct 02 11:36:34 compute-0 sshd-session[27929]: pam_unix(sshd:session): session closed for user zuul
Oct 02 11:36:34 compute-0 systemd[1]: session-9.scope: Deactivated successfully.
Oct 02 11:36:34 compute-0 systemd[1]: session-9.scope: Consumed 2min 5.391s CPU time.
Oct 02 11:36:34 compute-0 systemd-logind[827]: Session 9 logged out. Waiting for processes to exit.
Oct 02 11:36:34 compute-0 systemd-logind[827]: Removed session 9.
Oct 02 11:36:40 compute-0 sshd-session[40931]: Accepted publickey for zuul from 192.168.122.30 port 40154 ssh2: ECDSA SHA256:tAEsFSop2MjuMQQ/UqKU2uCkxqWXGfsM5crdhd6tlI4
Oct 02 11:36:40 compute-0 systemd-logind[827]: New session 10 of user zuul.
Oct 02 11:36:40 compute-0 systemd[1]: Started Session 10 of User zuul.
Oct 02 11:36:40 compute-0 sshd-session[40931]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Oct 02 11:36:41 compute-0 python3.9[41084]: ansible-ansible.builtin.setup Invoked with gather_subset=['!all', '!min', 'local'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:36:42 compute-0 python3.9[41238]: ansible-ansible.builtin.setup Invoked with gather_subset=['!all', '!min', 'local'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:36:43 compute-0 sudo[41392]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-umemgohjflrzfqhtdoaalgoshppdvumf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405003.613069-115-29736836595617/AnsiballZ_command.py'
Oct 02 11:36:43 compute-0 sudo[41392]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:44 compute-0 python3.9[41394]: ansible-ansible.legacy.command Invoked with _raw_params=PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin which growvols
                                             _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:36:44 compute-0 sudo[41392]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:45 compute-0 python3.9[41545]: ansible-ansible.builtin.setup Invoked with gather_subset=['!all', '!min', 'local'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:36:45 compute-0 sudo[41699]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ligbapcxegzphcsmamjndfeilllbswlk ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405005.5972512-175-273615908819367/AnsiballZ_setup.py'
Oct 02 11:36:45 compute-0 sudo[41699]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:46 compute-0 python3.9[41701]: ansible-ansible.legacy.setup Invoked with filter=['ansible_pkg_mgr'] gather_subset=['!all'] gather_timeout=10 fact_path=/etc/ansible/facts.d
Oct 02 11:36:46 compute-0 sudo[41699]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:46 compute-0 sudo[41783]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jrjaddukbtyvnjclfmcqdtjtloroqxob ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405005.5972512-175-273615908819367/AnsiballZ_dnf.py'
Oct 02 11:36:46 compute-0 sudo[41783]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:47 compute-0 python3.9[41785]: ansible-ansible.legacy.dnf Invoked with name=['podman'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Oct 02 11:36:48 compute-0 sudo[41783]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:48 compute-0 sudo[41936]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fbzaosmjjpepalghefeiejgfhwupftqi ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405008.4930198-211-212175205956132/AnsiballZ_setup.py'
Oct 02 11:36:48 compute-0 sudo[41936]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:49 compute-0 python3.9[41938]: ansible-ansible.builtin.setup Invoked with filter=['ansible_interfaces'] gather_subset=['!all', '!min', 'network'] gather_timeout=10 fact_path=/etc/ansible/facts.d
Oct 02 11:36:49 compute-0 sudo[41936]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:50 compute-0 sudo[42107]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mherhhcqihmeknzakmexbphhtvebcfbp ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405009.5746443-244-91285024770742/AnsiballZ_file.py'
Oct 02 11:36:50 compute-0 sudo[42107]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:50 compute-0 python3.9[42109]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/etc/containers/networks recurse=True state=directory force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:36:50 compute-0 sudo[42107]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:50 compute-0 sudo[42259]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-oevyrsnmgmhvnjjgmbmrpesgbhzznqax ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405010.5393815-268-249567503601336/AnsiballZ_command.py'
Oct 02 11:36:50 compute-0 sudo[42259]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:51 compute-0 python3.9[42261]: ansible-ansible.legacy.command Invoked with _raw_params=podman network inspect podman
                                             _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:36:51 compute-0 systemd[1]: var-lib-containers-storage-overlay-metacopy\x2dcheck1118462274-merged.mount: Deactivated successfully.
Oct 02 11:36:51 compute-0 podman[42262]: 2025-10-02 11:36:51.134249391 +0000 UTC m=+0.057696410 system refresh
Oct 02 11:36:51 compute-0 sudo[42259]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:51 compute-0 sudo[42423]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mfdvkayltfbcbdpwixfmqovmaaxnjslx ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405011.3875635-292-218834289547297/AnsiballZ_stat.py'
Oct 02 11:36:51 compute-0 sudo[42423]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:51 compute-0 python3.9[42425]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/networks/podman.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:36:52 compute-0 sudo[42423]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:52 compute-0 systemd[1]: var-lib-containers-storage-overlay-opaque\x2dbug\x2dcheck804993951-merged.mount: Deactivated successfully.
Oct 02 11:36:52 compute-0 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
Oct 02 11:36:52 compute-0 sudo[42546]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-obynjnhfbzusbnbfcjiwkyvpznaihwck ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405011.3875635-292-218834289547297/AnsiballZ_copy.py'
Oct 02 11:36:52 compute-0 sudo[42546]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:52 compute-0 python3.9[42548]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/networks/podman.json group=root mode=0644 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405011.3875635-292-218834289547297/.source.json follow=False _original_basename=podman_network_config.j2 checksum=bfb09eeb0e512bd68126c9970884bb98c19d5a1c backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:36:52 compute-0 sudo[42546]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:53 compute-0 sudo[42698]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bpmkjvdazzffehvyftrfceytrpxxwnma ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405012.8897676-337-238475953224030/AnsiballZ_stat.py'
Oct 02 11:36:53 compute-0 sudo[42698]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:53 compute-0 python3.9[42700]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/20-edpm-podman-registries.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:36:53 compute-0 sudo[42698]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:53 compute-0 sudo[42821]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-abdjjoaupgvenccsobvcuwzkhcjlddjo ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405012.8897676-337-238475953224030/AnsiballZ_copy.py'
Oct 02 11:36:53 compute-0 sudo[42821]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:53 compute-0 python3.9[42823]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/registries.conf.d/20-edpm-podman-registries.conf group=root mode=0644 owner=root setype=etc_t src=/home/zuul/.ansible/tmp/ansible-tmp-1759405012.8897676-337-238475953224030/.source.conf follow=False _original_basename=registries.conf.j2 checksum=a4fd3ca7d18166099562a65af8d6da655db34efc backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:36:53 compute-0 sudo[42821]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:54 compute-0 sudo[42973]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bnwhoqbxsukqijqjdvcviwhbwztejeqm ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405014.2147317-385-79481515003516/AnsiballZ_ini_file.py'
Oct 02 11:36:54 compute-0 sudo[42973]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:54 compute-0 python3.9[42975]: ansible-community.general.ini_file Invoked with create=True group=root mode=0644 option=pids_limit owner=root path=/etc/containers/containers.conf section=containers setype=etc_t value=4096 backup=False state=present exclusive=True no_extra_spaces=False ignore_spaces=False allow_no_value=False modify_inactive_option=True follow=False unsafe_writes=False section_has_values=None values=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:36:54 compute-0 sudo[42973]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:55 compute-0 sudo[43125]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xjcdlhhsahlzqwcfsespjglsieflbfhn ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405014.9063177-385-125716797303850/AnsiballZ_ini_file.py'
Oct 02 11:36:55 compute-0 sudo[43125]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:55 compute-0 python3.9[43127]: ansible-community.general.ini_file Invoked with create=True group=root mode=0644 option=events_logger owner=root path=/etc/containers/containers.conf section=engine setype=etc_t value="journald" backup=False state=present exclusive=True no_extra_spaces=False ignore_spaces=False allow_no_value=False modify_inactive_option=True follow=False unsafe_writes=False section_has_values=None values=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:36:55 compute-0 sudo[43125]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:55 compute-0 sudo[43277]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wjimoaccpgjkbhrpagpkbrsulmzlurlg ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405015.4927514-385-218168295081313/AnsiballZ_ini_file.py'
Oct 02 11:36:55 compute-0 sudo[43277]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:55 compute-0 python3.9[43279]: ansible-community.general.ini_file Invoked with create=True group=root mode=0644 option=runtime owner=root path=/etc/containers/containers.conf section=engine setype=etc_t value="crun" backup=False state=present exclusive=True no_extra_spaces=False ignore_spaces=False allow_no_value=False modify_inactive_option=True follow=False unsafe_writes=False section_has_values=None values=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:36:55 compute-0 sudo[43277]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:56 compute-0 sudo[43429]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bohvrqeshswidldghculazfyzhbfuetb ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405016.0648687-385-112607250992104/AnsiballZ_ini_file.py'
Oct 02 11:36:56 compute-0 sudo[43429]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:56 compute-0 python3.9[43431]: ansible-community.general.ini_file Invoked with create=True group=root mode=0644 option=network_backend owner=root path=/etc/containers/containers.conf section=network setype=etc_t value="netavark" backup=False state=present exclusive=True no_extra_spaces=False ignore_spaces=False allow_no_value=False modify_inactive_option=True follow=False unsafe_writes=False section_has_values=None values=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:36:56 compute-0 sudo[43429]: pam_unix(sudo:session): session closed for user root
Oct 02 11:36:57 compute-0 python3.9[43581]: ansible-ansible.builtin.setup Invoked with gather_subset=['!all', '!min', 'distribution'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:36:58 compute-0 sudo[43733]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-abpcscxvhcjetcukavjkqrizlomnlhdi ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405017.9572651-505-231532751423326/AnsiballZ_dnf.py'
Oct 02 11:36:58 compute-0 sudo[43733]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:36:58 compute-0 python3.9[43735]: ansible-ansible.legacy.dnf Invoked with download_only=True name=['driverctl', 'lvm2', 'crudini', 'jq', 'nftables', 'NetworkManager', 'openstack-selinux', 'python3-libselinux', 'python3-pyyaml', 'rsync', 'tmpwatch', 'sysstat', 'iproute-tc', 'ksmtuned', 'systemd-container', 'crypto-policies-scripts', 'grubby', 'sos'] allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None state=None
Oct 02 11:36:59 compute-0 sudo[43733]: pam_unix(sudo:session): session closed for user root
Oct 02 11:37:00 compute-0 sudo[43886]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-guyfxyhbtddnxxeprxgububiwxgrjyns ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405020.0201106-529-246349066423328/AnsiballZ_dnf.py'
Oct 02 11:37:00 compute-0 sudo[43886]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:37:00 compute-0 python3.9[43888]: ansible-ansible.legacy.dnf Invoked with download_only=True name=['openstack-network-scripts'] allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None state=None
Oct 02 11:37:02 compute-0 sudo[43886]: pam_unix(sudo:session): session closed for user root
Oct 02 11:37:02 compute-0 sudo[44046]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tfaciiznkxviffhyldjfronidswazvbv ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405022.7298796-559-157623585069795/AnsiballZ_dnf.py'
Oct 02 11:37:02 compute-0 sudo[44046]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:37:03 compute-0 python3.9[44048]: ansible-ansible.legacy.dnf Invoked with download_only=True name=['podman', 'buildah'] allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None state=None
Oct 02 11:37:04 compute-0 sudo[44046]: pam_unix(sudo:session): session closed for user root
Oct 02 11:37:05 compute-0 sudo[44199]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jfayoqenzzaykdbfdkhlsfrdmlayvkaq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405025.0650668-586-9673716659029/AnsiballZ_dnf.py'
Oct 02 11:37:05 compute-0 sudo[44199]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:37:05 compute-0 python3.9[44201]: ansible-ansible.legacy.dnf Invoked with download_only=True name=['tuned', 'tuned-profiles-cpu-partitioning'] allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None state=None
Oct 02 11:37:06 compute-0 sudo[44199]: pam_unix(sudo:session): session closed for user root
Oct 02 11:37:07 compute-0 sudo[44352]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-imxvypinookmgdwbiwwaqfownfkthegw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405027.3906214-619-211740283011932/AnsiballZ_dnf.py'
Oct 02 11:37:07 compute-0 sudo[44352]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:37:07 compute-0 python3.9[44354]: ansible-ansible.legacy.dnf Invoked with download_only=True name=['NetworkManager-ovs'] allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None state=None
Oct 02 11:37:09 compute-0 sudo[44352]: pam_unix(sudo:session): session closed for user root
Oct 02 11:37:10 compute-0 sudo[44508]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-oxgazebfmjdubtizblgixtnvdlpnphcr ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405029.7935328-643-217464786692913/AnsiballZ_dnf.py'
Oct 02 11:37:10 compute-0 sudo[44508]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:37:10 compute-0 python3.9[44510]: ansible-ansible.legacy.dnf Invoked with download_only=True name=['os-net-config'] allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None state=None
Oct 02 11:37:12 compute-0 sudo[44508]: pam_unix(sudo:session): session closed for user root
Oct 02 11:37:13 compute-0 sudo[44677]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zakbfrrtivljdhhghxcxycsnjlxhawkt ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405033.5765426-670-184249061923344/AnsiballZ_dnf.py'
Oct 02 11:37:13 compute-0 sudo[44677]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:37:14 compute-0 python3.9[44679]: ansible-ansible.legacy.dnf Invoked with download_only=True name=['openssh-server'] allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None state=None
Oct 02 11:37:15 compute-0 sudo[44677]: pam_unix(sudo:session): session closed for user root
Oct 02 11:37:15 compute-0 sudo[44830]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-efnhouvzanghonmchelltlfhmdebkjcv ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405035.6585631-697-233214159674601/AnsiballZ_dnf.py'
Oct 02 11:37:15 compute-0 sudo[44830]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:37:16 compute-0 python3.9[44832]: ansible-ansible.legacy.dnf Invoked with download_only=True name=['libvirt ', 'libvirt-admin ', 'libvirt-client ', 'libvirt-daemon ', 'qemu-kvm', 'qemu-img', 'libguestfs', 'libseccomp', 'swtpm', 'swtpm-tools', 'edk2-ovmf', 'ceph-common', 'cyrus-sasl-scram'] allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None state=None
Oct 02 11:37:27 compute-0 sudo[44830]: pam_unix(sudo:session): session closed for user root
Oct 02 11:37:29 compute-0 sudo[45166]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bdxktasumtfzimseutqdvoqmlewbnljg ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405049.1005268-730-98186117581083/AnsiballZ_file.py'
Oct 02 11:37:29 compute-0 sudo[45166]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:37:29 compute-0 python3.9[45168]: ansible-ansible.builtin.file Invoked with group=zuul mode=0770 owner=zuul path=/root/.config/containers recurse=True state=directory force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:37:29 compute-0 sudo[45166]: pam_unix(sudo:session): session closed for user root
Oct 02 11:37:30 compute-0 sudo[45341]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pdjhqqaphcwzdqkiemgfaxbrfntwfpoq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405049.7726886-754-61278494598278/AnsiballZ_stat.py'
Oct 02 11:37:30 compute-0 sudo[45341]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:37:30 compute-0 python3.9[45343]: ansible-ansible.legacy.stat Invoked with path=/root/.config/containers/auth.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:37:30 compute-0 sudo[45341]: pam_unix(sudo:session): session closed for user root
Oct 02 11:37:30 compute-0 sudo[45464]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gvzwbgbkhqgqvawjefsjlcxxmwdahfcp ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405049.7726886-754-61278494598278/AnsiballZ_copy.py'
Oct 02 11:37:30 compute-0 sudo[45464]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:37:30 compute-0 python3.9[45466]: ansible-ansible.legacy.copy Invoked with dest=/root/.config/containers/auth.json group=zuul mode=0660 owner=zuul src=/home/zuul/.ansible/tmp/ansible-tmp-1759405049.7726886-754-61278494598278/.source.json _original_basename=.7mrg1mmb follow=False checksum=bf21a9e8fbc5a3846fb05b4fa0859e0917b2202f backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:37:30 compute-0 sudo[45464]: pam_unix(sudo:session): session closed for user root
Oct 02 11:37:31 compute-0 sudo[45616]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cbmpuuhrqqtpbqvgzafmywzpqfunxyhz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405051.1631005-808-199237454772756/AnsiballZ_podman_image.py'
Oct 02 11:37:31 compute-0 sudo[45616]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:37:31 compute-0 python3.9[45618]: ansible-containers.podman.podman_image Invoked with auth_file=/root/.config/containers/auth.json name=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified tag=latest pull=True push=False force=False state=present executable=podman build={'force_rm': False, 'format': 'oci', 'cache': True, 'rm': True, 'annotation': None, 'file': None, 'container_file': None, 'volume': None, 'extra_args': None, 'target': None} push_args={'ssh': None, 'compress': None, 'format': None, 'remove_signatures': None, 'sign_by': None, 'dest': None, 'extra_args': None, 'transport': None} arch=None pull_extra_args=None path=None validate_certs=None username=None password=NOT_LOGGING_PARAMETER ca_cert_dir=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None
Oct 02 11:37:31 compute-0 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
Oct 02 11:37:33 compute-0 systemd[1]: var-lib-containers-storage-overlay-compat494096246-lower\x2dmapped.mount: Deactivated successfully.
Oct 02 11:37:37 compute-0 podman[45628]: 2025-10-02 11:37:37.857431746 +0000 UTC m=+5.960694466 image pull 1b3fd7f2436e5c6f2e28c01b83721476c7b295789c77b3d63e30f49404389ea1 quay.io/podified-antelope-centos9/openstack-iscsid:current-podified
Oct 02 11:37:37 compute-0 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
Oct 02 11:37:37 compute-0 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
Oct 02 11:37:37 compute-0 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
Oct 02 11:37:38 compute-0 sudo[45616]: pam_unix(sudo:session): session closed for user root
Oct 02 11:37:39 compute-0 sudo[45924]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cpwupsioljqqzeseksqhwbgsxfqdfspx ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405059.2754443-835-6884977729930/AnsiballZ_podman_image.py'
Oct 02 11:37:39 compute-0 sudo[45924]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:37:39 compute-0 python3.9[45926]: ansible-containers.podman.podman_image Invoked with auth_file=/root/.config/containers/auth.json name=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified tag=latest pull=True push=False force=False state=present executable=podman build={'force_rm': False, 'format': 'oci', 'cache': True, 'rm': True, 'annotation': None, 'file': None, 'container_file': None, 'volume': None, 'extra_args': None, 'target': None} push_args={'ssh': None, 'compress': None, 'format': None, 'remove_signatures': None, 'sign_by': None, 'dest': None, 'extra_args': None, 'transport': None} arch=None pull_extra_args=None path=None validate_certs=None username=None password=NOT_LOGGING_PARAMETER ca_cert_dir=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None
Oct 02 11:37:39 compute-0 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
Oct 02 11:37:41 compute-0 podman[45939]: 2025-10-02 11:37:41.937367105 +0000 UTC m=+2.132007574 image pull ae232aa720979600656d94fc26ba957f1cdf5bca825fe9b57990f60c6534611f quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified
Oct 02 11:37:41 compute-0 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
Oct 02 11:37:41 compute-0 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
Oct 02 11:37:42 compute-0 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
Oct 02 11:37:42 compute-0 sudo[45924]: pam_unix(sudo:session): session closed for user root
Oct 02 11:37:42 compute-0 sudo[46193]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-brpvxcjgjflssyseuththegkxmeycqrt ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405062.641846-874-273483177711049/AnsiballZ_podman_image.py'
Oct 02 11:37:42 compute-0 sudo[46193]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:37:43 compute-0 python3.9[46195]: ansible-containers.podman.podman_image Invoked with auth_file=/root/.config/containers/auth.json name=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified tag=latest pull=True push=False force=False state=present executable=podman build={'force_rm': False, 'format': 'oci', 'cache': True, 'rm': True, 'annotation': None, 'file': None, 'container_file': None, 'volume': None, 'extra_args': None, 'target': None} push_args={'ssh': None, 'compress': None, 'format': None, 'remove_signatures': None, 'sign_by': None, 'dest': None, 'extra_args': None, 'transport': None} arch=None pull_extra_args=None path=None validate_certs=None username=None password=NOT_LOGGING_PARAMETER ca_cert_dir=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None
Oct 02 11:37:43 compute-0 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
Oct 02 11:37:44 compute-0 podman[46206]: 2025-10-02 11:37:44.166980526 +0000 UTC m=+1.045610032 image pull d8d739f82a6fecf9df690e49539b589e74665b54e36448657b874630717d5bd1 quay.io/podified-antelope-centos9/openstack-multipathd:current-podified
Oct 02 11:37:44 compute-0 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
Oct 02 11:37:44 compute-0 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
Oct 02 11:37:44 compute-0 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
Oct 02 11:37:44 compute-0 sudo[46193]: pam_unix(sudo:session): session closed for user root
Oct 02 11:37:44 compute-0 sudo[46441]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jpxzdjfhorknnvtsyaeebwtgyiikgbxt ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405064.6578896-901-113271848350459/AnsiballZ_podman_image.py'
Oct 02 11:37:44 compute-0 sudo[46441]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:37:45 compute-0 python3.9[46443]: ansible-containers.podman.podman_image Invoked with auth_file=/root/.config/containers/auth.json name=quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified tag=latest pull=True push=False force=False state=present executable=podman build={'force_rm': False, 'format': 'oci', 'cache': True, 'rm': True, 'annotation': None, 'file': None, 'container_file': None, 'volume': None, 'extra_args': None, 'target': None} push_args={'ssh': None, 'compress': None, 'format': None, 'remove_signatures': None, 'sign_by': None, 'dest': None, 'extra_args': None, 'transport': None} arch=None pull_extra_args=None path=None validate_certs=None username=None password=NOT_LOGGING_PARAMETER ca_cert_dir=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None
Oct 02 11:38:01 compute-0 podman[46456]: 2025-10-02 11:38:01.678657669 +0000 UTC m=+16.550846198 image pull e36f31143f26011980def9337d375f895bea59b742a3a2b372b996aa8ad58eba quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified
Oct 02 11:38:01 compute-0 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
Oct 02 11:38:01 compute-0 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
Oct 02 11:38:01 compute-0 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
Oct 02 11:38:01 compute-0 sudo[46441]: pam_unix(sudo:session): session closed for user root
Oct 02 11:38:02 compute-0 sudo[46735]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-banuiuogxbyoobsgaztrpwbbuutfwqmc ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405082.2512715-934-15621877373690/AnsiballZ_podman_image.py'
Oct 02 11:38:02 compute-0 sudo[46735]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:38:02 compute-0 python3.9[46737]: ansible-containers.podman.podman_image Invoked with auth_file=/root/.config/containers/auth.json name=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified tag=latest pull=True push=False force=False state=present executable=podman build={'force_rm': False, 'format': 'oci', 'cache': True, 'rm': True, 'annotation': None, 'file': None, 'container_file': None, 'volume': None, 'extra_args': None, 'target': None} push_args={'ssh': None, 'compress': None, 'format': None, 'remove_signatures': None, 'sign_by': None, 'dest': None, 'extra_args': None, 'transport': None} arch=None pull_extra_args=None path=None validate_certs=None username=None password=NOT_LOGGING_PARAMETER ca_cert_dir=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None
Oct 02 11:38:02 compute-0 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
Oct 02 11:38:06 compute-0 podman[46749]: 2025-10-02 11:38:06.625121097 +0000 UTC m=+3.869595978 image pull 5f0622bc7c13827171d93b3baf72157e23d24d44579ad79fe3a89ad88180a4bb quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified
Oct 02 11:38:06 compute-0 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
Oct 02 11:38:06 compute-0 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
Oct 02 11:38:06 compute-0 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
Oct 02 11:38:06 compute-0 sudo[46735]: pam_unix(sudo:session): session closed for user root
Oct 02 11:38:07 compute-0 sudo[47003]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-obatfhrtlffnuamyeyideotwmpnjtvrw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405086.956652-934-102218760167523/AnsiballZ_podman_image.py'
Oct 02 11:38:07 compute-0 sudo[47003]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:38:07 compute-0 python3.9[47005]: ansible-containers.podman.podman_image Invoked with auth_file=/root/.config/containers/auth.json name=quay.io/prometheus/node-exporter:v1.5.0 tag=latest pull=True push=False force=False state=present executable=podman build={'force_rm': False, 'format': 'oci', 'cache': True, 'rm': True, 'annotation': None, 'file': None, 'container_file': None, 'volume': None, 'extra_args': None, 'target': None} push_args={'ssh': None, 'compress': None, 'format': None, 'remove_signatures': None, 'sign_by': None, 'dest': None, 'extra_args': None, 'transport': None} arch=None pull_extra_args=None path=None validate_certs=None username=None password=NOT_LOGGING_PARAMETER ca_cert_dir=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None
Oct 02 11:38:08 compute-0 podman[47017]: 2025-10-02 11:38:08.847464319 +0000 UTC m=+1.367354391 image pull 0da6a335fe1356545476b749c68f022c897de3a2139e8f0054f6937349ee2b83 quay.io/prometheus/node-exporter:v1.5.0
Oct 02 11:38:08 compute-0 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
Oct 02 11:38:08 compute-0 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
Oct 02 11:38:08 compute-0 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
Oct 02 11:38:09 compute-0 sudo[47003]: pam_unix(sudo:session): session closed for user root
Oct 02 11:38:09 compute-0 sshd-session[40934]: Connection closed by 192.168.122.30 port 40154
Oct 02 11:38:09 compute-0 sshd-session[40931]: pam_unix(sshd:session): session closed for user zuul
Oct 02 11:38:09 compute-0 systemd[1]: session-10.scope: Deactivated successfully.
Oct 02 11:38:09 compute-0 systemd[1]: session-10.scope: Consumed 1min 25.272s CPU time.
Oct 02 11:38:09 compute-0 systemd-logind[827]: Session 10 logged out. Waiting for processes to exit.
Oct 02 11:38:09 compute-0 systemd-logind[827]: Removed session 10.
Oct 02 11:38:14 compute-0 sshd-session[47163]: Accepted publickey for zuul from 192.168.122.30 port 57256 ssh2: ECDSA SHA256:tAEsFSop2MjuMQQ/UqKU2uCkxqWXGfsM5crdhd6tlI4
Oct 02 11:38:14 compute-0 systemd-logind[827]: New session 11 of user zuul.
Oct 02 11:38:14 compute-0 systemd[1]: Started Session 11 of User zuul.
Oct 02 11:38:14 compute-0 sshd-session[47163]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Oct 02 11:38:15 compute-0 python3.9[47316]: ansible-ansible.builtin.setup Invoked with gather_subset=['!all', '!min', 'local'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:38:16 compute-0 sudo[47470]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mmgwwvlqozhvvwozalmcouwudhlwvfas ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405096.5245621-72-32661241947714/AnsiballZ_getent.py'
Oct 02 11:38:16 compute-0 sudo[47470]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:38:17 compute-0 python3.9[47472]: ansible-ansible.builtin.getent Invoked with database=passwd key=openvswitch fail_key=True service=None split=None
Oct 02 11:38:17 compute-0 sudo[47470]: pam_unix(sudo:session): session closed for user root
Oct 02 11:38:18 compute-0 sudo[47623]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rzotjhiztcstagrfdgihdpekvuohtxzo ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405097.6845126-96-143547132721922/AnsiballZ_group.py'
Oct 02 11:38:18 compute-0 sudo[47623]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:38:18 compute-0 python3.9[47625]: ansible-ansible.builtin.group Invoked with gid=42476 name=openvswitch state=present force=False system=False local=False non_unique=False gid_min=None gid_max=None
Oct 02 11:38:18 compute-0 groupadd[47626]: group added to /etc/group: name=openvswitch, GID=42476
Oct 02 11:38:18 compute-0 groupadd[47626]: group added to /etc/gshadow: name=openvswitch
Oct 02 11:38:18 compute-0 groupadd[47626]: new group: name=openvswitch, GID=42476
Oct 02 11:38:18 compute-0 sudo[47623]: pam_unix(sudo:session): session closed for user root
Oct 02 11:38:19 compute-0 sudo[47781]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sssjsrmepdkyqpecmydtzezbgwxytboq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405098.565445-120-164008556862680/AnsiballZ_user.py'
Oct 02 11:38:19 compute-0 sudo[47781]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:38:19 compute-0 python3.9[47783]: ansible-ansible.builtin.user Invoked with comment=openvswitch user group=openvswitch groups=['hugetlbfs'] name=openvswitch shell=/sbin/nologin state=present uid=42476 non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on compute-0 update_password=always home=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None password_expire_account_disable=None uid_min=None uid_max=None
Oct 02 11:38:19 compute-0 useradd[47785]: new user: name=openvswitch, UID=42476, GID=42476, home=/home/openvswitch, shell=/sbin/nologin, from=/dev/pts/0
Oct 02 11:38:19 compute-0 useradd[47785]: add 'openvswitch' to group 'hugetlbfs'
Oct 02 11:38:19 compute-0 useradd[47785]: add 'openvswitch' to shadow group 'hugetlbfs'
Oct 02 11:38:19 compute-0 sudo[47781]: pam_unix(sudo:session): session closed for user root
Oct 02 11:38:19 compute-0 sudo[47941]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-geslfpwdurgrdnvxuqjwstzgzsrcsnjn ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405099.738098-150-107272925943857/AnsiballZ_setup.py'
Oct 02 11:38:19 compute-0 sudo[47941]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:38:20 compute-0 python3.9[47943]: ansible-ansible.legacy.setup Invoked with filter=['ansible_pkg_mgr'] gather_subset=['!all'] gather_timeout=10 fact_path=/etc/ansible/facts.d
Oct 02 11:38:20 compute-0 sudo[47941]: pam_unix(sudo:session): session closed for user root
Oct 02 11:38:21 compute-0 sudo[48025]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ayyxdjrdthvgnrvpskdhtehsrfzgbldz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405099.738098-150-107272925943857/AnsiballZ_dnf.py'
Oct 02 11:38:21 compute-0 sudo[48025]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:38:21 compute-0 python3.9[48027]: ansible-ansible.legacy.dnf Invoked with download_only=True name=['openvswitch'] allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None state=None
Oct 02 11:38:22 compute-0 sudo[48025]: pam_unix(sudo:session): session closed for user root
Oct 02 11:38:23 compute-0 sudo[48186]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kraiugrhokikqkpilqnudiadbpspuoxe ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405103.5271916-192-98199976323558/AnsiballZ_dnf.py'
Oct 02 11:38:23 compute-0 sudo[48186]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:38:23 compute-0 python3.9[48188]: ansible-ansible.legacy.dnf Invoked with name=['openvswitch'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Oct 02 11:38:36 compute-0 kernel: SELinux:  Converting 2724 SID table entries...
Oct 02 11:38:36 compute-0 kernel: SELinux:  policy capability network_peer_controls=1
Oct 02 11:38:36 compute-0 kernel: SELinux:  policy capability open_perms=1
Oct 02 11:38:36 compute-0 kernel: SELinux:  policy capability extended_socket_class=1
Oct 02 11:38:36 compute-0 kernel: SELinux:  policy capability always_check_network=0
Oct 02 11:38:36 compute-0 kernel: SELinux:  policy capability cgroup_seclabel=1
Oct 02 11:38:36 compute-0 kernel: SELinux:  policy capability nnp_nosuid_transition=1
Oct 02 11:38:36 compute-0 kernel: SELinux:  policy capability genfs_seclabel_symlinks=1
Oct 02 11:38:36 compute-0 groupadd[48211]: group added to /etc/group: name=unbound, GID=993
Oct 02 11:38:36 compute-0 groupadd[48211]: group added to /etc/gshadow: name=unbound
Oct 02 11:38:36 compute-0 groupadd[48211]: new group: name=unbound, GID=993
Oct 02 11:38:36 compute-0 useradd[48218]: new user: name=unbound, UID=993, GID=993, home=/var/lib/unbound, shell=/sbin/nologin, from=none
Oct 02 11:38:36 compute-0 dbus-broker-launch[818]: avc:  op=load_policy lsm=selinux seqno=9 res=1
Oct 02 11:38:36 compute-0 systemd[1]: Started daily update of the root trust anchor for DNSSEC.
Oct 02 11:38:37 compute-0 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update.
Oct 02 11:38:37 compute-0 systemd[1]: Starting man-db-cache-update.service...
Oct 02 11:38:38 compute-0 systemd[1]: Reloading.
Oct 02 11:38:38 compute-0 systemd-sysv-generator[48720]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:38:38 compute-0 systemd-rc-local-generator[48716]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:38:38 compute-0 systemd[1]: Queuing reload/restart jobs for marked units…
Oct 02 11:38:38 compute-0 systemd[1]: man-db-cache-update.service: Deactivated successfully.
Oct 02 11:38:38 compute-0 systemd[1]: Finished man-db-cache-update.service.
Oct 02 11:38:38 compute-0 systemd[1]: run-rb315e32e799749fe855d4e2832157c7d.service: Deactivated successfully.
Oct 02 11:38:38 compute-0 sudo[48186]: pam_unix(sudo:session): session closed for user root
Oct 02 11:38:42 compute-0 sudo[49287]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qgxqljfxdmegkylcrhqvznvanxgqooli ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405121.814145-216-225442466016334/AnsiballZ_systemd.py'
Oct 02 11:38:42 compute-0 sudo[49287]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:38:42 compute-0 python3.9[49289]: ansible-ansible.builtin.systemd Invoked with enabled=True masked=False name=openvswitch.service state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None
Oct 02 11:38:42 compute-0 systemd[1]: Reloading.
Oct 02 11:38:42 compute-0 systemd-rc-local-generator[49318]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:38:42 compute-0 systemd-sysv-generator[49321]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:38:43 compute-0 systemd[1]: Starting Open vSwitch Database Unit...
Oct 02 11:38:43 compute-0 chown[49330]: /usr/bin/chown: cannot access '/run/openvswitch': No such file or directory
Oct 02 11:38:43 compute-0 ovs-ctl[49335]: /etc/openvswitch/conf.db does not exist ... (warning).
Oct 02 11:38:43 compute-0 ovs-ctl[49335]: Creating empty database /etc/openvswitch/conf.db [  OK  ]
Oct 02 11:38:43 compute-0 ovs-ctl[49335]: Starting ovsdb-server [  OK  ]
Oct 02 11:38:43 compute-0 ovs-vsctl[49384]: ovs|00001|vsctl|INFO|Called as ovs-vsctl --no-wait -- init -- set Open_vSwitch . db-version=8.5.1
Oct 02 11:38:43 compute-0 ovs-vsctl[49401]: ovs|00001|vsctl|INFO|Called as ovs-vsctl --no-wait set Open_vSwitch . ovs-version=3.3.5-115.el9s "external-ids:system-id=\"c9f3d658-5c7a-4803-9bbb-01adfb7e88ca\"" "external-ids:rundir=\"/var/run/openvswitch\"" "system-type=\"centos\"" "system-version=\"9\""
Oct 02 11:38:43 compute-0 ovs-ctl[49335]: Configuring Open vSwitch system IDs [  OK  ]
Oct 02 11:38:43 compute-0 ovs-vsctl[49410]: ovs|00001|vsctl|INFO|Called as ovs-vsctl --no-wait add Open_vSwitch . external-ids hostname=compute-0
Oct 02 11:38:43 compute-0 ovs-ctl[49335]: Enabling remote OVSDB managers [  OK  ]
Oct 02 11:38:43 compute-0 systemd[1]: Started Open vSwitch Database Unit.
Oct 02 11:38:43 compute-0 systemd[1]: Starting Open vSwitch Delete Transient Ports...
Oct 02 11:38:43 compute-0 systemd[1]: Finished Open vSwitch Delete Transient Ports.
Oct 02 11:38:43 compute-0 systemd[1]: Starting Open vSwitch Forwarding Unit...
Oct 02 11:38:43 compute-0 kernel: openvswitch: Open vSwitch switching datapath
Oct 02 11:38:43 compute-0 ovs-ctl[49454]: Inserting openvswitch module [  OK  ]
Oct 02 11:38:43 compute-0 ovs-ctl[49423]: Starting ovs-vswitchd [  OK  ]
Oct 02 11:38:43 compute-0 ovs-vsctl[49472]: ovs|00001|vsctl|INFO|Called as ovs-vsctl --no-wait add Open_vSwitch . external-ids hostname=compute-0
Oct 02 11:38:43 compute-0 ovs-ctl[49423]: Enabling remote OVSDB managers [  OK  ]
Oct 02 11:38:43 compute-0 systemd[1]: Started Open vSwitch Forwarding Unit.
Oct 02 11:38:43 compute-0 systemd[1]: Starting Open vSwitch...
Oct 02 11:38:43 compute-0 systemd[1]: Finished Open vSwitch.
Oct 02 11:38:43 compute-0 sudo[49287]: pam_unix(sudo:session): session closed for user root
Oct 02 11:38:44 compute-0 python3.9[49623]: ansible-ansible.builtin.setup Invoked with gather_subset=['!all', '!min', 'selinux'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:38:45 compute-0 sudo[49773]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gmlzhaajvytmhlomwezmfdeqwpasomzw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405124.8594553-270-76069727094949/AnsiballZ_sefcontext.py'
Oct 02 11:38:45 compute-0 sudo[49773]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:38:45 compute-0 python3.9[49775]: ansible-community.general.sefcontext Invoked with selevel=s0 setype=container_file_t state=present target=/var/lib/edpm-config(/.*)? ignore_selinux_state=False ftype=a reload=True substitute=None seuser=None
Oct 02 11:38:47 compute-0 kernel: SELinux:  Converting 2738 SID table entries...
Oct 02 11:38:47 compute-0 kernel: SELinux:  policy capability network_peer_controls=1
Oct 02 11:38:47 compute-0 kernel: SELinux:  policy capability open_perms=1
Oct 02 11:38:47 compute-0 kernel: SELinux:  policy capability extended_socket_class=1
Oct 02 11:38:47 compute-0 kernel: SELinux:  policy capability always_check_network=0
Oct 02 11:38:47 compute-0 kernel: SELinux:  policy capability cgroup_seclabel=1
Oct 02 11:38:47 compute-0 kernel: SELinux:  policy capability nnp_nosuid_transition=1
Oct 02 11:38:47 compute-0 kernel: SELinux:  policy capability genfs_seclabel_symlinks=1
Oct 02 11:38:47 compute-0 sudo[49773]: pam_unix(sudo:session): session closed for user root
Oct 02 11:38:48 compute-0 python3.9[49930]: ansible-ansible.builtin.setup Invoked with gather_subset=['!all', '!min', 'local', 'distribution'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:38:48 compute-0 sudo[50086]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rylpjwuxmxxmlduuaadhrzpfixxuoswv ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405128.5706124-324-76163333600579/AnsiballZ_dnf.py'
Oct 02 11:38:48 compute-0 dbus-broker-launch[818]: avc:  op=load_policy lsm=selinux seqno=10 res=1
Oct 02 11:38:48 compute-0 sudo[50086]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:38:49 compute-0 python3.9[50088]: ansible-ansible.legacy.dnf Invoked with name=['driverctl', 'lvm2', 'crudini', 'jq', 'nftables', 'NetworkManager', 'openstack-selinux', 'python3-libselinux', 'python3-pyyaml', 'rsync', 'tmpwatch', 'sysstat', 'iproute-tc', 'ksmtuned', 'systemd-container', 'crypto-policies-scripts', 'grubby', 'sos'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Oct 02 11:38:50 compute-0 sudo[50086]: pam_unix(sudo:session): session closed for user root
Oct 02 11:38:50 compute-0 sudo[50239]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-uupuwfqrmcnnqednwqhajhyzbeztffki ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405130.5052307-348-169412555114547/AnsiballZ_command.py'
Oct 02 11:38:50 compute-0 sudo[50239]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:38:51 compute-0 python3.9[50241]: ansible-ansible.legacy.command Invoked with _raw_params=rpm -V driverctl lvm2 crudini jq nftables NetworkManager openstack-selinux python3-libselinux python3-pyyaml rsync tmpwatch sysstat iproute-tc ksmtuned systemd-container crypto-policies-scripts grubby sos _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:38:51 compute-0 sudo[50239]: pam_unix(sudo:session): session closed for user root
Oct 02 11:38:52 compute-0 sudo[50526]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wmdgqyaijtqkkxblghmjbgewfounvtnq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405131.954936-372-206085336330488/AnsiballZ_file.py'
Oct 02 11:38:52 compute-0 sudo[50526]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:38:52 compute-0 python3.9[50528]: ansible-ansible.builtin.file Invoked with mode=0750 path=/var/lib/edpm-config selevel=s0 setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None attributes=None
Oct 02 11:38:52 compute-0 sudo[50526]: pam_unix(sudo:session): session closed for user root
Oct 02 11:38:53 compute-0 python3.9[50678]: ansible-ansible.builtin.stat Invoked with path=/etc/cloud/cloud.cfg.d follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:38:53 compute-0 sudo[50830]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jgeyfyimluoflxjpvngtxmwkfyxuorej ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405133.505365-420-232486844610546/AnsiballZ_dnf.py'
Oct 02 11:38:53 compute-0 sudo[50830]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:38:53 compute-0 python3.9[50832]: ansible-ansible.legacy.dnf Invoked with name=['NetworkManager-ovs'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Oct 02 11:38:55 compute-0 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update.
Oct 02 11:38:55 compute-0 systemd[1]: Starting man-db-cache-update.service...
Oct 02 11:38:55 compute-0 systemd[1]: Reloading.
Oct 02 11:38:55 compute-0 systemd-rc-local-generator[50871]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:38:55 compute-0 systemd-sysv-generator[50874]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:38:55 compute-0 systemd[1]: Queuing reload/restart jobs for marked units…
Oct 02 11:38:56 compute-0 systemd[1]: man-db-cache-update.service: Deactivated successfully.
Oct 02 11:38:56 compute-0 systemd[1]: Finished man-db-cache-update.service.
Oct 02 11:38:56 compute-0 systemd[1]: run-r07c3491c4a5e40b4a87c725661d450d6.service: Deactivated successfully.
Oct 02 11:38:56 compute-0 sudo[50830]: pam_unix(sudo:session): session closed for user root
Oct 02 11:38:57 compute-0 sudo[51148]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wqrpixwcjvvkehfgkgzmyruikcoiuqya ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405137.0845535-444-29618648158078/AnsiballZ_systemd.py'
Oct 02 11:38:57 compute-0 sudo[51148]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:38:57 compute-0 python3.9[51150]: ansible-ansible.builtin.systemd Invoked with name=NetworkManager state=restarted daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None
Oct 02 11:38:57 compute-0 systemd[1]: NetworkManager-wait-online.service: Deactivated successfully.
Oct 02 11:38:57 compute-0 systemd[1]: Stopped Network Manager Wait Online.
Oct 02 11:38:57 compute-0 systemd[1]: Stopping Network Manager Wait Online...
Oct 02 11:38:57 compute-0 systemd[1]: Stopping Network Manager...
Oct 02 11:38:57 compute-0 NetworkManager[3990]: <info>  [1759405137.7014] caught SIGTERM, shutting down normally.
Oct 02 11:38:57 compute-0 NetworkManager[3990]: <info>  [1759405137.7028] dhcp4 (eth0): canceled DHCP transaction
Oct 02 11:38:57 compute-0 NetworkManager[3990]: <info>  [1759405137.7028] dhcp4 (eth0): activation: beginning transaction (timeout in 45 seconds)
Oct 02 11:38:57 compute-0 NetworkManager[3990]: <info>  [1759405137.7028] dhcp4 (eth0): state changed no lease
Oct 02 11:38:57 compute-0 NetworkManager[3990]: <info>  [1759405137.7031] manager: NetworkManager state is now CONNECTED_SITE
Oct 02 11:38:57 compute-0 NetworkManager[3990]: <info>  [1759405137.7103] exiting (success)
Oct 02 11:38:57 compute-0 systemd[1]: Starting Network Manager Script Dispatcher Service...
Oct 02 11:38:57 compute-0 systemd[1]: Started Network Manager Script Dispatcher Service.
Oct 02 11:38:57 compute-0 systemd[1]: NetworkManager.service: Deactivated successfully.
Oct 02 11:38:57 compute-0 systemd[1]: Stopped Network Manager.
Oct 02 11:38:57 compute-0 systemd[1]: NetworkManager.service: Consumed 11.482s CPU time, 4.1M memory peak, read 0B from disk, written 32.5K to disk.
Oct 02 11:38:57 compute-0 systemd[1]: Starting Network Manager...
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.7721] NetworkManager (version 1.54.1-1.el9) is starting... (after a restart, boot:1e8e4eaa-6890-46e6-baf5-d7fee48b6edb)
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.7722] Read config: /etc/NetworkManager/NetworkManager.conf, /run/NetworkManager/conf.d/15-carrier-timeout.conf
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.7780] manager[0x55d0b9c51090]: monitoring kernel firmware directory '/lib/firmware'.
Oct 02 11:38:57 compute-0 systemd[1]: Starting Hostname Service...
Oct 02 11:38:57 compute-0 systemd[1]: Started Hostname Service.
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8602] hostname: hostname: using hostnamed
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8602] hostname: static hostname changed from (none) to "compute-0"
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8608] dns-mgr: init: dns=default,systemd-resolved rc-manager=symlink (auto)
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8612] manager[0x55d0b9c51090]: rfkill: Wi-Fi hardware radio set enabled
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8612] manager[0x55d0b9c51090]: rfkill: WWAN hardware radio set enabled
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8629] Loaded device plugin: NMOvsFactory (/usr/lib64/NetworkManager/1.54.1-1.el9/libnm-device-plugin-ovs.so)
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8636] Loaded device plugin: NMTeamFactory (/usr/lib64/NetworkManager/1.54.1-1.el9/libnm-device-plugin-team.so)
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8637] manager: rfkill: Wi-Fi enabled by radio killswitch; enabled by state file
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8637] manager: rfkill: WWAN enabled by radio killswitch; enabled by state file
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8638] manager: Networking is enabled by state file
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8640] settings: Loaded settings plugin: keyfile (internal)
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8643] settings: Loaded settings plugin: ifcfg-rh ("/usr/lib64/NetworkManager/1.54.1-1.el9/libnm-settings-plugin-ifcfg-rh.so")
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8671] Warning: the ifcfg-rh plugin is deprecated, please migrate connections to the keyfile format using "nmcli connection migrate"
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8680] dhcp: init: Using DHCP client 'internal'
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8682] manager: (lo): new Loopback device (/org/freedesktop/NetworkManager/Devices/1)
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8688] device (lo): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8694] device (lo): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external')
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8703] device (lo): Activation: starting connection 'lo' (9c045e93-5256-40fb-a074-0144ed71625c)
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8709] device (eth0): carrier: link connected
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8713] manager: (eth0): new Ethernet device (/org/freedesktop/NetworkManager/Devices/2)
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8717] manager: (eth0): assume: will attempt to assume matching connection 'System eth0' (5fb06bd0-0bb0-7ffb-45f1-d6edd65f3e03) (indicated)
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8717] device (eth0): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'assume')
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8722] device (eth0): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'assume')
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8728] device (eth0): Activation: starting connection 'System eth0' (5fb06bd0-0bb0-7ffb-45f1-d6edd65f3e03)
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8733] device (eth1): carrier: link connected
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8736] manager: (eth1): new Ethernet device (/org/freedesktop/NetworkManager/Devices/3)
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8739] manager: (eth1): assume: will attempt to assume matching connection 'ci-private-network' (7a49c97b-caea-555e-9162-65c1fd602491) (indicated)
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8740] device (eth1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'assume')
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8743] device (eth1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'assume')
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8748] device (eth1): Activation: starting connection 'ci-private-network' (7a49c97b-caea-555e-9162-65c1fd602491)
Oct 02 11:38:57 compute-0 systemd[1]: Started Network Manager.
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8754] bus-manager: acquired D-Bus service "org.freedesktop.NetworkManager"
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8759] device (lo): state change: disconnected -> prepare (reason 'none', managed-type: 'external')
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8761] device (lo): state change: prepare -> config (reason 'none', managed-type: 'external')
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8762] device (lo): state change: config -> ip-config (reason 'none', managed-type: 'external')
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8763] device (eth0): state change: disconnected -> prepare (reason 'none', managed-type: 'assume')
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8766] device (eth0): state change: prepare -> config (reason 'none', managed-type: 'assume')
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8768] device (eth1): state change: disconnected -> prepare (reason 'none', managed-type: 'assume')
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8771] device (eth1): state change: prepare -> config (reason 'none', managed-type: 'assume')
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8775] device (lo): state change: ip-config -> ip-check (reason 'none', managed-type: 'external')
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8781] device (eth0): state change: config -> ip-config (reason 'none', managed-type: 'assume')
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8783] dhcp4 (eth0): activation: beginning transaction (timeout in 45 seconds)
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8796] device (eth1): state change: config -> ip-config (reason 'none', managed-type: 'assume')
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8809] device (eth1): state change: ip-config -> ip-check (reason 'none', managed-type: 'assume')
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8820] device (lo): state change: ip-check -> secondaries (reason 'none', managed-type: 'external')
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8823] device (lo): state change: secondaries -> activated (reason 'none', managed-type: 'external')
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8831] device (lo): Activation: successful, device activated.
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8839] dhcp4 (eth0): state changed new lease, address=38.129.56.69
Oct 02 11:38:57 compute-0 systemd[1]: Starting Network Manager Wait Online...
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8851] policy: set 'System eth0' (eth0) as default for IPv4 routing and DNS
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8921] device (eth1): state change: ip-check -> secondaries (reason 'none', managed-type: 'assume')
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8931] device (eth0): state change: ip-config -> ip-check (reason 'none', managed-type: 'assume')
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8939] device (eth1): state change: secondaries -> activated (reason 'none', managed-type: 'assume')
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8943] manager: NetworkManager state is now CONNECTED_LOCAL
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8948] device (eth1): Activation: successful, device activated.
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8979] device (eth0): state change: ip-check -> secondaries (reason 'none', managed-type: 'assume')
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8982] device (eth0): state change: secondaries -> activated (reason 'none', managed-type: 'assume')
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8987] manager: NetworkManager state is now CONNECTED_SITE
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.8995] device (eth0): Activation: successful, device activated.
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.9004] manager: NetworkManager state is now CONNECTED_GLOBAL
Oct 02 11:38:57 compute-0 NetworkManager[51160]: <info>  [1759405137.9009] manager: startup complete
Oct 02 11:38:57 compute-0 sudo[51148]: pam_unix(sudo:session): session closed for user root
Oct 02 11:38:57 compute-0 systemd[1]: Finished Network Manager Wait Online.
Oct 02 11:38:58 compute-0 sudo[51375]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bvbabevcsztqexedkykbeubldpgwavwu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405138.2754323-468-198170366426287/AnsiballZ_dnf.py'
Oct 02 11:38:58 compute-0 sudo[51375]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:38:58 compute-0 python3.9[51377]: ansible-ansible.legacy.dnf Invoked with name=['os-net-config'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Oct 02 11:39:03 compute-0 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update.
Oct 02 11:39:03 compute-0 systemd[1]: Starting man-db-cache-update.service...
Oct 02 11:39:03 compute-0 systemd[1]: Reloading.
Oct 02 11:39:03 compute-0 systemd-sysv-generator[51434]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:39:03 compute-0 systemd-rc-local-generator[51429]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:39:03 compute-0 systemd[1]: Queuing reload/restart jobs for marked units…
Oct 02 11:39:04 compute-0 systemd[1]: man-db-cache-update.service: Deactivated successfully.
Oct 02 11:39:04 compute-0 systemd[1]: Finished man-db-cache-update.service.
Oct 02 11:39:04 compute-0 systemd[1]: run-r1dea8b16f6f54351a77d211cba8e4e8c.service: Deactivated successfully.
Oct 02 11:39:04 compute-0 sudo[51375]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:08 compute-0 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.
Oct 02 11:39:08 compute-0 sudo[51841]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ymzwjmynjqkoesamhholaddzfndubqyr ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405148.2847307-504-167197193600682/AnsiballZ_stat.py'
Oct 02 11:39:08 compute-0 sudo[51841]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:08 compute-0 python3.9[51843]: ansible-ansible.builtin.stat Invoked with path=/var/lib/edpm-config/os-net-config.returncode follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:39:08 compute-0 sudo[51841]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:09 compute-0 sudo[51993]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pfcxdpyzgrrgzcbmythfeatlwweehyuj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405149.007041-531-64957982043520/AnsiballZ_ini_file.py'
Oct 02 11:39:09 compute-0 sudo[51993]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:09 compute-0 python3.9[51995]: ansible-community.general.ini_file Invoked with backup=True mode=0644 no_extra_spaces=True option=no-auto-default path=/etc/NetworkManager/NetworkManager.conf section=main state=present value=* exclusive=True ignore_spaces=False allow_no_value=False modify_inactive_option=True create=True follow=False unsafe_writes=False section_has_values=None values=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:39:09 compute-0 sudo[51993]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:10 compute-0 sudo[52147]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rggshtdwgcphnjfhanwadcbhpouqgmeb ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405149.9881525-561-142325704889577/AnsiballZ_ini_file.py'
Oct 02 11:39:10 compute-0 sudo[52147]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:10 compute-0 python3.9[52149]: ansible-community.general.ini_file Invoked with backup=True mode=0644 no_extra_spaces=True option=dns path=/etc/NetworkManager/NetworkManager.conf section=main state=absent value=none exclusive=True ignore_spaces=False allow_no_value=False modify_inactive_option=True create=True follow=False unsafe_writes=False section_has_values=None values=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:39:10 compute-0 sudo[52147]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:10 compute-0 sudo[52299]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dmbzqtxljkxhlkwtalfjjncexjfonbon ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405150.5764048-561-272174332085932/AnsiballZ_ini_file.py'
Oct 02 11:39:10 compute-0 sudo[52299]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:10 compute-0 python3.9[52301]: ansible-community.general.ini_file Invoked with backup=True mode=0644 no_extra_spaces=True option=dns path=/etc/NetworkManager/conf.d/99-cloud-init.conf section=main state=absent value=none exclusive=True ignore_spaces=False allow_no_value=False modify_inactive_option=True create=True follow=False unsafe_writes=False section_has_values=None values=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:39:11 compute-0 sudo[52299]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:11 compute-0 sudo[52451]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-miivifdnvmvkakdhfnuhkszfnfsyjhwq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405151.2335312-606-32060673731537/AnsiballZ_ini_file.py'
Oct 02 11:39:11 compute-0 sudo[52451]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:11 compute-0 python3.9[52453]: ansible-community.general.ini_file Invoked with backup=True mode=0644 no_extra_spaces=True option=rc-manager path=/etc/NetworkManager/NetworkManager.conf section=main state=absent value=unmanaged exclusive=True ignore_spaces=False allow_no_value=False modify_inactive_option=True create=True follow=False unsafe_writes=False section_has_values=None values=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:39:11 compute-0 sudo[52451]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:12 compute-0 sudo[52603]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lacvtnhkoejbofcnaqyzfzrmbnptapwi ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405151.8512285-606-182742635097116/AnsiballZ_ini_file.py'
Oct 02 11:39:12 compute-0 sudo[52603]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:12 compute-0 python3.9[52605]: ansible-community.general.ini_file Invoked with backup=True mode=0644 no_extra_spaces=True option=rc-manager path=/etc/NetworkManager/conf.d/99-cloud-init.conf section=main state=absent value=unmanaged exclusive=True ignore_spaces=False allow_no_value=False modify_inactive_option=True create=True follow=False unsafe_writes=False section_has_values=None values=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:39:12 compute-0 sudo[52603]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:12 compute-0 sudo[52755]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-etcnbkwrqpzsyluhlojofjcjqdhtramu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405152.4655292-651-159106515891074/AnsiballZ_stat.py'
Oct 02 11:39:12 compute-0 sudo[52755]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:12 compute-0 python3.9[52757]: ansible-ansible.legacy.stat Invoked with path=/etc/dhcp/dhclient-enter-hooks follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:39:12 compute-0 sudo[52755]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:13 compute-0 sudo[52878]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-diiuemuyyugedepfbkxyjntixkidbfbh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405152.4655292-651-159106515891074/AnsiballZ_copy.py'
Oct 02 11:39:13 compute-0 sudo[52878]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:13 compute-0 python3.9[52880]: ansible-ansible.legacy.copy Invoked with dest=/etc/dhcp/dhclient-enter-hooks mode=0755 src=/home/zuul/.ansible/tmp/ansible-tmp-1759405152.4655292-651-159106515891074/.source _original_basename=.2dthhslb follow=False checksum=f6278a40de79a9841f6ed1fc584538225566990c backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:39:13 compute-0 sudo[52878]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:13 compute-0 sudo[53030]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-diynedkofeuojpiheoixyvsogpjhcqtp ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405153.7034535-696-248554361311463/AnsiballZ_file.py'
Oct 02 11:39:13 compute-0 sudo[53030]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:14 compute-0 python3.9[53032]: ansible-ansible.builtin.file Invoked with mode=0755 path=/etc/os-net-config state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:39:14 compute-0 sudo[53030]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:14 compute-0 sudo[53182]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ozdfijaxncikdubrovrieofdufpttokj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405154.329477-720-222559727782985/AnsiballZ_edpm_os_net_config_mappings.py'
Oct 02 11:39:14 compute-0 sudo[53182]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:14 compute-0 python3.9[53184]: ansible-edpm_os_net_config_mappings Invoked with net_config_data_lookup={}
Oct 02 11:39:14 compute-0 sudo[53182]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:15 compute-0 sudo[53334]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tngrjmaalybewomdequutodmaokiokxn ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405155.1998885-747-258875918248749/AnsiballZ_file.py'
Oct 02 11:39:15 compute-0 sudo[53334]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:15 compute-0 python3.9[53336]: ansible-ansible.builtin.file Invoked with path=/var/lib/edpm-config/scripts state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:39:15 compute-0 sudo[53334]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:16 compute-0 sudo[53486]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xkqxibvbruemdnlttqkpuwtcxpdgyipr ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405155.9926438-777-5716194608716/AnsiballZ_stat.py'
Oct 02 11:39:16 compute-0 sudo[53486]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:16 compute-0 sudo[53486]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:16 compute-0 sudo[53609]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jxxnkqwhoagwllgrilnhjycaoyqgfmlh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405155.9926438-777-5716194608716/AnsiballZ_copy.py'
Oct 02 11:39:16 compute-0 sudo[53609]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:16 compute-0 sudo[53609]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:17 compute-0 sudo[53761]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cptbvjdoowonaapsdwyyfplwyaahgwcq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405157.2395914-822-184714657476406/AnsiballZ_slurp.py'
Oct 02 11:39:17 compute-0 sudo[53761]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:17 compute-0 python3.9[53763]: ansible-ansible.builtin.slurp Invoked with path=/etc/os-net-config/config.yaml src=/etc/os-net-config/config.yaml
Oct 02 11:39:17 compute-0 sudo[53761]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:18 compute-0 sudo[53936]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-utxusnxpcphbochvxudgyjzsilkjarnt ; ANSIBLE_ASYNC_DIR=\'~/.ansible_async\' /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405158.1187723-849-260352101558997/async_wrapper.py j17321634878 300 /home/zuul/.ansible/tmp/ansible-tmp-1759405158.1187723-849-260352101558997/AnsiballZ_edpm_os_net_config.py _'
Oct 02 11:39:18 compute-0 sudo[53936]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:18 compute-0 ansible-async_wrapper.py[53938]: Invoked with j17321634878 300 /home/zuul/.ansible/tmp/ansible-tmp-1759405158.1187723-849-260352101558997/AnsiballZ_edpm_os_net_config.py _
Oct 02 11:39:18 compute-0 ansible-async_wrapper.py[53941]: Starting module and watcher
Oct 02 11:39:18 compute-0 ansible-async_wrapper.py[53941]: Start watching 53942 (300)
Oct 02 11:39:18 compute-0 ansible-async_wrapper.py[53942]: Start module (53942)
Oct 02 11:39:18 compute-0 ansible-async_wrapper.py[53938]: Return async_wrapper task started.
Oct 02 11:39:18 compute-0 sudo[53936]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:19 compute-0 python3.9[53943]: ansible-edpm_os_net_config Invoked with cleanup=True config_file=/etc/os-net-config/config.yaml debug=True detailed_exit_codes=True safe_defaults=False use_nmstate=True
Oct 02 11:39:19 compute-0 kernel: cfg80211: Loading compiled-in X.509 certificates for regulatory database
Oct 02 11:39:19 compute-0 kernel: Loaded X.509 cert 'sforshee: 00b28ddf47aef9cea7'
Oct 02 11:39:19 compute-0 kernel: Loaded X.509 cert 'wens: 61c038651aabdcf94bd0ac7ff06c7248db18c600'
Oct 02 11:39:19 compute-0 kernel: platform regulatory.0: Direct firmware load for regulatory.db failed with error -2
Oct 02 11:39:19 compute-0 kernel: cfg80211: failed to load regulatory.db
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.7243] audit: op="checkpoint-create" arg="/org/freedesktop/NetworkManager/Checkpoint/1" pid=53944 uid=0 result="success"
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.7263] audit: op="checkpoint-adjust-rollback-timeout" arg="/org/freedesktop/NetworkManager/Checkpoint/1" pid=53944 uid=0 result="success"
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.7819] manager: (br-ex): new Open vSwitch Bridge device (/org/freedesktop/NetworkManager/Devices/4)
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.7821] audit: op="connection-add" uuid="b539370a-f4aa-47f0-9ed6-294ccf807f6e" name="br-ex-br" pid=53944 uid=0 result="success"
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.7838] manager: (br-ex): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/5)
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.7840] audit: op="connection-add" uuid="fd80d2f3-e648-480c-ad22-0327d512696c" name="br-ex-port" pid=53944 uid=0 result="success"
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.7856] manager: (eth1): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/6)
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.7857] audit: op="connection-add" uuid="0867d214-fb8d-4391-86ed-a04fe3836a7b" name="eth1-port" pid=53944 uid=0 result="success"
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.7868] manager: (vlan20): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/7)
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.7869] audit: op="connection-add" uuid="654f8f11-37a5-4ef4-9c75-4ab908ddb4bd" name="vlan20-port" pid=53944 uid=0 result="success"
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.7881] manager: (vlan21): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/8)
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.7882] audit: op="connection-add" uuid="b41ca4a8-302e-4a3f-a387-b1450bea72fa" name="vlan21-port" pid=53944 uid=0 result="success"
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.7895] manager: (vlan22): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/9)
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.7899] audit: op="connection-add" uuid="81269ace-5339-47ab-9a01-3f536fc994a9" name="vlan22-port" pid=53944 uid=0 result="success"
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.7917] audit: op="connection-update" uuid="5fb06bd0-0bb0-7ffb-45f1-d6edd65f3e03" name="System eth0" args="connection.autoconnect-priority,connection.timestamp,ipv6.addr-gen-mode,ipv6.dhcp-timeout,ipv6.method,802-3-ethernet.mtu,ipv4.dhcp-client-id,ipv4.dhcp-timeout" pid=53944 uid=0 result="success"
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.7931] manager: (br-ex): new Open vSwitch Interface device (/org/freedesktop/NetworkManager/Devices/10)
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.7933] audit: op="connection-add" uuid="ad0f82c0-abb2-4bcd-af24-f51d47ea9286" name="br-ex-if" pid=53944 uid=0 result="success"
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.7981] audit: op="connection-update" uuid="7a49c97b-caea-555e-9162-65c1fd602491" name="ci-private-network" args="connection.port-type,connection.slave-type,connection.controller,connection.master,connection.timestamp,ipv6.routing-rules,ipv6.routes,ipv6.addr-gen-mode,ipv6.addresses,ipv6.dns,ipv6.method,ovs-interface.type,ovs-external-ids.data,ipv4.routing-rules,ipv4.routes,ipv4.never-default,ipv4.addresses,ipv4.dns,ipv4.method" pid=53944 uid=0 result="success"
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.7994] manager: (vlan20): new Open vSwitch Interface device (/org/freedesktop/NetworkManager/Devices/11)
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.7995] audit: op="connection-add" uuid="7b9d0101-a928-475a-8701-ed525cb3957e" name="vlan20-if" pid=53944 uid=0 result="success"
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8008] manager: (vlan21): new Open vSwitch Interface device (/org/freedesktop/NetworkManager/Devices/12)
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8010] audit: op="connection-add" uuid="0acf3622-6537-4ad6-915a-dc1bc90ab27d" name="vlan21-if" pid=53944 uid=0 result="success"
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8023] manager: (vlan22): new Open vSwitch Interface device (/org/freedesktop/NetworkManager/Devices/13)
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8024] audit: op="connection-add" uuid="4345813d-9939-4be1-a4ee-556e82bbe925" name="vlan22-if" pid=53944 uid=0 result="success"
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8035] audit: op="connection-delete" uuid="80d64d2d-b88f-378c-97fd-b46295de63bc" name="Wired connection 1" pid=53944 uid=0 result="success"
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8046] device (br-ex)[Open vSwitch Bridge]: state change: unmanaged -> unavailable (reason 'managed', managed-type: 'external')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8054] device (br-ex)[Open vSwitch Bridge]: state change: unavailable -> disconnected (reason 'user-requested', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8057] device (br-ex)[Open vSwitch Bridge]: Activation: starting connection 'br-ex-br' (b539370a-f4aa-47f0-9ed6-294ccf807f6e)
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8058] audit: op="connection-activate" uuid="b539370a-f4aa-47f0-9ed6-294ccf807f6e" name="br-ex-br" pid=53944 uid=0 result="success"
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8060] device (br-ex)[Open vSwitch Port]: state change: unmanaged -> unavailable (reason 'managed', managed-type: 'external')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8066] device (br-ex)[Open vSwitch Port]: state change: unavailable -> disconnected (reason 'user-requested', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8070] device (br-ex)[Open vSwitch Port]: Activation: starting connection 'br-ex-port' (fd80d2f3-e648-480c-ad22-0327d512696c)
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8071] device (eth1)[Open vSwitch Port]: state change: unmanaged -> unavailable (reason 'managed', managed-type: 'external')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8077] device (eth1)[Open vSwitch Port]: state change: unavailable -> disconnected (reason 'user-requested', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8080] device (eth1)[Open vSwitch Port]: Activation: starting connection 'eth1-port' (0867d214-fb8d-4391-86ed-a04fe3836a7b)
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8082] device (vlan20)[Open vSwitch Port]: state change: unmanaged -> unavailable (reason 'managed', managed-type: 'external')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8088] device (vlan20)[Open vSwitch Port]: state change: unavailable -> disconnected (reason 'user-requested', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8092] device (vlan20)[Open vSwitch Port]: Activation: starting connection 'vlan20-port' (654f8f11-37a5-4ef4-9c75-4ab908ddb4bd)
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8094] device (vlan21)[Open vSwitch Port]: state change: unmanaged -> unavailable (reason 'managed', managed-type: 'external')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8102] device (vlan21)[Open vSwitch Port]: state change: unavailable -> disconnected (reason 'user-requested', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8106] device (vlan21)[Open vSwitch Port]: Activation: starting connection 'vlan21-port' (b41ca4a8-302e-4a3f-a387-b1450bea72fa)
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8107] device (vlan22)[Open vSwitch Port]: state change: unmanaged -> unavailable (reason 'managed', managed-type: 'external')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8114] device (vlan22)[Open vSwitch Port]: state change: unavailable -> disconnected (reason 'user-requested', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8118] device (vlan22)[Open vSwitch Port]: Activation: starting connection 'vlan22-port' (81269ace-5339-47ab-9a01-3f536fc994a9)
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8118] device (br-ex)[Open vSwitch Bridge]: state change: disconnected -> prepare (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8121] device (br-ex)[Open vSwitch Bridge]: state change: prepare -> config (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8123] device (br-ex)[Open vSwitch Bridge]: state change: config -> ip-config (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8130] device (br-ex)[Open vSwitch Interface]: state change: unmanaged -> unavailable (reason 'managed', managed-type: 'external')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8135] device (br-ex)[Open vSwitch Interface]: state change: unavailable -> disconnected (reason 'user-requested', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8140] device (br-ex)[Open vSwitch Interface]: Activation: starting connection 'br-ex-if' (ad0f82c0-abb2-4bcd-af24-f51d47ea9286)
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8141] device (br-ex)[Open vSwitch Port]: state change: disconnected -> prepare (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8145] device (br-ex)[Open vSwitch Port]: state change: prepare -> config (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8146] device (br-ex)[Open vSwitch Port]: state change: config -> ip-config (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8147] device (br-ex)[Open vSwitch Port]: Activation: connection 'br-ex-port' attached as port, continuing activation
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8148] device (eth1): state change: activated -> deactivating (reason 'new-activation', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8159] device (eth1): disconnecting for new activation request.
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8159] device (eth1)[Open vSwitch Port]: state change: disconnected -> prepare (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8162] device (eth1)[Open vSwitch Port]: state change: prepare -> config (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8163] device (eth1)[Open vSwitch Port]: state change: config -> ip-config (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8164] device (eth1)[Open vSwitch Port]: Activation: connection 'eth1-port' attached as port, continuing activation
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8168] device (vlan20)[Open vSwitch Interface]: state change: unmanaged -> unavailable (reason 'managed', managed-type: 'external')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8173] device (vlan20)[Open vSwitch Interface]: state change: unavailable -> disconnected (reason 'user-requested', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8176] device (vlan20)[Open vSwitch Interface]: Activation: starting connection 'vlan20-if' (7b9d0101-a928-475a-8701-ed525cb3957e)
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8177] device (vlan20)[Open vSwitch Port]: state change: disconnected -> prepare (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8180] device (vlan20)[Open vSwitch Port]: state change: prepare -> config (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8182] device (vlan20)[Open vSwitch Port]: state change: config -> ip-config (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8182] device (vlan20)[Open vSwitch Port]: Activation: connection 'vlan20-port' attached as port, continuing activation
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8184] device (vlan21)[Open vSwitch Interface]: state change: unmanaged -> unavailable (reason 'managed', managed-type: 'external')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8188] device (vlan21)[Open vSwitch Interface]: state change: unavailable -> disconnected (reason 'user-requested', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8191] device (vlan21)[Open vSwitch Interface]: Activation: starting connection 'vlan21-if' (0acf3622-6537-4ad6-915a-dc1bc90ab27d)
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8192] device (vlan21)[Open vSwitch Port]: state change: disconnected -> prepare (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8195] device (vlan21)[Open vSwitch Port]: state change: prepare -> config (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8197] device (vlan21)[Open vSwitch Port]: state change: config -> ip-config (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8197] device (vlan21)[Open vSwitch Port]: Activation: connection 'vlan21-port' attached as port, continuing activation
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8199] device (vlan22)[Open vSwitch Interface]: state change: unmanaged -> unavailable (reason 'managed', managed-type: 'external')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8203] device (vlan22)[Open vSwitch Interface]: state change: unavailable -> disconnected (reason 'user-requested', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8207] device (vlan22)[Open vSwitch Interface]: Activation: starting connection 'vlan22-if' (4345813d-9939-4be1-a4ee-556e82bbe925)
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8208] device (vlan22)[Open vSwitch Port]: state change: disconnected -> prepare (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8211] device (vlan22)[Open vSwitch Port]: state change: prepare -> config (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8212] device (vlan22)[Open vSwitch Port]: state change: config -> ip-config (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8213] device (vlan22)[Open vSwitch Port]: Activation: connection 'vlan22-port' attached as port, continuing activation
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8214] device (br-ex)[Open vSwitch Bridge]: state change: ip-config -> ip-check (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8223] audit: op="device-reapply" interface="eth0" ifindex=2 args="connection.autoconnect-priority,ipv6.addr-gen-mode,ipv6.method,802-3-ethernet.mtu,ipv4.dhcp-client-id,ipv4.dhcp-timeout" pid=53944 uid=0 result="success"
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8225] device (br-ex)[Open vSwitch Interface]: state change: disconnected -> prepare (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8227] device (br-ex)[Open vSwitch Interface]: state change: prepare -> config (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8229] device (br-ex)[Open vSwitch Interface]: state change: config -> ip-config (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8242] device (br-ex)[Open vSwitch Port]: state change: ip-config -> ip-check (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 kernel: ovs-system: entered promiscuous mode
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8247] device (eth1)[Open vSwitch Port]: state change: ip-config -> ip-check (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8250] device (vlan20)[Open vSwitch Interface]: state change: disconnected -> prepare (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8253] device (vlan20)[Open vSwitch Interface]: state change: prepare -> config (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8256] device (vlan20)[Open vSwitch Interface]: state change: config -> ip-config (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8263] device (vlan20)[Open vSwitch Port]: state change: ip-config -> ip-check (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8267] device (vlan21)[Open vSwitch Interface]: state change: disconnected -> prepare (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8270] device (vlan21)[Open vSwitch Interface]: state change: prepare -> config (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8271] device (vlan21)[Open vSwitch Interface]: state change: config -> ip-config (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 systemd-udevd[53950]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 11:39:20 compute-0 kernel: Timeout policy base is empty
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8276] device (vlan21)[Open vSwitch Port]: state change: ip-config -> ip-check (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8282] device (vlan22)[Open vSwitch Interface]: state change: disconnected -> prepare (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8284] device (vlan22)[Open vSwitch Interface]: state change: prepare -> config (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8286] device (vlan22)[Open vSwitch Interface]: state change: config -> ip-config (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8290] device (vlan22)[Open vSwitch Port]: state change: ip-config -> ip-check (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8294] dhcp4 (eth0): canceled DHCP transaction
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8294] dhcp4 (eth0): activation: beginning transaction (timeout in 45 seconds)
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8294] dhcp4 (eth0): state changed no lease
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8296] dhcp4 (eth0): activation: beginning transaction (no timeout)
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8313] device (br-ex)[Open vSwitch Interface]: Activation: connection 'br-ex-if' attached as port, continuing activation
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8316] audit: op="device-reapply" interface="eth1" ifindex=3 pid=53944 uid=0 result="fail" reason="Device is not activated"
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8321] device (vlan20)[Open vSwitch Interface]: Activation: connection 'vlan20-if' attached as port, continuing activation
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8370] device (vlan21)[Open vSwitch Interface]: Activation: connection 'vlan21-if' attached as port, continuing activation
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8374] dhcp4 (eth0): state changed new lease, address=38.129.56.69
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8379] device (vlan22)[Open vSwitch Interface]: Activation: connection 'vlan22-if' attached as port, continuing activation
Oct 02 11:39:20 compute-0 systemd[1]: Starting Network Manager Script Dispatcher Service...
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8461] device (eth1): disconnecting for new activation request.
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8461] audit: op="connection-activate" uuid="7a49c97b-caea-555e-9162-65c1fd602491" name="ci-private-network" pid=53944 uid=0 result="success"
Oct 02 11:39:20 compute-0 systemd[1]: Started Network Manager Script Dispatcher Service.
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8482] audit: op="checkpoint-adjust-rollback-timeout" arg="/org/freedesktop/NetworkManager/Checkpoint/1" pid=53944 uid=0 result="success"
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8490] device (eth1): state change: deactivating -> disconnected (reason 'new-activation', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8577] device (eth1): Activation: starting connection 'ci-private-network' (7a49c97b-caea-555e-9162-65c1fd602491)
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8583] device (br-ex)[Open vSwitch Bridge]: state change: ip-check -> secondaries (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8593] device (eth1): state change: disconnected -> prepare (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8597] device (eth1): state change: prepare -> config (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8604] device (br-ex)[Open vSwitch Bridge]: state change: secondaries -> activated (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8608] device (br-ex)[Open vSwitch Bridge]: Activation: successful, device activated.
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8612] device (br-ex)[Open vSwitch Port]: state change: ip-check -> secondaries (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8613] device (eth1)[Open vSwitch Port]: state change: ip-check -> secondaries (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8614] device (vlan20)[Open vSwitch Port]: state change: ip-check -> secondaries (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8616] device (vlan21)[Open vSwitch Port]: state change: ip-check -> secondaries (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8617] device (vlan22)[Open vSwitch Port]: state change: ip-check -> secondaries (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 kernel: br-ex: entered promiscuous mode
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8631] device (eth1): state change: config -> ip-config (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8642] device (br-ex)[Open vSwitch Port]: state change: secondaries -> activated (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8646] device (br-ex)[Open vSwitch Port]: Activation: successful, device activated.
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8650] device (eth1)[Open vSwitch Port]: state change: secondaries -> activated (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8656] device (eth1)[Open vSwitch Port]: Activation: successful, device activated.
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8660] device (vlan20)[Open vSwitch Port]: state change: secondaries -> activated (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8665] device (vlan20)[Open vSwitch Port]: Activation: successful, device activated.
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8670] device (vlan21)[Open vSwitch Port]: state change: secondaries -> activated (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8675] device (vlan21)[Open vSwitch Port]: Activation: successful, device activated.
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8680] device (vlan22)[Open vSwitch Port]: state change: secondaries -> activated (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8684] device (vlan22)[Open vSwitch Port]: Activation: successful, device activated.
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8695] device (eth1): Activation: connection 'ci-private-network' attached as port, continuing activation
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8701] device (eth1): state change: ip-config -> ip-check (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 kernel: vlan22: entered promiscuous mode
Oct 02 11:39:20 compute-0 kernel: virtio_net virtio5 eth1: entered promiscuous mode
Oct 02 11:39:20 compute-0 systemd-udevd[53949]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8753] device (eth1): state change: ip-check -> secondaries (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8757] device (eth1): state change: secondaries -> activated (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8777] device (eth1): Activation: successful, device activated.
Oct 02 11:39:20 compute-0 kernel: vlan21: entered promiscuous mode
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8796] device (br-ex)[Open vSwitch Interface]: carrier: link connected
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8826] device (br-ex)[Open vSwitch Interface]: state change: ip-config -> ip-check (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8839] device (vlan22)[Open vSwitch Interface]: carrier: link connected
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8852] device (br-ex)[Open vSwitch Interface]: state change: ip-check -> secondaries (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8857] device (vlan22)[Open vSwitch Interface]: state change: ip-config -> ip-check (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8863] device (br-ex)[Open vSwitch Interface]: state change: secondaries -> activated (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 kernel: vlan20: entered promiscuous mode
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8868] device (br-ex)[Open vSwitch Interface]: Activation: successful, device activated.
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8901] device (vlan21)[Open vSwitch Interface]: carrier: link connected
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8901] device (vlan22)[Open vSwitch Interface]: state change: ip-check -> secondaries (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8906] device (vlan22)[Open vSwitch Interface]: state change: secondaries -> activated (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8911] device (vlan22)[Open vSwitch Interface]: Activation: successful, device activated.
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8927] device (vlan21)[Open vSwitch Interface]: state change: ip-config -> ip-check (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8975] device (vlan20)[Open vSwitch Interface]: carrier: link connected
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8975] device (vlan21)[Open vSwitch Interface]: state change: ip-check -> secondaries (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8978] device (vlan21)[Open vSwitch Interface]: state change: secondaries -> activated (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.8986] device (vlan21)[Open vSwitch Interface]: Activation: successful, device activated.
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.9003] device (vlan20)[Open vSwitch Interface]: state change: ip-config -> ip-check (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.9033] device (vlan20)[Open vSwitch Interface]: state change: ip-check -> secondaries (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.9035] device (vlan20)[Open vSwitch Interface]: state change: secondaries -> activated (reason 'none', managed-type: 'full')
Oct 02 11:39:20 compute-0 NetworkManager[51160]: <info>  [1759405160.9042] device (vlan20)[Open vSwitch Interface]: Activation: successful, device activated.
Oct 02 11:39:22 compute-0 NetworkManager[51160]: <info>  [1759405162.0171] audit: op="checkpoint-adjust-rollback-timeout" arg="/org/freedesktop/NetworkManager/Checkpoint/1" pid=53944 uid=0 result="success"
Oct 02 11:39:22 compute-0 NetworkManager[51160]: <info>  [1759405162.1573] checkpoint[0x55d0b9c27950]: destroy /org/freedesktop/NetworkManager/Checkpoint/1
Oct 02 11:39:22 compute-0 NetworkManager[51160]: <info>  [1759405162.1575] audit: op="checkpoint-destroy" arg="/org/freedesktop/NetworkManager/Checkpoint/1" pid=53944 uid=0 result="success"
Oct 02 11:39:22 compute-0 NetworkManager[51160]: <info>  [1759405162.4013] audit: op="checkpoint-create" arg="/org/freedesktop/NetworkManager/Checkpoint/2" pid=53944 uid=0 result="success"
Oct 02 11:39:22 compute-0 NetworkManager[51160]: <info>  [1759405162.4027] audit: op="checkpoint-adjust-rollback-timeout" arg="/org/freedesktop/NetworkManager/Checkpoint/2" pid=53944 uid=0 result="success"
Oct 02 11:39:22 compute-0 sudo[54276]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lwgloosgbnaxupfipmdfwhnfdsxnevhr ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405162.105101-849-28344254992208/AnsiballZ_async_status.py'
Oct 02 11:39:22 compute-0 sudo[54276]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:22 compute-0 NetworkManager[51160]: <info>  [1759405162.5703] audit: op="networking-control" arg="global-dns-configuration" pid=53944 uid=0 result="success"
Oct 02 11:39:22 compute-0 NetworkManager[51160]: <info>  [1759405162.5746] config: signal: SET_VALUES,values,values-intern,global-dns-config (/etc/NetworkManager/NetworkManager.conf, /run/NetworkManager/conf.d/15-carrier-timeout.conf)
Oct 02 11:39:22 compute-0 NetworkManager[51160]: <info>  [1759405162.5777] audit: op="networking-control" arg="global-dns-configuration" pid=53944 uid=0 result="success"
Oct 02 11:39:22 compute-0 NetworkManager[51160]: <info>  [1759405162.5800] audit: op="checkpoint-adjust-rollback-timeout" arg="/org/freedesktop/NetworkManager/Checkpoint/2" pid=53944 uid=0 result="success"
Oct 02 11:39:22 compute-0 python3.9[54278]: ansible-ansible.legacy.async_status Invoked with jid=j17321634878.53938 mode=status _async_dir=/root/.ansible_async
Oct 02 11:39:22 compute-0 sudo[54276]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:22 compute-0 NetworkManager[51160]: <info>  [1759405162.7079] checkpoint[0x55d0b9c27a20]: destroy /org/freedesktop/NetworkManager/Checkpoint/2
Oct 02 11:39:22 compute-0 NetworkManager[51160]: <info>  [1759405162.7085] audit: op="checkpoint-destroy" arg="/org/freedesktop/NetworkManager/Checkpoint/2" pid=53944 uid=0 result="success"
Oct 02 11:39:22 compute-0 ansible-async_wrapper.py[53942]: Module complete (53942)
Oct 02 11:39:23 compute-0 ansible-async_wrapper.py[53941]: Done in kid B.
Oct 02 11:39:25 compute-0 sudo[54380]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bgiatgzlfhfxwuvumghbfmtkuewswqfd ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405162.105101-849-28344254992208/AnsiballZ_async_status.py'
Oct 02 11:39:25 compute-0 sudo[54380]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:26 compute-0 python3.9[54382]: ansible-ansible.legacy.async_status Invoked with jid=j17321634878.53938 mode=status _async_dir=/root/.ansible_async
Oct 02 11:39:26 compute-0 sudo[54380]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:26 compute-0 sudo[54480]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hjyjvkoouljssefwynxmfytbajaylixm ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405162.105101-849-28344254992208/AnsiballZ_async_status.py'
Oct 02 11:39:26 compute-0 sudo[54480]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:26 compute-0 python3.9[54482]: ansible-ansible.legacy.async_status Invoked with jid=j17321634878.53938 mode=cleanup _async_dir=/root/.ansible_async
Oct 02 11:39:26 compute-0 sudo[54480]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:27 compute-0 sudo[54632]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-etcqkrpaxgwxicelpfbncgqegrpqvudm ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405167.0146964-925-79186638377060/AnsiballZ_stat.py'
Oct 02 11:39:27 compute-0 sudo[54632]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:27 compute-0 python3.9[54634]: ansible-ansible.legacy.stat Invoked with path=/var/lib/edpm-config/os-net-config.returncode follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:39:27 compute-0 sudo[54632]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:27 compute-0 sudo[54755]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-howgsanfgkqweubydxqlncpkcubfajyy ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405167.0146964-925-79186638377060/AnsiballZ_copy.py'
Oct 02 11:39:27 compute-0 sudo[54755]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:27 compute-0 systemd[1]: systemd-hostnamed.service: Deactivated successfully.
Oct 02 11:39:27 compute-0 python3.9[54757]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/edpm-config/os-net-config.returncode mode=0644 src=/home/zuul/.ansible/tmp/ansible-tmp-1759405167.0146964-925-79186638377060/.source.returncode _original_basename=.7fa4cavm follow=False checksum=b6589fc6ab0dc82cf12099d1c2d40ab994e8410c backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:39:27 compute-0 sudo[54755]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:28 compute-0 sudo[54909]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-uavqxwdrdbpaqtwprzvxsrxjozyewfts ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405168.2156425-973-243987086557092/AnsiballZ_stat.py'
Oct 02 11:39:28 compute-0 sudo[54909]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:28 compute-0 python3.9[54911]: ansible-ansible.legacy.stat Invoked with path=/etc/cloud/cloud.cfg.d/99-edpm-disable-network-config.cfg follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:39:28 compute-0 sudo[54909]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:28 compute-0 sudo[55032]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-thokzzglaspinlrjnryducautjueekup ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405168.2156425-973-243987086557092/AnsiballZ_copy.py'
Oct 02 11:39:28 compute-0 sudo[55032]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:29 compute-0 python3.9[55034]: ansible-ansible.legacy.copy Invoked with dest=/etc/cloud/cloud.cfg.d/99-edpm-disable-network-config.cfg mode=0644 src=/home/zuul/.ansible/tmp/ansible-tmp-1759405168.2156425-973-243987086557092/.source.cfg _original_basename=.j8na9hkb follow=False checksum=f3c5952a9cd4c6c31b314b25eb897168971cc86e backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:39:29 compute-0 sudo[55032]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:29 compute-0 sudo[55185]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dqtqbvqqayvwrzxtojwzizosenqkishh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405169.396908-1018-120035721725709/AnsiballZ_systemd.py'
Oct 02 11:39:29 compute-0 sudo[55185]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:29 compute-0 python3.9[55187]: ansible-ansible.builtin.systemd Invoked with name=NetworkManager state=reloaded daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None
Oct 02 11:39:29 compute-0 systemd[1]: Reloading Network Manager...
Oct 02 11:39:29 compute-0 NetworkManager[51160]: <info>  [1759405169.9855] audit: op="reload" arg="0" pid=55191 uid=0 result="success"
Oct 02 11:39:29 compute-0 NetworkManager[51160]: <info>  [1759405169.9865] config: signal: SIGHUP,config-files,values,values-user,no-auto-default (/etc/NetworkManager/NetworkManager.conf, /usr/lib/NetworkManager/conf.d/00-server.conf, /run/NetworkManager/conf.d/15-carrier-timeout.conf, /var/lib/NetworkManager/NetworkManager-intern.conf)
Oct 02 11:39:29 compute-0 systemd[1]: Reloaded Network Manager.
Oct 02 11:39:30 compute-0 sudo[55185]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:30 compute-0 sshd-session[47166]: Connection closed by 192.168.122.30 port 57256
Oct 02 11:39:30 compute-0 sshd-session[47163]: pam_unix(sshd:session): session closed for user zuul
Oct 02 11:39:30 compute-0 systemd[1]: session-11.scope: Deactivated successfully.
Oct 02 11:39:30 compute-0 systemd[1]: session-11.scope: Consumed 45.312s CPU time.
Oct 02 11:39:30 compute-0 systemd-logind[827]: Session 11 logged out. Waiting for processes to exit.
Oct 02 11:39:30 compute-0 systemd-logind[827]: Removed session 11.
Oct 02 11:39:35 compute-0 sshd-session[55221]: Accepted publickey for zuul from 192.168.122.30 port 52760 ssh2: ECDSA SHA256:tAEsFSop2MjuMQQ/UqKU2uCkxqWXGfsM5crdhd6tlI4
Oct 02 11:39:35 compute-0 systemd-logind[827]: New session 12 of user zuul.
Oct 02 11:39:35 compute-0 systemd[1]: Started Session 12 of User zuul.
Oct 02 11:39:35 compute-0 sshd-session[55221]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Oct 02 11:39:36 compute-0 python3.9[55375]: ansible-ansible.builtin.setup Invoked with gather_subset=['!all', '!min', 'local'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:39:37 compute-0 python3.9[55529]: ansible-ansible.builtin.setup Invoked with filter=['ansible_default_ipv4'] gather_subset=['!all', '!min', 'network'] gather_timeout=10 fact_path=/etc/ansible/facts.d
Oct 02 11:39:38 compute-0 python3.9[55718]: ansible-ansible.legacy.command Invoked with _raw_params=hostname -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:39:39 compute-0 sshd-session[55224]: Connection closed by 192.168.122.30 port 52760
Oct 02 11:39:39 compute-0 sshd-session[55221]: pam_unix(sshd:session): session closed for user zuul
Oct 02 11:39:39 compute-0 systemd[1]: session-12.scope: Deactivated successfully.
Oct 02 11:39:39 compute-0 systemd[1]: session-12.scope: Consumed 2.416s CPU time.
Oct 02 11:39:39 compute-0 systemd-logind[827]: Session 12 logged out. Waiting for processes to exit.
Oct 02 11:39:39 compute-0 systemd-logind[827]: Removed session 12.
Oct 02 11:39:40 compute-0 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.
Oct 02 11:39:44 compute-0 sshd-session[55748]: Accepted publickey for zuul from 192.168.122.30 port 38382 ssh2: ECDSA SHA256:tAEsFSop2MjuMQQ/UqKU2uCkxqWXGfsM5crdhd6tlI4
Oct 02 11:39:44 compute-0 systemd-logind[827]: New session 13 of user zuul.
Oct 02 11:39:44 compute-0 systemd[1]: Started Session 13 of User zuul.
Oct 02 11:39:44 compute-0 sshd-session[55748]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Oct 02 11:39:45 compute-0 python3.9[55901]: ansible-ansible.builtin.setup Invoked with gather_subset=['!all', '!min', 'local'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:39:46 compute-0 python3.9[56056]: ansible-ansible.builtin.setup Invoked with gather_subset=['!all', '!min', 'local'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:39:47 compute-0 sudo[56210]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wsapjmlzljqylhoowbpmkltmozbqsytw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405186.9512522-85-139106344794483/AnsiballZ_setup.py'
Oct 02 11:39:47 compute-0 sudo[56210]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:47 compute-0 python3.9[56212]: ansible-ansible.legacy.setup Invoked with filter=['ansible_pkg_mgr'] gather_subset=['!all'] gather_timeout=10 fact_path=/etc/ansible/facts.d
Oct 02 11:39:47 compute-0 sudo[56210]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:48 compute-0 sudo[56294]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zisfctmedlbkagzfdfvkotkurdxjtbhm ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405186.9512522-85-139106344794483/AnsiballZ_dnf.py'
Oct 02 11:39:48 compute-0 sudo[56294]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:48 compute-0 python3.9[56296]: ansible-ansible.legacy.dnf Invoked with name=['podman'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Oct 02 11:39:49 compute-0 sudo[56294]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:49 compute-0 sudo[56448]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qcadmeoairpzcdhusoabyizsbudicxcz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405189.7406728-121-251670222085045/AnsiballZ_setup.py'
Oct 02 11:39:49 compute-0 sudo[56448]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:50 compute-0 python3.9[56450]: ansible-ansible.builtin.setup Invoked with filter=['ansible_interfaces'] gather_subset=['!all', '!min', 'network'] gather_timeout=10 fact_path=/etc/ansible/facts.d
Oct 02 11:39:50 compute-0 sudo[56448]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:51 compute-0 sudo[56639]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ygaemexrmugevrsccuicjyvhucvyrrcq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405190.7985773-154-160289445835782/AnsiballZ_file.py'
Oct 02 11:39:51 compute-0 sudo[56639]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:51 compute-0 python3.9[56641]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/etc/containers/networks recurse=True state=directory force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:39:51 compute-0 sudo[56639]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:51 compute-0 sudo[56792]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kpvirfnqwolhwjtyzjfdyhvmjiylzqju ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405191.5755234-178-216979596352959/AnsiballZ_command.py'
Oct 02 11:39:51 compute-0 sudo[56792]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:52 compute-0 python3.9[56794]: ansible-ansible.legacy.command Invoked with _raw_params=podman network inspect podman
                                             _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:39:52 compute-0 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
Oct 02 11:39:52 compute-0 sudo[56792]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:52 compute-0 sudo[56955]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jovljtyrarshzazxjwfpstnsuggvxkyw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405192.4252117-202-101805544892582/AnsiballZ_stat.py'
Oct 02 11:39:52 compute-0 sudo[56955]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:52 compute-0 python3.9[56957]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/networks/podman.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:39:53 compute-0 sudo[56955]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:53 compute-0 sudo[57033]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-egklnzvvuzpsigvkcezdohtqlkpbmgyu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405192.4252117-202-101805544892582/AnsiballZ_file.py'
Oct 02 11:39:53 compute-0 sudo[57033]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:53 compute-0 python3.9[57035]: ansible-ansible.legacy.file Invoked with group=root mode=0644 owner=root dest=/etc/containers/networks/podman.json _original_basename=podman_network_config.j2 recurse=False state=file path=/etc/containers/networks/podman.json force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:39:53 compute-0 sudo[57033]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:53 compute-0 sudo[57185]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jfdyumvyjfbcxmpwkajnccvghdviyhvj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405193.6358345-238-11605677939827/AnsiballZ_stat.py'
Oct 02 11:39:53 compute-0 sudo[57185]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:54 compute-0 python3.9[57187]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/20-edpm-podman-registries.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:39:54 compute-0 sudo[57185]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:54 compute-0 sudo[57263]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-innjucwcoaiufyxwvoounuvmgnyltvqn ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405193.6358345-238-11605677939827/AnsiballZ_file.py'
Oct 02 11:39:54 compute-0 sudo[57263]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:54 compute-0 python3.9[57265]: ansible-ansible.legacy.file Invoked with group=root mode=0644 owner=root setype=etc_t dest=/etc/containers/registries.conf.d/20-edpm-podman-registries.conf _original_basename=registries.conf.j2 recurse=False state=file path=/etc/containers/registries.conf.d/20-edpm-podman-registries.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:39:54 compute-0 sudo[57263]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:55 compute-0 sudo[57415]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tvihrkbjopvdypwzrajkwgbooowpcgdv ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405194.7508514-277-230303969903084/AnsiballZ_ini_file.py'
Oct 02 11:39:55 compute-0 sudo[57415]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:55 compute-0 python3.9[57417]: ansible-community.general.ini_file Invoked with create=True group=root mode=0644 option=pids_limit owner=root path=/etc/containers/containers.conf section=containers setype=etc_t value=4096 backup=False state=present exclusive=True no_extra_spaces=False ignore_spaces=False allow_no_value=False modify_inactive_option=True follow=False unsafe_writes=False section_has_values=None values=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:39:55 compute-0 sudo[57415]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:55 compute-0 sudo[57567]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gzjgeomyvbnmidyrxibslykmzplskacu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405195.4492667-277-56104119245382/AnsiballZ_ini_file.py'
Oct 02 11:39:55 compute-0 sudo[57567]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:55 compute-0 python3.9[57569]: ansible-community.general.ini_file Invoked with create=True group=root mode=0644 option=events_logger owner=root path=/etc/containers/containers.conf section=engine setype=etc_t value="journald" backup=False state=present exclusive=True no_extra_spaces=False ignore_spaces=False allow_no_value=False modify_inactive_option=True follow=False unsafe_writes=False section_has_values=None values=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:39:55 compute-0 sudo[57567]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:56 compute-0 sudo[57719]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-juqimcsigzbaxhbenuvtcdvtbaprrsri ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405195.9985058-277-237931365254430/AnsiballZ_ini_file.py'
Oct 02 11:39:56 compute-0 sudo[57719]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:56 compute-0 python3.9[57721]: ansible-community.general.ini_file Invoked with create=True group=root mode=0644 option=runtime owner=root path=/etc/containers/containers.conf section=engine setype=etc_t value="crun" backup=False state=present exclusive=True no_extra_spaces=False ignore_spaces=False allow_no_value=False modify_inactive_option=True follow=False unsafe_writes=False section_has_values=None values=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:39:56 compute-0 sudo[57719]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:56 compute-0 sudo[57871]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yjhuvvcifseeufjeyvguiycqlunjnedy ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405196.5256116-277-47296119145752/AnsiballZ_ini_file.py'
Oct 02 11:39:56 compute-0 sudo[57871]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:56 compute-0 python3.9[57873]: ansible-community.general.ini_file Invoked with create=True group=root mode=0644 option=network_backend owner=root path=/etc/containers/containers.conf section=network setype=etc_t value="netavark" backup=False state=present exclusive=True no_extra_spaces=False ignore_spaces=False allow_no_value=False modify_inactive_option=True follow=False unsafe_writes=False section_has_values=None values=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:39:56 compute-0 sudo[57871]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:57 compute-0 sudo[58023]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ofxtggiitstlxqakybgikmwcrlrbpmhu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405197.3846326-370-167297157530718/AnsiballZ_dnf.py'
Oct 02 11:39:57 compute-0 sudo[58023]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:39:57 compute-0 python3.9[58025]: ansible-ansible.legacy.dnf Invoked with name=['openssh-server'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Oct 02 11:39:58 compute-0 sudo[58023]: pam_unix(sudo:session): session closed for user root
Oct 02 11:39:59 compute-0 sudo[58176]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gpacjgaxegbnniqzesxrfzdmtpjoigrj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405199.5508614-403-196519376810117/AnsiballZ_setup.py'
Oct 02 11:39:59 compute-0 sudo[58176]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:00 compute-0 python3.9[58178]: ansible-setup Invoked with gather_subset=['!all', '!min', 'distribution', 'distribution_major_version', 'distribution_version', 'os_family'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:40:00 compute-0 sudo[58176]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:00 compute-0 sudo[58330]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ffaghfnvnjtjdzpcgichncrrgineyhhp ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405200.376421-427-143493911029199/AnsiballZ_stat.py'
Oct 02 11:40:00 compute-0 sudo[58330]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:00 compute-0 python3.9[58332]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:40:00 compute-0 sudo[58330]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:01 compute-0 sudo[58482]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tnmdoatztwgndfmrjkvgirqtbgppcmhl ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405201.0513923-454-254914973027331/AnsiballZ_stat.py'
Oct 02 11:40:01 compute-0 sudo[58482]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:01 compute-0 python3.9[58484]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:40:01 compute-0 sudo[58482]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:02 compute-0 sudo[58634]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wdlydraoxgdirpulsoapsdmijbuzeosd ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405202.0236857-484-31425762216842/AnsiballZ_service_facts.py'
Oct 02 11:40:02 compute-0 sudo[58634]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:02 compute-0 python3.9[58636]: ansible-service_facts Invoked
Oct 02 11:40:02 compute-0 network[58653]: You are using 'network' service provided by 'network-scripts', which are now deprecated.
Oct 02 11:40:02 compute-0 network[58654]: 'network-scripts' will be removed from distribution in near future.
Oct 02 11:40:02 compute-0 network[58655]: It is advised to switch to 'NetworkManager' instead for network management.
Oct 02 11:40:06 compute-0 sudo[58634]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:07 compute-0 sudo[58940]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sfetczndubnhadgogovxbtodpqvltiyo ; /bin/bash /home/zuul/.ansible/tmp/ansible-tmp-1759405207.0520506-523-179356425216492/AnsiballZ_timesync_provider.sh /home/zuul/.ansible/tmp/ansible-tmp-1759405207.0520506-523-179356425216492/args'
Oct 02 11:40:07 compute-0 sudo[58940]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:07 compute-0 sudo[58940]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:08 compute-0 sudo[59107]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-uflujmydiajtjqrbiaguxmpaaycailsb ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405207.8410027-556-135126725908705/AnsiballZ_dnf.py'
Oct 02 11:40:08 compute-0 sudo[59107]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:08 compute-0 python3.9[59109]: ansible-ansible.legacy.dnf Invoked with name=['chrony'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Oct 02 11:40:09 compute-0 sudo[59107]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:10 compute-0 sudo[59260]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vlulvoyoqhqtxswivwoenlvbzdweybtp ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405210.0057535-595-131810688705630/AnsiballZ_package_facts.py'
Oct 02 11:40:10 compute-0 sudo[59260]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:10 compute-0 python3.9[59262]: ansible-package_facts Invoked with manager=['auto'] strategy=first
Oct 02 11:40:11 compute-0 sudo[59260]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:12 compute-0 sudo[59412]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tmnmserkfbbtgmjcgbgfxkaxjxzckmtc ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405212.0421562-625-64315989789140/AnsiballZ_stat.py'
Oct 02 11:40:12 compute-0 sudo[59412]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:12 compute-0 python3.9[59414]: ansible-ansible.legacy.stat Invoked with path=/etc/chrony.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:40:12 compute-0 sudo[59412]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:13 compute-0 sudo[59537]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-eztumxqygrmnqsdzwodhlrdackwzkloj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405212.0421562-625-64315989789140/AnsiballZ_copy.py'
Oct 02 11:40:13 compute-0 sudo[59537]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:13 compute-0 python3.9[59539]: ansible-ansible.legacy.copy Invoked with backup=True dest=/etc/chrony.conf mode=0644 src=/home/zuul/.ansible/tmp/ansible-tmp-1759405212.0421562-625-64315989789140/.source.conf follow=False _original_basename=chrony.conf.j2 checksum=cfb003e56d02d0d2c65555452eb1a05073fecdad force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:40:13 compute-0 sudo[59537]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:13 compute-0 sudo[59692]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ijjiaubuussukwkhdkxgsdhsrktztxhp ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405213.5533724-670-36614791240665/AnsiballZ_stat.py'
Oct 02 11:40:13 compute-0 sudo[59692]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:13 compute-0 python3.9[59694]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/chronyd follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:40:14 compute-0 sudo[59692]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:14 compute-0 sudo[59817]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nzgmzssdfofpjzwlciaypqguemqgasnd ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405213.5533724-670-36614791240665/AnsiballZ_copy.py'
Oct 02 11:40:14 compute-0 sudo[59817]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:14 compute-0 python3.9[59819]: ansible-ansible.legacy.copy Invoked with backup=True dest=/etc/sysconfig/chronyd mode=0644 src=/home/zuul/.ansible/tmp/ansible-tmp-1759405213.5533724-670-36614791240665/.source follow=False _original_basename=chronyd.sysconfig.j2 checksum=dd196b1ff1f915b23eebc37ec77405b5dd3df76c force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:40:14 compute-0 sudo[59817]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:15 compute-0 sudo[59971]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-otvqvmxbgzifcgdvcurtiiknlbtogrif ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405215.4804702-733-12595694163243/AnsiballZ_lineinfile.py'
Oct 02 11:40:15 compute-0 sudo[59971]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:16 compute-0 python3.9[59973]: ansible-lineinfile Invoked with backup=True create=True dest=/etc/sysconfig/network line=PEERNTP=no mode=0644 regexp=^PEERNTP= state=present path=/etc/sysconfig/network backrefs=False firstmatch=False unsafe_writes=False search_string=None insertafter=None insertbefore=None validate=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:40:16 compute-0 sudo[59971]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:17 compute-0 sudo[60125]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kcxohciodewefyduqwkexvxxpvcbmoal ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405217.2775433-778-87392615441143/AnsiballZ_setup.py'
Oct 02 11:40:17 compute-0 sudo[60125]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:17 compute-0 python3.9[60127]: ansible-ansible.legacy.setup Invoked with gather_subset=['!all'] filter=['ansible_service_mgr'] gather_timeout=10 fact_path=/etc/ansible/facts.d
Oct 02 11:40:18 compute-0 sudo[60125]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:18 compute-0 sudo[60209]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cpbjndyqbypxumqgivuvjfhyuoggwqcw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405217.2775433-778-87392615441143/AnsiballZ_systemd.py'
Oct 02 11:40:18 compute-0 sudo[60209]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:19 compute-0 python3.9[60211]: ansible-ansible.legacy.systemd Invoked with enabled=True name=chronyd state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:40:19 compute-0 sudo[60209]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:20 compute-0 sudo[60363]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tilpvobbtqzargjygrfkkxlnuvutfhqq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405219.9226742-826-172778603164365/AnsiballZ_setup.py'
Oct 02 11:40:20 compute-0 sudo[60363]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:20 compute-0 python3.9[60365]: ansible-ansible.legacy.setup Invoked with gather_subset=['!all'] filter=['ansible_service_mgr'] gather_timeout=10 fact_path=/etc/ansible/facts.d
Oct 02 11:40:20 compute-0 sudo[60363]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:20 compute-0 sudo[60447]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-umcxpfvatmewspwfbtkqrdpegefdgfmx ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405219.9226742-826-172778603164365/AnsiballZ_systemd.py'
Oct 02 11:40:20 compute-0 sudo[60447]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:21 compute-0 python3.9[60449]: ansible-ansible.legacy.systemd Invoked with name=chronyd state=restarted daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None
Oct 02 11:40:21 compute-0 chronyd[837]: chronyd exiting
Oct 02 11:40:21 compute-0 systemd[1]: Stopping NTP client/server...
Oct 02 11:40:21 compute-0 systemd[1]: chronyd.service: Deactivated successfully.
Oct 02 11:40:21 compute-0 systemd[1]: Stopped NTP client/server.
Oct 02 11:40:21 compute-0 systemd[1]: Starting NTP client/server...
Oct 02 11:40:21 compute-0 chronyd[60458]: chronyd version 4.6.1 starting (+CMDMON +NTP +REFCLOCK +RTC +PRIVDROP +SCFILTER +SIGND +ASYNCDNS +NTS +SECHASH +IPV6 +DEBUG)
Oct 02 11:40:21 compute-0 chronyd[60458]: Frequency -27.195 +/- 0.172 ppm read from /var/lib/chrony/drift
Oct 02 11:40:21 compute-0 chronyd[60458]: Loaded seccomp filter (level 2)
Oct 02 11:40:21 compute-0 systemd[1]: Started NTP client/server.
Oct 02 11:40:21 compute-0 sudo[60447]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:21 compute-0 sshd-session[55751]: Connection closed by 192.168.122.30 port 38382
Oct 02 11:40:22 compute-0 sshd-session[55748]: pam_unix(sshd:session): session closed for user zuul
Oct 02 11:40:22 compute-0 systemd[1]: session-13.scope: Deactivated successfully.
Oct 02 11:40:22 compute-0 systemd[1]: session-13.scope: Consumed 23.048s CPU time.
Oct 02 11:40:22 compute-0 systemd-logind[827]: Session 13 logged out. Waiting for processes to exit.
Oct 02 11:40:22 compute-0 systemd-logind[827]: Removed session 13.
Oct 02 11:40:27 compute-0 sshd-session[60484]: Accepted publickey for zuul from 192.168.122.30 port 44478 ssh2: ECDSA SHA256:tAEsFSop2MjuMQQ/UqKU2uCkxqWXGfsM5crdhd6tlI4
Oct 02 11:40:27 compute-0 systemd-logind[827]: New session 14 of user zuul.
Oct 02 11:40:27 compute-0 systemd[1]: Started Session 14 of User zuul.
Oct 02 11:40:27 compute-0 sshd-session[60484]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Oct 02 11:40:28 compute-0 python3.9[60637]: ansible-ansible.builtin.setup Invoked with gather_subset=['!all', '!min', 'local'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:40:29 compute-0 sudo[60791]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hpyewbqlmwideruaonpfjblrvzcvgkgp ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405228.8575602-64-95536768149070/AnsiballZ_file.py'
Oct 02 11:40:29 compute-0 sudo[60791]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:29 compute-0 python3.9[60793]: ansible-ansible.builtin.file Invoked with group=zuul mode=0770 owner=zuul path=/root/.config/containers recurse=True state=directory force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:40:29 compute-0 sudo[60791]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:30 compute-0 sudo[60966]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nhwugqjslommnhyumkkcuqzuxfzaoutt ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405229.7425525-88-125206843161815/AnsiballZ_stat.py'
Oct 02 11:40:30 compute-0 sudo[60966]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:30 compute-0 python3.9[60968]: ansible-ansible.legacy.stat Invoked with path=/root/.config/containers/auth.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:40:30 compute-0 sudo[60966]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:30 compute-0 sudo[61044]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-oustgsucwphbqxxogoydvhjfnuiihwhz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405229.7425525-88-125206843161815/AnsiballZ_file.py'
Oct 02 11:40:30 compute-0 sudo[61044]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:30 compute-0 python3.9[61046]: ansible-ansible.legacy.file Invoked with group=zuul mode=0660 owner=zuul dest=/root/.config/containers/auth.json _original_basename=.phlreugb recurse=False state=file path=/root/.config/containers/auth.json force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:40:30 compute-0 sudo[61044]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:31 compute-0 sudo[61196]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ryqltpsfxbpfjhzfjkhyigblnbykupiw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405231.4659748-148-152406497157883/AnsiballZ_stat.py'
Oct 02 11:40:31 compute-0 sudo[61196]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:32 compute-0 python3.9[61198]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/podman_drop_in follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:40:32 compute-0 sudo[61196]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:32 compute-0 sudo[61319]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vxzrklnopljsaqwjyitjmjueknczunxe ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405231.4659748-148-152406497157883/AnsiballZ_copy.py'
Oct 02 11:40:32 compute-0 sudo[61319]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:32 compute-0 python3.9[61321]: ansible-ansible.legacy.copy Invoked with dest=/etc/sysconfig/podman_drop_in mode=0644 src=/home/zuul/.ansible/tmp/ansible-tmp-1759405231.4659748-148-152406497157883/.source _original_basename=.h4i2zh9j follow=False checksum=125299ce8dea7711a76292961206447f0043248b backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:40:32 compute-0 sudo[61319]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:33 compute-0 sudo[61471]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vinrnzgtwhcibosuhvvzmvcliqslvreu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405232.9863405-196-241022596977866/AnsiballZ_file.py'
Oct 02 11:40:33 compute-0 sudo[61471]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:33 compute-0 python3.9[61473]: ansible-ansible.builtin.file Invoked with path=/var/local/libexec recurse=True setype=container_file_t state=directory force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:40:33 compute-0 sudo[61471]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:34 compute-0 sudo[61623]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rjtetnnlymugcmiqngswpfwirsecbouq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405233.7330363-220-224422796597617/AnsiballZ_stat.py'
Oct 02 11:40:34 compute-0 sudo[61623]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:34 compute-0 python3.9[61625]: ansible-ansible.legacy.stat Invoked with path=/var/local/libexec/edpm-container-shutdown follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:40:34 compute-0 sudo[61623]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:34 compute-0 sudo[61746]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-uakugurqbwyrcvnlduxvopiixyhdwuvf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405233.7330363-220-224422796597617/AnsiballZ_copy.py'
Oct 02 11:40:34 compute-0 sudo[61746]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:34 compute-0 python3.9[61748]: ansible-ansible.legacy.copy Invoked with dest=/var/local/libexec/edpm-container-shutdown group=root mode=0700 owner=root setype=container_file_t src=/home/zuul/.ansible/tmp/ansible-tmp-1759405233.7330363-220-224422796597617/.source _original_basename=edpm-container-shutdown follow=False checksum=632c3792eb3dce4288b33ae7b265b71950d69f13 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:40:34 compute-0 sudo[61746]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:35 compute-0 sudo[61898]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kqlrnlvpzhmrwoonxtdnygqrcnnabyhm ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405235.0393248-220-23099751389131/AnsiballZ_stat.py'
Oct 02 11:40:35 compute-0 sudo[61898]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:35 compute-0 python3.9[61900]: ansible-ansible.legacy.stat Invoked with path=/var/local/libexec/edpm-start-podman-container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:40:35 compute-0 sudo[61898]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:35 compute-0 sudo[62021]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-aobowbxbpatjmzqfxbxucaxmnpannksy ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405235.0393248-220-23099751389131/AnsiballZ_copy.py'
Oct 02 11:40:35 compute-0 sudo[62021]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:36 compute-0 python3.9[62023]: ansible-ansible.legacy.copy Invoked with dest=/var/local/libexec/edpm-start-podman-container group=root mode=0700 owner=root setype=container_file_t src=/home/zuul/.ansible/tmp/ansible-tmp-1759405235.0393248-220-23099751389131/.source _original_basename=edpm-start-podman-container follow=False checksum=b963c569d75a655c0ccae95d9bb4a2a9a4df27d1 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:40:36 compute-0 sudo[62021]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:36 compute-0 sudo[62173]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pxmkivgiuczxurgidyeohnnfixcktwit ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405236.3782043-307-190907003707040/AnsiballZ_file.py'
Oct 02 11:40:36 compute-0 sudo[62173]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:36 compute-0 python3.9[62175]: ansible-ansible.builtin.file Invoked with mode=420 path=/etc/systemd/system-preset state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:40:36 compute-0 sudo[62173]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:37 compute-0 sudo[62325]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rxqikojqveqjhrhavsciaaurlwgxfsar ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405237.1549757-331-127416790770680/AnsiballZ_stat.py'
Oct 02 11:40:37 compute-0 sudo[62325]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:37 compute-0 python3.9[62327]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/edpm-container-shutdown.service follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:40:37 compute-0 sudo[62325]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:38 compute-0 sudo[62448]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-efapdrnlftokqeymdpehvavekfbpxzfv ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405237.1549757-331-127416790770680/AnsiballZ_copy.py'
Oct 02 11:40:38 compute-0 sudo[62448]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:38 compute-0 python3.9[62450]: ansible-ansible.legacy.copy Invoked with dest=/etc/systemd/system/edpm-container-shutdown.service group=root mode=0644 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405237.1549757-331-127416790770680/.source.service _original_basename=edpm-container-shutdown-service follow=False checksum=6336835cb0f888670cc99de31e19c8c071444d33 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:40:38 compute-0 sudo[62448]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:38 compute-0 sudo[62600]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wgrdpmhemjckbetiqsvqrbxttukthyoq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405238.4318767-376-4925412393171/AnsiballZ_stat.py'
Oct 02 11:40:38 compute-0 sudo[62600]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:38 compute-0 python3.9[62602]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system-preset/91-edpm-container-shutdown.preset follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:40:38 compute-0 sudo[62600]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:39 compute-0 sudo[62723]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-grcnywujyaumxqmxlfewukyoosbihvck ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405238.4318767-376-4925412393171/AnsiballZ_copy.py'
Oct 02 11:40:39 compute-0 sudo[62723]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:39 compute-0 python3.9[62725]: ansible-ansible.legacy.copy Invoked with dest=/etc/systemd/system-preset/91-edpm-container-shutdown.preset group=root mode=0644 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405238.4318767-376-4925412393171/.source.preset _original_basename=91-edpm-container-shutdown-preset follow=False checksum=b275e4375287528cb63464dd32f622c4f142a915 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:40:39 compute-0 sudo[62723]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:40 compute-0 sudo[62875]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-uhzwjtjtnlyvrlmzyrgkikzeheucjzjt ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405239.6859791-421-2862570091215/AnsiballZ_systemd.py'
Oct 02 11:40:40 compute-0 sudo[62875]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:40 compute-0 python3.9[62877]: ansible-ansible.builtin.systemd Invoked with daemon_reload=True enabled=True name=edpm-container-shutdown state=started daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:40:40 compute-0 systemd[1]: Reloading.
Oct 02 11:40:40 compute-0 systemd-rc-local-generator[62901]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:40:40 compute-0 systemd-sysv-generator[62904]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:40:40 compute-0 systemd[1]: Reloading.
Oct 02 11:40:40 compute-0 systemd-rc-local-generator[62945]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:40:40 compute-0 systemd-sysv-generator[62949]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:40:41 compute-0 systemd[1]: Starting EDPM Container Shutdown...
Oct 02 11:40:41 compute-0 systemd[1]: Finished EDPM Container Shutdown.
Oct 02 11:40:41 compute-0 sudo[62875]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:41 compute-0 sudo[63104]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-exvrfplpsuahxxvkthrmzevxyhqihgxe ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405241.2341638-445-123624639385957/AnsiballZ_stat.py'
Oct 02 11:40:41 compute-0 sudo[63104]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:41 compute-0 python3.9[63106]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/netns-placeholder.service follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:40:41 compute-0 sudo[63104]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:42 compute-0 sudo[63227]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hccgjsbdltwoptoobjbfajypmuyxzwgz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405241.2341638-445-123624639385957/AnsiballZ_copy.py'
Oct 02 11:40:42 compute-0 sudo[63227]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:42 compute-0 python3.9[63229]: ansible-ansible.legacy.copy Invoked with dest=/etc/systemd/system/netns-placeholder.service group=root mode=0644 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405241.2341638-445-123624639385957/.source.service _original_basename=netns-placeholder-service follow=False checksum=b61b1b5918c20c877b8b226fbf34ff89a082d972 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:40:42 compute-0 sudo[63227]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:42 compute-0 sudo[63379]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-atmffypvuozqgrurrikyxdnngnaqgput ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405242.5459433-490-235987748523173/AnsiballZ_stat.py'
Oct 02 11:40:42 compute-0 sudo[63379]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:43 compute-0 python3.9[63381]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system-preset/91-netns-placeholder.preset follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:40:43 compute-0 sudo[63379]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:43 compute-0 sudo[63502]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mnqohailismlnwkiyixajupporirprmq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405242.5459433-490-235987748523173/AnsiballZ_copy.py'
Oct 02 11:40:43 compute-0 sudo[63502]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:43 compute-0 python3.9[63504]: ansible-ansible.legacy.copy Invoked with dest=/etc/systemd/system-preset/91-netns-placeholder.preset group=root mode=0644 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405242.5459433-490-235987748523173/.source.preset _original_basename=91-netns-placeholder-preset follow=False checksum=28b7b9aa893525d134a1eeda8a0a48fb25b736b9 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:40:43 compute-0 sudo[63502]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:44 compute-0 sudo[63654]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mubejafmhfwljsjoimggizzofuejlqge ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405243.798896-535-183800585139838/AnsiballZ_systemd.py'
Oct 02 11:40:44 compute-0 sudo[63654]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:44 compute-0 python3.9[63656]: ansible-ansible.builtin.systemd Invoked with daemon_reload=True enabled=True name=netns-placeholder state=started daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:40:44 compute-0 systemd[1]: Reloading.
Oct 02 11:40:44 compute-0 systemd-sysv-generator[63688]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:40:44 compute-0 systemd-rc-local-generator[63684]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:40:44 compute-0 systemd[1]: Reloading.
Oct 02 11:40:44 compute-0 systemd-rc-local-generator[63722]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:40:44 compute-0 systemd-sysv-generator[63726]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:40:44 compute-0 systemd[1]: Starting Create netns directory...
Oct 02 11:40:44 compute-0 systemd[1]: run-netns-placeholder.mount: Deactivated successfully.
Oct 02 11:40:44 compute-0 systemd[1]: netns-placeholder.service: Deactivated successfully.
Oct 02 11:40:44 compute-0 systemd[1]: Finished Create netns directory.
Oct 02 11:40:44 compute-0 sudo[63654]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:45 compute-0 python3.9[63883]: ansible-ansible.builtin.service_facts Invoked
Oct 02 11:40:45 compute-0 network[63900]: You are using 'network' service provided by 'network-scripts', which are now deprecated.
Oct 02 11:40:45 compute-0 network[63901]: 'network-scripts' will be removed from distribution in near future.
Oct 02 11:40:45 compute-0 network[63902]: It is advised to switch to 'NetworkManager' instead for network management.
Oct 02 11:40:54 compute-0 sudo[64164]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jtvattlougbxwjesfccvrjbkhirvcetq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405253.8982272-583-95241203663535/AnsiballZ_systemd.py'
Oct 02 11:40:54 compute-0 sudo[64164]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:54 compute-0 python3.9[64166]: ansible-ansible.builtin.systemd Invoked with enabled=False name=iptables.service state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:40:54 compute-0 systemd[1]: Reloading.
Oct 02 11:40:54 compute-0 systemd-rc-local-generator[64196]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:40:54 compute-0 systemd-sysv-generator[64199]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:40:54 compute-0 systemd[1]: Stopping IPv4 firewall with iptables...
Oct 02 11:40:54 compute-0 iptables.init[64206]: iptables: Setting chains to policy ACCEPT: raw mangle filter nat [  OK  ]
Oct 02 11:40:54 compute-0 iptables.init[64206]: iptables: Flushing firewall rules: [  OK  ]
Oct 02 11:40:54 compute-0 systemd[1]: iptables.service: Deactivated successfully.
Oct 02 11:40:54 compute-0 systemd[1]: Stopped IPv4 firewall with iptables.
Oct 02 11:40:54 compute-0 sudo[64164]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:55 compute-0 sudo[64400]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ojvnxxluhlrbevjlpwhdjdmecvpszmfq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405255.1135762-583-268853271316155/AnsiballZ_systemd.py'
Oct 02 11:40:55 compute-0 sudo[64400]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:55 compute-0 python3.9[64402]: ansible-ansible.builtin.systemd Invoked with enabled=False name=ip6tables.service state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:40:55 compute-0 sudo[64400]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:56 compute-0 sudo[64554]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lmoakdtznyogdfzhkxribygfusoomdcb ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405256.0723507-631-136023636579065/AnsiballZ_systemd.py'
Oct 02 11:40:56 compute-0 sudo[64554]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:56 compute-0 python3.9[64556]: ansible-ansible.builtin.systemd Invoked with enabled=True name=nftables state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:40:56 compute-0 systemd[1]: Reloading.
Oct 02 11:40:56 compute-0 systemd-sysv-generator[64585]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:40:56 compute-0 systemd-rc-local-generator[64582]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:40:57 compute-0 systemd[1]: Starting Netfilter Tables...
Oct 02 11:40:57 compute-0 systemd[1]: Finished Netfilter Tables.
Oct 02 11:40:57 compute-0 sudo[64554]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:57 compute-0 sudo[64746]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wtvxioqghhmwyjzdfdrpmbaaizbtandl ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405257.2319202-655-49084509365033/AnsiballZ_command.py'
Oct 02 11:40:57 compute-0 sudo[64746]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:57 compute-0 python3.9[64748]: ansible-ansible.legacy.command Invoked with _raw_params=nft flush ruleset _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:40:57 compute-0 sudo[64746]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:58 compute-0 sudo[64899]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-csswspzwunnlvorfqnrybtfhsjjwbafj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405258.3400533-697-256760863529173/AnsiballZ_stat.py'
Oct 02 11:40:58 compute-0 sudo[64899]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:58 compute-0 python3.9[64901]: ansible-ansible.legacy.stat Invoked with path=/etc/ssh/sshd_config follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:40:58 compute-0 sudo[64899]: pam_unix(sudo:session): session closed for user root
Oct 02 11:40:59 compute-0 sudo[65024]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dfkbljktdycgdcaxcerxsmyqcokwqpti ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405258.3400533-697-256760863529173/AnsiballZ_copy.py'
Oct 02 11:40:59 compute-0 sudo[65024]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:40:59 compute-0 python3.9[65026]: ansible-ansible.legacy.copy Invoked with dest=/etc/ssh/sshd_config mode=0600 src=/home/zuul/.ansible/tmp/ansible-tmp-1759405258.3400533-697-256760863529173/.source validate=/usr/sbin/sshd -T -f %s follow=False _original_basename=sshd_config_block.j2 checksum=4729b6ffc5b555fa142bf0b6e6dc15609cb89a22 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:40:59 compute-0 sudo[65024]: pam_unix(sudo:session): session closed for user root
Oct 02 11:41:00 compute-0 python3.9[65177]: ansible-ansible.builtin.systemd Invoked with name=sshd state=reloaded daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None
Oct 02 11:41:00 compute-0 polkitd[6487]: Registered Authentication Agent for unix-process:65179:325776 (system bus name :1.550 [/usr/bin/pkttyagent --notify-fd 5 --fallback], object path /org/freedesktop/PolicyKit1/AuthenticationAgent, locale en_US.UTF-8)
Oct 02 11:41:01 compute-0 anacron[1094]: Job `cron.daily' started
Oct 02 11:41:01 compute-0 anacron[1094]: Job `cron.daily' terminated
Oct 02 11:41:25 compute-0 polkit-agent-helper-1[65191]: pam_unix(polkit-1:auth): conversation failed
Oct 02 11:41:25 compute-0 polkit-agent-helper-1[65191]: pam_unix(polkit-1:auth): auth could not identify password for [root]
Oct 02 11:41:25 compute-0 polkitd[6487]: Unregistered Authentication Agent for unix-process:65179:325776 (system bus name :1.550, object path /org/freedesktop/PolicyKit1/AuthenticationAgent, locale en_US.UTF-8) (disconnected from bus)
Oct 02 11:41:25 compute-0 polkitd[6487]: Operator of unix-process:65179:325776 FAILED to authenticate to gain authorization for action org.freedesktop.systemd1.manage-units for system-bus-name::1.549 [<unknown>] (owned by unix-user:zuul)
Oct 02 11:41:25 compute-0 sshd-session[60487]: Connection closed by 192.168.122.30 port 44478
Oct 02 11:41:25 compute-0 sshd-session[60484]: pam_unix(sshd:session): session closed for user zuul
Oct 02 11:41:25 compute-0 systemd-logind[827]: Session 14 logged out. Waiting for processes to exit.
Oct 02 11:41:25 compute-0 systemd[1]: session-14.scope: Deactivated successfully.
Oct 02 11:41:25 compute-0 systemd[1]: session-14.scope: Consumed 19.377s CPU time.
Oct 02 11:41:25 compute-0 systemd-logind[827]: Removed session 14.
Oct 02 11:41:38 compute-0 sshd-session[65219]: Accepted publickey for zuul from 192.168.122.30 port 50072 ssh2: ECDSA SHA256:tAEsFSop2MjuMQQ/UqKU2uCkxqWXGfsM5crdhd6tlI4
Oct 02 11:41:38 compute-0 systemd-logind[827]: New session 15 of user zuul.
Oct 02 11:41:38 compute-0 systemd[1]: Started Session 15 of User zuul.
Oct 02 11:41:38 compute-0 sshd-session[65219]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Oct 02 11:41:39 compute-0 python3.9[65372]: ansible-ansible.builtin.setup Invoked with gather_subset=['!all', '!min', 'local'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:41:39 compute-0 sudo[65526]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rilcmwqwcyzcrkpdphzjfcsvvhxictak ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405299.5007217-64-189429055140423/AnsiballZ_file.py'
Oct 02 11:41:39 compute-0 sudo[65526]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:41:40 compute-0 python3.9[65528]: ansible-ansible.builtin.file Invoked with group=zuul mode=0770 owner=zuul path=/root/.config/containers recurse=True state=directory force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:41:40 compute-0 sudo[65526]: pam_unix(sudo:session): session closed for user root
Oct 02 11:41:40 compute-0 sudo[65701]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-omtmkwiktzotoprfcwcjpkjewewmcnjm ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405300.345248-88-214073880913810/AnsiballZ_stat.py'
Oct 02 11:41:40 compute-0 sudo[65701]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:41:40 compute-0 python3.9[65703]: ansible-ansible.legacy.stat Invoked with path=/root/.config/containers/auth.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:41:41 compute-0 sudo[65701]: pam_unix(sudo:session): session closed for user root
Oct 02 11:41:41 compute-0 sudo[65779]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wmxnusrwkfyhiwtvxgqvcarvsnudjvey ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405300.345248-88-214073880913810/AnsiballZ_file.py'
Oct 02 11:41:41 compute-0 sudo[65779]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:41:41 compute-0 python3.9[65781]: ansible-ansible.legacy.file Invoked with group=zuul mode=0660 owner=zuul dest=/root/.config/containers/auth.json _original_basename=.3jvn513f recurse=False state=file path=/root/.config/containers/auth.json force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:41:41 compute-0 sudo[65779]: pam_unix(sudo:session): session closed for user root
Oct 02 11:41:42 compute-0 sudo[65931]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rthsvvmmsxtphqjdvlxcnljxbrgfhcfv ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405302.025312-148-229597703576632/AnsiballZ_stat.py'
Oct 02 11:41:42 compute-0 sudo[65931]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:41:42 compute-0 python3.9[65933]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/podman_drop_in follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:41:42 compute-0 sudo[65931]: pam_unix(sudo:session): session closed for user root
Oct 02 11:41:42 compute-0 sudo[66009]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wxffrypiqntvyympdtoekwzipjabrknp ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405302.025312-148-229597703576632/AnsiballZ_file.py'
Oct 02 11:41:42 compute-0 sudo[66009]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:41:42 compute-0 python3.9[66011]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/podman_drop_in _original_basename=.c5ruupg5 recurse=False state=file path=/etc/sysconfig/podman_drop_in force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:41:43 compute-0 sudo[66009]: pam_unix(sudo:session): session closed for user root
Oct 02 11:41:43 compute-0 sudo[66161]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rxchttdvdqgtrrcjtqiuguydxjqarbbc ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405303.2108207-187-38675306759556/AnsiballZ_file.py'
Oct 02 11:41:43 compute-0 sudo[66161]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:41:43 compute-0 python3.9[66163]: ansible-ansible.builtin.file Invoked with path=/var/local/libexec recurse=True setype=container_file_t state=directory force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:41:43 compute-0 sudo[66161]: pam_unix(sudo:session): session closed for user root
Oct 02 11:41:44 compute-0 sudo[66313]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cpknddxszdfzpgwpbdnmqykosugiusyb ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405303.9160225-211-227194833701951/AnsiballZ_stat.py'
Oct 02 11:41:44 compute-0 sudo[66313]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:41:44 compute-0 python3.9[66315]: ansible-ansible.legacy.stat Invoked with path=/var/local/libexec/edpm-container-shutdown follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:41:44 compute-0 sudo[66313]: pam_unix(sudo:session): session closed for user root
Oct 02 11:41:44 compute-0 sudo[66391]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-taxsnjvtnwprjvlwlgieinnmgzxdiyye ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405303.9160225-211-227194833701951/AnsiballZ_file.py'
Oct 02 11:41:44 compute-0 sudo[66391]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:41:44 compute-0 python3.9[66393]: ansible-ansible.legacy.file Invoked with group=root mode=0700 owner=root setype=container_file_t dest=/var/local/libexec/edpm-container-shutdown _original_basename=edpm-container-shutdown recurse=False state=file path=/var/local/libexec/edpm-container-shutdown force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:41:44 compute-0 sudo[66391]: pam_unix(sudo:session): session closed for user root
Oct 02 11:41:45 compute-0 sudo[66543]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jfhubrrgiyqyxwheevnexrgtcwetovqm ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405304.9257438-211-13954899262749/AnsiballZ_stat.py'
Oct 02 11:41:45 compute-0 sudo[66543]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:41:45 compute-0 python3.9[66545]: ansible-ansible.legacy.stat Invoked with path=/var/local/libexec/edpm-start-podman-container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:41:45 compute-0 sudo[66543]: pam_unix(sudo:session): session closed for user root
Oct 02 11:41:45 compute-0 sudo[66621]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dxnxumjfuziibufwlfycebiombfyqxup ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405304.9257438-211-13954899262749/AnsiballZ_file.py'
Oct 02 11:41:45 compute-0 sudo[66621]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:41:45 compute-0 python3.9[66623]: ansible-ansible.legacy.file Invoked with group=root mode=0700 owner=root setype=container_file_t dest=/var/local/libexec/edpm-start-podman-container _original_basename=edpm-start-podman-container recurse=False state=file path=/var/local/libexec/edpm-start-podman-container force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:41:45 compute-0 sudo[66621]: pam_unix(sudo:session): session closed for user root
Oct 02 11:41:46 compute-0 sudo[66773]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kmzctkzueohxnovlxdcbwhcppvnjzimg ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405306.3184924-280-142199223537070/AnsiballZ_file.py'
Oct 02 11:41:46 compute-0 sudo[66773]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:41:46 compute-0 python3.9[66775]: ansible-ansible.builtin.file Invoked with mode=420 path=/etc/systemd/system-preset state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:41:46 compute-0 sudo[66773]: pam_unix(sudo:session): session closed for user root
Oct 02 11:41:47 compute-0 sudo[66925]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rupkyhoqkihjnnngzddruajduhrtvkpn ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405306.9902935-304-112988916230673/AnsiballZ_stat.py'
Oct 02 11:41:47 compute-0 sudo[66925]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:41:47 compute-0 python3.9[66927]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/edpm-container-shutdown.service follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:41:47 compute-0 sudo[66925]: pam_unix(sudo:session): session closed for user root
Oct 02 11:41:47 compute-0 sudo[67003]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-spurstinjjrsjvtzjkeosvgphbfunivx ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405306.9902935-304-112988916230673/AnsiballZ_file.py'
Oct 02 11:41:47 compute-0 sudo[67003]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:41:47 compute-0 python3.9[67005]: ansible-ansible.legacy.file Invoked with group=root mode=0644 owner=root dest=/etc/systemd/system/edpm-container-shutdown.service _original_basename=edpm-container-shutdown-service recurse=False state=file path=/etc/systemd/system/edpm-container-shutdown.service force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:41:47 compute-0 sudo[67003]: pam_unix(sudo:session): session closed for user root
Oct 02 11:41:48 compute-0 sudo[67155]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vqrabusgeqahgbqjzftmjtbmzfmjmtno ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405308.0635073-340-169501281526645/AnsiballZ_stat.py'
Oct 02 11:41:48 compute-0 sudo[67155]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:41:48 compute-0 python3.9[67157]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system-preset/91-edpm-container-shutdown.preset follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:41:48 compute-0 sudo[67155]: pam_unix(sudo:session): session closed for user root
Oct 02 11:41:48 compute-0 sudo[67233]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vuvdimsgpgeeimteuxnumurskeyfcpvi ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405308.0635073-340-169501281526645/AnsiballZ_file.py'
Oct 02 11:41:48 compute-0 sudo[67233]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:41:48 compute-0 python3.9[67235]: ansible-ansible.legacy.file Invoked with group=root mode=0644 owner=root dest=/etc/systemd/system-preset/91-edpm-container-shutdown.preset _original_basename=91-edpm-container-shutdown-preset recurse=False state=file path=/etc/systemd/system-preset/91-edpm-container-shutdown.preset force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:41:48 compute-0 sudo[67233]: pam_unix(sudo:session): session closed for user root
Oct 02 11:41:49 compute-0 sudo[67385]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gpgeytccybchokmkrvnivotjhakrbvnw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405309.1284099-376-253945315195337/AnsiballZ_systemd.py'
Oct 02 11:41:49 compute-0 sudo[67385]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:41:50 compute-0 python3.9[67387]: ansible-ansible.builtin.systemd Invoked with daemon_reload=True enabled=True name=edpm-container-shutdown state=started daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:41:50 compute-0 systemd[1]: Reloading.
Oct 02 11:41:50 compute-0 systemd-rc-local-generator[67412]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:41:50 compute-0 systemd-sysv-generator[67417]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:41:50 compute-0 sudo[67385]: pam_unix(sudo:session): session closed for user root
Oct 02 11:41:50 compute-0 sudo[67574]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rmkwuakftnfzgtsiimtygucjnptekrfc ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405310.5211077-400-31893070110586/AnsiballZ_stat.py'
Oct 02 11:41:50 compute-0 sudo[67574]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:41:50 compute-0 python3.9[67576]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/netns-placeholder.service follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:41:51 compute-0 sudo[67574]: pam_unix(sudo:session): session closed for user root
Oct 02 11:41:51 compute-0 sudo[67652]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-anpxeqapelcazsuoeawzsxpoiikngrlm ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405310.5211077-400-31893070110586/AnsiballZ_file.py'
Oct 02 11:41:51 compute-0 sudo[67652]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:41:51 compute-0 python3.9[67654]: ansible-ansible.legacy.file Invoked with group=root mode=0644 owner=root dest=/etc/systemd/system/netns-placeholder.service _original_basename=netns-placeholder-service recurse=False state=file path=/etc/systemd/system/netns-placeholder.service force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:41:51 compute-0 sudo[67652]: pam_unix(sudo:session): session closed for user root
Oct 02 11:41:52 compute-0 sudo[67804]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cyenrjvrehqxyqoqrjlomtnlwshjmivz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405311.9079711-436-72769353310867/AnsiballZ_stat.py'
Oct 02 11:41:52 compute-0 sudo[67804]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:41:52 compute-0 python3.9[67806]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system-preset/91-netns-placeholder.preset follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:41:52 compute-0 sudo[67804]: pam_unix(sudo:session): session closed for user root
Oct 02 11:41:52 compute-0 sudo[67882]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dcwqrsxgszjodumzwequuobjtlidtojv ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405311.9079711-436-72769353310867/AnsiballZ_file.py'
Oct 02 11:41:52 compute-0 sudo[67882]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:41:52 compute-0 python3.9[67884]: ansible-ansible.legacy.file Invoked with group=root mode=0644 owner=root dest=/etc/systemd/system-preset/91-netns-placeholder.preset _original_basename=91-netns-placeholder-preset recurse=False state=file path=/etc/systemd/system-preset/91-netns-placeholder.preset force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:41:52 compute-0 sudo[67882]: pam_unix(sudo:session): session closed for user root
Oct 02 11:41:53 compute-0 sudo[68034]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-oveucxqdlmuxoajubxlksftsmxgtlpjm ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405313.08468-472-37889363212919/AnsiballZ_systemd.py'
Oct 02 11:41:53 compute-0 sudo[68034]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:41:53 compute-0 python3.9[68036]: ansible-ansible.builtin.systemd Invoked with daemon_reload=True enabled=True name=netns-placeholder state=started daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:41:53 compute-0 systemd[1]: Reloading.
Oct 02 11:41:53 compute-0 systemd-rc-local-generator[68064]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:41:53 compute-0 systemd-sysv-generator[68067]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:41:54 compute-0 systemd[1]: Starting Create netns directory...
Oct 02 11:41:54 compute-0 systemd[1]: run-netns-placeholder.mount: Deactivated successfully.
Oct 02 11:41:54 compute-0 systemd[1]: netns-placeholder.service: Deactivated successfully.
Oct 02 11:41:54 compute-0 systemd[1]: Finished Create netns directory.
Oct 02 11:41:55 compute-0 sudo[68034]: pam_unix(sudo:session): session closed for user root
Oct 02 11:41:55 compute-0 python3.9[68227]: ansible-ansible.builtin.service_facts Invoked
Oct 02 11:41:55 compute-0 network[68244]: You are using 'network' service provided by 'network-scripts', which are now deprecated.
Oct 02 11:41:55 compute-0 network[68245]: 'network-scripts' will be removed from distribution in near future.
Oct 02 11:41:55 compute-0 network[68246]: It is advised to switch to 'NetworkManager' instead for network management.
Oct 02 11:42:00 compute-0 sudo[68507]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-aezebsmecxdieikqgzjnpkmtwolratcc ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405319.8703263-550-158280419684764/AnsiballZ_stat.py'
Oct 02 11:42:00 compute-0 sudo[68507]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:00 compute-0 python3.9[68509]: ansible-ansible.legacy.stat Invoked with path=/etc/ssh/sshd_config follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:42:00 compute-0 sudo[68507]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:00 compute-0 sudo[68585]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-psctfenaggdgnmrxmjniggrjzrmwiwvj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405319.8703263-550-158280419684764/AnsiballZ_file.py'
Oct 02 11:42:00 compute-0 sudo[68585]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:00 compute-0 python3.9[68587]: ansible-ansible.legacy.file Invoked with mode=0600 dest=/etc/ssh/sshd_config _original_basename=sshd_config_block.j2 recurse=False state=file path=/etc/ssh/sshd_config force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:42:00 compute-0 sudo[68585]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:01 compute-0 sudo[68737]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lpbsailflqvgklssztotvtvfynzbdtgb ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405321.0909636-589-47941187448045/AnsiballZ_file.py'
Oct 02 11:42:01 compute-0 sudo[68737]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:01 compute-0 python3.9[68739]: ansible-ansible.builtin.file Invoked with group=root mode=0750 owner=root path=/var/lib/edpm-config/firewall state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:42:01 compute-0 sudo[68737]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:02 compute-0 sudo[68889]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sbkdihrnyerlpzkbsfvocodfvtkkljgb ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405321.8123894-613-195262141464937/AnsiballZ_stat.py'
Oct 02 11:42:02 compute-0 sudo[68889]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:02 compute-0 python3.9[68891]: ansible-ansible.legacy.stat Invoked with path=/var/lib/edpm-config/firewall/sshd-networks.yaml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:42:02 compute-0 sudo[68889]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:02 compute-0 sudo[69012]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-flygzsunaydwtcyaakuixkzsyqddlrvi ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405321.8123894-613-195262141464937/AnsiballZ_copy.py'
Oct 02 11:42:02 compute-0 sudo[69012]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:02 compute-0 python3.9[69014]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/edpm-config/firewall/sshd-networks.yaml group=root mode=0644 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405321.8123894-613-195262141464937/.source.yaml follow=False _original_basename=firewall.yaml.j2 checksum=0bfc8440fd8f39002ab90252479fb794f51b5ae8 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:42:02 compute-0 sudo[69012]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:03 compute-0 sudo[69164]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nekafttkaseikoofqehxyopvgotydsfg ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405323.3883328-667-50563237053643/AnsiballZ_timezone.py'
Oct 02 11:42:03 compute-0 sudo[69164]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:03 compute-0 python3.9[69166]: ansible-community.general.timezone Invoked with name=UTC hwclock=None
Oct 02 11:42:04 compute-0 systemd[1]: Starting Time & Date Service...
Oct 02 11:42:04 compute-0 systemd[1]: Started Time & Date Service.
Oct 02 11:42:04 compute-0 sudo[69164]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:04 compute-0 sudo[69320]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ltjotmhhruhetyfbuowpqvkjumuiznrh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405324.4825082-694-90038014432081/AnsiballZ_file.py'
Oct 02 11:42:04 compute-0 sudo[69320]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:04 compute-0 python3.9[69322]: ansible-ansible.builtin.file Invoked with group=root mode=0750 owner=root path=/var/lib/edpm-config/firewall state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:42:04 compute-0 sudo[69320]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:05 compute-0 sudo[69472]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hcmdmlbyubxhscsxfogfjzesbjfnaobm ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405325.2347984-718-80866976046138/AnsiballZ_stat.py'
Oct 02 11:42:05 compute-0 sudo[69472]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:05 compute-0 python3.9[69474]: ansible-ansible.legacy.stat Invoked with path=/var/lib/edpm-config/firewall/edpm-nftables-base.yaml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:42:05 compute-0 sudo[69472]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:06 compute-0 sudo[69595]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hhoselnamhpvjaohykehkuokmmhtyfos ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405325.2347984-718-80866976046138/AnsiballZ_copy.py'
Oct 02 11:42:06 compute-0 sudo[69595]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:06 compute-0 python3.9[69597]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/edpm-config/firewall/edpm-nftables-base.yaml mode=0644 src=/home/zuul/.ansible/tmp/ansible-tmp-1759405325.2347984-718-80866976046138/.source.yaml follow=False _original_basename=base-rules.yaml.j2 checksum=450456afcafded6d4bdecceec7a02e806eebd8b3 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:42:06 compute-0 sudo[69595]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:06 compute-0 sudo[69747]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nalzmyfsnpbactfpodhwdxjopjavtskm ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405326.4535792-763-129990355228632/AnsiballZ_stat.py'
Oct 02 11:42:06 compute-0 sudo[69747]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:06 compute-0 python3.9[69749]: ansible-ansible.legacy.stat Invoked with path=/var/lib/edpm-config/firewall/edpm-nftables-user-rules.yaml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:42:06 compute-0 sudo[69747]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:07 compute-0 sudo[69870]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vekbwzyojpitrixsqikmrmjqjwsresyf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405326.4535792-763-129990355228632/AnsiballZ_copy.py'
Oct 02 11:42:07 compute-0 sudo[69870]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:07 compute-0 python3.9[69872]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/edpm-config/firewall/edpm-nftables-user-rules.yaml mode=0644 src=/home/zuul/.ansible/tmp/ansible-tmp-1759405326.4535792-763-129990355228632/.source.yaml _original_basename=.e7ookkiq follow=False checksum=97d170e1550eee4afc0af065b78cda302a97674c backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:42:07 compute-0 sudo[69870]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:07 compute-0 sudo[70022]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fbvcxndrtvauruhacegrudptlqigyyvy ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405327.59465-808-79942579509128/AnsiballZ_stat.py'
Oct 02 11:42:07 compute-0 sudo[70022]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:08 compute-0 python3.9[70024]: ansible-ansible.legacy.stat Invoked with path=/etc/nftables/iptables.nft follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:42:08 compute-0 sudo[70022]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:08 compute-0 sudo[70145]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-aunhtrwbgpxgwgtgyhtzytidahpyhvvk ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405327.59465-808-79942579509128/AnsiballZ_copy.py'
Oct 02 11:42:08 compute-0 sudo[70145]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:08 compute-0 python3.9[70147]: ansible-ansible.legacy.copy Invoked with dest=/etc/nftables/iptables.nft group=root mode=0600 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405327.59465-808-79942579509128/.source.nft _original_basename=iptables.nft follow=False checksum=3e02df08f1f3ab4a513e94056dbd390e3d38fe30 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:42:08 compute-0 sudo[70145]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:09 compute-0 sudo[70297]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vdtrmxavxsjawacvzkbiuvcpjbzrchlz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405328.816678-853-89383682365509/AnsiballZ_command.py'
Oct 02 11:42:09 compute-0 sudo[70297]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:09 compute-0 python3.9[70299]: ansible-ansible.legacy.command Invoked with _raw_params=nft -f /etc/nftables/iptables.nft _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:42:09 compute-0 sudo[70297]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:09 compute-0 sudo[70450]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-biceeoqrcgxogowruxcuuzuefmvvxfki ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405329.6782105-877-253353161831473/AnsiballZ_command.py'
Oct 02 11:42:09 compute-0 sudo[70450]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:10 compute-0 python3.9[70452]: ansible-ansible.legacy.command Invoked with _raw_params=nft -j list ruleset _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:42:10 compute-0 sudo[70450]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:10 compute-0 sudo[70603]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lpjjdivbszzakdiugrdxyhkujrhqypvm ; /usr/bin/python3 /home/zuul/.ansible/tmp/ansible-tmp-1759405330.3580484-901-244141102321468/AnsiballZ_edpm_nftables_from_files.py'
Oct 02 11:42:10 compute-0 sudo[70603]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:10 compute-0 python3[70605]: ansible-edpm_nftables_from_files Invoked with src=/var/lib/edpm-config/firewall
Oct 02 11:42:10 compute-0 sudo[70603]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:11 compute-0 sudo[70755]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-uhebscuimapwdvuwutuhklcdxnqmjjzt ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405331.155697-925-200492242475233/AnsiballZ_stat.py'
Oct 02 11:42:11 compute-0 sudo[70755]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:11 compute-0 python3.9[70757]: ansible-ansible.legacy.stat Invoked with path=/etc/nftables/edpm-jumps.nft follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:42:11 compute-0 sudo[70755]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:12 compute-0 sudo[70878]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kanjcptcscxohyydstjysuyooadvbedh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405331.155697-925-200492242475233/AnsiballZ_copy.py'
Oct 02 11:42:12 compute-0 sudo[70878]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:12 compute-0 python3.9[70880]: ansible-ansible.legacy.copy Invoked with dest=/etc/nftables/edpm-jumps.nft group=root mode=0600 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405331.155697-925-200492242475233/.source.nft follow=False _original_basename=jump-chain.j2 checksum=4c6f036d2d5808f109acc0880c19aa74ca48c961 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:42:12 compute-0 sudo[70878]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:12 compute-0 sudo[71030]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rfcnvgukqszxmsbwwcadzjfuksftheqv ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405332.5080574-970-49346932233464/AnsiballZ_stat.py'
Oct 02 11:42:12 compute-0 sudo[71030]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:13 compute-0 python3.9[71032]: ansible-ansible.legacy.stat Invoked with path=/etc/nftables/edpm-update-jumps.nft follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:42:13 compute-0 sudo[71030]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:13 compute-0 sudo[71153]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wuuzxgdwrmahsezsteazenyconeslpmg ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405332.5080574-970-49346932233464/AnsiballZ_copy.py'
Oct 02 11:42:13 compute-0 sudo[71153]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:13 compute-0 python3.9[71155]: ansible-ansible.legacy.copy Invoked with dest=/etc/nftables/edpm-update-jumps.nft group=root mode=0600 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405332.5080574-970-49346932233464/.source.nft follow=False _original_basename=jump-chain.j2 checksum=4c6f036d2d5808f109acc0880c19aa74ca48c961 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:42:13 compute-0 sudo[71153]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:14 compute-0 sudo[71305]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-uuxwodqwbzvrkmqunwqeoldhxjliogtd ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405333.804351-1015-212800703303375/AnsiballZ_stat.py'
Oct 02 11:42:14 compute-0 sudo[71305]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:14 compute-0 python3.9[71307]: ansible-ansible.legacy.stat Invoked with path=/etc/nftables/edpm-flushes.nft follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:42:14 compute-0 sudo[71305]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:14 compute-0 sudo[71428]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xezgqijoscfiebepdgdupkgaytmyuigo ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405333.804351-1015-212800703303375/AnsiballZ_copy.py'
Oct 02 11:42:14 compute-0 sudo[71428]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:14 compute-0 python3.9[71430]: ansible-ansible.legacy.copy Invoked with dest=/etc/nftables/edpm-flushes.nft group=root mode=0600 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405333.804351-1015-212800703303375/.source.nft follow=False _original_basename=flush-chain.j2 checksum=d16337256a56373421842284fe09e4e6c7df417e backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:42:14 compute-0 sudo[71428]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:15 compute-0 sudo[71580]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-aombnsnrobogiihrzqiocdiuxhegultw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405335.5077586-1060-263864777306773/AnsiballZ_stat.py'
Oct 02 11:42:15 compute-0 sudo[71580]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:15 compute-0 python3.9[71582]: ansible-ansible.legacy.stat Invoked with path=/etc/nftables/edpm-chains.nft follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:42:16 compute-0 sudo[71580]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:16 compute-0 sudo[71703]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lemtkbabpjkvgzkrgrfrfowpskiulztq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405335.5077586-1060-263864777306773/AnsiballZ_copy.py'
Oct 02 11:42:16 compute-0 sudo[71703]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:16 compute-0 python3.9[71705]: ansible-ansible.legacy.copy Invoked with dest=/etc/nftables/edpm-chains.nft group=root mode=0600 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405335.5077586-1060-263864777306773/.source.nft follow=False _original_basename=chains.j2 checksum=2079f3b60590a165d1d502e763170876fc8e2984 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:42:16 compute-0 sudo[71703]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:17 compute-0 sudo[71855]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-daeqvaqzutjisslbywnkmjjzofnqrqeg ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405336.727732-1105-63099676496854/AnsiballZ_stat.py'
Oct 02 11:42:17 compute-0 sudo[71855]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:17 compute-0 python3.9[71857]: ansible-ansible.legacy.stat Invoked with path=/etc/nftables/edpm-rules.nft follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:42:17 compute-0 sudo[71855]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:17 compute-0 sudo[71979]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bzciibdwfmldhlbcaecaqfsgvgietuzw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405336.727732-1105-63099676496854/AnsiballZ_copy.py'
Oct 02 11:42:17 compute-0 sudo[71979]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:17 compute-0 python3.9[71981]: ansible-ansible.legacy.copy Invoked with dest=/etc/nftables/edpm-rules.nft group=root mode=0600 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405336.727732-1105-63099676496854/.source.nft follow=False _original_basename=ruleset.j2 checksum=15a82a0dc61abfd6aa593407582b5b950437eb80 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:42:17 compute-0 sudo[71979]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:18 compute-0 sudo[72131]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-iajkhfvwhwozfytzqikfguczhozkgkkw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405338.145342-1150-129914470955977/AnsiballZ_file.py'
Oct 02 11:42:18 compute-0 sudo[72131]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:18 compute-0 python3.9[72133]: ansible-ansible.builtin.file Invoked with group=root mode=0600 owner=root path=/etc/nftables/edpm-rules.nft.changed state=touch recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:42:18 compute-0 sudo[72131]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:19 compute-0 sudo[72283]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fenoswyfuqtzzfwvvqvsizekosxgjyuq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405338.872024-1174-78445007689614/AnsiballZ_command.py'
Oct 02 11:42:19 compute-0 sudo[72283]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:19 compute-0 python3.9[72285]: ansible-ansible.legacy.command Invoked with _raw_params=set -o pipefail; cat /etc/nftables/edpm-chains.nft /etc/nftables/edpm-flushes.nft /etc/nftables/edpm-rules.nft /etc/nftables/edpm-update-jumps.nft /etc/nftables/edpm-jumps.nft | nft -c -f - _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:42:19 compute-0 sudo[72283]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:20 compute-0 sudo[72442]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qxbkwjpxojjetsqejpyyaxnjulgnvppk ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405339.642728-1198-29851518195212/AnsiballZ_blockinfile.py'
Oct 02 11:42:20 compute-0 sudo[72442]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:20 compute-0 python3.9[72444]: ansible-ansible.builtin.blockinfile Invoked with backup=False block=include "/etc/nftables/iptables.nft"
                                            include "/etc/nftables/edpm-chains.nft"
                                            include "/etc/nftables/edpm-rules.nft"
                                            include "/etc/nftables/edpm-jumps.nft"
                                             path=/etc/sysconfig/nftables.conf validate=nft -c -f %s state=present marker=# {mark} ANSIBLE MANAGED BLOCK create=False marker_begin=BEGIN marker_end=END append_newline=False prepend_newline=False unsafe_writes=False insertafter=None insertbefore=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:42:20 compute-0 sudo[72442]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:20 compute-0 sudo[72595]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bxakhtcspptcuiaxwoadejdjqkgnsdgo ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405340.5996501-1225-53606972926561/AnsiballZ_file.py'
Oct 02 11:42:20 compute-0 sudo[72595]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:21 compute-0 python3.9[72597]: ansible-ansible.builtin.file Invoked with group=hugetlbfs mode=0775 owner=zuul path=/dev/hugepages1G state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:42:21 compute-0 sudo[72595]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:21 compute-0 sudo[72747]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fdmirterhgxbxlxyvancbuvoszavnxfm ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405341.2456553-1225-80881509359616/AnsiballZ_file.py'
Oct 02 11:42:21 compute-0 sudo[72747]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:21 compute-0 python3.9[72749]: ansible-ansible.builtin.file Invoked with group=hugetlbfs mode=0775 owner=zuul path=/dev/hugepages2M state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:42:21 compute-0 sudo[72747]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:22 compute-0 sudo[72899]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-narjdodtksfnuozdmvtdsjufzogltyft ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405342.097701-1270-252900367752771/AnsiballZ_mount.py'
Oct 02 11:42:22 compute-0 sudo[72899]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:23 compute-0 python3.9[72901]: ansible-ansible.posix.mount Invoked with fstype=hugetlbfs opts=pagesize=1G path=/dev/hugepages1G src=none state=mounted boot=True dump=0 opts_no_log=False passno=0 backup=False fstab=None
Oct 02 11:42:23 compute-0 sudo[72899]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:23 compute-0 rsyslogd[1013]: imjournal: journal files changed, reloading...  [v8.2506.0-2.el9 try https://www.rsyslog.com/e/0 ]
Oct 02 11:42:23 compute-0 rsyslogd[1013]: imjournal: journal files changed, reloading...  [v8.2506.0-2.el9 try https://www.rsyslog.com/e/0 ]
Oct 02 11:42:23 compute-0 sudo[73053]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nwlcrwxvcpjsrumjdobycbcamhmkfsou ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405343.3375735-1270-48502365626160/AnsiballZ_mount.py'
Oct 02 11:42:23 compute-0 sudo[73053]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:23 compute-0 python3.9[73055]: ansible-ansible.posix.mount Invoked with fstype=hugetlbfs opts=pagesize=2M path=/dev/hugepages2M src=none state=mounted boot=True dump=0 opts_no_log=False passno=0 backup=False fstab=None
Oct 02 11:42:23 compute-0 sudo[73053]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:24 compute-0 sshd-session[65222]: Connection closed by 192.168.122.30 port 50072
Oct 02 11:42:24 compute-0 sshd-session[65219]: pam_unix(sshd:session): session closed for user zuul
Oct 02 11:42:24 compute-0 systemd[1]: session-15.scope: Deactivated successfully.
Oct 02 11:42:24 compute-0 systemd[1]: session-15.scope: Consumed 29.193s CPU time.
Oct 02 11:42:24 compute-0 systemd-logind[827]: Session 15 logged out. Waiting for processes to exit.
Oct 02 11:42:24 compute-0 systemd-logind[827]: Removed session 15.
Oct 02 11:42:29 compute-0 sshd-session[73081]: Accepted publickey for zuul from 192.168.122.30 port 57844 ssh2: ECDSA SHA256:tAEsFSop2MjuMQQ/UqKU2uCkxqWXGfsM5crdhd6tlI4
Oct 02 11:42:29 compute-0 systemd-logind[827]: New session 16 of user zuul.
Oct 02 11:42:29 compute-0 systemd[1]: Started Session 16 of User zuul.
Oct 02 11:42:29 compute-0 sshd-session[73081]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Oct 02 11:42:29 compute-0 sudo[73234]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-muyvwfenroikbrndbjbkdfbpyehqwjst ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405349.2739668-23-99734278779344/AnsiballZ_tempfile.py'
Oct 02 11:42:29 compute-0 sudo[73234]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:29 compute-0 python3.9[73236]: ansible-ansible.builtin.tempfile Invoked with state=file prefix=ansible. suffix= path=None
Oct 02 11:42:29 compute-0 sudo[73234]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:30 compute-0 chronyd[60458]: Selected source 45.61.49.156 (pool.ntp.org)
Oct 02 11:42:30 compute-0 sudo[73386]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-olzmolkvacztlsshigiegupiqerooxzg ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405350.2367504-59-204160929048962/AnsiballZ_stat.py'
Oct 02 11:42:30 compute-0 sudo[73386]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:30 compute-0 python3.9[73388]: ansible-ansible.builtin.stat Invoked with path=/etc/ssh/ssh_known_hosts follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:42:30 compute-0 sudo[73386]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:31 compute-0 sudo[73538]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jnbsxdmbrjfleikpggijyocopjhdnqgp ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405351.3369842-89-266574020434141/AnsiballZ_setup.py'
Oct 02 11:42:31 compute-0 sudo[73538]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:32 compute-0 python3.9[73540]: ansible-ansible.builtin.setup Invoked with gather_subset=['!all', '!min', 'ssh_host_key_rsa_public', 'ssh_host_key_ed25519_public', 'ssh_host_key_ecdsa_public'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:42:32 compute-0 sudo[73538]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:33 compute-0 sudo[73690]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zoxayjrkurwjhycvkpyhnoouaoczpzjs ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405352.550044-114-116851296720662/AnsiballZ_blockinfile.py'
Oct 02 11:42:33 compute-0 sudo[73690]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:33 compute-0 python3.9[73692]: ansible-ansible.builtin.blockinfile Invoked with block=compute-2.ctlplane.example.com,192.168.122.102,compute-2* ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCVkUIA0SGLhushMFSLFSAWpWCX1FF5YUjql8/6tMZQcpzUyU7mJOEQY7Jf3ZvoRVMiETNv8NaicCQ10qaPGZQwEamylEkW24WAdEJ+0NDO/DPkUTIp6vmhyqMNK8IeoLM1RrAM82pBxdQ+jut498Pj6OeLzo75U5X+AQp3kNKD6nnt+JeBNs5kT35nF/5InhW1d2N5LWKKnnw2LJIgpPZkpDwuRAOTnEp/nyNR1NyRQY1VpGMuAXgEkvvu1no1xBYM2lnfNEwn46Bcfr8p+n5Jv3gJBcteKnTCaLF0CagpfSTcvar4pcN97zXX4Jlq0VyVjit+YemnX5EnCaQoK6sYtatkGsRooS56wc+WtVHhf155ZIAj8wPRwWpcXZq+EV0SwoTFwUUNTXToz7qscdq04OHTl0bFRFQevmks+w6V4a7CzQa1/eeGlYdGEUS1I0dC5eeHDewjoLwo5+ufxHrbBmxaZrgbtwk1E9MQqj3PmdFlh17a83VHQwat591/QWU=
                                            compute-2.ctlplane.example.com,192.168.122.102,compute-2* ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKeqMQGrAV3pXZcV6Ore8xolY214SO0KlbtK5lvj/17F
                                            compute-2.ctlplane.example.com,192.168.122.102,compute-2* ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBM+LLWBT7ZHcUDyX8Xq/MZx34NXsN2QLd9BzdUzQgHmTREhCHesKInMqP8HfljOxzmUfohPV1AQVEYpXvhkaaQM=
                                            compute-0.ctlplane.example.com,192.168.122.100,compute-0* ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCpSx4Pw7AZfRlGUxa5vBESKqssXVQvrJz1cHMKGIXXt2c6o14yjlDJQkQZMnezLwc2Chr5fJ4DbiWklwKtNLctbQXR5ygqWs0bxMEnUYw+SjtdNwhykNDKkOJF64+ZtokEdpLHge0NvMivE2EBqu3TeXUji1OpHV3NGMiFKFwb0YsujbJuPjzPh6igp8NPD3uwcNrf+rcVQz8qlT/9rxdBMoyNjDoha3HCOOQDoColV7DbtQNdDBy+PMi8DOqzRJ/iPi7C26lVo+1xQL/ZKdmOOijv/QkqsY3ejuzIO9w3z3+GuykWEdEzm3EkUZJ8Q48/OwksBIdmcOC2Ke46PTLmftlRsdK0YUy7UyzGX7HQ++JYiTXyXN92ieFxNY3MmKu/70/67TT90mqVUOkZ9C32ixYMvj2hhnxS5+bmnMjpwCkUvgS1BmmSof6ghFjYZsP6zgTonqOtP5gt5VLjy7xNuApqVGmSN09/ExnZcGBX3ymXsxepc6spJeZ7hw2P4E8=
                                            compute-0.ctlplane.example.com,192.168.122.100,compute-0* ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAILxXcYZxs19Ipxj4mIzt7SBi+8WzNq9W70+VNtppPYi1
                                            compute-0.ctlplane.example.com,192.168.122.100,compute-0* ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBDZWnRJzd2ypotxaluhYES+V8G+b3/YU1LqQdpTWOWSO1QiTR0RJRiCt3KgKfluISOv8H6sHrJ9PKv84heszJQY=
                                            compute-1.ctlplane.example.com,192.168.122.101,compute-1* ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC30XwCLl481RnJGbLpEu8HK5UD53phC9aWXrs97/vSr9LY5Wlu1FrcSpDZK7wWwcUjs+Ug5XBJCr6avXKE4rjPwk89lQ1q9g/H9bpxdA4xrV5Eoc6riCUU7Ig86tKNwjKxxe5YXXkbQXzO1m31FHYpGh6MsVqR+sdC4B+xoAW7BJ+sTbHJ0l17YcK68hwv9ZNXBecuDjZDvLtDNje8ZGmmlUIAQ9MfLqzQr0EclCOAdN+tu1Se7EQ/8vqrT6CSp6hCSBXg2bK7fPi0mqJ1MgA1xig5gH2fONZWMZ9gDEbfhr3UMzXKiB9YuhIx/xfPq174TvmMwN89+fteCUEl7FYK0+huTyjiBNyHBhniq+ndB0camrvH6y1i0qFjY2JAZ9zt1odn0an1VRX2fLnwHlbLgEzV7kFf7kzLvc38F4Hd2a4K7/W8rJ80hL4T0aYiPZvbt0T6Z8dKMiNdh5Uq6HXxMW3HhGZER30lJh4bTzzwRBwMlgFLe4nxKXKtNZggdHU=
                                            compute-1.ctlplane.example.com,192.168.122.101,compute-1* ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDQRjtGGFpYzrfHwb+9O0hMfMhijlzqGxkH0vMapGQGq
                                            compute-1.ctlplane.example.com,192.168.122.101,compute-1* ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBLy3xOsuqZD05zHjHYtORv2L5Dy5w2gv1l1NTxi4JLb2kboxAJmGY6ewcs/tttddwUtZ4hxQZpPqVyCmq+Pg//I=
                                             create=True mode=0644 path=/tmp/ansible.i11x9nlh state=present marker=# {mark} ANSIBLE MANAGED BLOCK backup=False marker_begin=BEGIN marker_end=END append_newline=False prepend_newline=False unsafe_writes=False insertafter=None insertbefore=None validate=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:42:33 compute-0 sudo[73690]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:33 compute-0 sudo[73842]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tpsgryzsusddceojwneygdsitazrrggr ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405353.4448283-138-35174348749081/AnsiballZ_command.py'
Oct 02 11:42:33 compute-0 sudo[73842]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:34 compute-0 python3.9[73844]: ansible-ansible.legacy.command Invoked with _raw_params=cat '/tmp/ansible.i11x9nlh' > /etc/ssh/ssh_known_hosts _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:42:34 compute-0 sudo[73842]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:34 compute-0 systemd[1]: systemd-timedated.service: Deactivated successfully.
Oct 02 11:42:34 compute-0 sudo[73998]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qdwzxxhyaesfpajnorjobicyqruplwiz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405354.2842536-162-186509789781448/AnsiballZ_file.py'
Oct 02 11:42:34 compute-0 sudo[73998]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:34 compute-0 python3.9[74000]: ansible-ansible.builtin.file Invoked with path=/tmp/ansible.i11x9nlh state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:42:34 compute-0 sudo[73998]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:35 compute-0 sshd-session[73084]: Connection closed by 192.168.122.30 port 57844
Oct 02 11:42:35 compute-0 sshd-session[73081]: pam_unix(sshd:session): session closed for user zuul
Oct 02 11:42:35 compute-0 systemd[1]: session-16.scope: Deactivated successfully.
Oct 02 11:42:35 compute-0 systemd[1]: session-16.scope: Consumed 3.180s CPU time.
Oct 02 11:42:35 compute-0 systemd-logind[827]: Session 16 logged out. Waiting for processes to exit.
Oct 02 11:42:35 compute-0 systemd-logind[827]: Removed session 16.
Oct 02 11:42:42 compute-0 sshd-session[74025]: Accepted publickey for zuul from 192.168.122.30 port 39172 ssh2: ECDSA SHA256:tAEsFSop2MjuMQQ/UqKU2uCkxqWXGfsM5crdhd6tlI4
Oct 02 11:42:42 compute-0 systemd-logind[827]: New session 17 of user zuul.
Oct 02 11:42:42 compute-0 systemd[1]: Started Session 17 of User zuul.
Oct 02 11:42:42 compute-0 sshd-session[74025]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Oct 02 11:42:43 compute-0 python3.9[74178]: ansible-ansible.builtin.setup Invoked with gather_subset=['!all', '!min', 'local'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:42:44 compute-0 sudo[74332]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ccwuiuorjccxbbabkqnooeayilcyrdki ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405363.5755599-61-37358945418974/AnsiballZ_systemd.py'
Oct 02 11:42:44 compute-0 sudo[74332]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:44 compute-0 python3.9[74334]: ansible-ansible.builtin.systemd Invoked with enabled=True name=sshd daemon_reload=False daemon_reexec=False scope=system no_block=False state=None force=None masked=None
Oct 02 11:42:44 compute-0 sudo[74332]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:45 compute-0 sudo[74486]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ncxoemajvovtybazwqhmwlvswgdtbuyd ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405364.8320074-85-230179007257570/AnsiballZ_systemd.py'
Oct 02 11:42:45 compute-0 sudo[74486]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:45 compute-0 python3.9[74488]: ansible-ansible.builtin.systemd Invoked with name=sshd state=started daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None
Oct 02 11:42:45 compute-0 sudo[74486]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:46 compute-0 sudo[74639]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xhgbmjfagasdeisoxjugvafyustbotie ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405365.6589782-112-64283169642312/AnsiballZ_command.py'
Oct 02 11:42:46 compute-0 sudo[74639]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:46 compute-0 python3.9[74641]: ansible-ansible.legacy.command Invoked with _raw_params=nft -f /etc/nftables/edpm-chains.nft _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:42:46 compute-0 sudo[74639]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:46 compute-0 sudo[74792]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ryyezskldnjcbjpuedrstsjepepvljoy ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405366.486558-136-210889234594235/AnsiballZ_stat.py'
Oct 02 11:42:46 compute-0 sudo[74792]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:47 compute-0 python3.9[74794]: ansible-ansible.builtin.stat Invoked with path=/etc/nftables/edpm-rules.nft.changed follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:42:47 compute-0 sudo[74792]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:47 compute-0 sudo[74946]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bkekrwosxwdjkkgnclaemlwnsffrcxhn ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405367.461512-160-165745804060537/AnsiballZ_command.py'
Oct 02 11:42:47 compute-0 sudo[74946]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:47 compute-0 python3.9[74948]: ansible-ansible.legacy.command Invoked with _raw_params=set -o pipefail; cat /etc/nftables/edpm-flushes.nft /etc/nftables/edpm-rules.nft /etc/nftables/edpm-update-jumps.nft | nft -f - _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:42:47 compute-0 sudo[74946]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:48 compute-0 sudo[75101]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sxpbzgserwjqtgtcxodxsvubsblonmzj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405368.1293812-184-113236910504160/AnsiballZ_file.py'
Oct 02 11:42:48 compute-0 sudo[75101]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:48 compute-0 python3.9[75103]: ansible-ansible.builtin.file Invoked with path=/etc/nftables/edpm-rules.nft.changed state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:42:48 compute-0 sudo[75101]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:49 compute-0 sshd-session[74028]: Connection closed by 192.168.122.30 port 39172
Oct 02 11:42:49 compute-0 sshd-session[74025]: pam_unix(sshd:session): session closed for user zuul
Oct 02 11:42:49 compute-0 systemd[1]: session-17.scope: Deactivated successfully.
Oct 02 11:42:49 compute-0 systemd[1]: session-17.scope: Consumed 4.149s CPU time.
Oct 02 11:42:49 compute-0 systemd-logind[827]: Session 17 logged out. Waiting for processes to exit.
Oct 02 11:42:49 compute-0 systemd-logind[827]: Removed session 17.
Oct 02 11:42:55 compute-0 sshd-session[75128]: Accepted publickey for zuul from 192.168.122.30 port 54638 ssh2: ECDSA SHA256:tAEsFSop2MjuMQQ/UqKU2uCkxqWXGfsM5crdhd6tlI4
Oct 02 11:42:55 compute-0 systemd-logind[827]: New session 18 of user zuul.
Oct 02 11:42:55 compute-0 systemd[1]: Started Session 18 of User zuul.
Oct 02 11:42:55 compute-0 sshd-session[75128]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Oct 02 11:42:56 compute-0 python3.9[75281]: ansible-ansible.builtin.setup Invoked with gather_subset=['!all', '!min', 'local'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:42:57 compute-0 sudo[75435]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-phgvborznhmlmeqmigiuyfhhmlaakdho ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405376.9691136-67-177034619742248/AnsiballZ_setup.py'
Oct 02 11:42:57 compute-0 sudo[75435]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:57 compute-0 python3.9[75437]: ansible-ansible.legacy.setup Invoked with filter=['ansible_pkg_mgr'] gather_subset=['!all'] gather_timeout=10 fact_path=/etc/ansible/facts.d
Oct 02 11:42:57 compute-0 sudo[75435]: pam_unix(sudo:session): session closed for user root
Oct 02 11:42:58 compute-0 sudo[75519]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yibgqjjknvoxdqhscldfywsoxggyihca ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405376.9691136-67-177034619742248/AnsiballZ_dnf.py'
Oct 02 11:42:58 compute-0 sudo[75519]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:42:58 compute-0 python3.9[75521]: ansible-ansible.legacy.dnf Invoked with name=['yum-utils'] allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None state=None
Oct 02 11:42:59 compute-0 sudo[75519]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:00 compute-0 python3.9[75672]: ansible-ansible.legacy.command Invoked with _raw_params=needs-restarting -r _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:43:02 compute-0 python3.9[75823]: ansible-ansible.builtin.find Invoked with paths=['/var/lib/openstack/reboot_required/'] patterns=[] read_whole_file=False file_type=file age_stamp=mtime recurse=False hidden=False follow=False get_checksum=False checksum_algorithm=sha1 use_regex=False exact_mode=True excludes=None contains=None age=None size=None depth=None mode=None encoding=None limit=None
Oct 02 11:43:03 compute-0 python3.9[75974]: ansible-ansible.builtin.stat Invoked with path=/var/lib/config-data/puppet-generated follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:43:03 compute-0 python3.9[76124]: ansible-ansible.builtin.stat Invoked with path=/var/lib/openstack/config follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:43:04 compute-0 sshd-session[75131]: Connection closed by 192.168.122.30 port 54638
Oct 02 11:43:04 compute-0 sshd-session[75128]: pam_unix(sshd:session): session closed for user zuul
Oct 02 11:43:04 compute-0 systemd[1]: session-18.scope: Deactivated successfully.
Oct 02 11:43:04 compute-0 systemd[1]: session-18.scope: Consumed 5.530s CPU time.
Oct 02 11:43:04 compute-0 systemd-logind[827]: Session 18 logged out. Waiting for processes to exit.
Oct 02 11:43:04 compute-0 systemd-logind[827]: Removed session 18.
Oct 02 11:43:11 compute-0 sshd-session[76149]: Accepted publickey for zuul from 192.168.122.30 port 32916 ssh2: ECDSA SHA256:tAEsFSop2MjuMQQ/UqKU2uCkxqWXGfsM5crdhd6tlI4
Oct 02 11:43:11 compute-0 systemd-logind[827]: New session 19 of user zuul.
Oct 02 11:43:11 compute-0 systemd[1]: Started Session 19 of User zuul.
Oct 02 11:43:11 compute-0 sshd-session[76149]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Oct 02 11:43:12 compute-0 python3.9[76302]: ansible-ansible.builtin.setup Invoked with gather_subset=['!all', '!min', 'local'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:43:14 compute-0 sudo[76456]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cxgstjalbfeblueoaciuattqubhltqyr ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405393.8223386-116-215795757253335/AnsiballZ_file.py'
Oct 02 11:43:14 compute-0 sudo[76456]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:14 compute-0 python3.9[76458]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/var/lib/openstack/certs/libvirt/default setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:43:14 compute-0 sudo[76456]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:15 compute-0 sudo[76608]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hvwukwnunietbkakhwbkuhuyrenyuuld ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405394.9485414-116-204560293396532/AnsiballZ_file.py'
Oct 02 11:43:15 compute-0 sudo[76608]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:15 compute-0 python3.9[76610]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/var/lib/openstack/certs/libvirt/default setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:43:15 compute-0 sudo[76608]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:16 compute-0 sudo[76760]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ofauiomoojkwmdukakimmspaoqxlfanc ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405395.7027106-162-274375420219137/AnsiballZ_stat.py'
Oct 02 11:43:16 compute-0 sudo[76760]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:16 compute-0 python3.9[76762]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/certs/libvirt/default/tls.crt follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:43:16 compute-0 sudo[76760]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:17 compute-0 sudo[76883]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mqffxabprwcftedituxdobnxisyvunsg ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405395.7027106-162-274375420219137/AnsiballZ_copy.py'
Oct 02 11:43:17 compute-0 sudo[76883]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:17 compute-0 python3.9[76885]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/certs/libvirt/default/tls.crt group=root mode=0600 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405395.7027106-162-274375420219137/.source.crt _original_basename=compute-0.ctlplane.example.com-tls.crt follow=False checksum=fe1cbdef25c0de3cbc8ad94d3a53af8c09f81847 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:43:17 compute-0 sudo[76883]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:17 compute-0 sudo[77035]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-awbexpytrjftxnwszrczruedccfihfif ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405397.6022556-162-65404517469504/AnsiballZ_stat.py'
Oct 02 11:43:17 compute-0 sudo[77035]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:18 compute-0 python3.9[77037]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/certs/libvirt/default/ca.crt follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:43:18 compute-0 sudo[77035]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:18 compute-0 sudo[77158]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lhhcaxzdguovtlqkzeksfuezhwdkyprs ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405397.6022556-162-65404517469504/AnsiballZ_copy.py'
Oct 02 11:43:18 compute-0 sudo[77158]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:18 compute-0 python3.9[77160]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/certs/libvirt/default/ca.crt group=root mode=0600 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405397.6022556-162-65404517469504/.source.crt _original_basename=compute-0.ctlplane.example.com-ca.crt follow=False checksum=618966fd8924c3b9caddce17df39815c03c6e5f3 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:43:18 compute-0 sudo[77158]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:19 compute-0 sudo[77310]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qtlpsjgjjdfuzbtaybtplowehrawjlyn ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405398.7787354-162-80875616087450/AnsiballZ_stat.py'
Oct 02 11:43:19 compute-0 sudo[77310]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:19 compute-0 python3.9[77312]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/certs/libvirt/default/tls.key follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:43:19 compute-0 sudo[77310]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:19 compute-0 sudo[77433]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jwxfycwwsyzkfdcclykgwursnzwxuhtx ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405398.7787354-162-80875616087450/AnsiballZ_copy.py'
Oct 02 11:43:19 compute-0 sudo[77433]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:19 compute-0 python3.9[77435]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/certs/libvirt/default/tls.key group=root mode=0600 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405398.7787354-162-80875616087450/.source.key _original_basename=compute-0.ctlplane.example.com-tls.key follow=False checksum=56f80086aa41088f7c3496979500da605f9c895a backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:43:19 compute-0 sudo[77433]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:20 compute-0 sudo[77585]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vwzompmkigtoozuyobneiyjfjuusljmr ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405400.053086-291-108428431597192/AnsiballZ_file.py'
Oct 02 11:43:20 compute-0 sudo[77585]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:20 compute-0 python3.9[77587]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/var/lib/openstack/certs/telemetry/default setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:43:20 compute-0 sudo[77585]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:21 compute-0 sudo[77737]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jklroqvrzunqfdhzfaffdncxakvrokog ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405400.723544-291-194468018726968/AnsiballZ_file.py'
Oct 02 11:43:21 compute-0 sudo[77737]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:21 compute-0 python3.9[77739]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/var/lib/openstack/certs/telemetry/default setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:43:21 compute-0 sudo[77737]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:21 compute-0 sudo[77889]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ljpdvclazstlrhyluzwwpuiwgwqlorle ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405401.429971-335-221735324283009/AnsiballZ_stat.py'
Oct 02 11:43:21 compute-0 sudo[77889]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:21 compute-0 python3.9[77891]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/certs/telemetry/default/tls.crt follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:43:21 compute-0 sudo[77889]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:22 compute-0 sudo[78012]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vulxzlsyxbjpvrgacqrhuiypqxdufygk ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405401.429971-335-221735324283009/AnsiballZ_copy.py'
Oct 02 11:43:22 compute-0 sudo[78012]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:22 compute-0 python3.9[78014]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/certs/telemetry/default/tls.crt group=root mode=0600 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405401.429971-335-221735324283009/.source.crt _original_basename=compute-0.ctlplane.example.com-tls.crt follow=False checksum=837dc72623c5ac38218c5112174f8d8e268a6813 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:43:22 compute-0 sudo[78012]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:22 compute-0 sudo[78164]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pdqjuxicrezhiwydfpswjahbuzminnbg ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405402.50537-335-10140773276709/AnsiballZ_stat.py'
Oct 02 11:43:22 compute-0 sudo[78164]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:22 compute-0 python3.9[78166]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/certs/telemetry/default/ca.crt follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:43:22 compute-0 sudo[78164]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:23 compute-0 sudo[78287]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fsddxirhztrtwvdloogtefewzgizcdon ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405402.50537-335-10140773276709/AnsiballZ_copy.py'
Oct 02 11:43:23 compute-0 sudo[78287]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:23 compute-0 python3.9[78289]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/certs/telemetry/default/ca.crt group=root mode=0600 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405402.50537-335-10140773276709/.source.crt _original_basename=compute-0.ctlplane.example.com-ca.crt follow=False checksum=cef26c6879264807de4e1e28241ed8a223aa26e4 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:43:23 compute-0 sudo[78287]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:23 compute-0 sudo[78439]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-veraqkofnjmxmhxdgfowiggpheaaofjd ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405403.568231-335-182538542394346/AnsiballZ_stat.py'
Oct 02 11:43:23 compute-0 sudo[78439]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:24 compute-0 python3.9[78441]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/certs/telemetry/default/tls.key follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:43:24 compute-0 sudo[78439]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:24 compute-0 sudo[78562]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wnhwdfzeazkyoopijreilxdcwztszbjd ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405403.568231-335-182538542394346/AnsiballZ_copy.py'
Oct 02 11:43:24 compute-0 sudo[78562]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:24 compute-0 python3.9[78564]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/certs/telemetry/default/tls.key group=root mode=0600 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405403.568231-335-182538542394346/.source.key _original_basename=compute-0.ctlplane.example.com-tls.key follow=False checksum=75d12749a45e302cf00185d6b1a745f5effa7a06 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:43:24 compute-0 sudo[78562]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:24 compute-0 sudo[78714]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ulbwwbsilalvtklzixmmqlnntcxmxmpm ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405404.738378-465-4415915396210/AnsiballZ_file.py'
Oct 02 11:43:24 compute-0 sudo[78714]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:25 compute-0 python3.9[78716]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/var/lib/openstack/certs/neutron-metadata/default setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:43:25 compute-0 sudo[78714]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:25 compute-0 sudo[78866]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bzdmhivokgahqwmdfrqeujhnzesweqdw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405405.3073218-465-28676860154737/AnsiballZ_file.py'
Oct 02 11:43:25 compute-0 sudo[78866]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:25 compute-0 python3.9[78868]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/var/lib/openstack/certs/neutron-metadata/default setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:43:25 compute-0 sudo[78866]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:26 compute-0 sudo[79018]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-arpdddgusutwhuszanozxfzyecxrygbs ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405405.9266906-511-216580642909006/AnsiballZ_stat.py'
Oct 02 11:43:26 compute-0 sudo[79018]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:26 compute-0 python3.9[79020]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/certs/neutron-metadata/default/tls.crt follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:43:26 compute-0 sudo[79018]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:26 compute-0 sudo[79141]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zwxqpgagbbankpatwkkheccxuaigeexl ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405405.9266906-511-216580642909006/AnsiballZ_copy.py'
Oct 02 11:43:26 compute-0 sudo[79141]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:26 compute-0 python3.9[79143]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/certs/neutron-metadata/default/tls.crt group=root mode=0600 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405405.9266906-511-216580642909006/.source.crt _original_basename=compute-0.ctlplane.example.com-tls.crt follow=False checksum=cc6a1b6f84c50fe2b9a753232e0bbff8832051ab backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:43:26 compute-0 sudo[79141]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:27 compute-0 sudo[79293]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-iylrgpafhzixhtbhiqekqdkrletikyxw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405407.0209703-511-176734041245339/AnsiballZ_stat.py'
Oct 02 11:43:27 compute-0 sudo[79293]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:27 compute-0 python3.9[79295]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/certs/neutron-metadata/default/ca.crt follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:43:27 compute-0 sudo[79293]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:27 compute-0 sudo[79416]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sgsgxtlsnsbvzmzacvgskjidtzqhbarl ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405407.0209703-511-176734041245339/AnsiballZ_copy.py'
Oct 02 11:43:27 compute-0 sudo[79416]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:27 compute-0 python3.9[79418]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/certs/neutron-metadata/default/ca.crt group=root mode=0600 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405407.0209703-511-176734041245339/.source.crt _original_basename=compute-0.ctlplane.example.com-ca.crt follow=False checksum=b4329abfe8c8dfc3dff902009782a13facac4ae4 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:43:28 compute-0 sudo[79416]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:28 compute-0 sudo[79568]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-apwfdaassjisjohlowssnfmqgptgflra ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405408.1563768-511-196553882432322/AnsiballZ_stat.py'
Oct 02 11:43:28 compute-0 sudo[79568]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:28 compute-0 python3.9[79570]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/certs/neutron-metadata/default/tls.key follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:43:28 compute-0 sudo[79568]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:28 compute-0 sudo[79691]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zmuivwfabkpgrmwmjhvwwqwbzbawqtva ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405408.1563768-511-196553882432322/AnsiballZ_copy.py'
Oct 02 11:43:28 compute-0 sudo[79691]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:29 compute-0 python3.9[79693]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/certs/neutron-metadata/default/tls.key group=root mode=0600 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405408.1563768-511-196553882432322/.source.key _original_basename=compute-0.ctlplane.example.com-tls.key follow=False checksum=7c9985ab7b008a407d879c8c3035727bade5e0bb backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:43:29 compute-0 sudo[79691]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:29 compute-0 sudo[79843]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rmyeewegvflgczwaqlnblqvgchrsgkhl ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405409.4228156-641-162188447781508/AnsiballZ_file.py'
Oct 02 11:43:29 compute-0 sudo[79843]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:29 compute-0 python3.9[79845]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/var/lib/openstack/certs/ovn/default setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:43:29 compute-0 sudo[79843]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:30 compute-0 sudo[79995]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ccftlaoqzhewofjkfpvaslfzyhpjyxtb ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405410.049992-641-195545885451517/AnsiballZ_file.py'
Oct 02 11:43:30 compute-0 sudo[79995]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:30 compute-0 python3.9[79997]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/var/lib/openstack/certs/ovn/default setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:43:30 compute-0 sudo[79995]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:30 compute-0 sudo[80147]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kumhfaubeippntvcercdjpoywiouhgfs ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405410.6846745-688-141228578977924/AnsiballZ_stat.py'
Oct 02 11:43:30 compute-0 sudo[80147]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:31 compute-0 python3.9[80149]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/certs/ovn/default/tls.crt follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:43:31 compute-0 sudo[80147]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:31 compute-0 sudo[80270]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hizuvdtxztekezprjjfuhpfhdzfgidgo ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405410.6846745-688-141228578977924/AnsiballZ_copy.py'
Oct 02 11:43:31 compute-0 sudo[80270]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:31 compute-0 python3.9[80272]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/certs/ovn/default/tls.crt group=root mode=0600 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405410.6846745-688-141228578977924/.source.crt _original_basename=compute-0.ctlplane.example.com-tls.crt follow=False checksum=8c5b5a08465e94e8fa81a56b497215871bcb0520 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:43:31 compute-0 sudo[80270]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:32 compute-0 sudo[80422]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ttmjnaakwzdyvzchufvuspmsonxyfvpo ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405411.9797559-688-211523607830070/AnsiballZ_stat.py'
Oct 02 11:43:32 compute-0 sudo[80422]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:32 compute-0 python3.9[80424]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/certs/ovn/default/ca.crt follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:43:32 compute-0 sudo[80422]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:32 compute-0 sudo[80545]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-heblkpyfyrbujzmxdpbakwxgcdxggbvq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405411.9797559-688-211523607830070/AnsiballZ_copy.py'
Oct 02 11:43:32 compute-0 sudo[80545]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:32 compute-0 python3.9[80547]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/certs/ovn/default/ca.crt group=root mode=0600 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405411.9797559-688-211523607830070/.source.crt _original_basename=compute-0.ctlplane.example.com-ca.crt follow=False checksum=b4329abfe8c8dfc3dff902009782a13facac4ae4 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:43:32 compute-0 sudo[80545]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:33 compute-0 sudo[80697]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pgiicynonsoycwilxkvaxfworqvuufeh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405413.0854213-688-184883267956840/AnsiballZ_stat.py'
Oct 02 11:43:33 compute-0 sudo[80697]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:33 compute-0 python3.9[80699]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/certs/ovn/default/tls.key follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:43:33 compute-0 sudo[80697]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:33 compute-0 sudo[80820]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gfwbzriubdxzqbsrjrjsemuipzxbnwlv ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405413.0854213-688-184883267956840/AnsiballZ_copy.py'
Oct 02 11:43:33 compute-0 sudo[80820]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:33 compute-0 python3.9[80822]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/certs/ovn/default/tls.key group=root mode=0600 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405413.0854213-688-184883267956840/.source.key _original_basename=compute-0.ctlplane.example.com-tls.key follow=False checksum=4bb7b9429d4d22bafdc995bbc29289541668507c backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:43:34 compute-0 sudo[80820]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:34 compute-0 sudo[80972]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tkndiryckogfbtqmtmtxtxuhsxggpqot ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405414.6929624-850-33932387405931/AnsiballZ_file.py'
Oct 02 11:43:34 compute-0 sudo[80972]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:35 compute-0 python3.9[80974]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/var/lib/openstack/cacerts/telemetry setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:43:35 compute-0 sudo[80972]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:35 compute-0 sudo[81124]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xurgcspulsnswrwslstzgvgupqntvgdv ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405415.3341825-889-249676224132880/AnsiballZ_stat.py'
Oct 02 11:43:35 compute-0 sudo[81124]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:35 compute-0 python3.9[81126]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:43:35 compute-0 sudo[81124]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:36 compute-0 sudo[81247]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dzdggbdlfwqipojbsprunbdtbvztmtvh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405415.3341825-889-249676224132880/AnsiballZ_copy.py'
Oct 02 11:43:36 compute-0 sudo[81247]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:36 compute-0 python3.9[81249]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem group=root mode=0644 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405415.3341825-889-249676224132880/.source.pem _original_basename=tls-ca-bundle.pem follow=False checksum=74de1ba89bc28b0be0e3b8a77822f232ede7d253 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:43:36 compute-0 sudo[81247]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:36 compute-0 sudo[81399]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xwtbtxrkrrxosstrqffcftigvujvacfh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405416.6063423-937-1409814581338/AnsiballZ_file.py'
Oct 02 11:43:36 compute-0 sudo[81399]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:37 compute-0 python3.9[81401]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/var/lib/openstack/cacerts/libvirt setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:43:37 compute-0 sudo[81399]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:37 compute-0 sudo[81551]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-drsvdsonjhbufktwykxhorxtplpsmwlz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405417.2736874-963-21088752487910/AnsiballZ_stat.py'
Oct 02 11:43:37 compute-0 sudo[81551]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:37 compute-0 python3.9[81553]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/cacerts/libvirt/tls-ca-bundle.pem follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:43:37 compute-0 sudo[81551]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:38 compute-0 sudo[81674]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vouucyooynhycynaqynqtwlvsnsztlri ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405417.2736874-963-21088752487910/AnsiballZ_copy.py'
Oct 02 11:43:38 compute-0 sudo[81674]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:38 compute-0 python3.9[81676]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/cacerts/libvirt/tls-ca-bundle.pem group=root mode=0644 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405417.2736874-963-21088752487910/.source.pem _original_basename=tls-ca-bundle.pem follow=False checksum=74de1ba89bc28b0be0e3b8a77822f232ede7d253 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:43:38 compute-0 sudo[81674]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:38 compute-0 sudo[81826]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hagllhgoaufmmrbrfjnsjeqgpnlzdkto ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405418.5288808-1010-246061967898773/AnsiballZ_file.py'
Oct 02 11:43:38 compute-0 sudo[81826]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:39 compute-0 python3.9[81828]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/var/lib/openstack/cacerts/repo-setup setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:43:39 compute-0 sudo[81826]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:39 compute-0 sudo[81978]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-trpdadwkaabcljmojyffpowbzuafexis ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405419.2777293-1035-36598773206867/AnsiballZ_stat.py'
Oct 02 11:43:39 compute-0 sudo[81978]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:39 compute-0 python3.9[81980]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/cacerts/repo-setup/tls-ca-bundle.pem follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:43:39 compute-0 sudo[81978]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:40 compute-0 sudo[82101]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wxpbczepebpzrgvbrwnfmyvlifmwmapb ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405419.2777293-1035-36598773206867/AnsiballZ_copy.py'
Oct 02 11:43:40 compute-0 sudo[82101]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:40 compute-0 python3.9[82103]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/cacerts/repo-setup/tls-ca-bundle.pem group=root mode=0644 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405419.2777293-1035-36598773206867/.source.pem _original_basename=tls-ca-bundle.pem follow=False checksum=74de1ba89bc28b0be0e3b8a77822f232ede7d253 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:43:40 compute-0 sudo[82101]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:40 compute-0 sudo[82253]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fzqzzzkwnakzuvbfpshhlpnwunsqhhig ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405420.6319404-1079-201194268652499/AnsiballZ_file.py'
Oct 02 11:43:40 compute-0 sudo[82253]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:41 compute-0 python3.9[82255]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/var/lib/openstack/cacerts/neutron-metadata setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:43:41 compute-0 sudo[82253]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:41 compute-0 sudo[82405]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-egdxyvmydkelebcpaapqxsbnliodrldt ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405421.3777452-1106-67658258739852/AnsiballZ_stat.py'
Oct 02 11:43:41 compute-0 sudo[82405]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:41 compute-0 python3.9[82407]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:43:41 compute-0 sudo[82405]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:42 compute-0 sudo[82528]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-aouetarxobsqmcilvtmuibaungardlka ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405421.3777452-1106-67658258739852/AnsiballZ_copy.py'
Oct 02 11:43:42 compute-0 sudo[82528]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:42 compute-0 python3.9[82530]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem group=root mode=0644 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405421.3777452-1106-67658258739852/.source.pem _original_basename=tls-ca-bundle.pem follow=False checksum=74de1ba89bc28b0be0e3b8a77822f232ede7d253 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:43:42 compute-0 sudo[82528]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:42 compute-0 sudo[82680]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bjtyrxbfrukvyuacrwhuyhlxngoyzmql ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405422.6869972-1155-41146512139775/AnsiballZ_file.py'
Oct 02 11:43:42 compute-0 sudo[82680]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:43 compute-0 python3.9[82682]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/var/lib/openstack/cacerts/ovn setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:43:43 compute-0 sudo[82680]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:43 compute-0 sudo[82832]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jyjrqbyijguybxqjdcehdpihycvscsqw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405423.2796323-1180-33299663685705/AnsiballZ_stat.py'
Oct 02 11:43:43 compute-0 sudo[82832]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:43 compute-0 python3.9[82834]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:43:43 compute-0 sudo[82832]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:43 compute-0 sudo[82955]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ljihuzsqibngzpvhvkqtvqmtdtwfcsyv ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405423.2796323-1180-33299663685705/AnsiballZ_copy.py'
Oct 02 11:43:43 compute-0 sudo[82955]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:44 compute-0 python3.9[82957]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem group=root mode=0644 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405423.2796323-1180-33299663685705/.source.pem _original_basename=tls-ca-bundle.pem follow=False checksum=74de1ba89bc28b0be0e3b8a77822f232ede7d253 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:43:44 compute-0 sudo[82955]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:44 compute-0 sudo[83107]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rklysfsrpzdkspregajzcpyhdjxzlqzn ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405424.3828354-1223-48938511385560/AnsiballZ_file.py'
Oct 02 11:43:44 compute-0 sudo[83107]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:44 compute-0 python3.9[83109]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/var/lib/openstack/cacerts/bootstrap setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:43:44 compute-0 sudo[83107]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:45 compute-0 sudo[83259]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xqzqnwcaihleeprqxsebobvxiluhlmuw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405424.947928-1248-113845568298449/AnsiballZ_stat.py'
Oct 02 11:43:45 compute-0 sudo[83259]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:45 compute-0 python3.9[83261]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/cacerts/bootstrap/tls-ca-bundle.pem follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:43:45 compute-0 sudo[83259]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:45 compute-0 sudo[83382]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mcjfbocfeopmznclqaocxdgkblgazefh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405424.947928-1248-113845568298449/AnsiballZ_copy.py'
Oct 02 11:43:45 compute-0 sudo[83382]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:45 compute-0 python3.9[83384]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/cacerts/bootstrap/tls-ca-bundle.pem group=root mode=0644 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405424.947928-1248-113845568298449/.source.pem _original_basename=tls-ca-bundle.pem follow=False checksum=74de1ba89bc28b0be0e3b8a77822f232ede7d253 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:43:46 compute-0 sudo[83382]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:46 compute-0 sudo[83534]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cujnkelkotwxpgdbczklyuzgxqllnixm ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405426.1988091-1294-115865366607185/AnsiballZ_file.py'
Oct 02 11:43:46 compute-0 sudo[83534]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:46 compute-0 python3.9[83536]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/var/lib/openstack/cacerts/nova setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:43:46 compute-0 sudo[83534]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:47 compute-0 sudo[83686]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rwyivrpvwentpjhwyezrvzkdjgvtlonu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405426.8329499-1319-96096740879505/AnsiballZ_stat.py'
Oct 02 11:43:47 compute-0 sudo[83686]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:47 compute-0 python3.9[83688]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/cacerts/nova/tls-ca-bundle.pem follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:43:47 compute-0 sudo[83686]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:47 compute-0 sudo[83809]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lmmyecnlsazdhbuzeynidryduwlqsyax ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405426.8329499-1319-96096740879505/AnsiballZ_copy.py'
Oct 02 11:43:47 compute-0 sudo[83809]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:47 compute-0 python3.9[83811]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/cacerts/nova/tls-ca-bundle.pem group=root mode=0644 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405426.8329499-1319-96096740879505/.source.pem _original_basename=tls-ca-bundle.pem follow=False checksum=74de1ba89bc28b0be0e3b8a77822f232ede7d253 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:43:47 compute-0 sudo[83809]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:49 compute-0 sshd-session[76152]: Connection closed by 192.168.122.30 port 32916
Oct 02 11:43:49 compute-0 sshd-session[76149]: pam_unix(sshd:session): session closed for user zuul
Oct 02 11:43:49 compute-0 systemd[1]: session-19.scope: Deactivated successfully.
Oct 02 11:43:49 compute-0 systemd[1]: session-19.scope: Consumed 27.790s CPU time.
Oct 02 11:43:49 compute-0 systemd-logind[827]: Session 19 logged out. Waiting for processes to exit.
Oct 02 11:43:49 compute-0 systemd-logind[827]: Removed session 19.
Oct 02 11:43:54 compute-0 sshd-session[83836]: Accepted publickey for zuul from 192.168.122.30 port 43304 ssh2: ECDSA SHA256:tAEsFSop2MjuMQQ/UqKU2uCkxqWXGfsM5crdhd6tlI4
Oct 02 11:43:54 compute-0 systemd-logind[827]: New session 20 of user zuul.
Oct 02 11:43:54 compute-0 systemd[1]: Started Session 20 of User zuul.
Oct 02 11:43:54 compute-0 sshd-session[83836]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Oct 02 11:43:55 compute-0 python3.9[83989]: ansible-ansible.builtin.setup Invoked with gather_subset=['!all', '!min', 'local'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:43:56 compute-0 sudo[84143]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mtvajayxnhedvmhuxurdmxuxcyzsyajr ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405435.7621086-67-13742444921112/AnsiballZ_file.py'
Oct 02 11:43:56 compute-0 sudo[84143]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:56 compute-0 python3.9[84145]: ansible-ansible.builtin.file Invoked with group=zuul mode=0750 owner=zuul path=/var/lib/edpm-config/firewall setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:43:56 compute-0 sudo[84143]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:56 compute-0 sudo[84295]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-puzcxzmnkqcaqspavwstiazdmlqxtofs ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405436.5392852-67-16199910721867/AnsiballZ_file.py'
Oct 02 11:43:56 compute-0 sudo[84295]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:56 compute-0 python3.9[84297]: ansible-ansible.builtin.file Invoked with group=openvswitch owner=openvswitch path=/var/lib/openvswitch/ovn setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:43:56 compute-0 sudo[84295]: pam_unix(sudo:session): session closed for user root
Oct 02 11:43:57 compute-0 python3.9[84447]: ansible-ansible.builtin.setup Invoked with gather_subset=['!all', '!min', 'selinux'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:43:58 compute-0 sudo[84597]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ebpftqnrfuaprpafzluhpcbwovgpwbjk ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405438.0094726-136-208380070319177/AnsiballZ_seboolean.py'
Oct 02 11:43:58 compute-0 sudo[84597]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:43:58 compute-0 python3.9[84599]: ansible-ansible.posix.seboolean Invoked with name=virt_sandbox_use_netlink persistent=True state=True ignore_selinux_state=False
Oct 02 11:43:59 compute-0 sudo[84597]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:00 compute-0 sudo[84753]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vzjndywjtrgemiwrvpmyrgwwrxbqelsr ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405440.2737567-166-277724130894797/AnsiballZ_setup.py'
Oct 02 11:44:00 compute-0 dbus-broker-launch[818]: avc:  op=load_policy lsm=selinux seqno=11 res=1
Oct 02 11:44:00 compute-0 sudo[84753]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:00 compute-0 python3.9[84755]: ansible-ansible.legacy.setup Invoked with filter=['ansible_pkg_mgr'] gather_subset=['!all'] gather_timeout=10 fact_path=/etc/ansible/facts.d
Oct 02 11:44:01 compute-0 sudo[84753]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:01 compute-0 sudo[84837]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-weaqgjvevphixoutjnhqjwptqimekoke ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405440.2737567-166-277724130894797/AnsiballZ_dnf.py'
Oct 02 11:44:01 compute-0 sudo[84837]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:01 compute-0 python3.9[84839]: ansible-ansible.legacy.dnf Invoked with name=['openvswitch'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Oct 02 11:44:03 compute-0 sudo[84837]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:03 compute-0 sudo[84990]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ajhqpgmbprdwwphlbucxkpablbnfgjwc ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405443.1923914-202-227828477754783/AnsiballZ_systemd.py'
Oct 02 11:44:03 compute-0 sudo[84990]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:04 compute-0 python3.9[84992]: ansible-ansible.builtin.systemd Invoked with enabled=True masked=False name=openvswitch.service state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None
Oct 02 11:44:04 compute-0 sudo[84990]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:04 compute-0 sudo[85145]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xldqypddygngvcymqqxbekbolqyvabpi ; /usr/bin/python3 /home/zuul/.ansible/tmp/ansible-tmp-1759405444.3533726-226-24439418950874/AnsiballZ_edpm_nftables_snippet.py'
Oct 02 11:44:04 compute-0 sudo[85145]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:05 compute-0 python3[85147]: ansible-osp.edpm.edpm_nftables_snippet Invoked with content=- rule_name: 118 neutron vxlan networks
                                            rule:
                                              proto: udp
                                              dport: 4789
                                          - rule_name: 119 neutron geneve networks
                                            rule:
                                              proto: udp
                                              dport: 6081
                                              state: ["UNTRACKED"]
                                          - rule_name: 120 neutron geneve networks no conntrack
                                            rule:
                                              proto: udp
                                              dport: 6081
                                              table: raw
                                              chain: OUTPUT
                                              jump: NOTRACK
                                              action: append
                                              state: []
                                          - rule_name: 121 neutron geneve networks no conntrack
                                            rule:
                                              proto: udp
                                              dport: 6081
                                              table: raw
                                              chain: PREROUTING
                                              jump: NOTRACK
                                              action: append
                                              state: []
                                           dest=/var/lib/edpm-config/firewall/ovn.yaml state=present
Oct 02 11:44:05 compute-0 sudo[85145]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:05 compute-0 sudo[85297]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-styyicggiyryoojypzmclrtxkclvngxz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405445.3475943-253-142583967705955/AnsiballZ_file.py'
Oct 02 11:44:05 compute-0 sudo[85297]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:05 compute-0 python3.9[85299]: ansible-ansible.builtin.file Invoked with group=root mode=0750 owner=root path=/var/lib/edpm-config/firewall state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:44:05 compute-0 sudo[85297]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:06 compute-0 sudo[85449]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qvufjfslfikjvxcqebwljmvjehjxfqvk ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405446.0083766-277-158859554762941/AnsiballZ_stat.py'
Oct 02 11:44:06 compute-0 sudo[85449]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:06 compute-0 python3.9[85451]: ansible-ansible.legacy.stat Invoked with path=/var/lib/edpm-config/firewall/edpm-nftables-base.yaml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:44:06 compute-0 sudo[85449]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:06 compute-0 sudo[85527]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mnpclearlfiiivdizqlqzucnwekrpeuh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405446.0083766-277-158859554762941/AnsiballZ_file.py'
Oct 02 11:44:06 compute-0 sudo[85527]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:07 compute-0 python3.9[85529]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/var/lib/edpm-config/firewall/edpm-nftables-base.yaml _original_basename=base-rules.yaml.j2 recurse=False state=file path=/var/lib/edpm-config/firewall/edpm-nftables-base.yaml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:44:07 compute-0 sudo[85527]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:07 compute-0 PackageKit[31115]: daemon quit
Oct 02 11:44:07 compute-0 systemd[1]: packagekit.service: Deactivated successfully.
Oct 02 11:44:07 compute-0 sudo[85679]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xpgaxrawbqpdfvolioxawymwqqdkdrlf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405447.3056414-313-236353366820517/AnsiballZ_stat.py'
Oct 02 11:44:07 compute-0 sudo[85679]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:07 compute-0 python3.9[85681]: ansible-ansible.legacy.stat Invoked with path=/var/lib/edpm-config/firewall/edpm-nftables-user-rules.yaml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:44:07 compute-0 sudo[85679]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:08 compute-0 sudo[85757]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wccohmzcuafirhukyeeprmrrfuzdnhsy ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405447.3056414-313-236353366820517/AnsiballZ_file.py'
Oct 02 11:44:08 compute-0 sudo[85757]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:08 compute-0 python3.9[85759]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/var/lib/edpm-config/firewall/edpm-nftables-user-rules.yaml _original_basename=.emtr1hbz recurse=False state=file path=/var/lib/edpm-config/firewall/edpm-nftables-user-rules.yaml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:44:08 compute-0 sudo[85757]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:08 compute-0 sudo[85909]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zahsvegsecqlphjlfmxklcwcejfhmzlx ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405448.480148-349-2506320509785/AnsiballZ_stat.py'
Oct 02 11:44:08 compute-0 sudo[85909]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:08 compute-0 python3.9[85911]: ansible-ansible.legacy.stat Invoked with path=/etc/nftables/iptables.nft follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:44:08 compute-0 sudo[85909]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:09 compute-0 sudo[85987]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yhuvfemgrbubncfrbzffhizvxfpaaibb ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405448.480148-349-2506320509785/AnsiballZ_file.py'
Oct 02 11:44:09 compute-0 sudo[85987]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:09 compute-0 python3.9[85989]: ansible-ansible.legacy.file Invoked with group=root mode=0600 owner=root dest=/etc/nftables/iptables.nft _original_basename=iptables.nft recurse=False state=file path=/etc/nftables/iptables.nft force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:44:09 compute-0 sudo[85987]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:10 compute-0 sudo[86139]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ulrxbupapujnmabmethuxdqhitmevwyy ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405449.625597-388-211638561392236/AnsiballZ_command.py'
Oct 02 11:44:10 compute-0 sudo[86139]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:10 compute-0 python3.9[86141]: ansible-ansible.legacy.command Invoked with _raw_params=nft -j list ruleset _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:44:10 compute-0 sudo[86139]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:10 compute-0 sudo[86292]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mtipvcrpfrzytkqjijqczdfshjziwmbq ; /usr/bin/python3 /home/zuul/.ansible/tmp/ansible-tmp-1759405450.4629884-412-131352673145888/AnsiballZ_edpm_nftables_from_files.py'
Oct 02 11:44:10 compute-0 sudo[86292]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:11 compute-0 python3[86294]: ansible-edpm_nftables_from_files Invoked with src=/var/lib/edpm-config/firewall
Oct 02 11:44:11 compute-0 sudo[86292]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:11 compute-0 sudo[86444]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gzorggoixodfiudwvbzpfjneyorbqhrv ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405451.4346585-436-268032046298318/AnsiballZ_stat.py'
Oct 02 11:44:11 compute-0 sudo[86444]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:11 compute-0 python3.9[86446]: ansible-ansible.legacy.stat Invoked with path=/etc/nftables/edpm-jumps.nft follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:44:11 compute-0 sudo[86444]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:12 compute-0 sudo[86569]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nscpxcqechillvzkaphbzbyzsfzeutyf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405451.4346585-436-268032046298318/AnsiballZ_copy.py'
Oct 02 11:44:12 compute-0 sudo[86569]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:12 compute-0 python3.9[86571]: ansible-ansible.legacy.copy Invoked with dest=/etc/nftables/edpm-jumps.nft group=root mode=0600 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405451.4346585-436-268032046298318/.source.nft follow=False _original_basename=jump-chain.j2 checksum=81c2fc96c23335ffe374f9b064e885d5d971ddf9 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:44:12 compute-0 sudo[86569]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:13 compute-0 sudo[86721]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ujifyhrjyromxzmejzpnmcurkuytndxk ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405452.7993324-481-203896507379008/AnsiballZ_stat.py'
Oct 02 11:44:13 compute-0 sudo[86721]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:13 compute-0 python3.9[86723]: ansible-ansible.legacy.stat Invoked with path=/etc/nftables/edpm-update-jumps.nft follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:44:13 compute-0 sudo[86721]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:13 compute-0 sudo[86846]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jwmiihhjwbwisewvfgihddgotscbwgzv ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405452.7993324-481-203896507379008/AnsiballZ_copy.py'
Oct 02 11:44:13 compute-0 sudo[86846]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:13 compute-0 python3.9[86848]: ansible-ansible.legacy.copy Invoked with dest=/etc/nftables/edpm-update-jumps.nft group=root mode=0600 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405452.7993324-481-203896507379008/.source.nft follow=False _original_basename=jump-chain.j2 checksum=81c2fc96c23335ffe374f9b064e885d5d971ddf9 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:44:13 compute-0 sudo[86846]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:14 compute-0 sudo[86998]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jzddsemaigtdpxswybwbdsndjydwuedw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405454.0880015-526-140850098342662/AnsiballZ_stat.py'
Oct 02 11:44:14 compute-0 sudo[86998]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:14 compute-0 python3.9[87000]: ansible-ansible.legacy.stat Invoked with path=/etc/nftables/edpm-flushes.nft follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:44:14 compute-0 sudo[86998]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:14 compute-0 sudo[87123]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zygssujsgkpeepenpxgxzfomtjewqeez ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405454.0880015-526-140850098342662/AnsiballZ_copy.py'
Oct 02 11:44:14 compute-0 sudo[87123]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:15 compute-0 python3.9[87125]: ansible-ansible.legacy.copy Invoked with dest=/etc/nftables/edpm-flushes.nft group=root mode=0600 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405454.0880015-526-140850098342662/.source.nft follow=False _original_basename=flush-chain.j2 checksum=4d3ffec49c8eb1a9b80d2f1e8cd64070063a87b4 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:44:15 compute-0 sudo[87123]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:15 compute-0 sudo[87275]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gbcdcudkddxguliidknkoxfsldekaisb ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405455.3105404-571-103355755173489/AnsiballZ_stat.py'
Oct 02 11:44:15 compute-0 sudo[87275]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:15 compute-0 python3.9[87277]: ansible-ansible.legacy.stat Invoked with path=/etc/nftables/edpm-chains.nft follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:44:15 compute-0 sudo[87275]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:16 compute-0 sudo[87400]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lwzflxyepvlvxuxflpsikvckhgqufxtg ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405455.3105404-571-103355755173489/AnsiballZ_copy.py'
Oct 02 11:44:16 compute-0 sudo[87400]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:16 compute-0 python3.9[87402]: ansible-ansible.legacy.copy Invoked with dest=/etc/nftables/edpm-chains.nft group=root mode=0600 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405455.3105404-571-103355755173489/.source.nft follow=False _original_basename=chains.j2 checksum=298ada419730ec15df17ded0cc50c97a4014a591 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:44:16 compute-0 sudo[87400]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:16 compute-0 sudo[87552]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fczckcygsqrwnwtthkopkaockzmnqxld ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405456.509585-616-16236029589193/AnsiballZ_stat.py'
Oct 02 11:44:16 compute-0 sudo[87552]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:17 compute-0 python3.9[87554]: ansible-ansible.legacy.stat Invoked with path=/etc/nftables/edpm-rules.nft follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:44:17 compute-0 sudo[87552]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:17 compute-0 sudo[87677]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wimygydzqlymmseyisjhmnrkbvktvpta ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405456.509585-616-16236029589193/AnsiballZ_copy.py'
Oct 02 11:44:17 compute-0 sudo[87677]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:17 compute-0 python3.9[87679]: ansible-ansible.legacy.copy Invoked with dest=/etc/nftables/edpm-rules.nft group=root mode=0600 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405456.509585-616-16236029589193/.source.nft follow=False _original_basename=ruleset.j2 checksum=eb691bdb7d792c5f8ff0d719e807fe1c95b09438 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:44:17 compute-0 sudo[87677]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:18 compute-0 sudo[87829]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wmnaxfbsitxmxuxtphvxzzyqxflmdaay ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405457.9122217-661-235419255343237/AnsiballZ_file.py'
Oct 02 11:44:18 compute-0 sudo[87829]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:18 compute-0 python3.9[87831]: ansible-ansible.builtin.file Invoked with group=root mode=0600 owner=root path=/etc/nftables/edpm-rules.nft.changed state=touch recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:44:18 compute-0 sudo[87829]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:18 compute-0 sudo[87981]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rgsrpxtkmztivlhamadfchhxfixsawbk ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405458.5867205-685-232069339267345/AnsiballZ_command.py'
Oct 02 11:44:18 compute-0 sudo[87981]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:19 compute-0 python3.9[87983]: ansible-ansible.legacy.command Invoked with _raw_params=set -o pipefail; cat /etc/nftables/edpm-chains.nft /etc/nftables/edpm-flushes.nft /etc/nftables/edpm-rules.nft /etc/nftables/edpm-update-jumps.nft /etc/nftables/edpm-jumps.nft | nft -c -f - _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:44:19 compute-0 sudo[87981]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:19 compute-0 sudo[88136]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-oyauvvsthksptksnpcakmyifcnodpxaj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405459.3127997-709-231584620234064/AnsiballZ_blockinfile.py'
Oct 02 11:44:19 compute-0 sudo[88136]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:19 compute-0 python3.9[88138]: ansible-ansible.builtin.blockinfile Invoked with backup=False block=include "/etc/nftables/iptables.nft"
                                            include "/etc/nftables/edpm-chains.nft"
                                            include "/etc/nftables/edpm-rules.nft"
                                            include "/etc/nftables/edpm-jumps.nft"
                                             path=/etc/sysconfig/nftables.conf validate=nft -c -f %s state=present marker=# {mark} ANSIBLE MANAGED BLOCK create=False marker_begin=BEGIN marker_end=END append_newline=False prepend_newline=False unsafe_writes=False insertafter=None insertbefore=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:44:19 compute-0 sudo[88136]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:20 compute-0 sudo[88288]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ootkmjdppcvpqxfrcbodlcgtjsqycpdx ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405460.201413-736-228478529494340/AnsiballZ_command.py'
Oct 02 11:44:20 compute-0 sudo[88288]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:20 compute-0 python3.9[88290]: ansible-ansible.legacy.command Invoked with _raw_params=nft -f /etc/nftables/edpm-chains.nft _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:44:20 compute-0 sudo[88288]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:21 compute-0 sudo[88441]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-uknwxqdofqcvljunmlnclcperdkufzcy ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405460.9577138-760-36230171909756/AnsiballZ_stat.py'
Oct 02 11:44:21 compute-0 sudo[88441]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:21 compute-0 python3.9[88443]: ansible-ansible.builtin.stat Invoked with path=/etc/nftables/edpm-rules.nft.changed follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:44:21 compute-0 sudo[88441]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:22 compute-0 sudo[88595]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jrdgzoerfmwfumvfmqrknrygovgaceyx ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405461.7811604-784-4436939891578/AnsiballZ_command.py'
Oct 02 11:44:22 compute-0 sudo[88595]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:22 compute-0 python3.9[88597]: ansible-ansible.legacy.command Invoked with _raw_params=set -o pipefail; cat /etc/nftables/edpm-flushes.nft /etc/nftables/edpm-rules.nft /etc/nftables/edpm-update-jumps.nft | nft -f - _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:44:22 compute-0 sudo[88595]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:22 compute-0 sudo[88750]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gramtnypcrdyyqysqcmsywdddeqdzpdq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405462.559211-808-90330476179949/AnsiballZ_file.py'
Oct 02 11:44:22 compute-0 sudo[88750]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:23 compute-0 python3.9[88752]: ansible-ansible.builtin.file Invoked with path=/etc/nftables/edpm-rules.nft.changed state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:44:23 compute-0 sudo[88750]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:24 compute-0 python3.9[88902]: ansible-ansible.builtin.setup Invoked with gather_subset=['!all', '!min', 'machine'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:44:25 compute-0 sudo[89053]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lmmkuhkttppxvxnsyeevbfdyidyhrwtl ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405465.1355531-928-9239988422430/AnsiballZ_command.py'
Oct 02 11:44:25 compute-0 sudo[89053]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:25 compute-0 python3.9[89055]: ansible-ansible.legacy.command Invoked with _raw_params=ovs-vsctl set open . external_ids:hostname=compute-0.ctlplane.example.com external_ids:ovn-bridge=br-int external_ids:ovn-bridge-mappings=datacentre:br-ex external_ids:ovn-chassis-mac-mappings="datacentre:3e:0a:d8:76:c8:90" external_ids:ovn-encap-ip=172.19.0.100 external_ids:ovn-encap-type=geneve external_ids:ovn-encap-tos=0 external_ids:ovn-match-northd-version=False external_ids:ovn-monitor-all=True external_ids:ovn-remote=ssl:ovsdbserver-sb.openstack.svc:6642 external_ids:ovn-remote-probe-interval=60000 external_ids:ovn-ofctrl-wait-before-clear=8000 external_ids:rundir=/var/run/openvswitch 
                                             _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:44:25 compute-0 ovs-vsctl[89056]: ovs|00001|vsctl|INFO|Called as ovs-vsctl set open . external_ids:hostname=compute-0.ctlplane.example.com external_ids:ovn-bridge=br-int external_ids:ovn-bridge-mappings=datacentre:br-ex external_ids:ovn-chassis-mac-mappings=datacentre:3e:0a:d8:76:c8:90 external_ids:ovn-encap-ip=172.19.0.100 external_ids:ovn-encap-type=geneve external_ids:ovn-encap-tos=0 external_ids:ovn-match-northd-version=False external_ids:ovn-monitor-all=True external_ids:ovn-remote=ssl:ovsdbserver-sb.openstack.svc:6642 external_ids:ovn-remote-probe-interval=60000 external_ids:ovn-ofctrl-wait-before-clear=8000 external_ids:rundir=/var/run/openvswitch
Oct 02 11:44:25 compute-0 sudo[89053]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:26 compute-0 sudo[89206]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yflxyqwnsfjolybyycnhkjetdjiuvmxu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405465.8386173-955-117802453723657/AnsiballZ_command.py'
Oct 02 11:44:26 compute-0 sudo[89206]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:26 compute-0 python3.9[89208]: ansible-ansible.legacy.command Invoked with _raw_params=set -o pipefail
                                            ovs-vsctl show | grep -q "Manager"
                                             _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:44:26 compute-0 sudo[89206]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:26 compute-0 sudo[89361]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cnhufdqnxpclymjicieolpgageogatpj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405466.5781512-979-50712833995511/AnsiballZ_command.py'
Oct 02 11:44:26 compute-0 sudo[89361]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:27 compute-0 python3.9[89363]: ansible-ansible.legacy.command Invoked with _raw_params=ovs-vsctl --timeout=5 --id=@manager -- create Manager target=\"ptcp:********@manager
                                             _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:44:27 compute-0 ovs-vsctl[89364]: ovs|00001|vsctl|INFO|Called as ovs-vsctl --timeout=5 --id=@manager -- create Manager "target=\"ptcp:6640:127.0.0.1\"" -- add Open_vSwitch . manager_options @manager
Oct 02 11:44:27 compute-0 sudo[89361]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:27 compute-0 python3.9[89514]: ansible-ansible.builtin.stat Invoked with path=/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:44:28 compute-0 sudo[89666]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zedpfykutjdguulvpybhtknbuvykcpha ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405468.0828571-1030-120962821205486/AnsiballZ_file.py'
Oct 02 11:44:28 compute-0 sudo[89666]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:28 compute-0 python3.9[89668]: ansible-ansible.builtin.file Invoked with path=/var/local/libexec recurse=True setype=container_file_t state=directory force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:44:28 compute-0 sudo[89666]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:29 compute-0 sudo[89818]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-avocklfsmlkmbszlzoyvnzhqvywrtuvq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405468.7987154-1054-251646466370989/AnsiballZ_stat.py'
Oct 02 11:44:29 compute-0 sudo[89818]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:29 compute-0 python3.9[89820]: ansible-ansible.legacy.stat Invoked with path=/var/local/libexec/edpm-container-shutdown follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:44:29 compute-0 sudo[89818]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:29 compute-0 sudo[89896]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lbpxiltoncrdrfvbmrrrchjgxnwacctx ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405468.7987154-1054-251646466370989/AnsiballZ_file.py'
Oct 02 11:44:29 compute-0 sudo[89896]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:29 compute-0 python3.9[89898]: ansible-ansible.legacy.file Invoked with group=root mode=0700 owner=root setype=container_file_t dest=/var/local/libexec/edpm-container-shutdown _original_basename=edpm-container-shutdown recurse=False state=file path=/var/local/libexec/edpm-container-shutdown force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:44:29 compute-0 sudo[89896]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:30 compute-0 sudo[90048]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fvhblguvmitefufdfjtpushwohdlvfpe ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405469.815999-1054-123042719953508/AnsiballZ_stat.py'
Oct 02 11:44:30 compute-0 sudo[90048]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:30 compute-0 python3.9[90050]: ansible-ansible.legacy.stat Invoked with path=/var/local/libexec/edpm-start-podman-container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:44:30 compute-0 sudo[90048]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:30 compute-0 sudo[90126]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bplimxvehspkydilukjfzxztwrqcczzp ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405469.815999-1054-123042719953508/AnsiballZ_file.py'
Oct 02 11:44:30 compute-0 sudo[90126]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:30 compute-0 python3.9[90128]: ansible-ansible.legacy.file Invoked with group=root mode=0700 owner=root setype=container_file_t dest=/var/local/libexec/edpm-start-podman-container _original_basename=edpm-start-podman-container recurse=False state=file path=/var/local/libexec/edpm-start-podman-container force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:44:30 compute-0 sudo[90126]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:31 compute-0 sudo[90278]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zfxceudxjrwnkzdfkiggvxizwgyxdfhi ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405470.956464-1123-222490980556170/AnsiballZ_file.py'
Oct 02 11:44:31 compute-0 sudo[90278]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:31 compute-0 python3.9[90280]: ansible-ansible.builtin.file Invoked with mode=420 path=/etc/systemd/system-preset state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:44:31 compute-0 sudo[90278]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:32 compute-0 sudo[90430]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gpdcwgwuzwgywovyllcvfcksjhtzbihi ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405471.8102903-1147-686149001120/AnsiballZ_stat.py'
Oct 02 11:44:32 compute-0 sudo[90430]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:32 compute-0 python3.9[90432]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/edpm-container-shutdown.service follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:44:32 compute-0 sudo[90430]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:32 compute-0 sudo[90508]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nddlfjqlrrixftrpmaxvqmqgskhvdvtt ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405471.8102903-1147-686149001120/AnsiballZ_file.py'
Oct 02 11:44:32 compute-0 sudo[90508]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:32 compute-0 python3.9[90510]: ansible-ansible.legacy.file Invoked with group=root mode=0644 owner=root dest=/etc/systemd/system/edpm-container-shutdown.service _original_basename=edpm-container-shutdown-service recurse=False state=file path=/etc/systemd/system/edpm-container-shutdown.service force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:44:32 compute-0 sudo[90508]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:33 compute-0 sudo[90660]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-clzqehnwxnumhaycyipxxmfhstdfycvi ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405472.9354978-1183-113341081603362/AnsiballZ_stat.py'
Oct 02 11:44:33 compute-0 sudo[90660]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:33 compute-0 python3.9[90662]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system-preset/91-edpm-container-shutdown.preset follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:44:33 compute-0 sudo[90660]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:33 compute-0 sudo[90738]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-krzcgrngokfhnsicbwckawsleculvkyp ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405472.9354978-1183-113341081603362/AnsiballZ_file.py'
Oct 02 11:44:33 compute-0 sudo[90738]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:33 compute-0 python3.9[90740]: ansible-ansible.legacy.file Invoked with group=root mode=0644 owner=root dest=/etc/systemd/system-preset/91-edpm-container-shutdown.preset _original_basename=91-edpm-container-shutdown-preset recurse=False state=file path=/etc/systemd/system-preset/91-edpm-container-shutdown.preset force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:44:33 compute-0 sudo[90738]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:34 compute-0 sudo[90890]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-soeoareogufibmyjvclysdqprkeyzrok ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405474.116717-1219-83919490793270/AnsiballZ_systemd.py'
Oct 02 11:44:34 compute-0 sudo[90890]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:34 compute-0 python3.9[90892]: ansible-ansible.builtin.systemd Invoked with daemon_reload=True enabled=True name=edpm-container-shutdown state=started daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:44:34 compute-0 systemd[1]: Reloading.
Oct 02 11:44:34 compute-0 systemd-rc-local-generator[90921]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:44:34 compute-0 systemd-sysv-generator[90924]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:44:34 compute-0 sudo[90890]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:35 compute-0 sudo[91080]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ndmfszkmbpilwyxhxzxtuzgaccmpgpft ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405475.1004066-1243-175509873373674/AnsiballZ_stat.py'
Oct 02 11:44:35 compute-0 sudo[91080]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:35 compute-0 python3.9[91082]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/netns-placeholder.service follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:44:35 compute-0 sudo[91080]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:35 compute-0 sudo[91158]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fzlugyznpykbuuvbvuyfsbrscvtbxzqd ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405475.1004066-1243-175509873373674/AnsiballZ_file.py'
Oct 02 11:44:35 compute-0 sudo[91158]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:36 compute-0 python3.9[91160]: ansible-ansible.legacy.file Invoked with group=root mode=0644 owner=root dest=/etc/systemd/system/netns-placeholder.service _original_basename=netns-placeholder-service recurse=False state=file path=/etc/systemd/system/netns-placeholder.service force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:44:36 compute-0 sudo[91158]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:36 compute-0 sudo[91310]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-uenuypiwzisftuqiskbocpzunhrzeztg ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405476.294204-1279-228283503854883/AnsiballZ_stat.py'
Oct 02 11:44:36 compute-0 sudo[91310]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:36 compute-0 python3.9[91312]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system-preset/91-netns-placeholder.preset follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:44:36 compute-0 sudo[91310]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:37 compute-0 sudo[91388]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-czjamzrknwpyvgzujjgixevfezycvyht ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405476.294204-1279-228283503854883/AnsiballZ_file.py'
Oct 02 11:44:37 compute-0 sudo[91388]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:37 compute-0 python3.9[91390]: ansible-ansible.legacy.file Invoked with group=root mode=0644 owner=root dest=/etc/systemd/system-preset/91-netns-placeholder.preset _original_basename=91-netns-placeholder-preset recurse=False state=file path=/etc/systemd/system-preset/91-netns-placeholder.preset force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:44:37 compute-0 sudo[91388]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:37 compute-0 sudo[91540]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rpkzdktllnjqzetnkjssdipskvjmphpf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405477.4921465-1315-86218341945988/AnsiballZ_systemd.py'
Oct 02 11:44:37 compute-0 sudo[91540]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:38 compute-0 python3.9[91542]: ansible-ansible.builtin.systemd Invoked with daemon_reload=True enabled=True name=netns-placeholder state=started daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:44:38 compute-0 systemd[1]: Reloading.
Oct 02 11:44:38 compute-0 systemd-rc-local-generator[91570]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:44:38 compute-0 systemd-sysv-generator[91574]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:44:38 compute-0 systemd[1]: Starting Create netns directory...
Oct 02 11:44:38 compute-0 systemd[1]: run-netns-placeholder.mount: Deactivated successfully.
Oct 02 11:44:38 compute-0 systemd[1]: netns-placeholder.service: Deactivated successfully.
Oct 02 11:44:38 compute-0 systemd[1]: Finished Create netns directory.
Oct 02 11:44:38 compute-0 sudo[91540]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:39 compute-0 sudo[91735]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ibswzulzxjqpuahkvarxlhmvebbmjfcn ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405479.0092764-1345-34233441509364/AnsiballZ_file.py'
Oct 02 11:44:39 compute-0 sudo[91735]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:39 compute-0 python3.9[91737]: ansible-ansible.builtin.file Invoked with group=zuul mode=0755 owner=zuul path=/var/lib/openstack/healthchecks setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:44:39 compute-0 sudo[91735]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:40 compute-0 sudo[91887]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yszxuvztvmgxjidrlrobjfqinptsvedy ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405479.830467-1369-97920341984742/AnsiballZ_stat.py'
Oct 02 11:44:40 compute-0 sudo[91887]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:40 compute-0 python3.9[91889]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/healthchecks/ovn_controller/healthcheck follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:44:40 compute-0 sudo[91887]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:40 compute-0 sudo[92010]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-drilogpnoaousqxzuvwecgvhbniuhmvg ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405479.830467-1369-97920341984742/AnsiballZ_copy.py'
Oct 02 11:44:40 compute-0 sudo[92010]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:40 compute-0 python3.9[92012]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/healthchecks/ovn_controller/ group=zuul mode=0700 owner=zuul setype=container_file_t src=/home/zuul/.ansible/tmp/ansible-tmp-1759405479.830467-1369-97920341984742/.source _original_basename=healthcheck follow=False checksum=4098dd010265fabdf5c26b97d169fc4e575ff457 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:44:40 compute-0 sudo[92010]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:42 compute-0 sudo[92162]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xvboaabtfrvextswmdiqesorilhnwoou ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405481.813497-1420-235329749885321/AnsiballZ_file.py'
Oct 02 11:44:42 compute-0 sudo[92162]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:42 compute-0 python3.9[92164]: ansible-ansible.builtin.file Invoked with path=/var/lib/kolla/config_files recurse=True setype=container_file_t state=directory force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:44:42 compute-0 sudo[92162]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:43 compute-0 sudo[92314]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-inmchpfvbjvtydpivlxxdxfsqfrnefmp ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405482.9187248-1444-218350296293/AnsiballZ_stat.py'
Oct 02 11:44:43 compute-0 sudo[92314]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:43 compute-0 python3.9[92316]: ansible-ansible.legacy.stat Invoked with path=/var/lib/kolla/config_files/ovn_controller.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:44:43 compute-0 sudo[92314]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:43 compute-0 sudo[92437]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fecxlyezmxarjhqkklaszwfoqdklbrwp ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405482.9187248-1444-218350296293/AnsiballZ_copy.py'
Oct 02 11:44:43 compute-0 sudo[92437]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:44 compute-0 python3.9[92439]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/kolla/config_files/ovn_controller.json mode=0600 src=/home/zuul/.ansible/tmp/ansible-tmp-1759405482.9187248-1444-218350296293/.source.json _original_basename=.txxy5hc2 follow=False checksum=2328fc98619beeb08ee32b01f15bb43094c10b61 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:44:44 compute-0 sudo[92437]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:44 compute-0 sudo[92589]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gowjhvwuubzxjsgqzpocshglywbydcnl ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405484.4724596-1489-44400808453403/AnsiballZ_file.py'
Oct 02 11:44:44 compute-0 sudo[92589]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:45 compute-0 python3.9[92591]: ansible-ansible.builtin.file Invoked with mode=0755 path=/var/lib/edpm-config/container-startup-config/ovn_controller state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:44:45 compute-0 sudo[92589]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:45 compute-0 sudo[92741]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-atrihfnipjcyladjcxrxpxaomaxaeuzx ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405485.234373-1513-155894681565879/AnsiballZ_stat.py'
Oct 02 11:44:45 compute-0 sudo[92741]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:45 compute-0 sudo[92741]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:46 compute-0 sudo[92864]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-voosagpysykfmmrtxsposbmwmizsnajt ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405485.234373-1513-155894681565879/AnsiballZ_copy.py'
Oct 02 11:44:46 compute-0 sudo[92864]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:46 compute-0 sudo[92864]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:47 compute-0 sudo[93016]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lxrgldoxyvtpqgnfyorwcirmnabfzcba ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405486.7412539-1564-211778017701305/AnsiballZ_container_config_data.py'
Oct 02 11:44:47 compute-0 sudo[93016]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:47 compute-0 python3.9[93018]: ansible-container_config_data Invoked with config_overrides={} config_path=/var/lib/edpm-config/container-startup-config/ovn_controller config_pattern=*.json debug=False
Oct 02 11:44:47 compute-0 sudo[93016]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:48 compute-0 sudo[93168]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ojqupufjlwigwltupxwxnnuiwshzztuw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405487.930006-1591-240893536974999/AnsiballZ_container_config_hash.py'
Oct 02 11:44:48 compute-0 sudo[93168]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:48 compute-0 python3.9[93170]: ansible-container_config_hash Invoked with check_mode=False config_vol_prefix=/var/lib/config-data
Oct 02 11:44:48 compute-0 sudo[93168]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:49 compute-0 sudo[93320]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fvfgsgvfhjugnfuhturivyozwlgjwyxc ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405488.8381863-1618-140724264411366/AnsiballZ_podman_container_info.py'
Oct 02 11:44:49 compute-0 sudo[93320]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:49 compute-0 python3.9[93322]: ansible-containers.podman.podman_container_info Invoked with executable=podman name=None
Oct 02 11:44:49 compute-0 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
Oct 02 11:44:49 compute-0 sudo[93320]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:50 compute-0 sudo[93483]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sffjesnkksuvtndcddnptoczlmjpnkle ; /usr/bin/python3 /home/zuul/.ansible/tmp/ansible-tmp-1759405490.114609-1657-167060550590173/AnsiballZ_edpm_container_manage.py'
Oct 02 11:44:50 compute-0 sudo[93483]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:50 compute-0 python3[93485]: ansible-edpm_container_manage Invoked with concurrency=1 config_dir=/var/lib/edpm-config/container-startup-config/ovn_controller config_id=ovn_controller config_overrides={} config_patterns=*.json log_base_path=/var/log/containers/stdouts debug=False
Oct 02 11:44:51 compute-0 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
Oct 02 11:44:51 compute-0 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
Oct 02 11:44:51 compute-0 podman[93520]: 2025-10-02 11:44:51.270878115 +0000 UTC m=+0.038448867 image pull ae232aa720979600656d94fc26ba957f1cdf5bca825fe9b57990f60c6534611f quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified
Oct 02 11:44:51 compute-0 podman[93520]: 2025-10-02 11:44:51.387183554 +0000 UTC m=+0.154754276 container create 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, container_name=ovn_controller, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.build-date=20251001, config_id=ovn_controller, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS)
Oct 02 11:44:51 compute-0 python3[93485]: ansible-edpm_container_manage PODMAN-CONTAINER-DEBUG: podman create --name ovn_controller --conmon-pidfile /run/ovn_controller.pid --env KOLLA_CONFIG_STRATEGY=COPY_ALWAYS --healthcheck-command /openstack/healthcheck --label config_id=ovn_controller --label container_name=ovn_controller --label managed_by=edpm_ansible --label config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']} --log-driver journald --log-level info --network host --privileged=True --user root --volume /lib/modules:/lib/modules:ro --volume /run:/run --volume /var/lib/openvswitch/ovn:/run/ovn:shared,z --volume /var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro --volume /var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z --volume /var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z --volume /var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z --volume /var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z --volume /var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified
Oct 02 11:44:51 compute-0 sudo[93483]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:52 compute-0 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.
Oct 02 11:44:52 compute-0 sudo[93708]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nazzmnntwfekwoookjnluhxxfztblpdr ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405491.7004838-1681-228492410928551/AnsiballZ_stat.py'
Oct 02 11:44:52 compute-0 sudo[93708]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:52 compute-0 python3.9[93710]: ansible-ansible.builtin.stat Invoked with path=/etc/sysconfig/podman_drop_in follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:44:52 compute-0 sudo[93708]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:53 compute-0 sudo[93862]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wgvumtdrkvnxsixezhyxoukdiuqbtdoy ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405492.7298362-1708-20133605271685/AnsiballZ_file.py'
Oct 02 11:44:53 compute-0 sudo[93862]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:53 compute-0 python3.9[93864]: ansible-file Invoked with path=/etc/systemd/system/edpm_ovn_controller.requires state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:44:53 compute-0 sudo[93862]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:53 compute-0 sudo[93938]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zkshnjnsxcfzppuqtjgchzflhmugvdkf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405492.7298362-1708-20133605271685/AnsiballZ_stat.py'
Oct 02 11:44:53 compute-0 sudo[93938]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:53 compute-0 python3.9[93940]: ansible-stat Invoked with path=/etc/systemd/system/edpm_ovn_controller_healthcheck.timer follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:44:53 compute-0 sudo[93938]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:54 compute-0 sudo[94089]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dhjcarautvuxoihbvrjnexiaikvubszu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405493.8086407-1708-99761625005176/AnsiballZ_copy.py'
Oct 02 11:44:54 compute-0 sudo[94089]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:54 compute-0 python3.9[94091]: ansible-copy Invoked with src=/home/zuul/.ansible/tmp/ansible-tmp-1759405493.8086407-1708-99761625005176/source dest=/etc/systemd/system/edpm_ovn_controller.service mode=0644 owner=root group=root backup=False force=True remote_src=False follow=False unsafe_writes=False _original_basename=None content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None checksum=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:44:54 compute-0 sudo[94089]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:54 compute-0 sudo[94165]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-uffgdlzeqknqtzyuacwgtvhgrsromeab ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405493.8086407-1708-99761625005176/AnsiballZ_systemd.py'
Oct 02 11:44:54 compute-0 sudo[94165]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:55 compute-0 python3.9[94167]: ansible-systemd Invoked with daemon_reload=True daemon_reexec=False scope=system no_block=False name=None state=None enabled=None force=None masked=None
Oct 02 11:44:55 compute-0 systemd[1]: Reloading.
Oct 02 11:44:55 compute-0 systemd-rc-local-generator[94190]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:44:55 compute-0 systemd-sysv-generator[94196]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:44:55 compute-0 sudo[94165]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:55 compute-0 sudo[94276]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rmhaqxbnroygzzmjsjydryepuhblbwpg ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405493.8086407-1708-99761625005176/AnsiballZ_systemd.py'
Oct 02 11:44:55 compute-0 sudo[94276]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:55 compute-0 python3.9[94278]: ansible-systemd Invoked with state=restarted name=edpm_ovn_controller.service enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:44:55 compute-0 systemd[1]: Reloading.
Oct 02 11:44:55 compute-0 systemd-rc-local-generator[94312]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:44:55 compute-0 systemd-sysv-generator[94316]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:44:56 compute-0 systemd[1]: Starting ovn_controller container...
Oct 02 11:44:56 compute-0 systemd[1]: Created slice Virtual Machine and Container Slice.
Oct 02 11:44:56 compute-0 systemd[1]: Started libcrun container.
Oct 02 11:44:56 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/fb8b1d7593ee21a078794ae8adec343d77f7eb15ef167774a1453c99a025ca39/merged/run/ovn supports timestamps until 2038 (0x7fffffff)
Oct 02 11:44:56 compute-0 systemd[1]: Started /usr/bin/podman healthcheck run 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d.
Oct 02 11:44:56 compute-0 podman[94321]: 2025-10-02 11:44:56.364564396 +0000 UTC m=+0.193260146 container init 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_id=ovn_controller, container_name=ovn_controller, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 11:44:56 compute-0 ovn_controller[94336]: + sudo -E kolla_set_configs
Oct 02 11:44:56 compute-0 podman[94321]: 2025-10-02 11:44:56.395994198 +0000 UTC m=+0.224689928 container start 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_id=ovn_controller, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.schema-version=1.0, container_name=ovn_controller, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3)
Oct 02 11:44:56 compute-0 edpm-start-podman-container[94321]: ovn_controller
Oct 02 11:44:56 compute-0 systemd[1]: Created slice User Slice of UID 0.
Oct 02 11:44:56 compute-0 systemd[1]: Starting User Runtime Directory /run/user/0...
Oct 02 11:44:56 compute-0 systemd[1]: Finished User Runtime Directory /run/user/0.
Oct 02 11:44:56 compute-0 systemd[1]: Starting User Manager for UID 0...
Oct 02 11:44:56 compute-0 systemd[94364]: pam_unix(systemd-user:session): session opened for user root(uid=0) by root(uid=0)
Oct 02 11:44:56 compute-0 edpm-start-podman-container[94320]: Creating additional drop-in dependency for "ovn_controller" (9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d)
Oct 02 11:44:56 compute-0 systemd[1]: Reloading.
Oct 02 11:44:56 compute-0 podman[94343]: 2025-10-02 11:44:56.508747188 +0000 UTC m=+0.103331230 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=starting, health_failing_streak=1, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_managed=true, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_id=ovn_controller, io.buildah.version=1.41.3)
Oct 02 11:44:56 compute-0 systemd[94364]: Queued start job for default target Main User Target.
Oct 02 11:44:56 compute-0 systemd[94364]: Created slice User Application Slice.
Oct 02 11:44:56 compute-0 systemd[94364]: Mark boot as successful after the user session has run 2 minutes was skipped because of an unmet condition check (ConditionUser=!@system).
Oct 02 11:44:56 compute-0 systemd[94364]: Started Daily Cleanup of User's Temporary Directories.
Oct 02 11:44:56 compute-0 systemd[94364]: Reached target Paths.
Oct 02 11:44:56 compute-0 systemd[94364]: Reached target Timers.
Oct 02 11:44:56 compute-0 systemd-sysv-generator[94426]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:44:56 compute-0 systemd-rc-local-generator[94420]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:44:56 compute-0 systemd[94364]: Starting D-Bus User Message Bus Socket...
Oct 02 11:44:56 compute-0 systemd[94364]: Starting Create User's Volatile Files and Directories...
Oct 02 11:44:56 compute-0 systemd[94364]: Listening on D-Bus User Message Bus Socket.
Oct 02 11:44:56 compute-0 systemd[94364]: Reached target Sockets.
Oct 02 11:44:56 compute-0 systemd[94364]: Finished Create User's Volatile Files and Directories.
Oct 02 11:44:56 compute-0 systemd[94364]: Reached target Basic System.
Oct 02 11:44:56 compute-0 systemd[94364]: Reached target Main User Target.
Oct 02 11:44:56 compute-0 systemd[94364]: Startup finished in 114ms.
Oct 02 11:44:56 compute-0 systemd[1]: Started User Manager for UID 0.
Oct 02 11:44:56 compute-0 systemd[1]: Started ovn_controller container.
Oct 02 11:44:56 compute-0 systemd[1]: 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d-77a6539b54108d5a.service: Main process exited, code=exited, status=1/FAILURE
Oct 02 11:44:56 compute-0 systemd[1]: 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d-77a6539b54108d5a.service: Failed with result 'exit-code'.
Oct 02 11:44:56 compute-0 systemd[1]: Started Session c1 of User root.
Oct 02 11:44:56 compute-0 sudo[94276]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:56 compute-0 ovn_controller[94336]: INFO:__main__:Loading config file at /var/lib/kolla/config_files/config.json
Oct 02 11:44:56 compute-0 ovn_controller[94336]: INFO:__main__:Validating config file
Oct 02 11:44:56 compute-0 ovn_controller[94336]: INFO:__main__:Kolla config strategy set to: COPY_ALWAYS
Oct 02 11:44:56 compute-0 ovn_controller[94336]: INFO:__main__:Writing out command to execute
Oct 02 11:44:56 compute-0 systemd[1]: session-c1.scope: Deactivated successfully.
Oct 02 11:44:56 compute-0 ovn_controller[94336]: ++ cat /run_command
Oct 02 11:44:56 compute-0 ovn_controller[94336]: + CMD='/usr/bin/ovn-controller --pidfile unix:/run/openvswitch/db.sock  -p /etc/pki/tls/private/ovndb.key -c /etc/pki/tls/certs/ovndb.crt -C /etc/pki/tls/certs/ovndbca.crt '
Oct 02 11:44:56 compute-0 ovn_controller[94336]: + ARGS=
Oct 02 11:44:56 compute-0 ovn_controller[94336]: + sudo kolla_copy_cacerts
Oct 02 11:44:56 compute-0 systemd[1]: Started Session c2 of User root.
Oct 02 11:44:56 compute-0 systemd[1]: session-c2.scope: Deactivated successfully.
Oct 02 11:44:56 compute-0 ovn_controller[94336]: + [[ ! -n '' ]]
Oct 02 11:44:56 compute-0 ovn_controller[94336]: + . kolla_extend_start
Oct 02 11:44:56 compute-0 ovn_controller[94336]: + echo 'Running command: '\''/usr/bin/ovn-controller --pidfile unix:/run/openvswitch/db.sock  -p /etc/pki/tls/private/ovndb.key -c /etc/pki/tls/certs/ovndb.crt -C /etc/pki/tls/certs/ovndbca.crt '\'''
Oct 02 11:44:56 compute-0 ovn_controller[94336]: + umask 0022
Oct 02 11:44:56 compute-0 ovn_controller[94336]: + exec /usr/bin/ovn-controller --pidfile unix:/run/openvswitch/db.sock -p /etc/pki/tls/private/ovndb.key -c /etc/pki/tls/certs/ovndb.crt -C /etc/pki/tls/certs/ovndbca.crt
Oct 02 11:44:56 compute-0 ovn_controller[94336]: Running command: '/usr/bin/ovn-controller --pidfile unix:/run/openvswitch/db.sock  -p /etc/pki/tls/private/ovndb.key -c /etc/pki/tls/certs/ovndb.crt -C /etc/pki/tls/certs/ovndbca.crt '
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00001|reconnect|INFO|unix:/run/openvswitch/db.sock: connecting...
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00002|reconnect|INFO|unix:/run/openvswitch/db.sock: connected
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00003|main|INFO|OVN internal version is : [24.03.7-20.33.0-76.8]
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00004|main|INFO|OVS IDL reconnected, force recompute.
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00005|reconnect|INFO|ssl:ovsdbserver-sb.openstack.svc:6642: connecting...
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00006|main|INFO|OVNSB IDL reconnected, force recompute.
Oct 02 11:44:56 compute-0 NetworkManager[51160]: <info>  [1759405496.8994] manager: (br-int): new Open vSwitch Interface device (/org/freedesktop/NetworkManager/Devices/14)
Oct 02 11:44:56 compute-0 NetworkManager[51160]: <info>  [1759405496.8999] device (br-int)[Open vSwitch Interface]: state change: unmanaged -> unavailable (reason 'managed', managed-type: 'external')
Oct 02 11:44:56 compute-0 NetworkManager[51160]: <info>  [1759405496.9007] manager: (br-int): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/15)
Oct 02 11:44:56 compute-0 NetworkManager[51160]: <info>  [1759405496.9011] manager: (br-int): new Open vSwitch Bridge device (/org/freedesktop/NetworkManager/Devices/16)
Oct 02 11:44:56 compute-0 NetworkManager[51160]: <info>  [1759405496.9014] device (br-int)[Open vSwitch Interface]: state change: unavailable -> disconnected (reason 'none', managed-type: 'full')
Oct 02 11:44:56 compute-0 kernel: br-int: entered promiscuous mode
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00007|reconnect|INFO|ssl:ovsdbserver-sb.openstack.svc:6642: connected
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00008|features|INFO|unix:/var/run/openvswitch/br-int.mgmt: connecting to switch
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00009|rconn|INFO|unix:/var/run/openvswitch/br-int.mgmt: connecting...
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00010|features|INFO|OVS Feature: ct_zero_snat, state: supported
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00011|features|INFO|OVS Feature: ct_flush, state: supported
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00012|features|INFO|OVS Feature: dp_hash_l4_sym_support, state: supported
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00013|reconnect|INFO|unix:/run/openvswitch/db.sock: connecting...
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00014|main|INFO|OVS feature set changed, force recompute.
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00015|ofctrl|INFO|unix:/var/run/openvswitch/br-int.mgmt: connecting to switch
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00016|rconn|INFO|unix:/var/run/openvswitch/br-int.mgmt: connecting...
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00017|rconn|INFO|unix:/var/run/openvswitch/br-int.mgmt: connected
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00018|ofctrl|INFO|ofctrl-wait-before-clear is now 8000 ms (was 0 ms)
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00019|main|INFO|OVS OpenFlow connection reconnected,force recompute.
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00020|rconn|INFO|unix:/var/run/openvswitch/br-int.mgmt: connected
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00021|reconnect|INFO|unix:/run/openvswitch/db.sock: connected
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00022|main|INFO|OVS feature set changed, force recompute.
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00023|features|INFO|OVS DB schema supports 4 flow table prefixes, our IDL supports: 4
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00024|main|INFO|Setting flow table prefixes: ip_src, ip_dst, ipv6_src, ipv6_dst.
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00001|pinctrl(ovn_pinctrl0)|INFO|unix:/var/run/openvswitch/br-int.mgmt: connecting to switch
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00002|rconn(ovn_pinctrl0)|INFO|unix:/var/run/openvswitch/br-int.mgmt: connecting...
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00001|statctrl(ovn_statctrl3)|INFO|unix:/var/run/openvswitch/br-int.mgmt: connecting to switch
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00002|rconn(ovn_statctrl3)|INFO|unix:/var/run/openvswitch/br-int.mgmt: connecting...
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00003|rconn(ovn_pinctrl0)|INFO|unix:/var/run/openvswitch/br-int.mgmt: connected
Oct 02 11:44:56 compute-0 ovn_controller[94336]: 2025-10-02T11:44:56Z|00003|rconn(ovn_statctrl3)|INFO|unix:/var/run/openvswitch/br-int.mgmt: connected
Oct 02 11:44:56 compute-0 systemd-udevd[94472]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 11:44:56 compute-0 NetworkManager[51160]: <info>  [1759405496.9332] manager: (ovn-1fc220-0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/17)
Oct 02 11:44:56 compute-0 NetworkManager[51160]: <info>  [1759405496.9338] manager: (ovn-3ff68c-0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/18)
Oct 02 11:44:56 compute-0 NetworkManager[51160]: <info>  [1759405496.9342] manager: (ovn-ef6a8b-0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/19)
Oct 02 11:44:56 compute-0 kernel: genev_sys_6081: entered promiscuous mode
Oct 02 11:44:56 compute-0 NetworkManager[51160]: <info>  [1759405496.9519] device (genev_sys_6081): carrier: link connected
Oct 02 11:44:56 compute-0 NetworkManager[51160]: <info>  [1759405496.9522] manager: (genev_sys_6081): new Generic device (/org/freedesktop/NetworkManager/Devices/20)
Oct 02 11:44:56 compute-0 systemd-udevd[94477]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 11:44:57 compute-0 sudo[94604]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jvtpzxderqwazqtaznsjtmnwmdzswclj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405497.0815306-1792-249031519933278/AnsiballZ_command.py'
Oct 02 11:44:57 compute-0 sudo[94604]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:57 compute-0 python3.9[94606]: ansible-ansible.legacy.command Invoked with _raw_params=ovs-vsctl remove open . other_config hw-offload
                                             _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:44:57 compute-0 ovs-vsctl[94607]: ovs|00001|vsctl|INFO|Called as ovs-vsctl remove open . other_config hw-offload
Oct 02 11:44:57 compute-0 sudo[94604]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:58 compute-0 sudo[94757]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lskytcevghexbjurmyvzjpfxbtpaulfc ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405497.8094609-1816-119113806281352/AnsiballZ_command.py'
Oct 02 11:44:58 compute-0 sudo[94757]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:58 compute-0 python3.9[94759]: ansible-ansible.legacy.command Invoked with _raw_params=ovs-vsctl get Open_vSwitch . external_ids:ovn-cms-options | sed 's/\"//g'
                                             _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:44:58 compute-0 ovs-vsctl[94761]: ovs|00001|db_ctl_base|ERR|no key "ovn-cms-options" in Open_vSwitch record "." column external_ids
Oct 02 11:44:58 compute-0 sudo[94757]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:59 compute-0 sudo[94912]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-scbhhynnrnunpapzustzdmvfelhhquck ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405498.8096628-1858-238157689661787/AnsiballZ_command.py'
Oct 02 11:44:59 compute-0 sudo[94912]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:44:59 compute-0 python3.9[94914]: ansible-ansible.legacy.command Invoked with _raw_params=ovs-vsctl remove Open_vSwitch . external_ids ovn-cms-options
                                             _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:44:59 compute-0 ovs-vsctl[94915]: ovs|00001|vsctl|INFO|Called as ovs-vsctl remove Open_vSwitch . external_ids ovn-cms-options
Oct 02 11:44:59 compute-0 sudo[94912]: pam_unix(sudo:session): session closed for user root
Oct 02 11:44:59 compute-0 sshd-session[83839]: Connection closed by 192.168.122.30 port 43304
Oct 02 11:44:59 compute-0 sshd-session[83836]: pam_unix(sshd:session): session closed for user zuul
Oct 02 11:44:59 compute-0 systemd[1]: session-20.scope: Deactivated successfully.
Oct 02 11:44:59 compute-0 systemd-logind[827]: Session 20 logged out. Waiting for processes to exit.
Oct 02 11:44:59 compute-0 systemd[1]: session-20.scope: Consumed 44.556s CPU time.
Oct 02 11:44:59 compute-0 systemd-logind[827]: Removed session 20.
Oct 02 11:45:04 compute-0 sshd-session[94940]: Accepted publickey for zuul from 192.168.122.30 port 37164 ssh2: ECDSA SHA256:tAEsFSop2MjuMQQ/UqKU2uCkxqWXGfsM5crdhd6tlI4
Oct 02 11:45:04 compute-0 systemd-logind[827]: New session 22 of user zuul.
Oct 02 11:45:04 compute-0 systemd[1]: Started Session 22 of User zuul.
Oct 02 11:45:04 compute-0 sshd-session[94940]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Oct 02 11:45:06 compute-0 python3.9[95093]: ansible-ansible.builtin.setup Invoked with gather_subset=['!all', '!min', 'local'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:45:06 compute-0 systemd[1]: Stopping User Manager for UID 0...
Oct 02 11:45:06 compute-0 systemd[94364]: Activating special unit Exit the Session...
Oct 02 11:45:06 compute-0 systemd[94364]: Stopped target Main User Target.
Oct 02 11:45:06 compute-0 systemd[94364]: Stopped target Basic System.
Oct 02 11:45:06 compute-0 systemd[94364]: Stopped target Paths.
Oct 02 11:45:06 compute-0 systemd[94364]: Stopped target Sockets.
Oct 02 11:45:06 compute-0 systemd[94364]: Stopped target Timers.
Oct 02 11:45:06 compute-0 systemd[94364]: Stopped Daily Cleanup of User's Temporary Directories.
Oct 02 11:45:06 compute-0 systemd[94364]: Closed D-Bus User Message Bus Socket.
Oct 02 11:45:06 compute-0 systemd[94364]: Stopped Create User's Volatile Files and Directories.
Oct 02 11:45:06 compute-0 systemd[94364]: Removed slice User Application Slice.
Oct 02 11:45:06 compute-0 systemd[94364]: Reached target Shutdown.
Oct 02 11:45:06 compute-0 systemd[94364]: Finished Exit the Session.
Oct 02 11:45:06 compute-0 systemd[94364]: Reached target Exit the Session.
Oct 02 11:45:06 compute-0 systemd[1]: user@0.service: Deactivated successfully.
Oct 02 11:45:06 compute-0 systemd[1]: Stopped User Manager for UID 0.
Oct 02 11:45:06 compute-0 systemd[1]: Stopping User Runtime Directory /run/user/0...
Oct 02 11:45:06 compute-0 systemd[1]: run-user-0.mount: Deactivated successfully.
Oct 02 11:45:06 compute-0 systemd[1]: user-runtime-dir@0.service: Deactivated successfully.
Oct 02 11:45:06 compute-0 systemd[1]: Stopped User Runtime Directory /run/user/0.
Oct 02 11:45:06 compute-0 systemd[1]: Removed slice User Slice of UID 0.
Oct 02 11:45:07 compute-0 sudo[95250]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ntxbxqgvoonebvfrjtbhwgmbkzmhcagg ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405506.5459733-67-189076233376834/AnsiballZ_file.py'
Oct 02 11:45:07 compute-0 sudo[95250]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:07 compute-0 python3.9[95252]: ansible-ansible.builtin.file Invoked with group=zuul owner=zuul path=/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:45:07 compute-0 sudo[95250]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:07 compute-0 sudo[95402]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mcyengchylydmuekozbkmokmluolrtbj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405507.343276-67-36938105903377/AnsiballZ_file.py'
Oct 02 11:45:07 compute-0 sudo[95402]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:07 compute-0 python3.9[95404]: ansible-ansible.builtin.file Invoked with group=zuul mode=0755 owner=zuul path=/var/lib/neutron setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:45:07 compute-0 sudo[95402]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:08 compute-0 sudo[95554]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-chqlgqrrvhvjjsnkjosjeheccbflqexh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405507.9540904-67-163956793037455/AnsiballZ_file.py'
Oct 02 11:45:08 compute-0 sudo[95554]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:08 compute-0 python3.9[95556]: ansible-ansible.builtin.file Invoked with group=zuul mode=0755 owner=zuul path=/var/lib/neutron/kill_scripts setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:45:08 compute-0 sudo[95554]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:08 compute-0 sudo[95706]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ekoopcpuihhnsahqvbutlczkbvpyaiul ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405508.552032-67-280392961812688/AnsiballZ_file.py'
Oct 02 11:45:08 compute-0 sudo[95706]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:09 compute-0 python3.9[95708]: ansible-ansible.builtin.file Invoked with group=zuul mode=0755 owner=zuul path=/var/lib/neutron/ovn-metadata-proxy setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:45:09 compute-0 sudo[95706]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:09 compute-0 sudo[95858]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vvohsqiowsjkvkamqinivjvjubggloan ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405509.1495886-67-93487406573803/AnsiballZ_file.py'
Oct 02 11:45:09 compute-0 sudo[95858]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:09 compute-0 python3.9[95860]: ansible-ansible.builtin.file Invoked with group=zuul mode=0755 owner=zuul path=/var/lib/neutron/external/pids setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:45:09 compute-0 sudo[95858]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:10 compute-0 python3.9[96010]: ansible-ansible.builtin.setup Invoked with gather_subset=['!all', '!min', 'selinux'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:45:11 compute-0 sudo[96160]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zbhqrbpfgygqpwokuliptelhdtbeesmf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405510.7718496-199-14254579431407/AnsiballZ_seboolean.py'
Oct 02 11:45:11 compute-0 sudo[96160]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:11 compute-0 python3.9[96162]: ansible-ansible.posix.seboolean Invoked with name=virt_sandbox_use_netlink persistent=True state=True ignore_selinux_state=False
Oct 02 11:45:12 compute-0 sudo[96160]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:13 compute-0 python3.9[96312]: ansible-ansible.legacy.stat Invoked with path=/var/lib/neutron/ovn_metadata_haproxy_wrapper follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:45:13 compute-0 python3.9[96433]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/neutron/ovn_metadata_haproxy_wrapper mode=0755 setype=container_file_t src=/home/zuul/.ansible/tmp/ansible-tmp-1759405512.3602457-223-120423557898595/.source follow=False _original_basename=haproxy.j2 checksum=95c62e64c8f82dd9393a560d1b052dc98d38f810 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:45:14 compute-0 python3.9[96583]: ansible-ansible.legacy.stat Invoked with path=/var/lib/neutron/kill_scripts/haproxy-kill follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:45:15 compute-0 python3.9[96704]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/neutron/kill_scripts/haproxy-kill mode=0755 setype=container_file_t src=/home/zuul/.ansible/tmp/ansible-tmp-1759405514.04843-268-165586257581403/.source follow=False _original_basename=kill-script.j2 checksum=2dfb5489f491f61b95691c3bf95fa1fe48ff3700 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:45:15 compute-0 sudo[96855]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zpkgrgopfaipushqifknnqwnusnqfkvn ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405515.4800773-319-210668893644402/AnsiballZ_setup.py'
Oct 02 11:45:15 compute-0 sudo[96855]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:16 compute-0 python3.9[96857]: ansible-ansible.legacy.setup Invoked with filter=['ansible_pkg_mgr'] gather_subset=['!all'] gather_timeout=10 fact_path=/etc/ansible/facts.d
Oct 02 11:45:16 compute-0 sudo[96855]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:16 compute-0 sudo[96939]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dcgvtabfnzsddlhvnzpsgegqrzbovvls ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405515.4800773-319-210668893644402/AnsiballZ_dnf.py'
Oct 02 11:45:16 compute-0 sudo[96939]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:16 compute-0 python3.9[96941]: ansible-ansible.legacy.dnf Invoked with name=['openvswitch'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Oct 02 11:45:18 compute-0 sudo[96939]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:19 compute-0 sudo[97092]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yoptzwlwkeulcmsbtsdxtqqqxiruuhcn ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405518.4807954-355-110555307867315/AnsiballZ_systemd.py'
Oct 02 11:45:19 compute-0 sudo[97092]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:19 compute-0 python3.9[97094]: ansible-ansible.builtin.systemd Invoked with enabled=True masked=False name=openvswitch.service state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None
Oct 02 11:45:19 compute-0 sudo[97092]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:20 compute-0 python3.9[97247]: ansible-ansible.legacy.stat Invoked with path=/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent/01-rootwrap.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:45:20 compute-0 python3.9[97368]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent/01-rootwrap.conf mode=0644 setype=container_file_t src=/home/zuul/.ansible/tmp/ansible-tmp-1759405519.749638-379-216867036562840/.source.conf follow=False _original_basename=rootwrap.conf.j2 checksum=11f2cfb4b7d97b2cef3c2c2d88089e6999cffe22 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:45:21 compute-0 python3.9[97518]: ansible-ansible.legacy.stat Invoked with path=/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent/01-neutron-ovn-metadata-agent.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:45:21 compute-0 python3.9[97639]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent/01-neutron-ovn-metadata-agent.conf mode=0644 setype=container_file_t src=/home/zuul/.ansible/tmp/ansible-tmp-1759405520.8969774-379-201558476676055/.source.conf follow=False _original_basename=neutron-ovn-metadata-agent.conf.j2 checksum=8bc979abbe81c2cf3993a225517a7e2483e20443 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:45:23 compute-0 python3.9[97789]: ansible-ansible.legacy.stat Invoked with path=/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent/10-neutron-metadata.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:45:23 compute-0 python3.9[97910]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent/10-neutron-metadata.conf mode=0644 setype=container_file_t src=/home/zuul/.ansible/tmp/ansible-tmp-1759405522.8126838-511-95309638885221/.source.conf _original_basename=10-neutron-metadata.conf follow=False checksum=ca7d4d155f5b812fab1a3b70e34adb495d291b8d backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:45:24 compute-0 python3.9[98060]: ansible-ansible.legacy.stat Invoked with path=/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent/05-nova-metadata.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:45:24 compute-0 python3.9[98181]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent/05-nova-metadata.conf mode=0644 setype=container_file_t src=/home/zuul/.ansible/tmp/ansible-tmp-1759405523.8474355-511-3583729364526/.source.conf _original_basename=05-nova-metadata.conf follow=False checksum=3fd0bbe67f8d6b170421a2b4395a288aa69eaea2 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:45:25 compute-0 python3.9[98331]: ansible-ansible.builtin.stat Invoked with path=/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:45:26 compute-0 sudo[98483]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xbrycomzlrizhgwgqwuyzmujzchqzudd ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405526.0045135-625-5042051853445/AnsiballZ_file.py'
Oct 02 11:45:26 compute-0 sudo[98483]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:26 compute-0 python3.9[98485]: ansible-ansible.builtin.file Invoked with path=/var/local/libexec recurse=True setype=container_file_t state=directory force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:45:26 compute-0 sudo[98483]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:27 compute-0 sudo[98644]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qojqbkrqvwhclenusnzslbjmvibrjecf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405526.8071516-649-170230107185715/AnsiballZ_stat.py'
Oct 02 11:45:27 compute-0 sudo[98644]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:27 compute-0 ovn_controller[94336]: 2025-10-02T11:45:27Z|00025|memory|INFO|16128 kB peak resident set size after 30.3 seconds
Oct 02 11:45:27 compute-0 ovn_controller[94336]: 2025-10-02T11:45:27Z|00026|memory|INFO|idl-cells-OVN_Southbound:273 idl-cells-Open_vSwitch:585 ofctrl_desired_flow_usage-KB:7 ofctrl_installed_flow_usage-KB:5 ofctrl_sb_flow_ref_usage-KB:3
Oct 02 11:45:27 compute-0 podman[98609]: 2025-10-02 11:45:27.194145129 +0000 UTC m=+0.123604772 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_controller, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, config_id=ovn_controller, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS)
Oct 02 11:45:27 compute-0 python3.9[98652]: ansible-ansible.legacy.stat Invoked with path=/var/local/libexec/edpm-container-shutdown follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:45:27 compute-0 sudo[98644]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:27 compute-0 sudo[98739]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-brrrycljrxidtahznrbybjibigastamh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405526.8071516-649-170230107185715/AnsiballZ_file.py'
Oct 02 11:45:27 compute-0 sudo[98739]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:27 compute-0 python3.9[98741]: ansible-ansible.legacy.file Invoked with group=root mode=0700 owner=root setype=container_file_t dest=/var/local/libexec/edpm-container-shutdown _original_basename=edpm-container-shutdown recurse=False state=file path=/var/local/libexec/edpm-container-shutdown force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:45:27 compute-0 sudo[98739]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:28 compute-0 sudo[98891]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ujsolcadaikmkbftdfwubddxqizpbizs ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405527.8453372-649-167127175163493/AnsiballZ_stat.py'
Oct 02 11:45:28 compute-0 sudo[98891]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:28 compute-0 python3.9[98893]: ansible-ansible.legacy.stat Invoked with path=/var/local/libexec/edpm-start-podman-container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:45:28 compute-0 sudo[98891]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:28 compute-0 sudo[98969]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zmkpnnaelmgioaefjehejlswpfhzegec ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405527.8453372-649-167127175163493/AnsiballZ_file.py'
Oct 02 11:45:28 compute-0 sudo[98969]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:28 compute-0 python3.9[98971]: ansible-ansible.legacy.file Invoked with group=root mode=0700 owner=root setype=container_file_t dest=/var/local/libexec/edpm-start-podman-container _original_basename=edpm-start-podman-container recurse=False state=file path=/var/local/libexec/edpm-start-podman-container force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:45:28 compute-0 sudo[98969]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:29 compute-0 sudo[99121]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yuheeircypneocmtkjyzqdssweodufkg ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405528.9303992-718-265287493989711/AnsiballZ_file.py'
Oct 02 11:45:29 compute-0 sudo[99121]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:29 compute-0 python3.9[99123]: ansible-ansible.builtin.file Invoked with mode=420 path=/etc/systemd/system-preset state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:45:29 compute-0 sudo[99121]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:29 compute-0 sudo[99273]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-eapxruwctnvjgdmxiqneqewchkpjxokv ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405529.6649516-742-102943118657672/AnsiballZ_stat.py'
Oct 02 11:45:29 compute-0 sudo[99273]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:30 compute-0 python3.9[99275]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/edpm-container-shutdown.service follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:45:30 compute-0 sudo[99273]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:30 compute-0 sudo[99351]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lekfscjaxdyqvxmrufwtqpvzsbgifglv ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405529.6649516-742-102943118657672/AnsiballZ_file.py'
Oct 02 11:45:30 compute-0 sudo[99351]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:30 compute-0 python3.9[99353]: ansible-ansible.legacy.file Invoked with group=root mode=0644 owner=root dest=/etc/systemd/system/edpm-container-shutdown.service _original_basename=edpm-container-shutdown-service recurse=False state=file path=/etc/systemd/system/edpm-container-shutdown.service force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:45:30 compute-0 sudo[99351]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:31 compute-0 sudo[99503]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cszhbfoptwklmlndfybitmjwrnmwjzib ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405530.7918627-778-11273497200714/AnsiballZ_stat.py'
Oct 02 11:45:31 compute-0 sudo[99503]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:31 compute-0 python3.9[99505]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system-preset/91-edpm-container-shutdown.preset follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:45:31 compute-0 sudo[99503]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:31 compute-0 sudo[99581]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pqsiybwepemgtqftnxsdfkadazsafeyq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405530.7918627-778-11273497200714/AnsiballZ_file.py'
Oct 02 11:45:31 compute-0 sudo[99581]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:31 compute-0 python3.9[99583]: ansible-ansible.legacy.file Invoked with group=root mode=0644 owner=root dest=/etc/systemd/system-preset/91-edpm-container-shutdown.preset _original_basename=91-edpm-container-shutdown-preset recurse=False state=file path=/etc/systemd/system-preset/91-edpm-container-shutdown.preset force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:45:31 compute-0 sudo[99581]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:32 compute-0 sudo[99733]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xvzeajlewaxsvjxbsudusqdyzbtgmrla ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405531.9519699-814-201334042501104/AnsiballZ_systemd.py'
Oct 02 11:45:32 compute-0 sudo[99733]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:32 compute-0 python3.9[99735]: ansible-ansible.builtin.systemd Invoked with daemon_reload=True enabled=True name=edpm-container-shutdown state=started daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:45:32 compute-0 systemd[1]: Reloading.
Oct 02 11:45:32 compute-0 systemd-sysv-generator[99765]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:45:32 compute-0 systemd-rc-local-generator[99762]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:45:32 compute-0 sudo[99733]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:33 compute-0 sudo[99921]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-axkenpazgijelgdlfgpkvlorlzxqgrhy ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405533.0682461-838-11661482901764/AnsiballZ_stat.py'
Oct 02 11:45:33 compute-0 sudo[99921]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:33 compute-0 python3.9[99923]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/netns-placeholder.service follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:45:33 compute-0 sudo[99921]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:33 compute-0 sudo[99999]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fmkjmlkpicapaoogwvqxyozoticbysba ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405533.0682461-838-11661482901764/AnsiballZ_file.py'
Oct 02 11:45:33 compute-0 sudo[99999]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:34 compute-0 python3.9[100001]: ansible-ansible.legacy.file Invoked with group=root mode=0644 owner=root dest=/etc/systemd/system/netns-placeholder.service _original_basename=netns-placeholder-service recurse=False state=file path=/etc/systemd/system/netns-placeholder.service force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:45:34 compute-0 sudo[99999]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:34 compute-0 sudo[100151]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pizsrnjqckclrazurlgbxhbbunjvivyj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405534.2237587-874-69159767612567/AnsiballZ_stat.py'
Oct 02 11:45:34 compute-0 sudo[100151]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:34 compute-0 python3.9[100153]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system-preset/91-netns-placeholder.preset follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:45:34 compute-0 sudo[100151]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:34 compute-0 sudo[100229]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-flusmehezjcvcztymdojsjszggcjkbty ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405534.2237587-874-69159767612567/AnsiballZ_file.py'
Oct 02 11:45:34 compute-0 sudo[100229]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:35 compute-0 python3.9[100231]: ansible-ansible.legacy.file Invoked with group=root mode=0644 owner=root dest=/etc/systemd/system-preset/91-netns-placeholder.preset _original_basename=91-netns-placeholder-preset recurse=False state=file path=/etc/systemd/system-preset/91-netns-placeholder.preset force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:45:35 compute-0 sudo[100229]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:35 compute-0 sudo[100381]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pywiiadpnmbsglkkzpltcibhxrpbvfwu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405535.3903518-910-15922292834948/AnsiballZ_systemd.py'
Oct 02 11:45:35 compute-0 sudo[100381]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:35 compute-0 python3.9[100383]: ansible-ansible.builtin.systemd Invoked with daemon_reload=True enabled=True name=netns-placeholder state=started daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:45:35 compute-0 systemd[1]: Reloading.
Oct 02 11:45:36 compute-0 systemd-sysv-generator[100408]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:45:36 compute-0 systemd-rc-local-generator[100404]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:45:36 compute-0 systemd[1]: Starting Create netns directory...
Oct 02 11:45:36 compute-0 systemd[1]: run-netns-placeholder.mount: Deactivated successfully.
Oct 02 11:45:36 compute-0 systemd[1]: netns-placeholder.service: Deactivated successfully.
Oct 02 11:45:36 compute-0 systemd[1]: Finished Create netns directory.
Oct 02 11:45:36 compute-0 sudo[100381]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:38 compute-0 sudo[100574]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dlhrjoftdlzxbeuuawclmjirgsdjvoap ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405537.7136545-940-223392705409635/AnsiballZ_file.py'
Oct 02 11:45:38 compute-0 sudo[100574]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:38 compute-0 python3.9[100576]: ansible-ansible.builtin.file Invoked with group=zuul mode=0755 owner=zuul path=/var/lib/openstack/healthchecks setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:45:38 compute-0 sudo[100574]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:38 compute-0 sudo[100726]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qljnzppegpimwjyvjvpboflnfasgkiqs ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405538.497977-964-23409788220187/AnsiballZ_stat.py'
Oct 02 11:45:38 compute-0 sudo[100726]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:39 compute-0 python3.9[100728]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/healthchecks/ovn_metadata_agent/healthcheck follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:45:39 compute-0 sudo[100726]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:39 compute-0 sudo[100849]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rmtdgeawykxqbldpwgshoszdmaepmepu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405538.497977-964-23409788220187/AnsiballZ_copy.py'
Oct 02 11:45:39 compute-0 sudo[100849]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:39 compute-0 python3.9[100851]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/healthchecks/ovn_metadata_agent/ group=zuul mode=0700 owner=zuul setype=container_file_t src=/home/zuul/.ansible/tmp/ansible-tmp-1759405538.497977-964-23409788220187/.source _original_basename=healthcheck follow=False checksum=898a5a1fcd473cf731177fc866e3bd7ebf20a131 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:45:39 compute-0 sudo[100849]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:40 compute-0 sudo[101001]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pbgoiaqkoiqbvklrkugnatkqsvqkfecd ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405540.1425068-1015-231378700356279/AnsiballZ_file.py'
Oct 02 11:45:40 compute-0 sudo[101001]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:40 compute-0 python3.9[101003]: ansible-ansible.builtin.file Invoked with path=/var/lib/kolla/config_files recurse=True setype=container_file_t state=directory force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:45:40 compute-0 sudo[101001]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:41 compute-0 sudo[101153]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ehgpocgeulvixtbofprojolzkyeuqofp ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405540.9627213-1039-58980907585550/AnsiballZ_stat.py'
Oct 02 11:45:41 compute-0 sudo[101153]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:41 compute-0 python3.9[101155]: ansible-ansible.legacy.stat Invoked with path=/var/lib/kolla/config_files/ovn_metadata_agent.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:45:41 compute-0 sudo[101153]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:41 compute-0 sudo[101276]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ctuczyzdfowwcwsanqfgkyovnaxhtpgs ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405540.9627213-1039-58980907585550/AnsiballZ_copy.py'
Oct 02 11:45:41 compute-0 sudo[101276]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:41 compute-0 python3.9[101278]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/kolla/config_files/ovn_metadata_agent.json mode=0600 src=/home/zuul/.ansible/tmp/ansible-tmp-1759405540.9627213-1039-58980907585550/.source.json _original_basename=.y0sx5pzd follow=False checksum=a908ef151ded3a33ae6c9ac8be72a35e5e33b9dc backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:45:42 compute-0 sudo[101276]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:42 compute-0 sudo[101428]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hbwbbnemexlumodoxigtvoeyyypykqow ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405542.2056944-1084-239075662015734/AnsiballZ_file.py'
Oct 02 11:45:42 compute-0 sudo[101428]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:42 compute-0 python3.9[101430]: ansible-ansible.builtin.file Invoked with mode=0755 path=/var/lib/edpm-config/container-startup-config/ovn_metadata_agent state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:45:42 compute-0 sudo[101428]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:43 compute-0 sudo[101580]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ekjtwkuvgjwezbsqeisiskcqaoqfcpoh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405543.014723-1108-117500703866922/AnsiballZ_stat.py'
Oct 02 11:45:43 compute-0 sudo[101580]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:43 compute-0 sudo[101580]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:43 compute-0 sudo[101703]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xopxzfwknhybrzbumdznxcgzsesamjel ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405543.014723-1108-117500703866922/AnsiballZ_copy.py'
Oct 02 11:45:43 compute-0 sudo[101703]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:44 compute-0 sudo[101703]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:44 compute-0 sudo[101855]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nerslzkewptommvenzydklwanaotmwor ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405544.5616698-1159-235204150448142/AnsiballZ_container_config_data.py'
Oct 02 11:45:44 compute-0 sudo[101855]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:45 compute-0 python3.9[101857]: ansible-container_config_data Invoked with config_overrides={} config_path=/var/lib/edpm-config/container-startup-config/ovn_metadata_agent config_pattern=*.json debug=False
Oct 02 11:45:45 compute-0 sudo[101855]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:45 compute-0 sudo[102007]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-soivjdphawsozyfawmasgdcwihdggvjk ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405545.4747884-1186-135981357792758/AnsiballZ_container_config_hash.py'
Oct 02 11:45:45 compute-0 sudo[102007]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:46 compute-0 python3.9[102009]: ansible-container_config_hash Invoked with check_mode=False config_vol_prefix=/var/lib/config-data
Oct 02 11:45:46 compute-0 sudo[102007]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:46 compute-0 sudo[102159]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-iphajxbpxhlpojzcgrmlqjoznxdbygdc ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405546.401813-1213-65408214499535/AnsiballZ_podman_container_info.py'
Oct 02 11:45:46 compute-0 sudo[102159]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:47 compute-0 python3.9[102161]: ansible-containers.podman.podman_container_info Invoked with executable=podman name=None
Oct 02 11:45:47 compute-0 sudo[102159]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:48 compute-0 sudo[102337]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dtjuyolnsyfdcoryobtlcfipnmhigdsh ; /usr/bin/python3 /home/zuul/.ansible/tmp/ansible-tmp-1759405547.771871-1252-127380507099437/AnsiballZ_edpm_container_manage.py'
Oct 02 11:45:48 compute-0 sudo[102337]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:48 compute-0 python3[102339]: ansible-edpm_container_manage Invoked with concurrency=1 config_dir=/var/lib/edpm-config/container-startup-config/ovn_metadata_agent config_id=ovn_metadata_agent config_overrides={} config_patterns=*.json log_base_path=/var/log/containers/stdouts debug=False
Oct 02 11:45:54 compute-0 podman[102351]: 2025-10-02 11:45:54.49414483 +0000 UTC m=+5.816836333 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 11:45:54 compute-0 podman[102448]: 2025-10-02 11:45:54.623129601 +0000 UTC m=+0.047959416 container create 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_id=ovn_metadata_agent, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, tcib_managed=true, container_name=ovn_metadata_agent, org.label-schema.build-date=20251001, managed_by=edpm_ansible, maintainer=OpenStack Kubernetes Operator team)
Oct 02 11:45:54 compute-0 podman[102448]: 2025-10-02 11:45:54.59758053 +0000 UTC m=+0.022410405 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 11:45:54 compute-0 python3[102339]: ansible-edpm_container_manage PODMAN-CONTAINER-DEBUG: podman create --name ovn_metadata_agent --cgroupns=host --conmon-pidfile /run/ovn_metadata_agent.pid --env KOLLA_CONFIG_STRATEGY=COPY_ALWAYS --env EDPM_CONFIG_HASH=509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a --healthcheck-command /openstack/healthcheck --label config_id=ovn_metadata_agent --label container_name=ovn_metadata_agent --label managed_by=edpm_ansible --label config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']} --log-driver journald --log-level info --network host --pid host --privileged=True --user root --volume /run/openvswitch:/run/openvswitch:z --volume /var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z --volume /run/netns:/run/netns:shared --volume /var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro --volume /var/lib/neutron:/var/lib/neutron:shared,z --volume /var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro --volume /var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro --volume /var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z --volume /var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z --volume /var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z --volume /var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z --volume /var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 11:45:54 compute-0 sudo[102337]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:56 compute-0 sudo[102637]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fnqqqmgozfrelslrfcnlaooibrdkmqkj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405556.1084342-1276-170328228154549/AnsiballZ_stat.py'
Oct 02 11:45:56 compute-0 sudo[102637]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:56 compute-0 python3.9[102639]: ansible-ansible.builtin.stat Invoked with path=/etc/sysconfig/podman_drop_in follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:45:56 compute-0 sudo[102637]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:57 compute-0 sudo[102791]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-imgutynvcnberaziwtavteiuvfxfnyok ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405556.8825867-1303-267806203156828/AnsiballZ_file.py'
Oct 02 11:45:57 compute-0 sudo[102791]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:57 compute-0 python3.9[102793]: ansible-file Invoked with path=/etc/systemd/system/edpm_ovn_metadata_agent.requires state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:45:57 compute-0 sudo[102791]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:57 compute-0 sudo[102877]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qfplhvqtkxcgrbldgjyfbbvmxmlhdzay ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405556.8825867-1303-267806203156828/AnsiballZ_stat.py'
Oct 02 11:45:57 compute-0 sudo[102877]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:57 compute-0 podman[102841]: 2025-10-02 11:45:57.583071332 +0000 UTC m=+0.078541829 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller, container_name=ovn_controller, org.label-schema.license=GPLv2)
Oct 02 11:45:57 compute-0 python3.9[102884]: ansible-stat Invoked with path=/etc/systemd/system/edpm_ovn_metadata_agent_healthcheck.timer follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:45:57 compute-0 sudo[102877]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:58 compute-0 sudo[103041]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vqoakqlqgeiyvrwwkjknucuqovfpzjqu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405557.7942789-1303-123041129918847/AnsiballZ_copy.py'
Oct 02 11:45:58 compute-0 sudo[103041]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:58 compute-0 python3.9[103043]: ansible-copy Invoked with src=/home/zuul/.ansible/tmp/ansible-tmp-1759405557.7942789-1303-123041129918847/source dest=/etc/systemd/system/edpm_ovn_metadata_agent.service mode=0644 owner=root group=root backup=False force=True remote_src=False follow=False unsafe_writes=False _original_basename=None content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None checksum=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:45:58 compute-0 sudo[103041]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:58 compute-0 sudo[103117]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-klgjbouojxhbwwjjdaodnslzlwhqyqtg ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405557.7942789-1303-123041129918847/AnsiballZ_systemd.py'
Oct 02 11:45:58 compute-0 sudo[103117]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:58 compute-0 python3.9[103119]: ansible-systemd Invoked with daemon_reload=True daemon_reexec=False scope=system no_block=False name=None state=None enabled=None force=None masked=None
Oct 02 11:45:58 compute-0 systemd[1]: Reloading.
Oct 02 11:45:59 compute-0 systemd-sysv-generator[103149]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:45:59 compute-0 systemd-rc-local-generator[103146]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:45:59 compute-0 sudo[103117]: pam_unix(sudo:session): session closed for user root
Oct 02 11:45:59 compute-0 sudo[103229]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cqajzkrxzlhsellxukzsftihbozzoqdl ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405557.7942789-1303-123041129918847/AnsiballZ_systemd.py'
Oct 02 11:45:59 compute-0 sudo[103229]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:45:59 compute-0 python3.9[103231]: ansible-systemd Invoked with state=restarted name=edpm_ovn_metadata_agent.service enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:45:59 compute-0 systemd[1]: Reloading.
Oct 02 11:45:59 compute-0 systemd-sysv-generator[103266]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:45:59 compute-0 systemd-rc-local-generator[103260]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:46:00 compute-0 systemd[1]: Starting ovn_metadata_agent container...
Oct 02 11:46:00 compute-0 systemd[1]: Started libcrun container.
Oct 02 11:46:00 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/e843323166b6178016afad9bb5be95f8f18eb3477dfffdd867bf300056f7455c/merged/etc/neutron.conf.d supports timestamps until 2038 (0x7fffffff)
Oct 02 11:46:00 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/e843323166b6178016afad9bb5be95f8f18eb3477dfffdd867bf300056f7455c/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 11:46:00 compute-0 systemd[1]: Started /usr/bin/podman healthcheck run 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3.
Oct 02 11:46:00 compute-0 podman[103273]: 2025-10-02 11:46:00.299694408 +0000 UTC m=+0.203911428 container init 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, config_id=ovn_metadata_agent, managed_by=edpm_ansible, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 11:46:00 compute-0 ovn_metadata_agent[103289]: + sudo -E kolla_set_configs
Oct 02 11:46:00 compute-0 podman[103273]: 2025-10-02 11:46:00.333458607 +0000 UTC m=+0.237675667 container start 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=ovn_metadata_agent, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 11:46:00 compute-0 edpm-start-podman-container[103273]: ovn_metadata_agent
Oct 02 11:46:00 compute-0 edpm-start-podman-container[103272]: Creating additional drop-in dependency for "ovn_metadata_agent" (02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3)
Oct 02 11:46:00 compute-0 podman[103296]: 2025-10-02 11:46:00.39785406 +0000 UTC m=+0.055531767 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, container_name=ovn_metadata_agent, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_id=ovn_metadata_agent, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, io.buildah.version=1.41.3)
Oct 02 11:46:00 compute-0 ovn_metadata_agent[103289]: INFO:__main__:Loading config file at /var/lib/kolla/config_files/config.json
Oct 02 11:46:00 compute-0 ovn_metadata_agent[103289]: INFO:__main__:Validating config file
Oct 02 11:46:00 compute-0 ovn_metadata_agent[103289]: INFO:__main__:Kolla config strategy set to: COPY_ALWAYS
Oct 02 11:46:00 compute-0 ovn_metadata_agent[103289]: INFO:__main__:Copying service configuration files
Oct 02 11:46:00 compute-0 ovn_metadata_agent[103289]: INFO:__main__:Deleting /etc/neutron/rootwrap.conf
Oct 02 11:46:00 compute-0 ovn_metadata_agent[103289]: INFO:__main__:Copying /etc/neutron.conf.d/01-rootwrap.conf to /etc/neutron/rootwrap.conf
Oct 02 11:46:00 compute-0 ovn_metadata_agent[103289]: INFO:__main__:Setting permission for /etc/neutron/rootwrap.conf
Oct 02 11:46:00 compute-0 ovn_metadata_agent[103289]: INFO:__main__:Writing out command to execute
Oct 02 11:46:00 compute-0 ovn_metadata_agent[103289]: INFO:__main__:Setting permission for /var/lib/neutron
Oct 02 11:46:00 compute-0 ovn_metadata_agent[103289]: INFO:__main__:Setting permission for /var/lib/neutron/kill_scripts
Oct 02 11:46:00 compute-0 ovn_metadata_agent[103289]: INFO:__main__:Setting permission for /var/lib/neutron/ovn-metadata-proxy
Oct 02 11:46:00 compute-0 ovn_metadata_agent[103289]: INFO:__main__:Setting permission for /var/lib/neutron/external
Oct 02 11:46:00 compute-0 ovn_metadata_agent[103289]: INFO:__main__:Setting permission for /var/lib/neutron/ovn_metadata_haproxy_wrapper
Oct 02 11:46:00 compute-0 ovn_metadata_agent[103289]: INFO:__main__:Setting permission for /var/lib/neutron/kill_scripts/haproxy-kill
Oct 02 11:46:00 compute-0 ovn_metadata_agent[103289]: INFO:__main__:Setting permission for /var/lib/neutron/external/pids
Oct 02 11:46:00 compute-0 systemd[1]: Reloading.
Oct 02 11:46:00 compute-0 ovn_metadata_agent[103289]: ++ cat /run_command
Oct 02 11:46:00 compute-0 ovn_metadata_agent[103289]: + CMD=neutron-ovn-metadata-agent
Oct 02 11:46:00 compute-0 ovn_metadata_agent[103289]: + ARGS=
Oct 02 11:46:00 compute-0 ovn_metadata_agent[103289]: + sudo kolla_copy_cacerts
Oct 02 11:46:00 compute-0 ovn_metadata_agent[103289]: Running command: 'neutron-ovn-metadata-agent'
Oct 02 11:46:00 compute-0 ovn_metadata_agent[103289]: + [[ ! -n '' ]]
Oct 02 11:46:00 compute-0 ovn_metadata_agent[103289]: + . kolla_extend_start
Oct 02 11:46:00 compute-0 ovn_metadata_agent[103289]: + echo 'Running command: '\''neutron-ovn-metadata-agent'\'''
Oct 02 11:46:00 compute-0 ovn_metadata_agent[103289]: + umask 0022
Oct 02 11:46:00 compute-0 ovn_metadata_agent[103289]: + exec neutron-ovn-metadata-agent
Oct 02 11:46:00 compute-0 systemd-rc-local-generator[103361]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:46:00 compute-0 systemd-sysv-generator[103365]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:46:00 compute-0 systemd[1]: Started ovn_metadata_agent container.
Oct 02 11:46:00 compute-0 sudo[103229]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:01 compute-0 sshd-session[94943]: Connection closed by 192.168.122.30 port 37164
Oct 02 11:46:01 compute-0 sshd-session[94940]: pam_unix(sshd:session): session closed for user zuul
Oct 02 11:46:01 compute-0 systemd[1]: session-22.scope: Deactivated successfully.
Oct 02 11:46:01 compute-0 systemd[1]: session-22.scope: Consumed 47.510s CPU time.
Oct 02 11:46:01 compute-0 systemd-logind[827]: Session 22 logged out. Waiting for processes to exit.
Oct 02 11:46:01 compute-0 systemd-logind[827]: Removed session 22.
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.151 103294 INFO neutron.common.config [-] Logging enabled!
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.152 103294 INFO neutron.common.config [-] /usr/bin/neutron-ovn-metadata-agent version 22.2.2.dev43
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.152 103294 DEBUG neutron.common.config [-] command line: /usr/bin/neutron-ovn-metadata-agent setup_logging /usr/lib/python3.9/site-packages/neutron/common/config.py:123
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.152 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ******************************************************************************** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2589
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.152 103294 DEBUG neutron.agent.ovn.metadata_agent [-] Configuration options gathered from: log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2590
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.152 103294 DEBUG neutron.agent.ovn.metadata_agent [-] command line args: [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2591
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.153 103294 DEBUG neutron.agent.ovn.metadata_agent [-] config files: ['/etc/neutron/neutron.conf'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2592
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.153 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ================================================================================ log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2594
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.153 103294 DEBUG neutron.agent.ovn.metadata_agent [-] agent_down_time                = 75 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.153 103294 DEBUG neutron.agent.ovn.metadata_agent [-] allow_bulk                     = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.153 103294 DEBUG neutron.agent.ovn.metadata_agent [-] api_extensions_path            =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.153 103294 DEBUG neutron.agent.ovn.metadata_agent [-] api_paste_config               = api-paste.ini log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.153 103294 DEBUG neutron.agent.ovn.metadata_agent [-] api_workers                    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.153 103294 DEBUG neutron.agent.ovn.metadata_agent [-] auth_ca_cert                   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.153 103294 DEBUG neutron.agent.ovn.metadata_agent [-] auth_strategy                  = keystone log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.154 103294 DEBUG neutron.agent.ovn.metadata_agent [-] backlog                        = 4096 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.154 103294 DEBUG neutron.agent.ovn.metadata_agent [-] base_mac                       = fa:16:3e:00:00:00 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.154 103294 DEBUG neutron.agent.ovn.metadata_agent [-] bind_host                      = 0.0.0.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.154 103294 DEBUG neutron.agent.ovn.metadata_agent [-] bind_port                      = 9696 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.154 103294 DEBUG neutron.agent.ovn.metadata_agent [-] client_socket_timeout          = 900 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.154 103294 DEBUG neutron.agent.ovn.metadata_agent [-] config_dir                     = ['/etc/neutron.conf.d'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.154 103294 DEBUG neutron.agent.ovn.metadata_agent [-] config_file                    = ['/etc/neutron/neutron.conf'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.154 103294 DEBUG neutron.agent.ovn.metadata_agent [-] config_source                  = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.154 103294 DEBUG neutron.agent.ovn.metadata_agent [-] control_exchange               = neutron log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.155 103294 DEBUG neutron.agent.ovn.metadata_agent [-] core_plugin                    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.155 103294 DEBUG neutron.agent.ovn.metadata_agent [-] debug                          = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.155 103294 DEBUG neutron.agent.ovn.metadata_agent [-] default_availability_zones     = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.155 103294 DEBUG neutron.agent.ovn.metadata_agent [-] default_log_levels             = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'OFPHandler=INFO', 'OfctlService=INFO', 'os_ken.base.app_manager=INFO', 'os_ken.controller.controller=INFO'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.155 103294 DEBUG neutron.agent.ovn.metadata_agent [-] dhcp_agent_notification        = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.155 103294 DEBUG neutron.agent.ovn.metadata_agent [-] dhcp_lease_duration            = 86400 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.155 103294 DEBUG neutron.agent.ovn.metadata_agent [-] dhcp_load_type                 = networks log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.155 103294 DEBUG neutron.agent.ovn.metadata_agent [-] dns_domain                     = openstacklocal log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.155 103294 DEBUG neutron.agent.ovn.metadata_agent [-] enable_new_agents              = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.156 103294 DEBUG neutron.agent.ovn.metadata_agent [-] enable_traditional_dhcp        = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.156 103294 DEBUG neutron.agent.ovn.metadata_agent [-] external_dns_driver            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.156 103294 DEBUG neutron.agent.ovn.metadata_agent [-] external_pids                  = /var/lib/neutron/external/pids log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.156 103294 DEBUG neutron.agent.ovn.metadata_agent [-] filter_validation              = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.156 103294 DEBUG neutron.agent.ovn.metadata_agent [-] global_physnet_mtu             = 1500 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.156 103294 DEBUG neutron.agent.ovn.metadata_agent [-] host                           = compute-0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.156 103294 DEBUG neutron.agent.ovn.metadata_agent [-] http_retries                   = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.156 103294 DEBUG neutron.agent.ovn.metadata_agent [-] instance_format                = [instance: %(uuid)s]  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.157 103294 DEBUG neutron.agent.ovn.metadata_agent [-] instance_uuid_format           = [instance: %(uuid)s]  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.157 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ipam_driver                    = internal log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.157 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ipv6_pd_enabled                = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.157 103294 DEBUG neutron.agent.ovn.metadata_agent [-] log_config_append              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.157 103294 DEBUG neutron.agent.ovn.metadata_agent [-] log_date_format                = %Y-%m-%d %H:%M:%S log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.157 103294 DEBUG neutron.agent.ovn.metadata_agent [-] log_dir                        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.157 103294 DEBUG neutron.agent.ovn.metadata_agent [-] log_file                       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.157 103294 DEBUG neutron.agent.ovn.metadata_agent [-] log_rotate_interval            = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.157 103294 DEBUG neutron.agent.ovn.metadata_agent [-] log_rotate_interval_type       = days log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.157 103294 DEBUG neutron.agent.ovn.metadata_agent [-] log_rotation_type              = none log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.158 103294 DEBUG neutron.agent.ovn.metadata_agent [-] logging_context_format_string  = %(asctime)s.%(msecs)03d %(process)d %(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(user_identity)s] %(instance)s%(message)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.158 103294 DEBUG neutron.agent.ovn.metadata_agent [-] logging_debug_format_suffix    = %(funcName)s %(pathname)s:%(lineno)d log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.158 103294 DEBUG neutron.agent.ovn.metadata_agent [-] logging_default_format_string  = %(asctime)s.%(msecs)03d %(process)d %(levelname)s %(name)s [-] %(instance)s%(message)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.158 103294 DEBUG neutron.agent.ovn.metadata_agent [-] logging_exception_prefix       = %(asctime)s.%(msecs)03d %(process)d ERROR %(name)s %(instance)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.158 103294 DEBUG neutron.agent.ovn.metadata_agent [-] logging_user_identity_format   = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.158 103294 DEBUG neutron.agent.ovn.metadata_agent [-] max_dns_nameservers            = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.158 103294 DEBUG neutron.agent.ovn.metadata_agent [-] max_header_line                = 16384 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.158 103294 DEBUG neutron.agent.ovn.metadata_agent [-] max_logfile_count              = 30 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.158 103294 DEBUG neutron.agent.ovn.metadata_agent [-] max_logfile_size_mb            = 200 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.158 103294 DEBUG neutron.agent.ovn.metadata_agent [-] max_subnet_host_routes         = 20 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.159 103294 DEBUG neutron.agent.ovn.metadata_agent [-] metadata_backlog               = 4096 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.159 103294 DEBUG neutron.agent.ovn.metadata_agent [-] metadata_proxy_group           =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.159 103294 DEBUG neutron.agent.ovn.metadata_agent [-] metadata_proxy_shared_secret   = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.159 103294 DEBUG neutron.agent.ovn.metadata_agent [-] metadata_proxy_socket          = /var/lib/neutron/metadata_proxy log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.159 103294 DEBUG neutron.agent.ovn.metadata_agent [-] metadata_proxy_socket_mode     = deduce log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.159 103294 DEBUG neutron.agent.ovn.metadata_agent [-] metadata_proxy_user            =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.159 103294 DEBUG neutron.agent.ovn.metadata_agent [-] metadata_workers               = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.159 103294 DEBUG neutron.agent.ovn.metadata_agent [-] network_link_prefix            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.160 103294 DEBUG neutron.agent.ovn.metadata_agent [-] notify_nova_on_port_data_changes = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.160 103294 DEBUG neutron.agent.ovn.metadata_agent [-] notify_nova_on_port_status_changes = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.160 103294 DEBUG neutron.agent.ovn.metadata_agent [-] nova_client_cert               =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.160 103294 DEBUG neutron.agent.ovn.metadata_agent [-] nova_client_priv_key           =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.160 103294 DEBUG neutron.agent.ovn.metadata_agent [-] nova_metadata_host             = nova-metadata-cell1-internal.openstack.svc log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.160 103294 DEBUG neutron.agent.ovn.metadata_agent [-] nova_metadata_insecure         = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.160 103294 DEBUG neutron.agent.ovn.metadata_agent [-] nova_metadata_port             = 8775 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.160 103294 DEBUG neutron.agent.ovn.metadata_agent [-] nova_metadata_protocol         = https log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.160 103294 DEBUG neutron.agent.ovn.metadata_agent [-] pagination_max_limit           = -1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.161 103294 DEBUG neutron.agent.ovn.metadata_agent [-] periodic_fuzzy_delay           = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.161 103294 DEBUG neutron.agent.ovn.metadata_agent [-] periodic_interval              = 40 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.161 103294 DEBUG neutron.agent.ovn.metadata_agent [-] publish_errors                 = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.161 103294 DEBUG neutron.agent.ovn.metadata_agent [-] rate_limit_burst               = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.161 103294 DEBUG neutron.agent.ovn.metadata_agent [-] rate_limit_except_level        = CRITICAL log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.161 103294 DEBUG neutron.agent.ovn.metadata_agent [-] rate_limit_interval            = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.161 103294 DEBUG neutron.agent.ovn.metadata_agent [-] retry_until_window             = 30 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.161 103294 DEBUG neutron.agent.ovn.metadata_agent [-] rpc_resources_processing_step  = 20 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.161 103294 DEBUG neutron.agent.ovn.metadata_agent [-] rpc_response_max_timeout       = 600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.161 103294 DEBUG neutron.agent.ovn.metadata_agent [-] rpc_state_report_workers       = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.162 103294 DEBUG neutron.agent.ovn.metadata_agent [-] rpc_workers                    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.162 103294 DEBUG neutron.agent.ovn.metadata_agent [-] send_events_interval           = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.162 103294 DEBUG neutron.agent.ovn.metadata_agent [-] service_plugins                = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.162 103294 DEBUG neutron.agent.ovn.metadata_agent [-] setproctitle                   = on log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.162 103294 DEBUG neutron.agent.ovn.metadata_agent [-] state_path                     = /var/lib/neutron log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.162 103294 DEBUG neutron.agent.ovn.metadata_agent [-] syslog_log_facility            = syslog log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.162 103294 DEBUG neutron.agent.ovn.metadata_agent [-] tcp_keepidle                   = 600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.162 103294 DEBUG neutron.agent.ovn.metadata_agent [-] transport_url                  = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.162 103294 DEBUG neutron.agent.ovn.metadata_agent [-] use_eventlog                   = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.163 103294 DEBUG neutron.agent.ovn.metadata_agent [-] use_journal                    = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.163 103294 DEBUG neutron.agent.ovn.metadata_agent [-] use_json                       = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.163 103294 DEBUG neutron.agent.ovn.metadata_agent [-] use_ssl                        = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.163 103294 DEBUG neutron.agent.ovn.metadata_agent [-] use_stderr                     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.163 103294 DEBUG neutron.agent.ovn.metadata_agent [-] use_syslog                     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.163 103294 DEBUG neutron.agent.ovn.metadata_agent [-] vlan_transparent               = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.163 103294 DEBUG neutron.agent.ovn.metadata_agent [-] watch_log_file                 = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.163 103294 DEBUG neutron.agent.ovn.metadata_agent [-] wsgi_default_pool_size         = 100 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.163 103294 DEBUG neutron.agent.ovn.metadata_agent [-] wsgi_keep_alive                = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.163 103294 DEBUG neutron.agent.ovn.metadata_agent [-] wsgi_log_format                = %(client_ip)s "%(request_line)s" status: %(status_code)s  len: %(body_length)s time: %(wall_seconds).7f log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.164 103294 DEBUG neutron.agent.ovn.metadata_agent [-] wsgi_server_debug              = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.164 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_concurrency.disable_process_locking = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.164 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_concurrency.lock_path     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.164 103294 DEBUG neutron.agent.ovn.metadata_agent [-] profiler.connection_string     = messaging:// log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.164 103294 DEBUG neutron.agent.ovn.metadata_agent [-] profiler.enabled               = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.164 103294 DEBUG neutron.agent.ovn.metadata_agent [-] profiler.es_doc_type           = notification log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.164 103294 DEBUG neutron.agent.ovn.metadata_agent [-] profiler.es_scroll_size        = 10000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.164 103294 DEBUG neutron.agent.ovn.metadata_agent [-] profiler.es_scroll_time        = 2m log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.164 103294 DEBUG neutron.agent.ovn.metadata_agent [-] profiler.filter_error_trace    = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.165 103294 DEBUG neutron.agent.ovn.metadata_agent [-] profiler.hmac_keys             = SECRET_KEY log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.165 103294 DEBUG neutron.agent.ovn.metadata_agent [-] profiler.sentinel_service_name = mymaster log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.165 103294 DEBUG neutron.agent.ovn.metadata_agent [-] profiler.socket_timeout        = 0.1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.165 103294 DEBUG neutron.agent.ovn.metadata_agent [-] profiler.trace_sqlalchemy      = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.165 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_policy.enforce_new_defaults = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.165 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_policy.enforce_scope      = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.165 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_policy.policy_default_rule = default log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.165 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_policy.policy_dirs        = ['policy.d'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.166 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_policy.policy_file        = policy.yaml log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.166 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_policy.remote_content_type = application/x-www-form-urlencoded log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.166 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_policy.remote_ssl_ca_crt_file = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.166 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_policy.remote_ssl_client_crt_file = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.166 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_policy.remote_ssl_client_key_file = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.166 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_policy.remote_ssl_verify_server_crt = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.166 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_metrics.metrics_buffer_size = 1000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.166 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_metrics.metrics_enabled = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.166 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_metrics.metrics_process_name =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.167 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.167 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.167 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.167 103294 DEBUG neutron.agent.ovn.metadata_agent [-] service_providers.service_provider = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.167 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep.capabilities           = [21, 12, 1, 2, 19] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.167 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep.group                  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.167 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep.helper_command         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.167 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep.logger_name            = oslo_privsep.daemon log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.167 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep.thread_pool_size       = 8 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.167 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep.user                   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.168 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_dhcp_release.capabilities = [21, 12] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.168 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_dhcp_release.group     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.168 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_dhcp_release.helper_command = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.168 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_dhcp_release.logger_name = oslo_privsep.daemon log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.168 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_dhcp_release.thread_pool_size = 8 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.168 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_dhcp_release.user      = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.168 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_ovs_vsctl.capabilities = [21, 12] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.168 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_ovs_vsctl.group        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.168 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_ovs_vsctl.helper_command = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.169 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_ovs_vsctl.logger_name  = oslo_privsep.daemon log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.169 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_ovs_vsctl.thread_pool_size = 8 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.169 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_ovs_vsctl.user         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.169 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_namespace.capabilities = [21] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.169 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_namespace.group        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.169 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_namespace.helper_command = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.169 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_namespace.logger_name  = oslo_privsep.daemon log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.169 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_namespace.thread_pool_size = 8 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.169 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_namespace.user         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.170 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_conntrack.capabilities = [12] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.170 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_conntrack.group        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.170 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_conntrack.helper_command = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.170 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_conntrack.logger_name  = oslo_privsep.daemon log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.170 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_conntrack.thread_pool_size = 8 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.170 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_conntrack.user         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.170 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_link.capabilities      = [12, 21] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.170 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_link.group             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.171 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_link.helper_command    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.171 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_link.logger_name       = oslo_privsep.daemon log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.171 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_link.thread_pool_size  = 8 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.171 103294 DEBUG neutron.agent.ovn.metadata_agent [-] privsep_link.user              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.171 103294 DEBUG neutron.agent.ovn.metadata_agent [-] AGENT.check_child_processes_action = respawn log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.171 103294 DEBUG neutron.agent.ovn.metadata_agent [-] AGENT.check_child_processes_interval = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.172 103294 DEBUG neutron.agent.ovn.metadata_agent [-] AGENT.comment_iptables_rules   = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.172 103294 DEBUG neutron.agent.ovn.metadata_agent [-] AGENT.debug_iptables_rules     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.172 103294 DEBUG neutron.agent.ovn.metadata_agent [-] AGENT.kill_scripts_path        = /etc/neutron/kill_scripts/ log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.172 103294 DEBUG neutron.agent.ovn.metadata_agent [-] AGENT.root_helper              = sudo neutron-rootwrap /etc/neutron/rootwrap.conf log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.172 103294 DEBUG neutron.agent.ovn.metadata_agent [-] AGENT.root_helper_daemon       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.172 103294 DEBUG neutron.agent.ovn.metadata_agent [-] AGENT.use_helper_for_ns_read   = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.172 103294 DEBUG neutron.agent.ovn.metadata_agent [-] AGENT.use_random_fully         = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.172 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_versionedobjects.fatal_exception_format_errors = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.172 103294 DEBUG neutron.agent.ovn.metadata_agent [-] QUOTAS.default_quota           = -1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.173 103294 DEBUG neutron.agent.ovn.metadata_agent [-] QUOTAS.quota_driver            = neutron.db.quota.driver_nolock.DbQuotaNoLockDriver log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.173 103294 DEBUG neutron.agent.ovn.metadata_agent [-] QUOTAS.quota_network           = 100 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.173 103294 DEBUG neutron.agent.ovn.metadata_agent [-] QUOTAS.quota_port              = 500 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.173 103294 DEBUG neutron.agent.ovn.metadata_agent [-] QUOTAS.quota_security_group    = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.173 103294 DEBUG neutron.agent.ovn.metadata_agent [-] QUOTAS.quota_security_group_rule = 100 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.173 103294 DEBUG neutron.agent.ovn.metadata_agent [-] QUOTAS.quota_subnet            = 100 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.173 103294 DEBUG neutron.agent.ovn.metadata_agent [-] QUOTAS.track_quota_usage       = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.173 103294 DEBUG neutron.agent.ovn.metadata_agent [-] nova.auth_section              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.173 103294 DEBUG neutron.agent.ovn.metadata_agent [-] nova.auth_type                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.174 103294 DEBUG neutron.agent.ovn.metadata_agent [-] nova.cafile                    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.174 103294 DEBUG neutron.agent.ovn.metadata_agent [-] nova.certfile                  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.174 103294 DEBUG neutron.agent.ovn.metadata_agent [-] nova.collect_timing            = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.174 103294 DEBUG neutron.agent.ovn.metadata_agent [-] nova.endpoint_type             = public log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.174 103294 DEBUG neutron.agent.ovn.metadata_agent [-] nova.insecure                  = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.174 103294 DEBUG neutron.agent.ovn.metadata_agent [-] nova.keyfile                   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.174 103294 DEBUG neutron.agent.ovn.metadata_agent [-] nova.region_name               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.174 103294 DEBUG neutron.agent.ovn.metadata_agent [-] nova.split_loggers             = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.175 103294 DEBUG neutron.agent.ovn.metadata_agent [-] nova.timeout                   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.175 103294 DEBUG neutron.agent.ovn.metadata_agent [-] placement.auth_section         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.175 103294 DEBUG neutron.agent.ovn.metadata_agent [-] placement.auth_type            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.175 103294 DEBUG neutron.agent.ovn.metadata_agent [-] placement.cafile               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.175 103294 DEBUG neutron.agent.ovn.metadata_agent [-] placement.certfile             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.175 103294 DEBUG neutron.agent.ovn.metadata_agent [-] placement.collect_timing       = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.175 103294 DEBUG neutron.agent.ovn.metadata_agent [-] placement.endpoint_type        = public log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.175 103294 DEBUG neutron.agent.ovn.metadata_agent [-] placement.insecure             = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.175 103294 DEBUG neutron.agent.ovn.metadata_agent [-] placement.keyfile              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.176 103294 DEBUG neutron.agent.ovn.metadata_agent [-] placement.region_name          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.176 103294 DEBUG neutron.agent.ovn.metadata_agent [-] placement.split_loggers        = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.176 103294 DEBUG neutron.agent.ovn.metadata_agent [-] placement.timeout              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.176 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ironic.auth_section            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.176 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ironic.auth_type               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.176 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ironic.cafile                  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.176 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ironic.certfile                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.176 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ironic.collect_timing          = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.176 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ironic.connect_retries         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.176 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ironic.connect_retry_delay     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.177 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ironic.enable_notifications    = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.177 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ironic.endpoint_override       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.177 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ironic.insecure                = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.177 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ironic.interface               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.177 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ironic.keyfile                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.177 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ironic.max_version             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.177 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ironic.min_version             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.178 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ironic.region_name             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.178 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ironic.service_name            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.178 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ironic.service_type            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.178 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ironic.split_loggers           = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.178 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ironic.status_code_retries     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.178 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ironic.status_code_retry_delay = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.178 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ironic.timeout                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.178 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ironic.valid_interfaces        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.178 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ironic.version                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.178 103294 DEBUG neutron.agent.ovn.metadata_agent [-] cli_script.dry_run             = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.179 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ovn.allow_stateless_action_supported = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.179 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ovn.dhcp_default_lease_time    = 43200 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.179 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ovn.disable_ovn_dhcp_for_baremetal_ports = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.179 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ovn.dns_servers                = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.179 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ovn.enable_distributed_floating_ip = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.179 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ovn.neutron_sync_mode          = log log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.179 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ovn.ovn_dhcp4_global_options   = {} log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.179 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ovn.ovn_dhcp6_global_options   = {} log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.180 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ovn.ovn_emit_need_to_frag      = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.180 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ovn.ovn_l3_mode                = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.180 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ovn.ovn_l3_scheduler           = leastloaded log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.180 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ovn.ovn_metadata_enabled       = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.180 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ovn.ovn_nb_ca_cert             =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.180 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ovn.ovn_nb_certificate         =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.180 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ovn.ovn_nb_connection          = tcp:127.0.0.1:6641 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.180 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ovn.ovn_nb_private_key         =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.180 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ovn.ovn_sb_ca_cert             = /etc/pki/tls/certs/ovndbca.crt log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.180 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ovn.ovn_sb_certificate         = /etc/pki/tls/certs/ovndb.crt log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.181 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ovn.ovn_sb_connection          = ssl:ovsdbserver-sb.openstack.svc:6642 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.181 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ovn.ovn_sb_private_key         = /etc/pki/tls/private/ovndb.key log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.181 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ovn.ovsdb_connection_timeout   = 180 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.181 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ovn.ovsdb_log_level            = INFO log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.181 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ovn.ovsdb_probe_interval       = 60000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.181 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ovn.ovsdb_retry_max_interval   = 180 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.181 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ovn.vhost_sock_dir             = /var/run/openvswitch log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.181 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ovn.vif_type                   = ovs log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.181 103294 DEBUG neutron.agent.ovn.metadata_agent [-] OVS.bridge_mac_table_size      = 50000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.182 103294 DEBUG neutron.agent.ovn.metadata_agent [-] OVS.igmp_snooping_enable       = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.182 103294 DEBUG neutron.agent.ovn.metadata_agent [-] OVS.ovsdb_timeout              = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.182 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ovs.ovsdb_connection           = tcp:127.0.0.1:6640 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.182 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ovs.ovsdb_connection_timeout   = 180 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.182 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.amqp_auto_delete = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.182 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.amqp_durable_queues = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.182 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.conn_pool_min_size = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.182 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.conn_pool_ttl = 1200 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.182 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.direct_mandatory_flag = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.183 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.enable_cancel_on_failover = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.183 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.heartbeat_in_pthread = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.183 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.heartbeat_rate = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.183 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.183 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.kombu_compression = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.183 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.kombu_failover_strategy = round-robin log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.183 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.183 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.184 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.rabbit_ha_queues = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.184 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.rabbit_interval_max = 30 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.184 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.184 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.184 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.184 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.184 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.184 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.rabbit_quorum_queue = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.184 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.rabbit_retry_backoff = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.185 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.rabbit_retry_interval = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.185 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.185 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.rpc_conn_pool_size = 30 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.185 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.ssl      = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.185 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.ssl_ca_file =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.185 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.ssl_cert_file =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.185 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.ssl_enforce_fips_mode = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.185 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.ssl_key_file =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.185 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_rabbit.ssl_version =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.185 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_notifications.driver = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.186 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_notifications.retry = -1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.186 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_notifications.topics = ['notifications'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.186 103294 DEBUG neutron.agent.ovn.metadata_agent [-] oslo_messaging_notifications.transport_url = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.186 103294 DEBUG neutron.agent.ovn.metadata_agent [-] ******************************************************************************** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2613
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.194 103294 DEBUG ovsdbapp.backend.ovs_idl [-] Created schema index Bridge.name autocreate_indices /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/__init__.py:106
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.194 103294 DEBUG ovsdbapp.backend.ovs_idl [-] Created schema index Port.name autocreate_indices /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/__init__.py:106
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.194 103294 DEBUG ovsdbapp.backend.ovs_idl [-] Created schema index Interface.name autocreate_indices /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/__init__.py:106
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.195 103294 INFO ovsdbapp.backend.ovs_idl.vlog [-] tcp:127.0.0.1:6640: connecting...
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.195 103294 INFO ovsdbapp.backend.ovs_idl.vlog [-] tcp:127.0.0.1:6640: connected
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.207 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Loaded chassis name c9f3d658-5c7a-4803-9bbb-01adfb7e88ca (UUID: c9f3d658-5c7a-4803-9bbb-01adfb7e88ca) and ovn bridge br-int. _load_config /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:309
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.236 103294 INFO neutron.agent.ovn.metadata.ovsdb [-] Getting OvsdbSbOvnIdl for MetadataAgent with retry
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.236 103294 DEBUG ovsdbapp.backend.ovs_idl [-] Created lookup_table index Chassis.name autocreate_indices /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/__init__.py:87
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.236 103294 DEBUG ovsdbapp.backend.ovs_idl [-] Created schema index Datapath_Binding.tunnel_key autocreate_indices /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/__init__.py:106
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.237 103294 DEBUG ovsdbapp.backend.ovs_idl [-] Created schema index Chassis_Private.name autocreate_indices /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/__init__.py:106
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.242 103294 INFO ovsdbapp.backend.ovs_idl.vlog [-] ssl:ovsdbserver-sb.openstack.svc:6642: connecting...
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.248 103294 INFO ovsdbapp.backend.ovs_idl.vlog [-] ssl:ovsdbserver-sb.openstack.svc:6642: connected
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.253 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched CREATE: ChassisPrivateCreateEvent(events=('create',), table='Chassis_Private', conditions=(('name', '=', 'c9f3d658-5c7a-4803-9bbb-01adfb7e88ca'),), old_conditions=None), priority=20 to row=Chassis_Private(chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], external_ids={}, name=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, nb_cfg_timestamp=1759405504919, nb_cfg=1) old= matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.254 103294 DEBUG neutron_lib.callbacks.manager [-] Subscribe: <bound method MetadataProxyHandler.post_fork_initialize of <neutron.agent.ovn.metadata.server.MetadataProxyHandler object at 0x7ff20b152bb0>> process after_init 55550000, False subscribe /usr/lib/python3.9/site-packages/neutron_lib/callbacks/manager.py:52
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.255 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "singleton_lock" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.255 103294 DEBUG oslo_concurrency.lockutils [-] Acquired lock "singleton_lock" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.255 103294 DEBUG oslo_concurrency.lockutils [-] Releasing lock "singleton_lock" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.255 103294 INFO oslo_service.service [-] Starting 1 workers
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.259 103294 DEBUG oslo_service.service [-] Started child 103399 _start_child /usr/lib/python3.9/site-packages/oslo_service/service.py:575
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.262 103294 INFO oslo.privsep.daemon [-] Running privsep helper: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'privsep-helper', '--config-file', '/etc/neutron/neutron.conf', '--config-dir', '/etc/neutron.conf.d', '--privsep_context', 'neutron.privileged.namespace_cmd', '--privsep_sock_path', '/tmp/tmpbhv2iuz5/privsep.sock']
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.262 103399 DEBUG neutron_lib.callbacks.manager [-] Publish callbacks ['neutron.agent.ovn.metadata.server.MetadataProxyHandler.post_fork_initialize-168621'] for process (None), after_init _notify_loop /usr/lib/python3.9/site-packages/neutron_lib/callbacks/manager.py:184
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.287 103399 INFO neutron.agent.ovn.metadata.ovsdb [-] Getting OvsdbSbOvnIdl for MetadataAgent with retry
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.288 103399 DEBUG ovsdbapp.backend.ovs_idl [-] Created lookup_table index Chassis.name autocreate_indices /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/__init__.py:87
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.288 103399 DEBUG ovsdbapp.backend.ovs_idl [-] Created schema index Datapath_Binding.tunnel_key autocreate_indices /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/__init__.py:106
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.291 103399 INFO ovsdbapp.backend.ovs_idl.vlog [-] ssl:ovsdbserver-sb.openstack.svc:6642: connecting...
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.321 103399 INFO ovsdbapp.backend.ovs_idl.vlog [-] ssl:ovsdbserver-sb.openstack.svc:6642: connected
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.333 103399 INFO eventlet.wsgi.server [-] (103399) wsgi starting up on http:/var/lib/neutron/metadata_proxy
Oct 02 11:46:02 compute-0 kernel: capability: warning: `privsep-helper' uses deprecated v2 capabilities in a way that may be insecure
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.902 103294 INFO oslo.privsep.daemon [-] Spawned new privsep daemon via rootwrap
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.903 103294 DEBUG oslo.privsep.daemon [-] Accepted privsep connection to /tmp/tmpbhv2iuz5/privsep.sock __init__ /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:362
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.787 103404 INFO oslo.privsep.daemon [-] privsep daemon starting
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.791 103404 INFO oslo.privsep.daemon [-] privsep process running with uid/gid: 0/0
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.794 103404 INFO oslo.privsep.daemon [-] privsep process running with capabilities (eff/prm/inh): CAP_SYS_ADMIN/CAP_SYS_ADMIN/none
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.795 103404 INFO oslo.privsep.daemon [-] privsep daemon running as pid 103404
Oct 02 11:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:02.906 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[e4feef4e-a90f-4394-8543-3da49f1e33b0]: (2,) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.391 103404 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "context-manager" by "neutron_lib.db.api._create_context_manager" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.391 103404 DEBUG oslo_concurrency.lockutils [-] Lock "context-manager" acquired by "neutron_lib.db.api._create_context_manager" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.392 103404 DEBUG oslo_concurrency.lockutils [-] Lock "context-manager" "released" by "neutron_lib.db.api._create_context_manager" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.946 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[63510f15-d6ad-4a70-936a-c90872a61501]: (4, []) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.947 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbAddCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, column=external_ids, values=({'neutron:ovn-metadata-id': 'ec37080c-22e0-5d37-93ed-d993f0363b0b'},)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.961 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-bridge': 'br-int'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.967 103294 DEBUG oslo_service.service [-] Full set of CONF: wait /usr/lib/python3.9/site-packages/oslo_service/service.py:649
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.967 103294 DEBUG oslo_service.service [-] ******************************************************************************** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2589
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.967 103294 DEBUG oslo_service.service [-] Configuration options gathered from: log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2590
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.967 103294 DEBUG oslo_service.service [-] command line args: [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2591
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.968 103294 DEBUG oslo_service.service [-] config files: ['/etc/neutron/neutron.conf'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2592
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.968 103294 DEBUG oslo_service.service [-] ================================================================================ log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2594
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.968 103294 DEBUG oslo_service.service [-] agent_down_time                = 75 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.968 103294 DEBUG oslo_service.service [-] allow_bulk                     = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.968 103294 DEBUG oslo_service.service [-] api_extensions_path            =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.968 103294 DEBUG oslo_service.service [-] api_paste_config               = api-paste.ini log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.969 103294 DEBUG oslo_service.service [-] api_workers                    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.969 103294 DEBUG oslo_service.service [-] auth_ca_cert                   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.969 103294 DEBUG oslo_service.service [-] auth_strategy                  = keystone log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.969 103294 DEBUG oslo_service.service [-] backlog                        = 4096 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.969 103294 DEBUG oslo_service.service [-] base_mac                       = fa:16:3e:00:00:00 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.969 103294 DEBUG oslo_service.service [-] bind_host                      = 0.0.0.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.969 103294 DEBUG oslo_service.service [-] bind_port                      = 9696 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.969 103294 DEBUG oslo_service.service [-] client_socket_timeout          = 900 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.970 103294 DEBUG oslo_service.service [-] config_dir                     = ['/etc/neutron.conf.d'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.970 103294 DEBUG oslo_service.service [-] config_file                    = ['/etc/neutron/neutron.conf'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.970 103294 DEBUG oslo_service.service [-] config_source                  = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.970 103294 DEBUG oslo_service.service [-] control_exchange               = neutron log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.970 103294 DEBUG oslo_service.service [-] core_plugin                    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.970 103294 DEBUG oslo_service.service [-] debug                          = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.970 103294 DEBUG oslo_service.service [-] default_availability_zones     = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.970 103294 DEBUG oslo_service.service [-] default_log_levels             = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'OFPHandler=INFO', 'OfctlService=INFO', 'os_ken.base.app_manager=INFO', 'os_ken.controller.controller=INFO'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.971 103294 DEBUG oslo_service.service [-] dhcp_agent_notification        = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.971 103294 DEBUG oslo_service.service [-] dhcp_lease_duration            = 86400 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.971 103294 DEBUG oslo_service.service [-] dhcp_load_type                 = networks log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.971 103294 DEBUG oslo_service.service [-] dns_domain                     = openstacklocal log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.971 103294 DEBUG oslo_service.service [-] enable_new_agents              = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.971 103294 DEBUG oslo_service.service [-] enable_traditional_dhcp        = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.971 103294 DEBUG oslo_service.service [-] external_dns_driver            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.972 103294 DEBUG oslo_service.service [-] external_pids                  = /var/lib/neutron/external/pids log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.972 103294 DEBUG oslo_service.service [-] filter_validation              = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.972 103294 DEBUG oslo_service.service [-] global_physnet_mtu             = 1500 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.972 103294 DEBUG oslo_service.service [-] graceful_shutdown_timeout      = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.972 103294 DEBUG oslo_service.service [-] host                           = compute-0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.972 103294 DEBUG oslo_service.service [-] http_retries                   = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.972 103294 DEBUG oslo_service.service [-] instance_format                = [instance: %(uuid)s]  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.972 103294 DEBUG oslo_service.service [-] instance_uuid_format           = [instance: %(uuid)s]  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.973 103294 DEBUG oslo_service.service [-] ipam_driver                    = internal log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.973 103294 DEBUG oslo_service.service [-] ipv6_pd_enabled                = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.973 103294 DEBUG oslo_service.service [-] log_config_append              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.973 103294 DEBUG oslo_service.service [-] log_date_format                = %Y-%m-%d %H:%M:%S log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.973 103294 DEBUG oslo_service.service [-] log_dir                        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.973 103294 DEBUG oslo_service.service [-] log_file                       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.973 103294 DEBUG oslo_service.service [-] log_options                    = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.973 103294 DEBUG oslo_service.service [-] log_rotate_interval            = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.973 103294 DEBUG oslo_service.service [-] log_rotate_interval_type       = days log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.974 103294 DEBUG oslo_service.service [-] log_rotation_type              = none log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.974 103294 DEBUG oslo_service.service [-] logging_context_format_string  = %(asctime)s.%(msecs)03d %(process)d %(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(user_identity)s] %(instance)s%(message)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.974 103294 DEBUG oslo_service.service [-] logging_debug_format_suffix    = %(funcName)s %(pathname)s:%(lineno)d log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.974 103294 DEBUG oslo_service.service [-] logging_default_format_string  = %(asctime)s.%(msecs)03d %(process)d %(levelname)s %(name)s [-] %(instance)s%(message)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.974 103294 DEBUG oslo_service.service [-] logging_exception_prefix       = %(asctime)s.%(msecs)03d %(process)d ERROR %(name)s %(instance)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.974 103294 DEBUG oslo_service.service [-] logging_user_identity_format   = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.974 103294 DEBUG oslo_service.service [-] max_dns_nameservers            = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.974 103294 DEBUG oslo_service.service [-] max_header_line                = 16384 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.974 103294 DEBUG oslo_service.service [-] max_logfile_count              = 30 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.974 103294 DEBUG oslo_service.service [-] max_logfile_size_mb            = 200 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.975 103294 DEBUG oslo_service.service [-] max_subnet_host_routes         = 20 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.975 103294 DEBUG oslo_service.service [-] metadata_backlog               = 4096 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.975 103294 DEBUG oslo_service.service [-] metadata_proxy_group           =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.975 103294 DEBUG oslo_service.service [-] metadata_proxy_shared_secret   = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.975 103294 DEBUG oslo_service.service [-] metadata_proxy_socket          = /var/lib/neutron/metadata_proxy log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.975 103294 DEBUG oslo_service.service [-] metadata_proxy_socket_mode     = deduce log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.975 103294 DEBUG oslo_service.service [-] metadata_proxy_user            =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.975 103294 DEBUG oslo_service.service [-] metadata_workers               = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.976 103294 DEBUG oslo_service.service [-] network_link_prefix            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.976 103294 DEBUG oslo_service.service [-] notify_nova_on_port_data_changes = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.976 103294 DEBUG oslo_service.service [-] notify_nova_on_port_status_changes = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.976 103294 DEBUG oslo_service.service [-] nova_client_cert               =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.976 103294 DEBUG oslo_service.service [-] nova_client_priv_key           =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.976 103294 DEBUG oslo_service.service [-] nova_metadata_host             = nova-metadata-cell1-internal.openstack.svc log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.976 103294 DEBUG oslo_service.service [-] nova_metadata_insecure         = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.976 103294 DEBUG oslo_service.service [-] nova_metadata_port             = 8775 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.976 103294 DEBUG oslo_service.service [-] nova_metadata_protocol         = https log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.977 103294 DEBUG oslo_service.service [-] pagination_max_limit           = -1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.977 103294 DEBUG oslo_service.service [-] periodic_fuzzy_delay           = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.977 103294 DEBUG oslo_service.service [-] periodic_interval              = 40 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.977 103294 DEBUG oslo_service.service [-] publish_errors                 = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.977 103294 DEBUG oslo_service.service [-] rate_limit_burst               = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.977 103294 DEBUG oslo_service.service [-] rate_limit_except_level        = CRITICAL log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.977 103294 DEBUG oslo_service.service [-] rate_limit_interval            = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.977 103294 DEBUG oslo_service.service [-] retry_until_window             = 30 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.978 103294 DEBUG oslo_service.service [-] rpc_resources_processing_step  = 20 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.978 103294 DEBUG oslo_service.service [-] rpc_response_max_timeout       = 600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.978 103294 DEBUG oslo_service.service [-] rpc_state_report_workers       = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.978 103294 DEBUG oslo_service.service [-] rpc_workers                    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.978 103294 DEBUG oslo_service.service [-] send_events_interval           = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.978 103294 DEBUG oslo_service.service [-] service_plugins                = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.978 103294 DEBUG oslo_service.service [-] setproctitle                   = on log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.978 103294 DEBUG oslo_service.service [-] state_path                     = /var/lib/neutron log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.978 103294 DEBUG oslo_service.service [-] syslog_log_facility            = syslog log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.979 103294 DEBUG oslo_service.service [-] tcp_keepidle                   = 600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.979 103294 DEBUG oslo_service.service [-] transport_url                  = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.979 103294 DEBUG oslo_service.service [-] use_eventlog                   = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.979 103294 DEBUG oslo_service.service [-] use_journal                    = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.979 103294 DEBUG oslo_service.service [-] use_json                       = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.979 103294 DEBUG oslo_service.service [-] use_ssl                        = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.979 103294 DEBUG oslo_service.service [-] use_stderr                     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.979 103294 DEBUG oslo_service.service [-] use_syslog                     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.979 103294 DEBUG oslo_service.service [-] vlan_transparent               = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.979 103294 DEBUG oslo_service.service [-] watch_log_file                 = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.980 103294 DEBUG oslo_service.service [-] wsgi_default_pool_size         = 100 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.980 103294 DEBUG oslo_service.service [-] wsgi_keep_alive                = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.980 103294 DEBUG oslo_service.service [-] wsgi_log_format                = %(client_ip)s "%(request_line)s" status: %(status_code)s  len: %(body_length)s time: %(wall_seconds).7f log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.980 103294 DEBUG oslo_service.service [-] wsgi_server_debug              = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.980 103294 DEBUG oslo_service.service [-] oslo_concurrency.disable_process_locking = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.980 103294 DEBUG oslo_service.service [-] oslo_concurrency.lock_path     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.980 103294 DEBUG oslo_service.service [-] profiler.connection_string     = messaging:// log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.980 103294 DEBUG oslo_service.service [-] profiler.enabled               = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.981 103294 DEBUG oslo_service.service [-] profiler.es_doc_type           = notification log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.981 103294 DEBUG oslo_service.service [-] profiler.es_scroll_size        = 10000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.981 103294 DEBUG oslo_service.service [-] profiler.es_scroll_time        = 2m log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.981 103294 DEBUG oslo_service.service [-] profiler.filter_error_trace    = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.981 103294 DEBUG oslo_service.service [-] profiler.hmac_keys             = SECRET_KEY log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.981 103294 DEBUG oslo_service.service [-] profiler.sentinel_service_name = mymaster log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.981 103294 DEBUG oslo_service.service [-] profiler.socket_timeout        = 0.1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.981 103294 DEBUG oslo_service.service [-] profiler.trace_sqlalchemy      = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.981 103294 DEBUG oslo_service.service [-] oslo_policy.enforce_new_defaults = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.982 103294 DEBUG oslo_service.service [-] oslo_policy.enforce_scope      = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.982 103294 DEBUG oslo_service.service [-] oslo_policy.policy_default_rule = default log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.982 103294 DEBUG oslo_service.service [-] oslo_policy.policy_dirs        = ['policy.d'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.982 103294 DEBUG oslo_service.service [-] oslo_policy.policy_file        = policy.yaml log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.982 103294 DEBUG oslo_service.service [-] oslo_policy.remote_content_type = application/x-www-form-urlencoded log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.982 103294 DEBUG oslo_service.service [-] oslo_policy.remote_ssl_ca_crt_file = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.982 103294 DEBUG oslo_service.service [-] oslo_policy.remote_ssl_client_crt_file = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.982 103294 DEBUG oslo_service.service [-] oslo_policy.remote_ssl_client_key_file = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.983 103294 DEBUG oslo_service.service [-] oslo_policy.remote_ssl_verify_server_crt = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.983 103294 DEBUG oslo_service.service [-] oslo_messaging_metrics.metrics_buffer_size = 1000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.983 103294 DEBUG oslo_service.service [-] oslo_messaging_metrics.metrics_enabled = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.983 103294 DEBUG oslo_service.service [-] oslo_messaging_metrics.metrics_process_name =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.983 103294 DEBUG oslo_service.service [-] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.983 103294 DEBUG oslo_service.service [-] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.984 103294 DEBUG oslo_service.service [-] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.984 103294 DEBUG oslo_service.service [-] service_providers.service_provider = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.984 103294 DEBUG oslo_service.service [-] privsep.capabilities           = [21, 12, 1, 2, 19] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.984 103294 DEBUG oslo_service.service [-] privsep.group                  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.984 103294 DEBUG oslo_service.service [-] privsep.helper_command         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.984 103294 DEBUG oslo_service.service [-] privsep.logger_name            = oslo_privsep.daemon log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.984 103294 DEBUG oslo_service.service [-] privsep.thread_pool_size       = 8 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.984 103294 DEBUG oslo_service.service [-] privsep.user                   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.984 103294 DEBUG oslo_service.service [-] privsep_dhcp_release.capabilities = [21, 12] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.985 103294 DEBUG oslo_service.service [-] privsep_dhcp_release.group     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.985 103294 DEBUG oslo_service.service [-] privsep_dhcp_release.helper_command = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.985 103294 DEBUG oslo_service.service [-] privsep_dhcp_release.logger_name = oslo_privsep.daemon log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.985 103294 DEBUG oslo_service.service [-] privsep_dhcp_release.thread_pool_size = 8 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.985 103294 DEBUG oslo_service.service [-] privsep_dhcp_release.user      = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.985 103294 DEBUG oslo_service.service [-] privsep_ovs_vsctl.capabilities = [21, 12] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.985 103294 DEBUG oslo_service.service [-] privsep_ovs_vsctl.group        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.985 103294 DEBUG oslo_service.service [-] privsep_ovs_vsctl.helper_command = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.985 103294 DEBUG oslo_service.service [-] privsep_ovs_vsctl.logger_name  = oslo_privsep.daemon log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.985 103294 DEBUG oslo_service.service [-] privsep_ovs_vsctl.thread_pool_size = 8 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.986 103294 DEBUG oslo_service.service [-] privsep_ovs_vsctl.user         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.986 103294 DEBUG oslo_service.service [-] privsep_namespace.capabilities = [21] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.986 103294 DEBUG oslo_service.service [-] privsep_namespace.group        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.986 103294 DEBUG oslo_service.service [-] privsep_namespace.helper_command = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.986 103294 DEBUG oslo_service.service [-] privsep_namespace.logger_name  = oslo_privsep.daemon log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.986 103294 DEBUG oslo_service.service [-] privsep_namespace.thread_pool_size = 8 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.986 103294 DEBUG oslo_service.service [-] privsep_namespace.user         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.986 103294 DEBUG oslo_service.service [-] privsep_conntrack.capabilities = [12] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.986 103294 DEBUG oslo_service.service [-] privsep_conntrack.group        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.987 103294 DEBUG oslo_service.service [-] privsep_conntrack.helper_command = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.987 103294 DEBUG oslo_service.service [-] privsep_conntrack.logger_name  = oslo_privsep.daemon log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.987 103294 DEBUG oslo_service.service [-] privsep_conntrack.thread_pool_size = 8 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.987 103294 DEBUG oslo_service.service [-] privsep_conntrack.user         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.987 103294 DEBUG oslo_service.service [-] privsep_link.capabilities      = [12, 21] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.987 103294 DEBUG oslo_service.service [-] privsep_link.group             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.987 103294 DEBUG oslo_service.service [-] privsep_link.helper_command    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.987 103294 DEBUG oslo_service.service [-] privsep_link.logger_name       = oslo_privsep.daemon log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.987 103294 DEBUG oslo_service.service [-] privsep_link.thread_pool_size  = 8 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.988 103294 DEBUG oslo_service.service [-] privsep_link.user              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.988 103294 DEBUG oslo_service.service [-] AGENT.check_child_processes_action = respawn log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.988 103294 DEBUG oslo_service.service [-] AGENT.check_child_processes_interval = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.988 103294 DEBUG oslo_service.service [-] AGENT.comment_iptables_rules   = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.988 103294 DEBUG oslo_service.service [-] AGENT.debug_iptables_rules     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.988 103294 DEBUG oslo_service.service [-] AGENT.kill_scripts_path        = /etc/neutron/kill_scripts/ log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.988 103294 DEBUG oslo_service.service [-] AGENT.root_helper              = sudo neutron-rootwrap /etc/neutron/rootwrap.conf log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.988 103294 DEBUG oslo_service.service [-] AGENT.root_helper_daemon       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.988 103294 DEBUG oslo_service.service [-] AGENT.use_helper_for_ns_read   = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.988 103294 DEBUG oslo_service.service [-] AGENT.use_random_fully         = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.989 103294 DEBUG oslo_service.service [-] oslo_versionedobjects.fatal_exception_format_errors = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.989 103294 DEBUG oslo_service.service [-] QUOTAS.default_quota           = -1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.989 103294 DEBUG oslo_service.service [-] QUOTAS.quota_driver            = neutron.db.quota.driver_nolock.DbQuotaNoLockDriver log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.989 103294 DEBUG oslo_service.service [-] QUOTAS.quota_network           = 100 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.989 103294 DEBUG oslo_service.service [-] QUOTAS.quota_port              = 500 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.989 103294 DEBUG oslo_service.service [-] QUOTAS.quota_security_group    = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.989 103294 DEBUG oslo_service.service [-] QUOTAS.quota_security_group_rule = 100 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.989 103294 DEBUG oslo_service.service [-] QUOTAS.quota_subnet            = 100 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.990 103294 DEBUG oslo_service.service [-] QUOTAS.track_quota_usage       = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.990 103294 DEBUG oslo_service.service [-] nova.auth_section              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.990 103294 DEBUG oslo_service.service [-] nova.auth_type                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.990 103294 DEBUG oslo_service.service [-] nova.cafile                    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.990 103294 DEBUG oslo_service.service [-] nova.certfile                  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.990 103294 DEBUG oslo_service.service [-] nova.collect_timing            = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.990 103294 DEBUG oslo_service.service [-] nova.endpoint_type             = public log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.990 103294 DEBUG oslo_service.service [-] nova.insecure                  = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.990 103294 DEBUG oslo_service.service [-] nova.keyfile                   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.990 103294 DEBUG oslo_service.service [-] nova.region_name               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.991 103294 DEBUG oslo_service.service [-] nova.split_loggers             = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.991 103294 DEBUG oslo_service.service [-] nova.timeout                   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.991 103294 DEBUG oslo_service.service [-] placement.auth_section         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.991 103294 DEBUG oslo_service.service [-] placement.auth_type            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.991 103294 DEBUG oslo_service.service [-] placement.cafile               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.991 103294 DEBUG oslo_service.service [-] placement.certfile             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.991 103294 DEBUG oslo_service.service [-] placement.collect_timing       = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.991 103294 DEBUG oslo_service.service [-] placement.endpoint_type        = public log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.992 103294 DEBUG oslo_service.service [-] placement.insecure             = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.992 103294 DEBUG oslo_service.service [-] placement.keyfile              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.992 103294 DEBUG oslo_service.service [-] placement.region_name          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.992 103294 DEBUG oslo_service.service [-] placement.split_loggers        = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.992 103294 DEBUG oslo_service.service [-] placement.timeout              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.992 103294 DEBUG oslo_service.service [-] ironic.auth_section            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.993 103294 DEBUG oslo_service.service [-] ironic.auth_type               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.993 103294 DEBUG oslo_service.service [-] ironic.cafile                  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.993 103294 DEBUG oslo_service.service [-] ironic.certfile                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.993 103294 DEBUG oslo_service.service [-] ironic.collect_timing          = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.993 103294 DEBUG oslo_service.service [-] ironic.connect_retries         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.993 103294 DEBUG oslo_service.service [-] ironic.connect_retry_delay     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.993 103294 DEBUG oslo_service.service [-] ironic.enable_notifications    = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.993 103294 DEBUG oslo_service.service [-] ironic.endpoint_override       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.994 103294 DEBUG oslo_service.service [-] ironic.insecure                = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.994 103294 DEBUG oslo_service.service [-] ironic.interface               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.994 103294 DEBUG oslo_service.service [-] ironic.keyfile                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.994 103294 DEBUG oslo_service.service [-] ironic.max_version             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.994 103294 DEBUG oslo_service.service [-] ironic.min_version             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.994 103294 DEBUG oslo_service.service [-] ironic.region_name             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.994 103294 DEBUG oslo_service.service [-] ironic.service_name            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.995 103294 DEBUG oslo_service.service [-] ironic.service_type            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.995 103294 DEBUG oslo_service.service [-] ironic.split_loggers           = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.995 103294 DEBUG oslo_service.service [-] ironic.status_code_retries     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.995 103294 DEBUG oslo_service.service [-] ironic.status_code_retry_delay = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.995 103294 DEBUG oslo_service.service [-] ironic.timeout                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.995 103294 DEBUG oslo_service.service [-] ironic.valid_interfaces        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.995 103294 DEBUG oslo_service.service [-] ironic.version                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.995 103294 DEBUG oslo_service.service [-] cli_script.dry_run             = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.996 103294 DEBUG oslo_service.service [-] ovn.allow_stateless_action_supported = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.996 103294 DEBUG oslo_service.service [-] ovn.dhcp_default_lease_time    = 43200 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.996 103294 DEBUG oslo_service.service [-] ovn.disable_ovn_dhcp_for_baremetal_ports = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.996 103294 DEBUG oslo_service.service [-] ovn.dns_servers                = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.996 103294 DEBUG oslo_service.service [-] ovn.enable_distributed_floating_ip = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.996 103294 DEBUG oslo_service.service [-] ovn.neutron_sync_mode          = log log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.996 103294 DEBUG oslo_service.service [-] ovn.ovn_dhcp4_global_options   = {} log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.997 103294 DEBUG oslo_service.service [-] ovn.ovn_dhcp6_global_options   = {} log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.997 103294 DEBUG oslo_service.service [-] ovn.ovn_emit_need_to_frag      = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.997 103294 DEBUG oslo_service.service [-] ovn.ovn_l3_mode                = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.997 103294 DEBUG oslo_service.service [-] ovn.ovn_l3_scheduler           = leastloaded log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.997 103294 DEBUG oslo_service.service [-] ovn.ovn_metadata_enabled       = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.997 103294 DEBUG oslo_service.service [-] ovn.ovn_nb_ca_cert             =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.997 103294 DEBUG oslo_service.service [-] ovn.ovn_nb_certificate         =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.998 103294 DEBUG oslo_service.service [-] ovn.ovn_nb_connection          = tcp:127.0.0.1:6641 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.998 103294 DEBUG oslo_service.service [-] ovn.ovn_nb_private_key         =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.998 103294 DEBUG oslo_service.service [-] ovn.ovn_sb_ca_cert             = /etc/pki/tls/certs/ovndbca.crt log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.998 103294 DEBUG oslo_service.service [-] ovn.ovn_sb_certificate         = /etc/pki/tls/certs/ovndb.crt log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.998 103294 DEBUG oslo_service.service [-] ovn.ovn_sb_connection          = ssl:ovsdbserver-sb.openstack.svc:6642 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.998 103294 DEBUG oslo_service.service [-] ovn.ovn_sb_private_key         = /etc/pki/tls/private/ovndb.key log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.998 103294 DEBUG oslo_service.service [-] ovn.ovsdb_connection_timeout   = 180 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.998 103294 DEBUG oslo_service.service [-] ovn.ovsdb_log_level            = INFO log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.998 103294 DEBUG oslo_service.service [-] ovn.ovsdb_probe_interval       = 60000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.999 103294 DEBUG oslo_service.service [-] ovn.ovsdb_retry_max_interval   = 180 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.999 103294 DEBUG oslo_service.service [-] ovn.vhost_sock_dir             = /var/run/openvswitch log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.999 103294 DEBUG oslo_service.service [-] ovn.vif_type                   = ovs log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.999 103294 DEBUG oslo_service.service [-] OVS.bridge_mac_table_size      = 50000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.999 103294 DEBUG oslo_service.service [-] OVS.igmp_snooping_enable       = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.999 103294 DEBUG oslo_service.service [-] OVS.ovsdb_timeout              = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.999 103294 DEBUG oslo_service.service [-] ovs.ovsdb_connection           = tcp:127.0.0.1:6640 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.999 103294 DEBUG oslo_service.service [-] ovs.ovsdb_connection_timeout   = 180 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:03.999 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.amqp_auto_delete = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.000 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.amqp_durable_queues = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.000 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.conn_pool_min_size = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.000 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.conn_pool_ttl = 1200 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.000 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.direct_mandatory_flag = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.000 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.enable_cancel_on_failover = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.000 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.heartbeat_in_pthread = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.000 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.heartbeat_rate = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.000 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.000 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.kombu_compression = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.001 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.kombu_failover_strategy = round-robin log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.001 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.001 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.001 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.rabbit_ha_queues = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.001 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.rabbit_interval_max = 30 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.001 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.001 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.001 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.001 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.001 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.002 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.rabbit_quorum_queue = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.002 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.rabbit_retry_backoff = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.002 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.rabbit_retry_interval = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.002 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.002 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.rpc_conn_pool_size = 30 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.002 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.ssl      = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.002 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.ssl_ca_file =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.002 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.ssl_cert_file =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.002 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.ssl_enforce_fips_mode = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.002 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.ssl_key_file =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.003 103294 DEBUG oslo_service.service [-] oslo_messaging_rabbit.ssl_version =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.003 103294 DEBUG oslo_service.service [-] oslo_messaging_notifications.driver = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.003 103294 DEBUG oslo_service.service [-] oslo_messaging_notifications.retry = -1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.003 103294 DEBUG oslo_service.service [-] oslo_messaging_notifications.topics = ['notifications'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.003 103294 DEBUG oslo_service.service [-] oslo_messaging_notifications.transport_url = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:46:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:46:04.003 103294 DEBUG oslo_service.service [-] ******************************************************************************** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2613
Oct 02 11:46:06 compute-0 sshd-session[103409]: Accepted publickey for zuul from 192.168.122.30 port 53524 ssh2: ECDSA SHA256:tAEsFSop2MjuMQQ/UqKU2uCkxqWXGfsM5crdhd6tlI4
Oct 02 11:46:06 compute-0 systemd-logind[827]: New session 23 of user zuul.
Oct 02 11:46:06 compute-0 systemd[1]: Started Session 23 of User zuul.
Oct 02 11:46:06 compute-0 sshd-session[103409]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Oct 02 11:46:07 compute-0 python3.9[103562]: ansible-ansible.builtin.setup Invoked with gather_subset=['!all', '!min', 'local'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:46:08 compute-0 sudo[103716]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cbsycpumzpvvrqbpmgnqitkahvnidusi ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405567.7425566-67-38284611170666/AnsiballZ_command.py'
Oct 02 11:46:08 compute-0 sudo[103716]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:08 compute-0 python3.9[103718]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a --filter name=^nova_virtlogd$ --format \{\{.Names\}\} _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:46:08 compute-0 sudo[103716]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:09 compute-0 sudo[103881]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kwqyhgcwhukgwosulgablkuhppiujsmr ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405568.9135904-100-170214836281166/AnsiballZ_systemd_service.py'
Oct 02 11:46:09 compute-0 sudo[103881]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:09 compute-0 python3.9[103883]: ansible-ansible.builtin.systemd_service Invoked with daemon_reload=True daemon_reexec=False scope=system no_block=False name=None state=None enabled=None force=None masked=None
Oct 02 11:46:09 compute-0 systemd[1]: Reloading.
Oct 02 11:46:09 compute-0 systemd-rc-local-generator[103908]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:46:09 compute-0 systemd-sysv-generator[103913]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:46:10 compute-0 sudo[103881]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:10 compute-0 python3.9[104067]: ansible-ansible.builtin.service_facts Invoked
Oct 02 11:46:10 compute-0 network[104084]: You are using 'network' service provided by 'network-scripts', which are now deprecated.
Oct 02 11:46:10 compute-0 network[104085]: 'network-scripts' will be removed from distribution in near future.
Oct 02 11:46:10 compute-0 network[104086]: It is advised to switch to 'NetworkManager' instead for network management.
Oct 02 11:46:17 compute-0 sudo[104348]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dfotjbqptwgrmlozanrisfeokyjxuhnb ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405577.4179146-157-159196398148447/AnsiballZ_systemd_service.py'
Oct 02 11:46:17 compute-0 sudo[104348]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:18 compute-0 python3.9[104350]: ansible-ansible.builtin.systemd_service Invoked with enabled=False name=tripleo_nova_libvirt.target state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:46:18 compute-0 sudo[104348]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:18 compute-0 sudo[104501]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jjwggaaarltwnipgyujmbejcvsiqhmge ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405578.4459293-157-214833549867654/AnsiballZ_systemd_service.py'
Oct 02 11:46:18 compute-0 sudo[104501]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:18 compute-0 python3.9[104503]: ansible-ansible.builtin.systemd_service Invoked with enabled=False name=tripleo_nova_virtlogd_wrapper.service state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:46:19 compute-0 sudo[104501]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:19 compute-0 sudo[104654]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kceimjyamcxxnxvgbxlkupmqlxmybjep ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405579.2068887-157-110392571877099/AnsiballZ_systemd_service.py'
Oct 02 11:46:19 compute-0 sudo[104654]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:19 compute-0 python3.9[104656]: ansible-ansible.builtin.systemd_service Invoked with enabled=False name=tripleo_nova_virtnodedevd.service state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:46:19 compute-0 sudo[104654]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:20 compute-0 sudo[104807]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lcrtsyuydfuowrmgmbfznlhkgrcsgwkt ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405579.9330187-157-215472053995835/AnsiballZ_systemd_service.py'
Oct 02 11:46:20 compute-0 sudo[104807]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:20 compute-0 python3.9[104809]: ansible-ansible.builtin.systemd_service Invoked with enabled=False name=tripleo_nova_virtproxyd.service state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:46:20 compute-0 sudo[104807]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:21 compute-0 sudo[104960]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xlpwpkxcrulmpkwiegtvgirbfxgqxvoz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405580.6936057-157-49835219903691/AnsiballZ_systemd_service.py'
Oct 02 11:46:21 compute-0 sudo[104960]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:21 compute-0 python3.9[104962]: ansible-ansible.builtin.systemd_service Invoked with enabled=False name=tripleo_nova_virtqemud.service state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:46:21 compute-0 sudo[104960]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:21 compute-0 sudo[105113]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gsvxcdrbswizvytvtwyonjujhvoscgnu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405581.5342724-157-108401812071606/AnsiballZ_systemd_service.py'
Oct 02 11:46:21 compute-0 sudo[105113]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:22 compute-0 python3.9[105115]: ansible-ansible.builtin.systemd_service Invoked with enabled=False name=tripleo_nova_virtsecretd.service state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:46:22 compute-0 sudo[105113]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:22 compute-0 sudo[105266]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zvnsmkcgksvkpohkqegtjfofptukaeot ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405582.4227772-157-181449444874475/AnsiballZ_systemd_service.py'
Oct 02 11:46:22 compute-0 sudo[105266]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:23 compute-0 python3.9[105268]: ansible-ansible.builtin.systemd_service Invoked with enabled=False name=tripleo_nova_virtstoraged.service state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:46:23 compute-0 sudo[105266]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:24 compute-0 sudo[105419]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ueamawrgsusqzhbvtkffnpribllwgymi ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405583.5547838-313-125678099828847/AnsiballZ_file.py'
Oct 02 11:46:24 compute-0 sudo[105419]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:24 compute-0 python3.9[105421]: ansible-ansible.builtin.file Invoked with path=/usr/lib/systemd/system/tripleo_nova_libvirt.target state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:46:24 compute-0 sudo[105419]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:24 compute-0 sudo[105571]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rjgfazuhzyudmfqqyvpmumrrneduelhu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405584.4825714-313-223667945566617/AnsiballZ_file.py'
Oct 02 11:46:24 compute-0 sudo[105571]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:24 compute-0 python3.9[105573]: ansible-ansible.builtin.file Invoked with path=/usr/lib/systemd/system/tripleo_nova_virtlogd_wrapper.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:46:24 compute-0 sudo[105571]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:25 compute-0 sudo[105723]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dwmzeyudrtjaqtjnyrlxxdynkbwrjjvr ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405585.103243-313-106146453122054/AnsiballZ_file.py'
Oct 02 11:46:25 compute-0 sudo[105723]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:25 compute-0 python3.9[105725]: ansible-ansible.builtin.file Invoked with path=/usr/lib/systemd/system/tripleo_nova_virtnodedevd.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:46:25 compute-0 sudo[105723]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:25 compute-0 sudo[105876]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-otdyxnucxfxuarprfnxmsuzwtszvpfxz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405585.713217-313-56414716344327/AnsiballZ_file.py'
Oct 02 11:46:25 compute-0 sudo[105876]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:26 compute-0 python3.9[105878]: ansible-ansible.builtin.file Invoked with path=/usr/lib/systemd/system/tripleo_nova_virtproxyd.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:46:26 compute-0 sudo[105876]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:27 compute-0 sudo[106028]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gaoubwhudoefjovvuyxaajyilfwsxcxj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405586.9081888-313-6403190478899/AnsiballZ_file.py'
Oct 02 11:46:27 compute-0 sudo[106028]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:27 compute-0 python3.9[106030]: ansible-ansible.builtin.file Invoked with path=/usr/lib/systemd/system/tripleo_nova_virtqemud.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:46:27 compute-0 sudo[106028]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:27 compute-0 sudo[106198]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jdqxyjbkwcnyggzobdtarvfgubqbcjvt ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405587.4379663-313-140789197782142/AnsiballZ_file.py'
Oct 02 11:46:27 compute-0 sudo[106198]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:27 compute-0 podman[106154]: 2025-10-02 11:46:27.842591217 +0000 UTC m=+0.124814785 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, config_id=ovn_controller, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, tcib_managed=true, container_name=ovn_controller, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']})
Oct 02 11:46:27 compute-0 python3.9[106204]: ansible-ansible.builtin.file Invoked with path=/usr/lib/systemd/system/tripleo_nova_virtsecretd.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:46:27 compute-0 sudo[106198]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:28 compute-0 sudo[106359]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-peeimdyfcxqnqjgjajqhhudfawfmzzdl ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405588.0807633-313-94700933748139/AnsiballZ_file.py'
Oct 02 11:46:28 compute-0 sudo[106359]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:28 compute-0 python3.9[106361]: ansible-ansible.builtin.file Invoked with path=/usr/lib/systemd/system/tripleo_nova_virtstoraged.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:46:28 compute-0 sudo[106359]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:28 compute-0 sudo[106511]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rwxmiwddycylgamyzuowqygrspvuoccr ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405588.7427104-463-164494438333981/AnsiballZ_file.py'
Oct 02 11:46:28 compute-0 sudo[106511]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:29 compute-0 python3.9[106513]: ansible-ansible.builtin.file Invoked with path=/etc/systemd/system/tripleo_nova_libvirt.target state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:46:29 compute-0 sudo[106511]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:29 compute-0 sudo[106663]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bzlqgqhahzdzklaggxrzvnlykvqvyyjj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405589.3537657-463-103841820759938/AnsiballZ_file.py'
Oct 02 11:46:29 compute-0 sudo[106663]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:29 compute-0 python3.9[106665]: ansible-ansible.builtin.file Invoked with path=/etc/systemd/system/tripleo_nova_virtlogd_wrapper.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:46:29 compute-0 sudo[106663]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:30 compute-0 sudo[106815]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cqpjurettxftsmpdcoxrxymlmtukywir ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405590.0152247-463-28533743614439/AnsiballZ_file.py'
Oct 02 11:46:30 compute-0 sudo[106815]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:30 compute-0 python3.9[106817]: ansible-ansible.builtin.file Invoked with path=/etc/systemd/system/tripleo_nova_virtnodedevd.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:46:30 compute-0 sudo[106815]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:31 compute-0 podman[106922]: 2025-10-02 11:46:31.139817407 +0000 UTC m=+0.057464521 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, container_name=ovn_metadata_agent, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, config_id=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 11:46:31 compute-0 sudo[106985]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wsugzxyjcbtckhyyuthtddwrsrefcwch ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405590.7592394-463-57039618554802/AnsiballZ_file.py'
Oct 02 11:46:31 compute-0 sudo[106985]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:31 compute-0 python3.9[106987]: ansible-ansible.builtin.file Invoked with path=/etc/systemd/system/tripleo_nova_virtproxyd.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:46:31 compute-0 sudo[106985]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:31 compute-0 sudo[107137]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hfskrpfbtamcnptusponpsujwdzfrgij ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405591.51576-463-48035610954143/AnsiballZ_file.py'
Oct 02 11:46:31 compute-0 sudo[107137]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:32 compute-0 python3.9[107139]: ansible-ansible.builtin.file Invoked with path=/etc/systemd/system/tripleo_nova_virtqemud.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:46:32 compute-0 sudo[107137]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:32 compute-0 sudo[107289]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ckhptlhvhxcciyjppnflhgrkprojoyue ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405592.2245486-463-26186468956866/AnsiballZ_file.py'
Oct 02 11:46:32 compute-0 sudo[107289]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:32 compute-0 python3.9[107291]: ansible-ansible.builtin.file Invoked with path=/etc/systemd/system/tripleo_nova_virtsecretd.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:46:32 compute-0 sudo[107289]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:33 compute-0 sudo[107441]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-iznukwbivnezsumsdzjsxmrszobvutcx ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405592.8594086-463-40129736494482/AnsiballZ_file.py'
Oct 02 11:46:33 compute-0 sudo[107441]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:33 compute-0 python3.9[107443]: ansible-ansible.builtin.file Invoked with path=/etc/systemd/system/tripleo_nova_virtstoraged.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:46:33 compute-0 sudo[107441]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:33 compute-0 sudo[107593]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cohwtjuzgjdxmxbuyfrzqeuqiybanhcu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405593.5863552-616-240054628815057/AnsiballZ_command.py'
Oct 02 11:46:33 compute-0 sudo[107593]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:34 compute-0 python3.9[107595]: ansible-ansible.legacy.command Invoked with _raw_params=if systemctl is-active certmonger.service; then
                                               systemctl disable --now certmonger.service
                                               test -f /etc/systemd/system/certmonger.service || systemctl mask certmonger.service
                                             fi
                                              _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:46:34 compute-0 sudo[107593]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:35 compute-0 python3.9[107747]: ansible-ansible.builtin.find Invoked with file_type=any hidden=True paths=['/var/lib/certmonger/requests'] patterns=[] read_whole_file=False age_stamp=mtime recurse=False follow=False get_checksum=False checksum_algorithm=sha1 use_regex=False exact_mode=True excludes=None contains=None age=None size=None depth=None mode=None encoding=None limit=None
Oct 02 11:46:35 compute-0 sudo[107897]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nidyxjqjerbxgabikmrpntvfteszpfni ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405595.3232524-670-15096705216525/AnsiballZ_systemd_service.py'
Oct 02 11:46:35 compute-0 sudo[107897]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:35 compute-0 python3.9[107899]: ansible-ansible.builtin.systemd_service Invoked with daemon_reload=True daemon_reexec=False scope=system no_block=False name=None state=None enabled=None force=None masked=None
Oct 02 11:46:35 compute-0 systemd[1]: Reloading.
Oct 02 11:46:35 compute-0 systemd-rc-local-generator[107920]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:46:35 compute-0 systemd-sysv-generator[107926]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:46:36 compute-0 sudo[107897]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:36 compute-0 sudo[108084]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-epehgthvfifmepecpxbbiffflsforyxw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405596.4123712-694-100094583496845/AnsiballZ_command.py'
Oct 02 11:46:36 compute-0 sudo[108084]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:36 compute-0 python3.9[108086]: ansible-ansible.legacy.command Invoked with cmd=/usr/bin/systemctl reset-failed tripleo_nova_libvirt.target _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True _raw_params=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:46:36 compute-0 sudo[108084]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:37 compute-0 sudo[108237]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lfdfzeoyliamspsmybyecvsynlsdrhoj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405597.1026309-694-173689904305172/AnsiballZ_command.py'
Oct 02 11:46:37 compute-0 sudo[108237]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:37 compute-0 python3.9[108239]: ansible-ansible.legacy.command Invoked with cmd=/usr/bin/systemctl reset-failed tripleo_nova_virtlogd_wrapper.service _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True _raw_params=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:46:37 compute-0 sudo[108237]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:38 compute-0 sudo[108390]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cvvtndyuvznhyxaerbuznzicwosfjabn ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405597.761269-694-255764838195258/AnsiballZ_command.py'
Oct 02 11:46:38 compute-0 sudo[108390]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:38 compute-0 python3.9[108392]: ansible-ansible.legacy.command Invoked with cmd=/usr/bin/systemctl reset-failed tripleo_nova_virtnodedevd.service _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True _raw_params=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:46:38 compute-0 sudo[108390]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:38 compute-0 sudo[108543]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pgeabxofaitplatwwkqxrktqkmavlrcu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405598.408846-694-1444636988160/AnsiballZ_command.py'
Oct 02 11:46:38 compute-0 sudo[108543]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:38 compute-0 python3.9[108545]: ansible-ansible.legacy.command Invoked with cmd=/usr/bin/systemctl reset-failed tripleo_nova_virtproxyd.service _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True _raw_params=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:46:38 compute-0 sudo[108543]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:39 compute-0 sudo[108696]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-alqlyggoekqnzqlsnclgfwqvhprtvgwf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405599.0787961-694-58768238515465/AnsiballZ_command.py'
Oct 02 11:46:39 compute-0 sudo[108696]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:39 compute-0 python3.9[108698]: ansible-ansible.legacy.command Invoked with cmd=/usr/bin/systemctl reset-failed tripleo_nova_virtqemud.service _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True _raw_params=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:46:39 compute-0 sudo[108696]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:40 compute-0 sudo[108849]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ggftfeaockzgtxwuavndatcpcqidztdw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405599.8899062-694-15616440812051/AnsiballZ_command.py'
Oct 02 11:46:40 compute-0 sudo[108849]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:40 compute-0 python3.9[108851]: ansible-ansible.legacy.command Invoked with cmd=/usr/bin/systemctl reset-failed tripleo_nova_virtsecretd.service _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True _raw_params=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:46:40 compute-0 sudo[108849]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:40 compute-0 sudo[109002]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-saiugxxnoyjywstpqwsziqmtidofqxqu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405600.4966578-694-100485398051307/AnsiballZ_command.py'
Oct 02 11:46:40 compute-0 sudo[109002]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:40 compute-0 python3.9[109004]: ansible-ansible.legacy.command Invoked with cmd=/usr/bin/systemctl reset-failed tripleo_nova_virtstoraged.service _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True _raw_params=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:46:41 compute-0 sudo[109002]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:43 compute-0 sudo[109155]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hgfewvrafqxpedyttufjazvgrymaqagd ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405602.626329-856-221417091673476/AnsiballZ_getent.py'
Oct 02 11:46:43 compute-0 sudo[109155]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:43 compute-0 python3.9[109157]: ansible-ansible.builtin.getent Invoked with database=passwd key=libvirt fail_key=True service=None split=None
Oct 02 11:46:43 compute-0 sudo[109155]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:44 compute-0 sudo[109308]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jfyevpdgdcvtfvqqyfkxorevcdkgxozz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405603.5957265-880-37699175840613/AnsiballZ_group.py'
Oct 02 11:46:44 compute-0 sudo[109308]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:44 compute-0 python3.9[109310]: ansible-ansible.builtin.group Invoked with gid=42473 name=libvirt state=present force=False system=False local=False non_unique=False gid_min=None gid_max=None
Oct 02 11:46:44 compute-0 groupadd[109311]: group added to /etc/group: name=libvirt, GID=42473
Oct 02 11:46:44 compute-0 groupadd[109311]: group added to /etc/gshadow: name=libvirt
Oct 02 11:46:44 compute-0 groupadd[109311]: new group: name=libvirt, GID=42473
Oct 02 11:46:44 compute-0 sudo[109308]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:45 compute-0 sudo[109466]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tluzaxwuvugucconijfwvitnimlyjwcf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405604.7983334-904-105897252057503/AnsiballZ_user.py'
Oct 02 11:46:45 compute-0 sudo[109466]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:45 compute-0 python3.9[109468]: ansible-ansible.builtin.user Invoked with comment=libvirt user group=libvirt groups=[''] name=libvirt shell=/sbin/nologin state=present uid=42473 non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on compute-0 update_password=always home=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None password_expire_account_disable=None uid_min=None uid_max=None
Oct 02 11:46:45 compute-0 useradd[109470]: new user: name=libvirt, UID=42473, GID=42473, home=/home/libvirt, shell=/sbin/nologin, from=/dev/pts/0
Oct 02 11:46:45 compute-0 sudo[109466]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:46 compute-0 sudo[109626]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zneoohbxtymagxrjdrbgkfrnptoitdox ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405606.093929-937-266391871552767/AnsiballZ_setup.py'
Oct 02 11:46:46 compute-0 sudo[109626]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:46 compute-0 python3.9[109628]: ansible-ansible.legacy.setup Invoked with filter=['ansible_pkg_mgr'] gather_subset=['!all'] gather_timeout=10 fact_path=/etc/ansible/facts.d
Oct 02 11:46:46 compute-0 sudo[109626]: pam_unix(sudo:session): session closed for user root
Oct 02 11:46:47 compute-0 sudo[109710]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fhfvsifhsfoizsvhyiycxgbsjuhtblkr ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405606.093929-937-266391871552767/AnsiballZ_dnf.py'
Oct 02 11:46:47 compute-0 sudo[109710]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:46:47 compute-0 python3.9[109712]: ansible-ansible.legacy.dnf Invoked with name=['libvirt ', 'libvirt-admin ', 'libvirt-client ', 'libvirt-daemon ', 'qemu-kvm', 'qemu-img', 'libguestfs', 'libseccomp', 'swtpm', 'swtpm-tools', 'edk2-ovmf', 'ceph-common', 'cyrus-sasl-scram'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Oct 02 11:46:58 compute-0 podman[109724]: 2025-10-02 11:46:58.172931196 +0000 UTC m=+0.084103375 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, container_name=ovn_controller, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team)
Oct 02 11:47:02 compute-0 podman[109891]: 2025-10-02 11:47:02.136731601 +0000 UTC m=+0.051630412 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_managed=true, container_name=ovn_metadata_agent, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_metadata_agent, io.buildah.version=1.41.3, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible)
Oct 02 11:47:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:47:02.187 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 11:47:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:47:02.188 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 11:47:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:47:02.188 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 11:47:17 compute-0 kernel: SELinux:  Converting 2752 SID table entries...
Oct 02 11:47:17 compute-0 kernel: SELinux:  policy capability network_peer_controls=1
Oct 02 11:47:17 compute-0 kernel: SELinux:  policy capability open_perms=1
Oct 02 11:47:17 compute-0 kernel: SELinux:  policy capability extended_socket_class=1
Oct 02 11:47:17 compute-0 kernel: SELinux:  policy capability always_check_network=0
Oct 02 11:47:17 compute-0 kernel: SELinux:  policy capability cgroup_seclabel=1
Oct 02 11:47:17 compute-0 kernel: SELinux:  policy capability nnp_nosuid_transition=1
Oct 02 11:47:17 compute-0 kernel: SELinux:  policy capability genfs_seclabel_symlinks=1
Oct 02 11:47:27 compute-0 kernel: SELinux:  Converting 2752 SID table entries...
Oct 02 11:47:27 compute-0 kernel: SELinux:  policy capability network_peer_controls=1
Oct 02 11:47:27 compute-0 kernel: SELinux:  policy capability open_perms=1
Oct 02 11:47:27 compute-0 kernel: SELinux:  policy capability extended_socket_class=1
Oct 02 11:47:27 compute-0 kernel: SELinux:  policy capability always_check_network=0
Oct 02 11:47:27 compute-0 kernel: SELinux:  policy capability cgroup_seclabel=1
Oct 02 11:47:27 compute-0 kernel: SELinux:  policy capability nnp_nosuid_transition=1
Oct 02 11:47:27 compute-0 kernel: SELinux:  policy capability genfs_seclabel_symlinks=1
Oct 02 11:47:29 compute-0 dbus-broker-launch[818]: avc:  op=load_policy lsm=selinux seqno=13 res=1
Oct 02 11:47:29 compute-0 podman[109964]: 2025-10-02 11:47:29.195103708 +0000 UTC m=+0.104153799 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_controller, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller)
Oct 02 11:47:33 compute-0 podman[109990]: 2025-10-02 11:47:33.135608219 +0000 UTC m=+0.052319062 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_id=ovn_metadata_agent, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS)
Oct 02 11:48:00 compute-0 podman[121949]: 2025-10-02 11:48:00.233267316 +0000 UTC m=+0.142700941 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_controller, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 11:48:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:48:02.188 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 11:48:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:48:02.189 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 11:48:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:48:02.189 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 11:48:04 compute-0 podman[124831]: 2025-10-02 11:48:04.123251902 +0000 UTC m=+0.042226471 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=ovn_metadata_agent, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.build-date=20251001, container_name=ovn_metadata_agent, io.buildah.version=1.41.3)
Oct 02 11:48:19 compute-0 kernel: SELinux:  Converting 2753 SID table entries...
Oct 02 11:48:19 compute-0 kernel: SELinux:  policy capability network_peer_controls=1
Oct 02 11:48:19 compute-0 kernel: SELinux:  policy capability open_perms=1
Oct 02 11:48:19 compute-0 kernel: SELinux:  policy capability extended_socket_class=1
Oct 02 11:48:19 compute-0 kernel: SELinux:  policy capability always_check_network=0
Oct 02 11:48:19 compute-0 kernel: SELinux:  policy capability cgroup_seclabel=1
Oct 02 11:48:19 compute-0 kernel: SELinux:  policy capability nnp_nosuid_transition=1
Oct 02 11:48:19 compute-0 kernel: SELinux:  policy capability genfs_seclabel_symlinks=1
Oct 02 11:48:20 compute-0 groupadd[126814]: group added to /etc/group: name=dnsmasq, GID=992
Oct 02 11:48:20 compute-0 groupadd[126814]: group added to /etc/gshadow: name=dnsmasq
Oct 02 11:48:20 compute-0 groupadd[126814]: new group: name=dnsmasq, GID=992
Oct 02 11:48:20 compute-0 useradd[126821]: new user: name=dnsmasq, UID=992, GID=992, home=/var/lib/dnsmasq, shell=/usr/sbin/nologin, from=none
Oct 02 11:48:20 compute-0 dbus-broker-launch[817]: Noticed file-system modification, trigger reload.
Oct 02 11:48:20 compute-0 dbus-broker-launch[818]: avc:  op=load_policy lsm=selinux seqno=14 res=1
Oct 02 11:48:20 compute-0 dbus-broker-launch[817]: Noticed file-system modification, trigger reload.
Oct 02 11:48:21 compute-0 groupadd[126834]: group added to /etc/group: name=clevis, GID=991
Oct 02 11:48:21 compute-0 groupadd[126834]: group added to /etc/gshadow: name=clevis
Oct 02 11:48:21 compute-0 groupadd[126834]: new group: name=clevis, GID=991
Oct 02 11:48:21 compute-0 useradd[126841]: new user: name=clevis, UID=991, GID=991, home=/var/cache/clevis, shell=/usr/sbin/nologin, from=none
Oct 02 11:48:21 compute-0 usermod[126851]: add 'clevis' to group 'tss'
Oct 02 11:48:21 compute-0 usermod[126851]: add 'clevis' to shadow group 'tss'
Oct 02 11:48:23 compute-0 polkitd[6487]: Reloading rules
Oct 02 11:48:23 compute-0 polkitd[6487]: Collecting garbage unconditionally...
Oct 02 11:48:23 compute-0 polkitd[6487]: Loading rules from directory /etc/polkit-1/rules.d
Oct 02 11:48:23 compute-0 polkitd[6487]: Loading rules from directory /usr/share/polkit-1/rules.d
Oct 02 11:48:23 compute-0 polkitd[6487]: Finished loading, compiling and executing 4 rules
Oct 02 11:48:23 compute-0 polkitd[6487]: Reloading rules
Oct 02 11:48:23 compute-0 polkitd[6487]: Collecting garbage unconditionally...
Oct 02 11:48:23 compute-0 polkitd[6487]: Loading rules from directory /etc/polkit-1/rules.d
Oct 02 11:48:23 compute-0 polkitd[6487]: Loading rules from directory /usr/share/polkit-1/rules.d
Oct 02 11:48:23 compute-0 polkitd[6487]: Finished loading, compiling and executing 4 rules
Oct 02 11:48:24 compute-0 groupadd[127038]: group added to /etc/group: name=ceph, GID=167
Oct 02 11:48:24 compute-0 groupadd[127038]: group added to /etc/gshadow: name=ceph
Oct 02 11:48:24 compute-0 groupadd[127038]: new group: name=ceph, GID=167
Oct 02 11:48:24 compute-0 useradd[127044]: new user: name=ceph, UID=167, GID=167, home=/var/lib/ceph, shell=/sbin/nologin, from=none
Oct 02 11:48:27 compute-0 systemd[1]: Stopping OpenSSH server daemon...
Oct 02 11:48:27 compute-0 sshd[1014]: Received signal 15; terminating.
Oct 02 11:48:27 compute-0 systemd[1]: sshd.service: Deactivated successfully.
Oct 02 11:48:27 compute-0 systemd[1]: Stopped OpenSSH server daemon.
Oct 02 11:48:27 compute-0 systemd[1]: sshd.service: Consumed 1.192s CPU time, no IO.
Oct 02 11:48:27 compute-0 systemd[1]: Stopped target sshd-keygen.target.
Oct 02 11:48:27 compute-0 systemd[1]: Stopping sshd-keygen.target...
Oct 02 11:48:27 compute-0 systemd[1]: OpenSSH ecdsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).
Oct 02 11:48:27 compute-0 systemd[1]: OpenSSH ed25519 Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).
Oct 02 11:48:27 compute-0 systemd[1]: OpenSSH rsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).
Oct 02 11:48:27 compute-0 systemd[1]: Reached target sshd-keygen.target.
Oct 02 11:48:27 compute-0 systemd[1]: Starting OpenSSH server daemon...
Oct 02 11:48:27 compute-0 sshd[127563]: Server listening on 0.0.0.0 port 22.
Oct 02 11:48:27 compute-0 sshd[127563]: Server listening on :: port 22.
Oct 02 11:48:27 compute-0 systemd[1]: Started OpenSSH server daemon.
Oct 02 11:48:29 compute-0 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update.
Oct 02 11:48:29 compute-0 systemd[1]: Starting man-db-cache-update.service...
Oct 02 11:48:29 compute-0 systemd[1]: Reloading.
Oct 02 11:48:29 compute-0 systemd-sysv-generator[127824]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:48:29 compute-0 systemd-rc-local-generator[127820]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:48:29 compute-0 systemd[1]: Queuing reload/restart jobs for marked units…
Oct 02 11:48:31 compute-0 podman[130103]: 2025-10-02 11:48:31.172841688 +0000 UTC m=+0.085855020 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_controller, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']})
Oct 02 11:48:33 compute-0 systemd[1]: Starting PackageKit Daemon...
Oct 02 11:48:33 compute-0 PackageKit[132561]: daemon start
Oct 02 11:48:33 compute-0 systemd[1]: Started PackageKit Daemon.
Oct 02 11:48:33 compute-0 sudo[109710]: pam_unix(sudo:session): session closed for user root
Oct 02 11:48:35 compute-0 podman[134966]: 2025-10-02 11:48:35.128699504 +0000 UTC m=+0.047120495 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_metadata_agent, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001)
Oct 02 11:48:36 compute-0 systemd[1]: man-db-cache-update.service: Deactivated successfully.
Oct 02 11:48:36 compute-0 systemd[1]: Finished man-db-cache-update.service.
Oct 02 11:48:36 compute-0 systemd[1]: man-db-cache-update.service: Consumed 9.031s CPU time.
Oct 02 11:48:36 compute-0 systemd[1]: run-r5cf214cbd88d44e88fa578da5eb9072d.service: Deactivated successfully.
Oct 02 11:48:40 compute-0 sudo[136265]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-inqeymhqrhixjfybqibkiyqzsciaogqk ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405720.3373156-973-259873317727202/AnsiballZ_systemd.py'
Oct 02 11:48:40 compute-0 sudo[136265]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:48:41 compute-0 python3.9[136267]: ansible-ansible.builtin.systemd Invoked with enabled=False masked=True name=libvirtd state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False force=None
Oct 02 11:48:41 compute-0 systemd[1]: Reloading.
Oct 02 11:48:41 compute-0 systemd-sysv-generator[136298]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:48:41 compute-0 systemd-rc-local-generator[136291]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:48:41 compute-0 sudo[136265]: pam_unix(sudo:session): session closed for user root
Oct 02 11:48:42 compute-0 sudo[136454]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-olzkcimkyfexzeprnkkkjushgwsllugf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405721.7619495-973-80828064364094/AnsiballZ_systemd.py'
Oct 02 11:48:42 compute-0 sudo[136454]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:48:42 compute-0 python3.9[136456]: ansible-ansible.builtin.systemd Invoked with enabled=False masked=True name=libvirtd-tcp.socket state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False force=None
Oct 02 11:48:42 compute-0 systemd[1]: Reloading.
Oct 02 11:48:42 compute-0 systemd-rc-local-generator[136483]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:48:42 compute-0 systemd-sysv-generator[136488]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:48:42 compute-0 sudo[136454]: pam_unix(sudo:session): session closed for user root
Oct 02 11:48:43 compute-0 sudo[136644]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-chdxfvbjwqjurmvlzmwkwbcfzdterlsk ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405722.8247764-973-136970688217587/AnsiballZ_systemd.py'
Oct 02 11:48:43 compute-0 sudo[136644]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:48:43 compute-0 python3.9[136646]: ansible-ansible.builtin.systemd Invoked with enabled=False masked=True name=libvirtd-tls.socket state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False force=None
Oct 02 11:48:43 compute-0 systemd[1]: Reloading.
Oct 02 11:48:43 compute-0 systemd-rc-local-generator[136670]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:48:43 compute-0 systemd-sysv-generator[136676]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:48:44 compute-0 sudo[136644]: pam_unix(sudo:session): session closed for user root
Oct 02 11:48:45 compute-0 sudo[136834]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zbkfmujngtuasearvzkemurdqlipxndy ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405725.0352418-973-61801956167169/AnsiballZ_systemd.py'
Oct 02 11:48:45 compute-0 sudo[136834]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:48:45 compute-0 python3.9[136836]: ansible-ansible.builtin.systemd Invoked with enabled=False masked=True name=virtproxyd-tcp.socket state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False force=None
Oct 02 11:48:45 compute-0 systemd[1]: Reloading.
Oct 02 11:48:45 compute-0 systemd-sysv-generator[136868]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:48:45 compute-0 systemd-rc-local-generator[136865]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:48:46 compute-0 sudo[136834]: pam_unix(sudo:session): session closed for user root
Oct 02 11:48:46 compute-0 sudo[137023]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rlpiyiiynbjvgmpmxyezylwjlykeeqso ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405726.2301037-1060-92883467539548/AnsiballZ_systemd.py'
Oct 02 11:48:46 compute-0 sudo[137023]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:48:46 compute-0 python3.9[137025]: ansible-ansible.builtin.systemd Invoked with enabled=True masked=False name=virtlogd.service daemon_reload=False daemon_reexec=False scope=system no_block=False state=None force=None
Oct 02 11:48:46 compute-0 systemd[1]: Reloading.
Oct 02 11:48:47 compute-0 systemd-sysv-generator[137061]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:48:47 compute-0 systemd-rc-local-generator[137056]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:48:47 compute-0 sudo[137023]: pam_unix(sudo:session): session closed for user root
Oct 02 11:48:47 compute-0 sudo[137214]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yjkggvbbavbvwjjqprupwyqzefreieiq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405727.4187033-1060-173374112160536/AnsiballZ_systemd.py'
Oct 02 11:48:47 compute-0 sudo[137214]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:48:47 compute-0 python3.9[137216]: ansible-ansible.builtin.systemd Invoked with enabled=True masked=False name=virtnodedevd.service daemon_reload=False daemon_reexec=False scope=system no_block=False state=None force=None
Oct 02 11:48:48 compute-0 systemd[1]: Reloading.
Oct 02 11:48:48 compute-0 systemd-rc-local-generator[137245]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:48:48 compute-0 systemd-sysv-generator[137248]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:48:48 compute-0 sudo[137214]: pam_unix(sudo:session): session closed for user root
Oct 02 11:48:48 compute-0 sudo[137404]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yqgppkkpmoxwcxovxbchqzhvglrvoniz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405728.5165575-1060-51826492225377/AnsiballZ_systemd.py'
Oct 02 11:48:48 compute-0 sudo[137404]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:48:49 compute-0 python3.9[137406]: ansible-ansible.builtin.systemd Invoked with enabled=True masked=False name=virtproxyd.service daemon_reload=False daemon_reexec=False scope=system no_block=False state=None force=None
Oct 02 11:48:49 compute-0 systemd[1]: Reloading.
Oct 02 11:48:49 compute-0 systemd-rc-local-generator[137436]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:48:49 compute-0 systemd-sysv-generator[137439]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:48:49 compute-0 sudo[137404]: pam_unix(sudo:session): session closed for user root
Oct 02 11:48:49 compute-0 sudo[137594]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ggmnuiozoylairhkswgkreckslolghyr ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405729.5923057-1060-139140570943602/AnsiballZ_systemd.py'
Oct 02 11:48:49 compute-0 sudo[137594]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:48:50 compute-0 python3.9[137596]: ansible-ansible.builtin.systemd Invoked with enabled=True masked=False name=virtqemud.service daemon_reload=False daemon_reexec=False scope=system no_block=False state=None force=None
Oct 02 11:48:50 compute-0 sudo[137594]: pam_unix(sudo:session): session closed for user root
Oct 02 11:48:50 compute-0 sudo[137749]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vgceqvfpumcxwpscysivrbqvmlkdyjjs ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405730.3722203-1060-3135047104349/AnsiballZ_systemd.py'
Oct 02 11:48:50 compute-0 sudo[137749]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:48:50 compute-0 python3.9[137751]: ansible-ansible.builtin.systemd Invoked with enabled=True masked=False name=virtsecretd.service daemon_reload=False daemon_reexec=False scope=system no_block=False state=None force=None
Oct 02 11:48:51 compute-0 systemd[1]: Reloading.
Oct 02 11:48:51 compute-0 systemd-rc-local-generator[137775]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:48:51 compute-0 systemd-sysv-generator[137782]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:48:51 compute-0 sudo[137749]: pam_unix(sudo:session): session closed for user root
Oct 02 11:48:51 compute-0 sudo[137939]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mwydvfjfgcvtzaxhblcfbiuxghfvnhwa ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405731.5394437-1168-32035511888084/AnsiballZ_systemd.py'
Oct 02 11:48:51 compute-0 sudo[137939]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:48:52 compute-0 python3.9[137941]: ansible-ansible.builtin.systemd Invoked with enabled=True masked=False name=virtproxyd-tls.socket state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None
Oct 02 11:48:52 compute-0 systemd[1]: Reloading.
Oct 02 11:48:52 compute-0 systemd-rc-local-generator[137973]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:48:52 compute-0 systemd-sysv-generator[137976]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:48:52 compute-0 systemd[1]: Listening on libvirt proxy daemon socket.
Oct 02 11:48:52 compute-0 systemd[1]: Listening on libvirt proxy daemon TLS IP socket.
Oct 02 11:48:52 compute-0 sudo[137939]: pam_unix(sudo:session): session closed for user root
Oct 02 11:48:52 compute-0 sudo[138133]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-oyklqatmschglugwgmarzxdvgvdnhzos ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405732.6966934-1192-234279305007097/AnsiballZ_systemd.py'
Oct 02 11:48:52 compute-0 sudo[138133]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:48:53 compute-0 python3.9[138135]: ansible-ansible.builtin.systemd Invoked with enabled=True masked=False name=virtlogd.socket daemon_reload=False daemon_reexec=False scope=system no_block=False state=None force=None
Oct 02 11:48:53 compute-0 sudo[138133]: pam_unix(sudo:session): session closed for user root
Oct 02 11:48:53 compute-0 sudo[138288]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-afgaehutikwyjyqckpbxkklylsmchdnz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405733.5405328-1192-228958686809720/AnsiballZ_systemd.py'
Oct 02 11:48:53 compute-0 sudo[138288]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:48:54 compute-0 python3.9[138290]: ansible-ansible.builtin.systemd Invoked with enabled=True masked=False name=virtlogd-admin.socket daemon_reload=False daemon_reexec=False scope=system no_block=False state=None force=None
Oct 02 11:48:54 compute-0 sudo[138288]: pam_unix(sudo:session): session closed for user root
Oct 02 11:48:54 compute-0 sudo[138443]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rakdryocccwzygmotcfcnbavsflzhwfk ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405734.3864417-1192-228483197553750/AnsiballZ_systemd.py'
Oct 02 11:48:54 compute-0 sudo[138443]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:48:54 compute-0 python3.9[138445]: ansible-ansible.builtin.systemd Invoked with enabled=True masked=False name=virtnodedevd.socket daemon_reload=False daemon_reexec=False scope=system no_block=False state=None force=None
Oct 02 11:48:55 compute-0 sudo[138443]: pam_unix(sudo:session): session closed for user root
Oct 02 11:48:55 compute-0 sudo[138598]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cwkptnzupznrcnvcmsddyhgfqyqccfpe ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405735.1922445-1192-23064997762049/AnsiballZ_systemd.py'
Oct 02 11:48:55 compute-0 sudo[138598]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:48:55 compute-0 python3.9[138600]: ansible-ansible.builtin.systemd Invoked with enabled=True masked=False name=virtnodedevd-ro.socket daemon_reload=False daemon_reexec=False scope=system no_block=False state=None force=None
Oct 02 11:48:55 compute-0 sudo[138598]: pam_unix(sudo:session): session closed for user root
Oct 02 11:48:56 compute-0 sudo[138753]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sykkazdlmrchpbnxkcorpytqnmblpueb ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405735.9552743-1192-71763712732683/AnsiballZ_systemd.py'
Oct 02 11:48:56 compute-0 sudo[138753]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:48:56 compute-0 python3.9[138755]: ansible-ansible.builtin.systemd Invoked with enabled=True masked=False name=virtnodedevd-admin.socket daemon_reload=False daemon_reexec=False scope=system no_block=False state=None force=None
Oct 02 11:48:56 compute-0 sudo[138753]: pam_unix(sudo:session): session closed for user root
Oct 02 11:48:57 compute-0 sudo[138908]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zbpiivoluzapfhhtmauxhykmcwjvndga ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405736.731164-1192-257222875913423/AnsiballZ_systemd.py'
Oct 02 11:48:57 compute-0 sudo[138908]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:48:57 compute-0 python3.9[138910]: ansible-ansible.builtin.systemd Invoked with enabled=True masked=False name=virtproxyd.socket daemon_reload=False daemon_reexec=False scope=system no_block=False state=None force=None
Oct 02 11:48:57 compute-0 sudo[138908]: pam_unix(sudo:session): session closed for user root
Oct 02 11:48:57 compute-0 sudo[139063]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cppwmkrzdspwmfytufqctxjlgrmmvwii ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405737.570761-1192-123042559806338/AnsiballZ_systemd.py'
Oct 02 11:48:57 compute-0 sudo[139063]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:48:58 compute-0 python3.9[139065]: ansible-ansible.builtin.systemd Invoked with enabled=True masked=False name=virtproxyd-ro.socket daemon_reload=False daemon_reexec=False scope=system no_block=False state=None force=None
Oct 02 11:48:58 compute-0 sudo[139063]: pam_unix(sudo:session): session closed for user root
Oct 02 11:48:58 compute-0 sudo[139218]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gokjdqyditwokgviqymxghabasyvnqyn ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405738.4181483-1192-163198930815745/AnsiballZ_systemd.py'
Oct 02 11:48:58 compute-0 sudo[139218]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:48:59 compute-0 python3.9[139220]: ansible-ansible.builtin.systemd Invoked with enabled=True masked=False name=virtproxyd-admin.socket daemon_reload=False daemon_reexec=False scope=system no_block=False state=None force=None
Oct 02 11:48:59 compute-0 sudo[139218]: pam_unix(sudo:session): session closed for user root
Oct 02 11:48:59 compute-0 sudo[139373]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mlzifxyeglkrkxzjuhtcmxntnjjkzzhj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405739.4223711-1192-49387136193815/AnsiballZ_systemd.py'
Oct 02 11:48:59 compute-0 sudo[139373]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:48:59 compute-0 python3.9[139375]: ansible-ansible.builtin.systemd Invoked with enabled=True masked=False name=virtqemud.socket daemon_reload=False daemon_reexec=False scope=system no_block=False state=None force=None
Oct 02 11:49:00 compute-0 sudo[139373]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:00 compute-0 sudo[139528]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dvokppuougtgosxubcetacndvwozqxjd ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405740.1692889-1192-111261989901515/AnsiballZ_systemd.py'
Oct 02 11:49:00 compute-0 sudo[139528]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:00 compute-0 python3.9[139530]: ansible-ansible.builtin.systemd Invoked with enabled=True masked=False name=virtqemud-ro.socket daemon_reload=False daemon_reexec=False scope=system no_block=False state=None force=None
Oct 02 11:49:00 compute-0 sudo[139528]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:01 compute-0 sudo[139696]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jyaviazpyqvplurcjjbjxxdlkqphkdnp ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405740.9714172-1192-260781180731984/AnsiballZ_systemd.py'
Oct 02 11:49:01 compute-0 sudo[139696]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:01 compute-0 podman[139657]: 2025-10-02 11:49:01.334831025 +0000 UTC m=+0.121352406 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.license=GPLv2, container_name=ovn_controller, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']})
Oct 02 11:49:01 compute-0 python3.9[139702]: ansible-ansible.builtin.systemd Invoked with enabled=True masked=False name=virtqemud-admin.socket daemon_reload=False daemon_reexec=False scope=system no_block=False state=None force=None
Oct 02 11:49:01 compute-0 sudo[139696]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:02 compute-0 sudo[139865]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-iewhtponunykfffnszmkapqxyqjhkayk ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405741.775569-1192-148478098394381/AnsiballZ_systemd.py'
Oct 02 11:49:02 compute-0 sudo[139865]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:49:02.189 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 11:49:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:49:02.190 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 11:49:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:49:02.190 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 11:49:02 compute-0 python3.9[139867]: ansible-ansible.builtin.systemd Invoked with enabled=True masked=False name=virtsecretd.socket daemon_reload=False daemon_reexec=False scope=system no_block=False state=None force=None
Oct 02 11:49:02 compute-0 sudo[139865]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:02 compute-0 sudo[140020]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-druyhbtfgxrbbmbmylppqduaixvfwzzb ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405742.5569246-1192-265615626512588/AnsiballZ_systemd.py'
Oct 02 11:49:02 compute-0 sudo[140020]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:03 compute-0 python3.9[140022]: ansible-ansible.builtin.systemd Invoked with enabled=True masked=False name=virtsecretd-ro.socket daemon_reload=False daemon_reexec=False scope=system no_block=False state=None force=None
Oct 02 11:49:03 compute-0 sudo[140020]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:03 compute-0 sudo[140175]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vqbrldbkzncgdmnstaycqqddsrnuczcq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405743.3420405-1192-103748690839093/AnsiballZ_systemd.py'
Oct 02 11:49:03 compute-0 sudo[140175]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:04 compute-0 python3.9[140177]: ansible-ansible.builtin.systemd Invoked with enabled=True masked=False name=virtsecretd-admin.socket daemon_reload=False daemon_reexec=False scope=system no_block=False state=None force=None
Oct 02 11:49:04 compute-0 sudo[140175]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:04 compute-0 sudo[140330]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bfsandsudyhtsymxyvvygnnkjinrebvk ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405744.5488873-1498-135478190812997/AnsiballZ_file.py'
Oct 02 11:49:04 compute-0 sudo[140330]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:05 compute-0 python3.9[140332]: ansible-ansible.builtin.file Invoked with group=root owner=root path=/etc/tmpfiles.d/ setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:49:05 compute-0 sudo[140330]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:05 compute-0 sudo[140493]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ifqlvlnlgnccmzfrqrgfseemctwyfybt ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405745.1830935-1498-93782490927854/AnsiballZ_file.py'
Oct 02 11:49:05 compute-0 sudo[140493]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:05 compute-0 podman[140456]: 2025-10-02 11:49:05.526310097 +0000 UTC m=+0.069082121 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, config_id=ovn_metadata_agent, container_name=ovn_metadata_agent, managed_by=edpm_ansible, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0)
Oct 02 11:49:05 compute-0 python3.9[140501]: ansible-ansible.builtin.file Invoked with group=root owner=root path=/var/lib/edpm-config/firewall setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:49:05 compute-0 sudo[140493]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:06 compute-0 sudo[140653]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ggtgeiovhzmfyvzzycyrbdkrbauniuye ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405745.8485181-1498-225114284373421/AnsiballZ_file.py'
Oct 02 11:49:06 compute-0 sudo[140653]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:06 compute-0 python3.9[140655]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/etc/pki/libvirt setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:49:06 compute-0 sudo[140653]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:06 compute-0 sudo[140805]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hkfulakfqecxbgqqbanytxprqktaurno ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405746.5077558-1498-253420968605925/AnsiballZ_file.py'
Oct 02 11:49:06 compute-0 sudo[140805]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:06 compute-0 python3.9[140807]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/etc/pki/libvirt/private setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:49:06 compute-0 sudo[140805]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:07 compute-0 sudo[140957]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cwnrowzgggehieamcucpfgoijwxnxpdg ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405747.085309-1498-92731085750110/AnsiballZ_file.py'
Oct 02 11:49:07 compute-0 sudo[140957]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:07 compute-0 python3.9[140959]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/etc/pki/CA setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:49:07 compute-0 sudo[140957]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:07 compute-0 sudo[141109]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zxstfhygebfkilgrtplxlrkeumatwmra ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405747.7300565-1498-278753102318058/AnsiballZ_file.py'
Oct 02 11:49:07 compute-0 sudo[141109]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:08 compute-0 python3.9[141111]: ansible-ansible.builtin.file Invoked with group=qemu owner=root path=/etc/pki/qemu setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:49:08 compute-0 sudo[141109]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:09 compute-0 sudo[141261]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fdjwiouttywycsekgdonyylamhhztefl ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405748.391608-1627-182662463377178/AnsiballZ_stat.py'
Oct 02 11:49:09 compute-0 sudo[141261]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:09 compute-0 python3.9[141263]: ansible-ansible.legacy.stat Invoked with path=/etc/libvirt/virtlogd.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:49:09 compute-0 sudo[141261]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:09 compute-0 sudo[141386]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vgdriffpvdoprbuatsijytuafowwfupy ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405748.391608-1627-182662463377178/AnsiballZ_copy.py'
Oct 02 11:49:09 compute-0 sudo[141386]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:09 compute-0 python3.9[141388]: ansible-ansible.legacy.copy Invoked with dest=/etc/libvirt/virtlogd.conf group=libvirt mode=0640 owner=libvirt src=/home/zuul/.ansible/tmp/ansible-tmp-1759405748.391608-1627-182662463377178/.source.conf follow=False _original_basename=virtlogd.conf checksum=d7a72ae92c2c205983b029473e05a6aa4c58ec24 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:09 compute-0 sudo[141386]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:10 compute-0 sudo[141538]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bsxjrkjrnvehcwcicfolxqaextiuuohl ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405750.0792534-1627-206348441594298/AnsiballZ_stat.py'
Oct 02 11:49:10 compute-0 sudo[141538]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:10 compute-0 python3.9[141540]: ansible-ansible.legacy.stat Invoked with path=/etc/libvirt/virtnodedevd.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:49:10 compute-0 sudo[141538]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:10 compute-0 sudo[141663]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lchnydwmcnnaqecexidjpuvcwtkjjbhs ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405750.0792534-1627-206348441594298/AnsiballZ_copy.py'
Oct 02 11:49:10 compute-0 sudo[141663]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:11 compute-0 python3.9[141665]: ansible-ansible.legacy.copy Invoked with dest=/etc/libvirt/virtnodedevd.conf group=libvirt mode=0640 owner=libvirt src=/home/zuul/.ansible/tmp/ansible-tmp-1759405750.0792534-1627-206348441594298/.source.conf follow=False _original_basename=virtnodedevd.conf checksum=7a604468adb2868f1ab6ebd0fd4622286e6373e2 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:11 compute-0 sudo[141663]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:11 compute-0 sudo[141815]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-krkkmchxdbslrmxyekiwklibyasdurha ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405751.235011-1627-213943353728116/AnsiballZ_stat.py'
Oct 02 11:49:11 compute-0 sudo[141815]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:11 compute-0 python3.9[141817]: ansible-ansible.legacy.stat Invoked with path=/etc/libvirt/virtproxyd.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:49:11 compute-0 sudo[141815]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:12 compute-0 sudo[141940]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-twtyrwxdrzydluawunftmwgrfkrfymxp ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405751.235011-1627-213943353728116/AnsiballZ_copy.py'
Oct 02 11:49:12 compute-0 sudo[141940]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:12 compute-0 python3.9[141942]: ansible-ansible.legacy.copy Invoked with dest=/etc/libvirt/virtproxyd.conf group=libvirt mode=0640 owner=libvirt src=/home/zuul/.ansible/tmp/ansible-tmp-1759405751.235011-1627-213943353728116/.source.conf follow=False _original_basename=virtproxyd.conf checksum=28bc484b7c9988e03de49d4fcc0a088ea975f716 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:12 compute-0 sudo[141940]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:12 compute-0 sudo[142092]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qqwdwvfsiyxastrpnbismkgramhasfay ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405752.4841137-1627-14673421539059/AnsiballZ_stat.py'
Oct 02 11:49:12 compute-0 sudo[142092]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:12 compute-0 python3.9[142094]: ansible-ansible.legacy.stat Invoked with path=/etc/libvirt/virtqemud.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:49:12 compute-0 sudo[142092]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:13 compute-0 sudo[142217]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qymdbwerhyjkiilgquedfjhznibtfjve ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405752.4841137-1627-14673421539059/AnsiballZ_copy.py'
Oct 02 11:49:13 compute-0 sudo[142217]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:13 compute-0 python3.9[142219]: ansible-ansible.legacy.copy Invoked with dest=/etc/libvirt/virtqemud.conf group=libvirt mode=0640 owner=libvirt src=/home/zuul/.ansible/tmp/ansible-tmp-1759405752.4841137-1627-14673421539059/.source.conf follow=False _original_basename=virtqemud.conf checksum=7a604468adb2868f1ab6ebd0fd4622286e6373e2 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:13 compute-0 sudo[142217]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:13 compute-0 sudo[142369]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-djxvpetkpfxqxwtvjagslcaqjlulgjco ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405753.7435956-1627-71930485298931/AnsiballZ_stat.py'
Oct 02 11:49:13 compute-0 sudo[142369]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:14 compute-0 python3.9[142371]: ansible-ansible.legacy.stat Invoked with path=/etc/libvirt/qemu.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:49:14 compute-0 sudo[142369]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:14 compute-0 sudo[142494]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zmeskijcmdtcgjqcouhrbefgtmaiuezm ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405753.7435956-1627-71930485298931/AnsiballZ_copy.py'
Oct 02 11:49:14 compute-0 sudo[142494]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:14 compute-0 python3.9[142496]: ansible-ansible.legacy.copy Invoked with dest=/etc/libvirt/qemu.conf group=libvirt mode=0640 owner=libvirt src=/home/zuul/.ansible/tmp/ansible-tmp-1759405753.7435956-1627-71930485298931/.source.conf follow=False _original_basename=qemu.conf.j2 checksum=c44de21af13c90603565570f09ff60c6a41ed8df backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:14 compute-0 sudo[142494]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:15 compute-0 sudo[142646]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yjnejxmnwlsgankeyhhpjrhiiqdxdchf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405754.8069987-1627-81323318551639/AnsiballZ_stat.py'
Oct 02 11:49:15 compute-0 sudo[142646]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:15 compute-0 python3.9[142648]: ansible-ansible.legacy.stat Invoked with path=/etc/libvirt/virtsecretd.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:49:15 compute-0 sudo[142646]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:15 compute-0 sudo[142771]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kgepqhqmowdarpjhozfsyovxgfyktwzu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405754.8069987-1627-81323318551639/AnsiballZ_copy.py'
Oct 02 11:49:15 compute-0 sudo[142771]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:15 compute-0 python3.9[142773]: ansible-ansible.legacy.copy Invoked with dest=/etc/libvirt/virtsecretd.conf group=libvirt mode=0640 owner=libvirt src=/home/zuul/.ansible/tmp/ansible-tmp-1759405754.8069987-1627-81323318551639/.source.conf follow=False _original_basename=virtsecretd.conf checksum=7a604468adb2868f1ab6ebd0fd4622286e6373e2 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:15 compute-0 sudo[142771]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:16 compute-0 sudo[142923]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mbhdjgynlqkrzhxpdqmldwguvkwjjorb ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405755.9302192-1627-16066282863848/AnsiballZ_stat.py'
Oct 02 11:49:16 compute-0 sudo[142923]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:16 compute-0 python3.9[142925]: ansible-ansible.legacy.stat Invoked with path=/etc/libvirt/auth.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:49:16 compute-0 sudo[142923]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:16 compute-0 sudo[143046]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bcjzgqtmgaaswrbtjkmvrsxisrecdemt ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405755.9302192-1627-16066282863848/AnsiballZ_copy.py'
Oct 02 11:49:16 compute-0 sudo[143046]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:16 compute-0 python3.9[143048]: ansible-ansible.legacy.copy Invoked with dest=/etc/libvirt/auth.conf group=libvirt mode=0600 owner=libvirt src=/home/zuul/.ansible/tmp/ansible-tmp-1759405755.9302192-1627-16066282863848/.source.conf follow=False _original_basename=auth.conf checksum=a94cd818c374cec2c8425b70d2e0e2f41b743ae4 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:17 compute-0 sudo[143046]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:17 compute-0 sudo[143198]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ulggbrerqczembyjbgylyogblmgarvpt ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405757.129539-1627-191419103778389/AnsiballZ_stat.py'
Oct 02 11:49:17 compute-0 sudo[143198]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:17 compute-0 python3.9[143200]: ansible-ansible.legacy.stat Invoked with path=/etc/sasl2/libvirt.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:49:17 compute-0 sudo[143198]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:17 compute-0 sudo[143323]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cyxedrualmbksdwbmqmjnlcbsroqhdtk ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405757.129539-1627-191419103778389/AnsiballZ_copy.py'
Oct 02 11:49:17 compute-0 sudo[143323]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:18 compute-0 python3.9[143325]: ansible-ansible.legacy.copy Invoked with dest=/etc/sasl2/libvirt.conf group=libvirt mode=0640 owner=libvirt src=/home/zuul/.ansible/tmp/ansible-tmp-1759405757.129539-1627-191419103778389/.source.conf follow=False _original_basename=sasl_libvirt.conf checksum=652e4d404bf79253d06956b8e9847c9364979d4a backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:18 compute-0 sudo[143323]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:19 compute-0 sudo[143475]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gioddpszjlrckwpqwxqjtspygroxioez ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405758.9203703-1966-116592553225958/AnsiballZ_command.py'
Oct 02 11:49:19 compute-0 sudo[143475]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:19 compute-0 python3.9[143477]: ansible-ansible.legacy.command Invoked with cmd=saslpasswd2 -f /etc/libvirt/passwd.db -p -a libvirt -u openstack migration stdin=12345678 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True _raw_params=None argv=None chdir=None executable=None creates=None removes=None
Oct 02 11:49:19 compute-0 sudo[143475]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:19 compute-0 sudo[143628]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xgikxqpxmaablgmtlqtoejuvlkawsesb ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405759.6854994-1993-231755287649408/AnsiballZ_file.py'
Oct 02 11:49:19 compute-0 sudo[143628]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:20 compute-0 python3.9[143630]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/etc/systemd/system/virtlogd.socket.d state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:20 compute-0 sudo[143628]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:20 compute-0 sudo[143780]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wuolxkhojtvaqqfdvvqvjqacttltvbmf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405760.2874415-1993-40768876006835/AnsiballZ_file.py'
Oct 02 11:49:20 compute-0 sudo[143780]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:20 compute-0 python3.9[143782]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/etc/systemd/system/virtlogd-admin.socket.d state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:20 compute-0 sudo[143780]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:21 compute-0 sudo[143932]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ihprngutiasgpbyotzogiiodomcymihq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405760.8696914-1993-269588524790277/AnsiballZ_file.py'
Oct 02 11:49:21 compute-0 sudo[143932]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:21 compute-0 python3.9[143934]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/etc/systemd/system/virtnodedevd.socket.d state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:21 compute-0 sudo[143932]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:21 compute-0 sudo[144084]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qeeyhfvrekprqrwrxwcxhkuwosmyfezv ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405761.4617836-1993-144384126081367/AnsiballZ_file.py'
Oct 02 11:49:21 compute-0 sudo[144084]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:21 compute-0 python3.9[144086]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/etc/systemd/system/virtnodedevd-ro.socket.d state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:21 compute-0 sudo[144084]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:22 compute-0 sudo[144236]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-eyhuookqjbpuqminnppgbkybpklkptte ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405762.0248647-1993-122170837359715/AnsiballZ_file.py'
Oct 02 11:49:22 compute-0 sudo[144236]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:22 compute-0 python3.9[144238]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/etc/systemd/system/virtnodedevd-admin.socket.d state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:22 compute-0 sudo[144236]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:23 compute-0 sudo[144388]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gzpwuiugocwrmjvyjbweilmaqjgeagak ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405762.7413702-1993-249605987475512/AnsiballZ_file.py'
Oct 02 11:49:23 compute-0 sudo[144388]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:23 compute-0 python3.9[144390]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/etc/systemd/system/virtproxyd.socket.d state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:23 compute-0 sudo[144388]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:23 compute-0 sudo[144540]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vhdhiqvkkahtvhvnvwgzigntgraumhef ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405763.344464-1993-153680318170938/AnsiballZ_file.py'
Oct 02 11:49:23 compute-0 sudo[144540]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:23 compute-0 python3.9[144542]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/etc/systemd/system/virtproxyd-ro.socket.d state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:23 compute-0 sudo[144540]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:24 compute-0 sudo[144692]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-liqmvrnwrgbolbqqnjvbqhisdfusbjha ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405763.915088-1993-208970073367586/AnsiballZ_file.py'
Oct 02 11:49:24 compute-0 sudo[144692]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:24 compute-0 python3.9[144694]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/etc/systemd/system/virtproxyd-admin.socket.d state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:24 compute-0 sudo[144692]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:24 compute-0 sudo[144844]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-srjqpzvqldlhojnoksyypedomagtstdb ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405764.5204985-1993-79549440086918/AnsiballZ_file.py'
Oct 02 11:49:24 compute-0 sudo[144844]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:24 compute-0 python3.9[144846]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/etc/systemd/system/virtqemud.socket.d state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:24 compute-0 sudo[144844]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:25 compute-0 sudo[144996]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ntvsflcviuelpfxupqwealsqpaqrqckf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405765.1156476-1993-48057567764834/AnsiballZ_file.py'
Oct 02 11:49:25 compute-0 sudo[144996]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:25 compute-0 python3.9[144998]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/etc/systemd/system/virtqemud-ro.socket.d state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:25 compute-0 sudo[144996]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:25 compute-0 sudo[145148]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tikbbgexjquxxlwtuxdpootqymzhradg ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405765.67715-1993-144194595519599/AnsiballZ_file.py'
Oct 02 11:49:25 compute-0 sudo[145148]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:26 compute-0 python3.9[145150]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/etc/systemd/system/virtqemud-admin.socket.d state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:26 compute-0 sudo[145148]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:26 compute-0 sudo[145300]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hrmijnqvdgmlcwstohmhykkvucaoaaji ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405766.221725-1993-21064293924507/AnsiballZ_file.py'
Oct 02 11:49:26 compute-0 sudo[145300]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:26 compute-0 python3.9[145302]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/etc/systemd/system/virtsecretd.socket.d state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:26 compute-0 sudo[145300]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:26 compute-0 sudo[145452]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ghqxwtjkmtcdemeppkffqdbthvsjiyzt ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405766.7755027-1993-267390189043283/AnsiballZ_file.py'
Oct 02 11:49:26 compute-0 sudo[145452]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:27 compute-0 python3.9[145454]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/etc/systemd/system/virtsecretd-ro.socket.d state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:27 compute-0 sudo[145452]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:27 compute-0 sudo[145604]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jefcykuvhnnsrlmotidlcdgwxycntbln ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405767.339603-1993-161886254995412/AnsiballZ_file.py'
Oct 02 11:49:27 compute-0 sudo[145604]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:27 compute-0 python3.9[145606]: ansible-ansible.builtin.file Invoked with group=root mode=0755 owner=root path=/etc/systemd/system/virtsecretd-admin.socket.d state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:27 compute-0 sudo[145604]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:28 compute-0 sudo[145756]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fdaxbjklutcvhtrmsmonqolwmzjpqlca ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405768.5217597-2290-77718581116755/AnsiballZ_stat.py'
Oct 02 11:49:28 compute-0 sudo[145756]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:28 compute-0 python3.9[145758]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/virtlogd.socket.d/override.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:49:28 compute-0 sudo[145756]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:29 compute-0 sudo[145879]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hkmgfjypzlynazcaylbocjnixwhnemyn ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405768.5217597-2290-77718581116755/AnsiballZ_copy.py'
Oct 02 11:49:29 compute-0 sudo[145879]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:29 compute-0 python3.9[145881]: ansible-ansible.legacy.copy Invoked with dest=/etc/systemd/system/virtlogd.socket.d/override.conf group=root mode=0644 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405768.5217597-2290-77718581116755/.source.conf follow=False _original_basename=libvirt-socket.unit.j2 checksum=0bad41f409b4ee7e780a2a59dc18f5c84ed99826 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:29 compute-0 sudo[145879]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:30 compute-0 sudo[146031]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ccfwedatlwxziopbqgzyrvlevuefmhow ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405769.8197417-2290-23729912497091/AnsiballZ_stat.py'
Oct 02 11:49:30 compute-0 sudo[146031]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:30 compute-0 python3.9[146033]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/virtlogd-admin.socket.d/override.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:49:30 compute-0 sudo[146031]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:30 compute-0 sudo[146154]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cvibwjfibxxrvyxirnybrxafkumluavs ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405769.8197417-2290-23729912497091/AnsiballZ_copy.py'
Oct 02 11:49:30 compute-0 sudo[146154]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:30 compute-0 python3.9[146156]: ansible-ansible.legacy.copy Invoked with dest=/etc/systemd/system/virtlogd-admin.socket.d/override.conf group=root mode=0644 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405769.8197417-2290-23729912497091/.source.conf follow=False _original_basename=libvirt-socket.unit.j2 checksum=0bad41f409b4ee7e780a2a59dc18f5c84ed99826 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:30 compute-0 sudo[146154]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:31 compute-0 sudo[146306]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ktpceyzdjtexuptudiqcqjtemndvhlye ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405771.0013065-2290-206553085835415/AnsiballZ_stat.py'
Oct 02 11:49:31 compute-0 sudo[146306]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:31 compute-0 python3.9[146308]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/virtnodedevd.socket.d/override.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:49:31 compute-0 sudo[146306]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:31 compute-0 sudo[146440]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rdjcidzmydfiztxtzfqwtkxiaeaerzhb ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405771.0013065-2290-206553085835415/AnsiballZ_copy.py'
Oct 02 11:49:31 compute-0 sudo[146440]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:31 compute-0 podman[146403]: 2025-10-02 11:49:31.801832193 +0000 UTC m=+0.084747405 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, container_name=ovn_controller, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_id=ovn_controller, io.buildah.version=1.41.3, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']})
Oct 02 11:49:31 compute-0 python3.9[146448]: ansible-ansible.legacy.copy Invoked with dest=/etc/systemd/system/virtnodedevd.socket.d/override.conf group=root mode=0644 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405771.0013065-2290-206553085835415/.source.conf follow=False _original_basename=libvirt-socket.unit.j2 checksum=0bad41f409b4ee7e780a2a59dc18f5c84ed99826 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:31 compute-0 sudo[146440]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:32 compute-0 sudo[146607]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vbfemgxgouzcvtjspswkdyjdnlupkpob ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405772.0776327-2290-118163469193938/AnsiballZ_stat.py'
Oct 02 11:49:32 compute-0 sudo[146607]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:32 compute-0 python3.9[146609]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/virtnodedevd-ro.socket.d/override.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:49:32 compute-0 sudo[146607]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:32 compute-0 sudo[146730]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-knrdcwvgfceitanjtqdgnxtlkukzotch ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405772.0776327-2290-118163469193938/AnsiballZ_copy.py'
Oct 02 11:49:32 compute-0 sudo[146730]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:33 compute-0 python3.9[146732]: ansible-ansible.legacy.copy Invoked with dest=/etc/systemd/system/virtnodedevd-ro.socket.d/override.conf group=root mode=0644 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405772.0776327-2290-118163469193938/.source.conf follow=False _original_basename=libvirt-socket.unit.j2 checksum=0bad41f409b4ee7e780a2a59dc18f5c84ed99826 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:33 compute-0 sudo[146730]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:33 compute-0 sudo[146882]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zojxklyejtpbjetfovtdpcmpywrnsrhi ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405773.1916215-2290-208640709272627/AnsiballZ_stat.py'
Oct 02 11:49:33 compute-0 sudo[146882]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:33 compute-0 python3.9[146884]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/virtnodedevd-admin.socket.d/override.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:49:33 compute-0 sudo[146882]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:33 compute-0 sudo[147005]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yrxfmhtmwsxgpdzpkjugwutymdssjmnt ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405773.1916215-2290-208640709272627/AnsiballZ_copy.py'
Oct 02 11:49:33 compute-0 sudo[147005]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:34 compute-0 python3.9[147007]: ansible-ansible.legacy.copy Invoked with dest=/etc/systemd/system/virtnodedevd-admin.socket.d/override.conf group=root mode=0644 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405773.1916215-2290-208640709272627/.source.conf follow=False _original_basename=libvirt-socket.unit.j2 checksum=0bad41f409b4ee7e780a2a59dc18f5c84ed99826 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:34 compute-0 sudo[147005]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:34 compute-0 sudo[147157]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wnqrvznjkeyfvxqebldwimqsybpihdni ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405774.300045-2290-234413136814556/AnsiballZ_stat.py'
Oct 02 11:49:34 compute-0 sudo[147157]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:34 compute-0 python3.9[147159]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/virtproxyd.socket.d/override.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:49:34 compute-0 sudo[147157]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:35 compute-0 sudo[147280]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-poquzcqtjqmzqhmfjrhfpkjuvrrsuhej ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405774.300045-2290-234413136814556/AnsiballZ_copy.py'
Oct 02 11:49:35 compute-0 sudo[147280]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:35 compute-0 python3.9[147282]: ansible-ansible.legacy.copy Invoked with dest=/etc/systemd/system/virtproxyd.socket.d/override.conf group=root mode=0644 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405774.300045-2290-234413136814556/.source.conf follow=False _original_basename=libvirt-socket.unit.j2 checksum=0bad41f409b4ee7e780a2a59dc18f5c84ed99826 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:35 compute-0 sudo[147280]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:35 compute-0 sudo[147449]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bhnopbmovpmagqbacmsbavutpdtdgyad ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405775.4432225-2290-250321258811747/AnsiballZ_stat.py'
Oct 02 11:49:35 compute-0 sudo[147449]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:35 compute-0 podman[147406]: 2025-10-02 11:49:35.739866505 +0000 UTC m=+0.044304930 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_id=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, container_name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0)
Oct 02 11:49:35 compute-0 python3.9[147453]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/virtproxyd-ro.socket.d/override.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:49:35 compute-0 sudo[147449]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:36 compute-0 sudo[147574]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-iqnsldfkxeociivqgyqobsvxrikltfel ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405775.4432225-2290-250321258811747/AnsiballZ_copy.py'
Oct 02 11:49:36 compute-0 sudo[147574]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:36 compute-0 python3.9[147576]: ansible-ansible.legacy.copy Invoked with dest=/etc/systemd/system/virtproxyd-ro.socket.d/override.conf group=root mode=0644 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405775.4432225-2290-250321258811747/.source.conf follow=False _original_basename=libvirt-socket.unit.j2 checksum=0bad41f409b4ee7e780a2a59dc18f5c84ed99826 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:36 compute-0 sudo[147574]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:36 compute-0 sudo[147726]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-afetmjqhrjqgccnbdxfxebllambutwku ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405776.599403-2290-155494014173743/AnsiballZ_stat.py'
Oct 02 11:49:36 compute-0 sudo[147726]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:37 compute-0 python3.9[147728]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/virtproxyd-admin.socket.d/override.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:49:37 compute-0 sudo[147726]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:37 compute-0 sudo[147849]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nuomiszeyzzlzfeaoquughpcmatelncx ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405776.599403-2290-155494014173743/AnsiballZ_copy.py'
Oct 02 11:49:37 compute-0 sudo[147849]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:37 compute-0 python3.9[147851]: ansible-ansible.legacy.copy Invoked with dest=/etc/systemd/system/virtproxyd-admin.socket.d/override.conf group=root mode=0644 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405776.599403-2290-155494014173743/.source.conf follow=False _original_basename=libvirt-socket.unit.j2 checksum=0bad41f409b4ee7e780a2a59dc18f5c84ed99826 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:37 compute-0 sudo[147849]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:37 compute-0 sudo[148001]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mlxlixvwzrbdyqcrxwrnyfzptgqjrfnf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405777.632149-2290-247784415913576/AnsiballZ_stat.py'
Oct 02 11:49:37 compute-0 sudo[148001]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:38 compute-0 python3.9[148003]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/virtqemud.socket.d/override.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:49:38 compute-0 sudo[148001]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:38 compute-0 sudo[148124]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xsufymobhrcxrgxiwbmqllmlpnigbrpy ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405777.632149-2290-247784415913576/AnsiballZ_copy.py'
Oct 02 11:49:38 compute-0 sudo[148124]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:38 compute-0 python3.9[148126]: ansible-ansible.legacy.copy Invoked with dest=/etc/systemd/system/virtqemud.socket.d/override.conf group=root mode=0644 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405777.632149-2290-247784415913576/.source.conf follow=False _original_basename=libvirt-socket.unit.j2 checksum=0bad41f409b4ee7e780a2a59dc18f5c84ed99826 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:38 compute-0 sudo[148124]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:39 compute-0 sudo[148276]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-anzrsxwdvowtozzteoowygjgmumgxckd ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405778.7308376-2290-3203353894314/AnsiballZ_stat.py'
Oct 02 11:49:39 compute-0 sudo[148276]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:39 compute-0 python3.9[148278]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/virtqemud-ro.socket.d/override.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:49:39 compute-0 sudo[148276]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:39 compute-0 sudo[148399]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jzqoqqnryyxbnezgsmkbmmbvvayziuob ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405778.7308376-2290-3203353894314/AnsiballZ_copy.py'
Oct 02 11:49:39 compute-0 sudo[148399]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:39 compute-0 python3.9[148401]: ansible-ansible.legacy.copy Invoked with dest=/etc/systemd/system/virtqemud-ro.socket.d/override.conf group=root mode=0644 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405778.7308376-2290-3203353894314/.source.conf follow=False _original_basename=libvirt-socket.unit.j2 checksum=0bad41f409b4ee7e780a2a59dc18f5c84ed99826 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:39 compute-0 sudo[148399]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:40 compute-0 sudo[148551]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qvpbhgmktlctuoeywmpoykikzqcvuvxj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405779.916292-2290-133577446590359/AnsiballZ_stat.py'
Oct 02 11:49:40 compute-0 sudo[148551]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:40 compute-0 python3.9[148553]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/virtqemud-admin.socket.d/override.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:49:40 compute-0 sudo[148551]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:40 compute-0 sudo[148674]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mbjwqmpswpobvryictfdbafhlujpvewu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405779.916292-2290-133577446590359/AnsiballZ_copy.py'
Oct 02 11:49:40 compute-0 sudo[148674]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:40 compute-0 python3.9[148676]: ansible-ansible.legacy.copy Invoked with dest=/etc/systemd/system/virtqemud-admin.socket.d/override.conf group=root mode=0644 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405779.916292-2290-133577446590359/.source.conf follow=False _original_basename=libvirt-socket.unit.j2 checksum=0bad41f409b4ee7e780a2a59dc18f5c84ed99826 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:40 compute-0 sudo[148674]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:41 compute-0 sudo[148826]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dvqkgbdmrrjhzobzszbdaysritjbdwfj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405781.048207-2290-10451018363629/AnsiballZ_stat.py'
Oct 02 11:49:41 compute-0 sudo[148826]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:41 compute-0 python3.9[148828]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/virtsecretd.socket.d/override.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:49:41 compute-0 sudo[148826]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:41 compute-0 sudo[148949]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fayleiqnayapkcbcsnheilfemkrabwuq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405781.048207-2290-10451018363629/AnsiballZ_copy.py'
Oct 02 11:49:41 compute-0 sudo[148949]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:41 compute-0 python3.9[148951]: ansible-ansible.legacy.copy Invoked with dest=/etc/systemd/system/virtsecretd.socket.d/override.conf group=root mode=0644 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405781.048207-2290-10451018363629/.source.conf follow=False _original_basename=libvirt-socket.unit.j2 checksum=0bad41f409b4ee7e780a2a59dc18f5c84ed99826 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:42 compute-0 sudo[148949]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:42 compute-0 sudo[149101]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qqouifvmjpehspiqyxktdkhhjjsolebq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405782.1306736-2290-242100583502263/AnsiballZ_stat.py'
Oct 02 11:49:42 compute-0 sudo[149101]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:42 compute-0 python3.9[149103]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/virtsecretd-ro.socket.d/override.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:49:42 compute-0 sudo[149101]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:42 compute-0 sudo[149224]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-slltijzjxycsiumnkdthbvpjsmizbpwu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405782.1306736-2290-242100583502263/AnsiballZ_copy.py'
Oct 02 11:49:42 compute-0 sudo[149224]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:43 compute-0 python3.9[149226]: ansible-ansible.legacy.copy Invoked with dest=/etc/systemd/system/virtsecretd-ro.socket.d/override.conf group=root mode=0644 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405782.1306736-2290-242100583502263/.source.conf follow=False _original_basename=libvirt-socket.unit.j2 checksum=0bad41f409b4ee7e780a2a59dc18f5c84ed99826 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:43 compute-0 sudo[149224]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:43 compute-0 sudo[149376]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xcfsomxbbxgizyktssjbjdcokgfrefyg ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405783.2511587-2290-136607544449179/AnsiballZ_stat.py'
Oct 02 11:49:43 compute-0 sudo[149376]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:43 compute-0 python3.9[149378]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/virtsecretd-admin.socket.d/override.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:49:43 compute-0 sudo[149376]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:44 compute-0 sudo[149499]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-niolpkpluxrumeaajrrpivpewmxeopoe ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405783.2511587-2290-136607544449179/AnsiballZ_copy.py'
Oct 02 11:49:44 compute-0 sudo[149499]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:44 compute-0 python3.9[149501]: ansible-ansible.legacy.copy Invoked with dest=/etc/systemd/system/virtsecretd-admin.socket.d/override.conf group=root mode=0644 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405783.2511587-2290-136607544449179/.source.conf follow=False _original_basename=libvirt-socket.unit.j2 checksum=0bad41f409b4ee7e780a2a59dc18f5c84ed99826 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:44 compute-0 sudo[149499]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:45 compute-0 python3.9[149651]: ansible-ansible.legacy.command Invoked with _raw_params=set -o pipefail
                                             ls -lRZ /run/libvirt | grep -E ':container_\S+_t'
                                              _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:49:46 compute-0 sudo[149804]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gisvbwmkpkpssgldsyetvdkttzydfeli ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405785.8854644-2908-83868285558289/AnsiballZ_seboolean.py'
Oct 02 11:49:46 compute-0 sudo[149804]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:46 compute-0 python3.9[149806]: ansible-ansible.posix.seboolean Invoked with name=os_enable_vtpm persistent=True state=True ignore_selinux_state=False
Oct 02 11:49:47 compute-0 sudo[149804]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:48 compute-0 sudo[149960]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dixwnxcspnzeasqhrnjhdulpsxfkpuvq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405788.07235-2932-10776453864885/AnsiballZ_copy.py'
Oct 02 11:49:48 compute-0 dbus-broker-launch[818]: avc:  op=load_policy lsm=selinux seqno=15 res=1
Oct 02 11:49:48 compute-0 sudo[149960]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:48 compute-0 python3.9[149962]: ansible-ansible.legacy.copy Invoked with dest=/etc/pki/libvirt/servercert.pem group=root mode=0644 owner=root remote_src=True src=/var/lib/openstack/certs/libvirt/default/tls.crt backup=False force=True follow=False unsafe_writes=False _original_basename=None content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None checksum=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:48 compute-0 sudo[149960]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:48 compute-0 sudo[150112]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fyoxrydtpasijfoczumdvobttlgdtrqm ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405788.6499467-2932-191286792204352/AnsiballZ_copy.py'
Oct 02 11:49:48 compute-0 sudo[150112]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:49 compute-0 python3.9[150114]: ansible-ansible.legacy.copy Invoked with dest=/etc/pki/libvirt/private/serverkey.pem group=root mode=0600 owner=root remote_src=True src=/var/lib/openstack/certs/libvirt/default/tls.key backup=False force=True follow=False unsafe_writes=False _original_basename=None content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None checksum=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:49 compute-0 sudo[150112]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:49 compute-0 sudo[150264]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wbpjkoxnyudwagwfugdkoqknqdiuqkus ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405789.2852912-2932-165912820475526/AnsiballZ_copy.py'
Oct 02 11:49:49 compute-0 sudo[150264]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:49 compute-0 python3.9[150266]: ansible-ansible.legacy.copy Invoked with dest=/etc/pki/libvirt/clientcert.pem group=root mode=0644 owner=root remote_src=True src=/var/lib/openstack/certs/libvirt/default/tls.crt backup=False force=True follow=False unsafe_writes=False _original_basename=None content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None checksum=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:49 compute-0 sudo[150264]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:50 compute-0 sudo[150416]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lokjytqubfgpuisutceuxytmstmbbvmh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405789.89678-2932-236068468761194/AnsiballZ_copy.py'
Oct 02 11:49:50 compute-0 sudo[150416]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:50 compute-0 python3.9[150418]: ansible-ansible.legacy.copy Invoked with dest=/etc/pki/libvirt/private/clientkey.pem group=root mode=0644 owner=root remote_src=True src=/var/lib/openstack/certs/libvirt/default/tls.key backup=False force=True follow=False unsafe_writes=False _original_basename=None content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None checksum=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:50 compute-0 sudo[150416]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:50 compute-0 sudo[150568]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lnyugickqwsuqcnmrcxbcquelrcrjhvo ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405790.4927762-2932-160378809661505/AnsiballZ_copy.py'
Oct 02 11:49:50 compute-0 sudo[150568]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:50 compute-0 python3.9[150570]: ansible-ansible.legacy.copy Invoked with dest=/etc/pki/CA/cacert.pem group=root mode=0644 owner=root remote_src=True src=/var/lib/openstack/certs/libvirt/default/ca.crt backup=False force=True follow=False unsafe_writes=False _original_basename=None content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None checksum=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:50 compute-0 sudo[150568]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:52 compute-0 sudo[150720]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-txbkbffigkfhtneitkgoddszsrultuwk ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405791.9022937-3040-689542177739/AnsiballZ_copy.py'
Oct 02 11:49:52 compute-0 sudo[150720]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:52 compute-0 python3.9[150722]: ansible-ansible.legacy.copy Invoked with dest=/etc/pki/qemu/server-cert.pem group=qemu mode=0640 owner=root remote_src=True src=/var/lib/openstack/certs/libvirt/default/tls.crt backup=False force=True follow=False unsafe_writes=False _original_basename=None content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None checksum=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:52 compute-0 sudo[150720]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:52 compute-0 sudo[150872]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rkpczampmqwgmtrflmimpagtwmffhwlw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405792.5122726-3040-124360516126711/AnsiballZ_copy.py'
Oct 02 11:49:52 compute-0 sudo[150872]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:52 compute-0 python3.9[150874]: ansible-ansible.legacy.copy Invoked with dest=/etc/pki/qemu/server-key.pem group=qemu mode=0640 owner=root remote_src=True src=/var/lib/openstack/certs/libvirt/default/tls.key backup=False force=True follow=False unsafe_writes=False _original_basename=None content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None checksum=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:52 compute-0 sudo[150872]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:53 compute-0 sudo[151024]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wxkjbakbbhccffpnhpoxyzizmsoxvuju ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405793.0798993-3040-94026440902728/AnsiballZ_copy.py'
Oct 02 11:49:53 compute-0 sudo[151024]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:53 compute-0 python3.9[151026]: ansible-ansible.legacy.copy Invoked with dest=/etc/pki/qemu/client-cert.pem group=qemu mode=0640 owner=root remote_src=True src=/var/lib/openstack/certs/libvirt/default/tls.crt backup=False force=True follow=False unsafe_writes=False _original_basename=None content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None checksum=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:53 compute-0 sudo[151024]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:53 compute-0 sudo[151176]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vxyiqiconmupdasxrtxesuysjqjzxvvc ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405793.7078283-3040-65001538784826/AnsiballZ_copy.py'
Oct 02 11:49:53 compute-0 sudo[151176]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:54 compute-0 python3.9[151178]: ansible-ansible.legacy.copy Invoked with dest=/etc/pki/qemu/client-key.pem group=qemu mode=0640 owner=root remote_src=True src=/var/lib/openstack/certs/libvirt/default/tls.key backup=False force=True follow=False unsafe_writes=False _original_basename=None content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None checksum=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:54 compute-0 sudo[151176]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:54 compute-0 sudo[151328]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-epsoucecgqpyzufiamgjmnzuupxnfuwh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405794.326577-3040-171177603776604/AnsiballZ_copy.py'
Oct 02 11:49:54 compute-0 sudo[151328]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:54 compute-0 python3.9[151330]: ansible-ansible.legacy.copy Invoked with dest=/etc/pki/qemu/ca-cert.pem group=qemu mode=0640 owner=root remote_src=True src=/var/lib/openstack/certs/libvirt/default/ca.crt backup=False force=True follow=False unsafe_writes=False _original_basename=None content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None checksum=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:49:54 compute-0 sudo[151328]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:55 compute-0 sudo[151480]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hraykdtjvnhcqrghmhmytbgnwmrquzfc ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405795.0697834-3148-184723232939071/AnsiballZ_systemd.py'
Oct 02 11:49:55 compute-0 sudo[151480]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:55 compute-0 python3.9[151482]: ansible-ansible.builtin.systemd Invoked with daemon_reload=True name=virtlogd.service state=restarted daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None
Oct 02 11:49:55 compute-0 systemd[1]: Reloading.
Oct 02 11:49:55 compute-0 systemd-rc-local-generator[151509]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:49:55 compute-0 systemd-sysv-generator[151512]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:49:55 compute-0 systemd[1]: Starting libvirt logging daemon socket...
Oct 02 11:49:55 compute-0 systemd[1]: Listening on libvirt logging daemon socket.
Oct 02 11:49:55 compute-0 systemd[1]: Starting libvirt logging daemon admin socket...
Oct 02 11:49:55 compute-0 systemd[1]: Listening on libvirt logging daemon admin socket.
Oct 02 11:49:55 compute-0 systemd[1]: Starting libvirt logging daemon...
Oct 02 11:49:55 compute-0 systemd[1]: Started libvirt logging daemon.
Oct 02 11:49:56 compute-0 sudo[151480]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:56 compute-0 sudo[151674]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sxsdglukrfovqyoxxsmoatplfpsawccr ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405796.148418-3148-44422907024753/AnsiballZ_systemd.py'
Oct 02 11:49:56 compute-0 sudo[151674]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:56 compute-0 python3.9[151676]: ansible-ansible.builtin.systemd Invoked with daemon_reload=True name=virtnodedevd.service state=restarted daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None
Oct 02 11:49:56 compute-0 systemd[1]: Reloading.
Oct 02 11:49:56 compute-0 systemd-rc-local-generator[151705]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:49:56 compute-0 systemd-sysv-generator[151710]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:49:56 compute-0 systemd[1]: Starting libvirt nodedev daemon socket...
Oct 02 11:49:56 compute-0 systemd[1]: Listening on libvirt nodedev daemon socket.
Oct 02 11:49:56 compute-0 systemd[1]: Starting libvirt nodedev daemon admin socket...
Oct 02 11:49:56 compute-0 systemd[1]: Starting libvirt nodedev daemon read-only socket...
Oct 02 11:49:56 compute-0 systemd[1]: Listening on libvirt nodedev daemon admin socket.
Oct 02 11:49:56 compute-0 systemd[1]: Listening on libvirt nodedev daemon read-only socket.
Oct 02 11:49:56 compute-0 systemd[1]: Starting libvirt nodedev daemon...
Oct 02 11:49:56 compute-0 systemd[1]: Started libvirt nodedev daemon.
Oct 02 11:49:57 compute-0 sudo[151674]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:57 compute-0 systemd[1]: Starting SETroubleshoot daemon for processing new SELinux denial logs...
Oct 02 11:49:57 compute-0 systemd[1]: Started SETroubleshoot daemon for processing new SELinux denial logs.
Oct 02 11:49:57 compute-0 sudo[151890]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cnmmutewxakutgmgdbqjhyavvdlyfkny ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405797.1436913-3148-250315373384433/AnsiballZ_systemd.py'
Oct 02 11:49:57 compute-0 sudo[151890]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:57 compute-0 systemd[1]: Created slice Slice /system/dbus-:1.1-org.fedoraproject.SetroubleshootPrivileged.
Oct 02 11:49:57 compute-0 systemd[1]: Started dbus-:1.1-org.fedoraproject.SetroubleshootPrivileged@0.service.
Oct 02 11:49:57 compute-0 python3.9[151892]: ansible-ansible.builtin.systemd Invoked with daemon_reload=True name=virtproxyd.service state=restarted daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None
Oct 02 11:49:57 compute-0 systemd[1]: Reloading.
Oct 02 11:49:57 compute-0 systemd-sysv-generator[151933]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:49:57 compute-0 systemd-rc-local-generator[151929]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:49:57 compute-0 systemd[1]: Starting libvirt proxy daemon admin socket...
Oct 02 11:49:58 compute-0 systemd[1]: Starting libvirt proxy daemon read-only socket...
Oct 02 11:49:58 compute-0 systemd[1]: Listening on libvirt proxy daemon admin socket.
Oct 02 11:49:58 compute-0 systemd[1]: Listening on libvirt proxy daemon read-only socket.
Oct 02 11:49:58 compute-0 systemd[1]: Starting libvirt proxy daemon...
Oct 02 11:49:58 compute-0 systemd[1]: Started libvirt proxy daemon.
Oct 02 11:49:58 compute-0 sudo[151890]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:58 compute-0 setroubleshoot[151738]: SELinux is preventing /usr/sbin/virtlogd from using the dac_read_search capability. For complete SELinux messages run: sealert -l b659541d-2c7c-4ef7-b606-444e8b60bce7
Oct 02 11:49:58 compute-0 setroubleshoot[151738]: SELinux is preventing /usr/sbin/virtlogd from using the dac_read_search capability.
                                                  
                                                  *****  Plugin dac_override (91.4 confidence) suggests   **********************
                                                  
                                                  If you want to help identify if domain needs this access or you have a file with the wrong permissions on your system
                                                  Then turn on full auditing to get path information about the offending file and generate the error again.
                                                  Do
                                                  
                                                  Turn on full auditing
                                                  # auditctl -w /etc/shadow -p w
                                                  Try to recreate AVC. Then execute
                                                  # ausearch -m avc -ts recent
                                                  If you see PATH record check ownership/permissions on file, and fix it,
                                                  otherwise report as a bugzilla.
                                                  
                                                  *****  Plugin catchall (9.59 confidence) suggests   **************************
                                                  
                                                  If you believe that virtlogd should have the dac_read_search capability by default.
                                                  Then you should report this as a bug.
                                                  You can generate a local policy module to allow this access.
                                                  Do
                                                  allow this access for now by executing:
                                                  # ausearch -c 'virtlogd' --raw | audit2allow -M my-virtlogd
                                                  # semodule -X 300 -i my-virtlogd.pp
                                                  
Oct 02 11:49:58 compute-0 setroubleshoot[151738]: SELinux is preventing /usr/sbin/virtlogd from using the dac_read_search capability. For complete SELinux messages run: sealert -l b659541d-2c7c-4ef7-b606-444e8b60bce7
Oct 02 11:49:58 compute-0 setroubleshoot[151738]: SELinux is preventing /usr/sbin/virtlogd from using the dac_read_search capability.
                                                  
                                                  *****  Plugin dac_override (91.4 confidence) suggests   **********************
                                                  
                                                  If you want to help identify if domain needs this access or you have a file with the wrong permissions on your system
                                                  Then turn on full auditing to get path information about the offending file and generate the error again.
                                                  Do
                                                  
                                                  Turn on full auditing
                                                  # auditctl -w /etc/shadow -p w
                                                  Try to recreate AVC. Then execute
                                                  # ausearch -m avc -ts recent
                                                  If you see PATH record check ownership/permissions on file, and fix it,
                                                  otherwise report as a bugzilla.
                                                  
                                                  *****  Plugin catchall (9.59 confidence) suggests   **************************
                                                  
                                                  If you believe that virtlogd should have the dac_read_search capability by default.
                                                  Then you should report this as a bug.
                                                  You can generate a local policy module to allow this access.
                                                  Do
                                                  allow this access for now by executing:
                                                  # ausearch -c 'virtlogd' --raw | audit2allow -M my-virtlogd
                                                  # semodule -X 300 -i my-virtlogd.pp
                                                  
Oct 02 11:49:58 compute-0 sudo[152109]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ddkynstiiolzvhwctkepwvpbhwseaflr ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405798.2051797-3148-103466741187941/AnsiballZ_systemd.py'
Oct 02 11:49:58 compute-0 sudo[152109]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:58 compute-0 python3.9[152111]: ansible-ansible.builtin.systemd Invoked with daemon_reload=True name=virtqemud.service state=restarted daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None
Oct 02 11:49:58 compute-0 systemd[1]: Reloading.
Oct 02 11:49:58 compute-0 systemd-rc-local-generator[152138]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:49:58 compute-0 systemd-sysv-generator[152142]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:49:59 compute-0 systemd[1]: Listening on libvirt locking daemon socket.
Oct 02 11:49:59 compute-0 systemd[1]: Starting libvirt QEMU daemon socket...
Oct 02 11:49:59 compute-0 systemd[1]: Virtual Machine and Container Storage (Compatibility) was skipped because of an unmet condition check (ConditionPathExists=/var/lib/machines.raw).
Oct 02 11:49:59 compute-0 systemd[1]: Starting Virtual Machine and Container Registration Service...
Oct 02 11:49:59 compute-0 systemd[1]: Listening on libvirt QEMU daemon socket.
Oct 02 11:49:59 compute-0 systemd[1]: Starting libvirt QEMU daemon admin socket...
Oct 02 11:49:59 compute-0 systemd[1]: Starting libvirt QEMU daemon read-only socket...
Oct 02 11:49:59 compute-0 systemd[1]: Listening on libvirt QEMU daemon admin socket.
Oct 02 11:49:59 compute-0 systemd[1]: Listening on libvirt QEMU daemon read-only socket.
Oct 02 11:49:59 compute-0 systemd[1]: Started Virtual Machine and Container Registration Service.
Oct 02 11:49:59 compute-0 systemd[1]: Starting libvirt QEMU daemon...
Oct 02 11:49:59 compute-0 systemd[1]: Started libvirt QEMU daemon.
Oct 02 11:49:59 compute-0 sudo[152109]: pam_unix(sudo:session): session closed for user root
Oct 02 11:49:59 compute-0 sudo[152321]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dshbvremklkovjngzeolcgucjnmjhtgp ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405799.2933674-3148-168770914478612/AnsiballZ_systemd.py'
Oct 02 11:49:59 compute-0 sudo[152321]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:49:59 compute-0 python3.9[152323]: ansible-ansible.builtin.systemd Invoked with daemon_reload=True name=virtsecretd.service state=restarted daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None
Oct 02 11:49:59 compute-0 systemd[1]: Reloading.
Oct 02 11:50:00 compute-0 systemd-rc-local-generator[152348]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:50:00 compute-0 systemd-sysv-generator[152351]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:50:00 compute-0 systemd[1]: Starting libvirt secret daemon socket...
Oct 02 11:50:00 compute-0 systemd[1]: Listening on libvirt secret daemon socket.
Oct 02 11:50:00 compute-0 systemd[1]: Starting libvirt secret daemon admin socket...
Oct 02 11:50:00 compute-0 systemd[1]: Starting libvirt secret daemon read-only socket...
Oct 02 11:50:00 compute-0 systemd[1]: Listening on libvirt secret daemon admin socket.
Oct 02 11:50:00 compute-0 systemd[1]: Listening on libvirt secret daemon read-only socket.
Oct 02 11:50:00 compute-0 systemd[1]: Starting libvirt secret daemon...
Oct 02 11:50:00 compute-0 systemd[1]: Started libvirt secret daemon.
Oct 02 11:50:00 compute-0 sudo[152321]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:01 compute-0 sudo[152531]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jtwgererpiejtlqbuwhompqpoitaigdf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405801.0973136-3259-100086989377320/AnsiballZ_file.py'
Oct 02 11:50:01 compute-0 sudo[152531]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:01 compute-0 python3.9[152533]: ansible-ansible.builtin.file Invoked with mode=0755 path=/var/lib/openstack/config/ceph state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:50:01 compute-0 sudo[152531]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:02 compute-0 sudo[152694]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wtlcivphkpjxactvejackntfucgryubs ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405801.8086214-3283-193279260146667/AnsiballZ_find.py'
Oct 02 11:50:02 compute-0 sudo[152694]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:02 compute-0 podman[152657]: 2025-10-02 11:50:02.115499611 +0000 UTC m=+0.086637937 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_id=ovn_controller, container_name=ovn_controller, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001)
Oct 02 11:50:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:50:02.190 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 11:50:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:50:02.190 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 11:50:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:50:02.190 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 11:50:02 compute-0 python3.9[152703]: ansible-ansible.builtin.find Invoked with paths=['/var/lib/openstack/config/ceph'] patterns=['*.conf'] read_whole_file=False file_type=file age_stamp=mtime recurse=False hidden=False follow=False get_checksum=False checksum_algorithm=sha1 use_regex=False exact_mode=True excludes=None contains=None age=None size=None depth=None mode=None encoding=None limit=None
Oct 02 11:50:02 compute-0 sudo[152694]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:03 compute-0 sudo[152862]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bgmxfrubcztqmtfllnrwuqajuvavctbi ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405802.881529-3325-257213427378824/AnsiballZ_stat.py'
Oct 02 11:50:03 compute-0 sudo[152862]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:03 compute-0 python3.9[152864]: ansible-ansible.legacy.stat Invoked with path=/var/lib/edpm-config/firewall/libvirt.yaml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:50:03 compute-0 sudo[152862]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:03 compute-0 sudo[152985]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dkntnpcuzqnczivrwifkczbrarzzwyhx ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405802.881529-3325-257213427378824/AnsiballZ_copy.py'
Oct 02 11:50:03 compute-0 sudo[152985]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:03 compute-0 python3.9[152987]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/edpm-config/firewall/libvirt.yaml mode=0640 src=/home/zuul/.ansible/tmp/ansible-tmp-1759405802.881529-3325-257213427378824/.source.yaml follow=False _original_basename=firewall.yaml.j2 checksum=5ca83b1310a74c5e48c4c3d4640e1cb8fdac1061 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:50:03 compute-0 sudo[152985]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:04 compute-0 sudo[153137]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-txihkqtpfhyezctceonprutonedxlcvc ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405804.2778087-3373-202055892163489/AnsiballZ_file.py'
Oct 02 11:50:04 compute-0 sudo[153137]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:04 compute-0 python3.9[153139]: ansible-ansible.builtin.file Invoked with group=root mode=0750 owner=root path=/var/lib/edpm-config/firewall state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:50:04 compute-0 sudo[153137]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:05 compute-0 sudo[153289]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bbhmzlxkavcwjoscfecfvbxzhsakxqco ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405805.0698502-3397-3374815850491/AnsiballZ_stat.py'
Oct 02 11:50:05 compute-0 sudo[153289]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:05 compute-0 python3.9[153291]: ansible-ansible.legacy.stat Invoked with path=/var/lib/edpm-config/firewall/edpm-nftables-base.yaml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:50:05 compute-0 sudo[153289]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:05 compute-0 sudo[153377]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nskmbjhblkdnpkomflascnpuatsdncgz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405805.0698502-3397-3374815850491/AnsiballZ_file.py'
Oct 02 11:50:05 compute-0 sudo[153377]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:05 compute-0 podman[153341]: 2025-10-02 11:50:05.846976136 +0000 UTC m=+0.065742201 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, org.label-schema.build-date=20251001, tcib_managed=true, container_name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 11:50:06 compute-0 python3.9[153379]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/var/lib/edpm-config/firewall/edpm-nftables-base.yaml _original_basename=base-rules.yaml.j2 recurse=False state=file path=/var/lib/edpm-config/firewall/edpm-nftables-base.yaml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:50:06 compute-0 sudo[153377]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:06 compute-0 sudo[153535]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qnrnfhgzmnznyektoczyuerkaupatdjx ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405806.2897594-3433-240201384509961/AnsiballZ_stat.py'
Oct 02 11:50:06 compute-0 sudo[153535]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:06 compute-0 python3.9[153537]: ansible-ansible.legacy.stat Invoked with path=/var/lib/edpm-config/firewall/edpm-nftables-user-rules.yaml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:50:06 compute-0 sudo[153535]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:07 compute-0 sudo[153613]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xylffybixtlcwjfkwpeeafrrmbqxkjmj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405806.2897594-3433-240201384509961/AnsiballZ_file.py'
Oct 02 11:50:07 compute-0 sudo[153613]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:07 compute-0 python3.9[153615]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/var/lib/edpm-config/firewall/edpm-nftables-user-rules.yaml _original_basename=.51uaj1el recurse=False state=file path=/var/lib/edpm-config/firewall/edpm-nftables-user-rules.yaml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:50:07 compute-0 sudo[153613]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:07 compute-0 sudo[153765]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rxeucnoeowrgdhisrifqxqesaujfshwq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405807.4971921-3469-179097161625189/AnsiballZ_stat.py'
Oct 02 11:50:07 compute-0 sudo[153765]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:08 compute-0 python3.9[153767]: ansible-ansible.legacy.stat Invoked with path=/etc/nftables/iptables.nft follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:50:08 compute-0 sudo[153765]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:08 compute-0 sudo[153843]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zoejxzajboxkksgitoqlqfphpotmepxr ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405807.4971921-3469-179097161625189/AnsiballZ_file.py'
Oct 02 11:50:08 compute-0 sudo[153843]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:08 compute-0 systemd[1]: dbus-:1.1-org.fedoraproject.SetroubleshootPrivileged@0.service: Deactivated successfully.
Oct 02 11:50:08 compute-0 python3.9[153845]: ansible-ansible.legacy.file Invoked with group=root mode=0600 owner=root dest=/etc/nftables/iptables.nft _original_basename=iptables.nft recurse=False state=file path=/etc/nftables/iptables.nft force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:50:08 compute-0 sudo[153843]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:08 compute-0 systemd[1]: setroubleshootd.service: Deactivated successfully.
Oct 02 11:50:08 compute-0 sudo[153996]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zthjmtfcjjtycznxgojnyxuwrauakczh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405808.7257946-3508-141893552889210/AnsiballZ_command.py'
Oct 02 11:50:08 compute-0 sudo[153996]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:09 compute-0 python3.9[153998]: ansible-ansible.legacy.command Invoked with _raw_params=nft -j list ruleset _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:50:09 compute-0 sudo[153996]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:09 compute-0 sudo[154149]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nexaoonxlbpxxxeykvncswrshefghqwr ; /usr/bin/python3 /home/zuul/.ansible/tmp/ansible-tmp-1759405809.3969185-3532-144732533186335/AnsiballZ_edpm_nftables_from_files.py'
Oct 02 11:50:09 compute-0 sudo[154149]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:10 compute-0 python3[154151]: ansible-edpm_nftables_from_files Invoked with src=/var/lib/edpm-config/firewall
Oct 02 11:50:10 compute-0 sudo[154149]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:10 compute-0 sudo[154302]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-caltbpoxhwgzdpapxxejmjukaszmyags ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405810.251563-3556-197248956116919/AnsiballZ_stat.py'
Oct 02 11:50:10 compute-0 sudo[154302]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:10 compute-0 python3.9[154304]: ansible-ansible.legacy.stat Invoked with path=/etc/nftables/edpm-jumps.nft follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:50:10 compute-0 sudo[154302]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:11 compute-0 sudo[154380]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qgrbzfymaaazqoltarmwnojyadkxermp ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405810.251563-3556-197248956116919/AnsiballZ_file.py'
Oct 02 11:50:11 compute-0 sudo[154380]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:11 compute-0 python3.9[154382]: ansible-ansible.legacy.file Invoked with group=root mode=0600 owner=root dest=/etc/nftables/edpm-jumps.nft _original_basename=jump-chain.j2 recurse=False state=file path=/etc/nftables/edpm-jumps.nft force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:50:11 compute-0 sudo[154380]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:11 compute-0 sudo[154532]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fvzaeutmeyxdifjelkwsjnguxsmcnulr ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405811.467922-3592-277349836093395/AnsiballZ_stat.py'
Oct 02 11:50:11 compute-0 sudo[154532]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:12 compute-0 python3.9[154534]: ansible-ansible.legacy.stat Invoked with path=/etc/nftables/edpm-update-jumps.nft follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:50:12 compute-0 sudo[154532]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:12 compute-0 sudo[154610]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qscherrmcsgksumyidxozcyklpjimnmk ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405811.467922-3592-277349836093395/AnsiballZ_file.py'
Oct 02 11:50:12 compute-0 sudo[154610]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:12 compute-0 python3.9[154612]: ansible-ansible.legacy.file Invoked with group=root mode=0600 owner=root dest=/etc/nftables/edpm-update-jumps.nft _original_basename=jump-chain.j2 recurse=False state=file path=/etc/nftables/edpm-update-jumps.nft force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:50:12 compute-0 sudo[154610]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:12 compute-0 sudo[154762]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-unkgdbjtezznhzuqbpkxrywqpxygtmdl ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405812.7137525-3628-171230602918187/AnsiballZ_stat.py'
Oct 02 11:50:13 compute-0 sudo[154762]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:13 compute-0 python3.9[154764]: ansible-ansible.legacy.stat Invoked with path=/etc/nftables/edpm-flushes.nft follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:50:13 compute-0 sudo[154762]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:13 compute-0 sudo[154840]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bydzibbvmaioqjsqyikeekrzzfwxlqod ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405812.7137525-3628-171230602918187/AnsiballZ_file.py'
Oct 02 11:50:13 compute-0 sudo[154840]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:13 compute-0 python3.9[154842]: ansible-ansible.legacy.file Invoked with group=root mode=0600 owner=root dest=/etc/nftables/edpm-flushes.nft _original_basename=flush-chain.j2 recurse=False state=file path=/etc/nftables/edpm-flushes.nft force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:50:13 compute-0 sudo[154840]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:14 compute-0 sudo[154992]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-oeftosmsdtrrtkhgrerawwehnrmhiomu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405813.876647-3664-49503507138350/AnsiballZ_stat.py'
Oct 02 11:50:14 compute-0 sudo[154992]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:14 compute-0 python3.9[154994]: ansible-ansible.legacy.stat Invoked with path=/etc/nftables/edpm-chains.nft follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:50:14 compute-0 sudo[154992]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:14 compute-0 sudo[155070]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ptfdmoaamlhfiozhoyifwslxgyxobltt ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405813.876647-3664-49503507138350/AnsiballZ_file.py'
Oct 02 11:50:14 compute-0 sudo[155070]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:14 compute-0 python3.9[155072]: ansible-ansible.legacy.file Invoked with group=root mode=0600 owner=root dest=/etc/nftables/edpm-chains.nft _original_basename=chains.j2 recurse=False state=file path=/etc/nftables/edpm-chains.nft force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:50:14 compute-0 sudo[155070]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:15 compute-0 sudo[155222]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nnfbgmqbphpuvwpdzfcilmwvfdmatnig ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405815.166167-3700-99401104910189/AnsiballZ_stat.py'
Oct 02 11:50:15 compute-0 sudo[155222]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:15 compute-0 python3.9[155224]: ansible-ansible.legacy.stat Invoked with path=/etc/nftables/edpm-rules.nft follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:50:15 compute-0 sudo[155222]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:15 compute-0 sudo[155347]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mfcfpalavvsschoqmuosltoplidmvnpx ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405815.166167-3700-99401104910189/AnsiballZ_copy.py'
Oct 02 11:50:15 compute-0 sudo[155347]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:16 compute-0 python3.9[155349]: ansible-ansible.legacy.copy Invoked with dest=/etc/nftables/edpm-rules.nft group=root mode=0600 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759405815.166167-3700-99401104910189/.source.nft follow=False _original_basename=ruleset.j2 checksum=8a12d4eb5149b6e500230381c1359a710881e9b0 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:50:16 compute-0 sudo[155347]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:16 compute-0 sudo[155499]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ordizpkgcspahfmbbyepvzhdyphgwklr ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405816.5783908-3745-44597502136172/AnsiballZ_file.py'
Oct 02 11:50:16 compute-0 sudo[155499]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:17 compute-0 python3.9[155501]: ansible-ansible.builtin.file Invoked with group=root mode=0600 owner=root path=/etc/nftables/edpm-rules.nft.changed state=touch recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:50:17 compute-0 sudo[155499]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:17 compute-0 sudo[155651]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sfxcxtulbmptdnrvoylnxoeznhsqnymm ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405817.3417244-3769-68609794216674/AnsiballZ_command.py'
Oct 02 11:50:17 compute-0 sudo[155651]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:17 compute-0 python3.9[155653]: ansible-ansible.legacy.command Invoked with _raw_params=set -o pipefail; cat /etc/nftables/edpm-chains.nft /etc/nftables/edpm-flushes.nft /etc/nftables/edpm-rules.nft /etc/nftables/edpm-update-jumps.nft /etc/nftables/edpm-jumps.nft | nft -c -f - _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:50:17 compute-0 sudo[155651]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:18 compute-0 sudo[155806]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fnsieiosgwiqybatryssjztqvjkuwuid ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405818.0032153-3793-90892781338989/AnsiballZ_blockinfile.py'
Oct 02 11:50:18 compute-0 sudo[155806]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:18 compute-0 python3.9[155808]: ansible-ansible.builtin.blockinfile Invoked with backup=False block=include "/etc/nftables/iptables.nft"
                                             include "/etc/nftables/edpm-chains.nft"
                                             include "/etc/nftables/edpm-rules.nft"
                                             include "/etc/nftables/edpm-jumps.nft"
                                              path=/etc/sysconfig/nftables.conf validate=nft -c -f %s state=present marker=# {mark} ANSIBLE MANAGED BLOCK create=False marker_begin=BEGIN marker_end=END append_newline=False prepend_newline=False unsafe_writes=False insertafter=None insertbefore=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:50:18 compute-0 sudo[155806]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:19 compute-0 sudo[155958]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mbggkkazohnzqglztsptuftzrcqzosbp ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405818.9154909-3820-6494035943799/AnsiballZ_command.py'
Oct 02 11:50:19 compute-0 sudo[155958]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:19 compute-0 python3.9[155960]: ansible-ansible.legacy.command Invoked with _raw_params=nft -f /etc/nftables/edpm-chains.nft _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:50:19 compute-0 sudo[155958]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:19 compute-0 sudo[156111]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gxaqcsbbsnrcpltsdjrcmdllafezhgul ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405819.6174324-3844-187279452828877/AnsiballZ_stat.py'
Oct 02 11:50:19 compute-0 sudo[156111]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:20 compute-0 python3.9[156113]: ansible-ansible.builtin.stat Invoked with path=/etc/nftables/edpm-rules.nft.changed follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:50:20 compute-0 sudo[156111]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:20 compute-0 sudo[156265]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-juczwsyxosrbytohcgzzkkqsxesdyzlg ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405820.3542693-3868-160177487968704/AnsiballZ_command.py'
Oct 02 11:50:20 compute-0 sudo[156265]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:20 compute-0 python3.9[156267]: ansible-ansible.legacy.command Invoked with _raw_params=set -o pipefail; cat /etc/nftables/edpm-flushes.nft /etc/nftables/edpm-rules.nft /etc/nftables/edpm-update-jumps.nft | nft -f - _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:50:20 compute-0 sudo[156265]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:21 compute-0 sudo[156420]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xlulxzknxytdlqjnhkgzqmrtqzduteyz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405821.134029-3892-265411882658232/AnsiballZ_file.py'
Oct 02 11:50:21 compute-0 sudo[156420]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:21 compute-0 python3.9[156422]: ansible-ansible.builtin.file Invoked with path=/etc/nftables/edpm-rules.nft.changed state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:50:21 compute-0 sudo[156420]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:22 compute-0 sudo[156572]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zgjzekhphkvoocjoujcdywclebyyanul ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405821.8527694-3916-269487792867458/AnsiballZ_stat.py'
Oct 02 11:50:22 compute-0 sudo[156572]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:22 compute-0 python3.9[156574]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/edpm_libvirt.target follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:50:22 compute-0 sudo[156572]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:22 compute-0 sudo[156695]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vmwqvrilbqmswzycnsgjiuvdcazzcwyn ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405821.8527694-3916-269487792867458/AnsiballZ_copy.py'
Oct 02 11:50:22 compute-0 sudo[156695]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:22 compute-0 python3.9[156697]: ansible-ansible.legacy.copy Invoked with dest=/etc/systemd/system/edpm_libvirt.target mode=0644 src=/home/zuul/.ansible/tmp/ansible-tmp-1759405821.8527694-3916-269487792867458/.source.target follow=False _original_basename=edpm_libvirt.target checksum=13035a1aa0f414c677b14be9a5a363b6623d393c backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:50:22 compute-0 sudo[156695]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:23 compute-0 sudo[156847]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-flwdvrqwujknscmkbyemzscfbodpvgqs ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405823.077062-3961-135465102749145/AnsiballZ_stat.py'
Oct 02 11:50:23 compute-0 sudo[156847]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:23 compute-0 python3.9[156849]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/edpm_libvirt_guests.service follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:50:23 compute-0 sudo[156847]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:23 compute-0 sudo[156970]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-imufivezszazcgkpokhfyhjvrikmlbvf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405823.077062-3961-135465102749145/AnsiballZ_copy.py'
Oct 02 11:50:23 compute-0 sudo[156970]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:24 compute-0 python3.9[156972]: ansible-ansible.legacy.copy Invoked with dest=/etc/systemd/system/edpm_libvirt_guests.service mode=0644 src=/home/zuul/.ansible/tmp/ansible-tmp-1759405823.077062-3961-135465102749145/.source.service follow=False _original_basename=edpm_libvirt_guests.service checksum=db83430a42fc2ccfd6ed8b56ebf04f3dff9cd0cf backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:50:24 compute-0 sudo[156970]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:24 compute-0 sudo[157122]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cknatsmnyfynfsjktbwaazkmxhjvhkec ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405824.3659146-4006-135909030046735/AnsiballZ_stat.py'
Oct 02 11:50:24 compute-0 sudo[157122]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:24 compute-0 python3.9[157124]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/virt-guest-shutdown.target follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:50:24 compute-0 sudo[157122]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:25 compute-0 sudo[157245]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-flvvbtuqrwebydhhgtrkjrmhpphtpoge ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405824.3659146-4006-135909030046735/AnsiballZ_copy.py'
Oct 02 11:50:25 compute-0 sudo[157245]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:25 compute-0 python3.9[157247]: ansible-ansible.legacy.copy Invoked with dest=/etc/systemd/system/virt-guest-shutdown.target mode=0644 src=/home/zuul/.ansible/tmp/ansible-tmp-1759405824.3659146-4006-135909030046735/.source.target follow=False _original_basename=virt-guest-shutdown.target checksum=49ca149619c596cbba877418629d2cf8f7b0f5cf backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:50:25 compute-0 sudo[157245]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:26 compute-0 sudo[157397]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-joxwljdirkihgitoophtolpnucjatuit ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405825.7600198-4051-34091130667145/AnsiballZ_systemd.py'
Oct 02 11:50:26 compute-0 sudo[157397]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:26 compute-0 python3.9[157399]: ansible-ansible.builtin.systemd Invoked with daemon_reload=True enabled=True name=edpm_libvirt.target state=restarted daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:50:26 compute-0 systemd[1]: Reloading.
Oct 02 11:50:26 compute-0 systemd-rc-local-generator[157422]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:50:26 compute-0 systemd-sysv-generator[157426]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:50:27 compute-0 systemd[1]: Reached target edpm_libvirt.target.
Oct 02 11:50:27 compute-0 sudo[157397]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:28 compute-0 sudo[157588]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ctdxlpzefyfzcwmekalnhtccoehigmsi ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405827.9033246-4075-103590207177501/AnsiballZ_systemd.py'
Oct 02 11:50:28 compute-0 sudo[157588]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:28 compute-0 python3.9[157590]: ansible-ansible.builtin.systemd Invoked with daemon_reload=True enabled=True name=edpm_libvirt_guests daemon_reexec=False scope=system no_block=False state=None force=None masked=None
Oct 02 11:50:28 compute-0 systemd[1]: Reloading.
Oct 02 11:50:28 compute-0 systemd-sysv-generator[157620]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:50:28 compute-0 systemd-rc-local-generator[157617]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:50:28 compute-0 systemd[1]: Reloading.
Oct 02 11:50:28 compute-0 systemd-rc-local-generator[157656]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:50:28 compute-0 systemd-sysv-generator[157659]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:50:29 compute-0 sudo[157588]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:29 compute-0 sshd-session[103412]: Connection closed by 192.168.122.30 port 53524
Oct 02 11:50:29 compute-0 sshd-session[103409]: pam_unix(sshd:session): session closed for user zuul
Oct 02 11:50:29 compute-0 systemd[1]: session-23.scope: Deactivated successfully.
Oct 02 11:50:29 compute-0 systemd[1]: session-23.scope: Consumed 3min 12.138s CPU time.
Oct 02 11:50:29 compute-0 systemd-logind[827]: Session 23 logged out. Waiting for processes to exit.
Oct 02 11:50:29 compute-0 systemd-logind[827]: Removed session 23.
Oct 02 11:50:33 compute-0 podman[157687]: 2025-10-02 11:50:33.217420704 +0000 UTC m=+0.118174465 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=ovn_controller, config_id=ovn_controller, maintainer=OpenStack Kubernetes Operator team)
Oct 02 11:50:35 compute-0 sshd-session[157712]: Accepted publickey for zuul from 192.168.122.30 port 42126 ssh2: ECDSA SHA256:tAEsFSop2MjuMQQ/UqKU2uCkxqWXGfsM5crdhd6tlI4
Oct 02 11:50:35 compute-0 systemd-logind[827]: New session 24 of user zuul.
Oct 02 11:50:35 compute-0 systemd[1]: Started Session 24 of User zuul.
Oct 02 11:50:35 compute-0 sshd-session[157712]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Oct 02 11:50:36 compute-0 podman[157815]: 2025-10-02 11:50:36.146247047 +0000 UTC m=+0.055084364 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, container_name=ovn_metadata_agent, org.label-schema.vendor=CentOS, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, org.label-schema.license=GPLv2, tcib_managed=true)
Oct 02 11:50:36 compute-0 python3.9[157884]: ansible-ansible.builtin.setup Invoked with gather_subset=['!all', '!min', 'local'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:50:37 compute-0 sudo[158038]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tgsicuihlmwayqpwhtsubqppwjvflfsh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405837.3590362-67-246312271675029/AnsiballZ_file.py'
Oct 02 11:50:37 compute-0 sudo[158038]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:37 compute-0 python3.9[158040]: ansible-ansible.builtin.file Invoked with mode=0755 path=/etc/iscsi setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:50:37 compute-0 sudo[158038]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:38 compute-0 sudo[158190]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-arhxyftzpobpkczrpqdwndpklxncnbob ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405838.1164536-67-208345018861072/AnsiballZ_file.py'
Oct 02 11:50:38 compute-0 sudo[158190]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:38 compute-0 python3.9[158192]: ansible-ansible.builtin.file Invoked with path=/etc/target setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:50:38 compute-0 sudo[158190]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:39 compute-0 sudo[158342]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mvgqeyvfacfowfjzjsladuxvuivroolz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405838.7322545-67-209969110943448/AnsiballZ_file.py'
Oct 02 11:50:39 compute-0 sudo[158342]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:39 compute-0 python3.9[158344]: ansible-ansible.builtin.file Invoked with path=/var/lib/iscsi setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:50:39 compute-0 sudo[158342]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:39 compute-0 sudo[158494]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jokwayytjqlwfikgfltuayvwkgfnnaqw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405839.336023-67-268882364086953/AnsiballZ_file.py'
Oct 02 11:50:39 compute-0 sudo[158494]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:39 compute-0 python3.9[158496]: ansible-ansible.builtin.file Invoked with mode=0755 path=/var/lib/config-data selevel=s0 setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None attributes=None
Oct 02 11:50:39 compute-0 sudo[158494]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:40 compute-0 sudo[158646]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cvjrvksfvscogqpbqjdfhtbomgnhduoi ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405840.0115445-67-153643637651503/AnsiballZ_file.py'
Oct 02 11:50:40 compute-0 sudo[158646]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:40 compute-0 python3.9[158648]: ansible-ansible.builtin.file Invoked with mode=0755 path=/var/lib/config-data/ansible-generated/iscsid setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:50:40 compute-0 sudo[158646]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:41 compute-0 sudo[158798]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zdupcbekdcddarakqatzlvdjoxxgvwbp ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405840.8570218-175-200458892605856/AnsiballZ_stat.py'
Oct 02 11:50:41 compute-0 sudo[158798]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:41 compute-0 python3.9[158800]: ansible-ansible.builtin.stat Invoked with path=/lib/systemd/system/iscsid.socket follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:50:41 compute-0 sudo[158798]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:42 compute-0 sudo[158952]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-iyiapkogqscnhzdhlhtkjqpurvzjjkao ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405841.7666976-199-248474403097626/AnsiballZ_systemd.py'
Oct 02 11:50:42 compute-0 sudo[158952]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:42 compute-0 python3.9[158954]: ansible-ansible.builtin.systemd Invoked with enabled=False name=iscsid.socket state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:50:42 compute-0 systemd[1]: Reloading.
Oct 02 11:50:42 compute-0 systemd-rc-local-generator[158985]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:50:42 compute-0 systemd-sysv-generator[158989]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:50:43 compute-0 sudo[158952]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:43 compute-0 sudo[159141]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yrdfdqjcvtreiyfhqjwrvpwbciykspsz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405843.26087-223-57047024941815/AnsiballZ_service_facts.py'
Oct 02 11:50:43 compute-0 sudo[159141]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:43 compute-0 python3.9[159143]: ansible-ansible.builtin.service_facts Invoked
Oct 02 11:50:43 compute-0 network[159160]: You are using 'network' service provided by 'network-scripts', which are now deprecated.
Oct 02 11:50:43 compute-0 network[159161]: 'network-scripts' will be removed from distribution in near future.
Oct 02 11:50:43 compute-0 network[159162]: It is advised to switch to 'NetworkManager' instead for network management.
Oct 02 11:50:46 compute-0 sudo[159141]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:48 compute-0 sudo[159433]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pfvxyhcbjnkayxwsiawzmtpchtwqtrrt ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405848.2953486-247-122889588909746/AnsiballZ_systemd.py'
Oct 02 11:50:48 compute-0 sudo[159433]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:48 compute-0 python3.9[159435]: ansible-ansible.builtin.systemd Invoked with enabled=False name=iscsi-starter.service state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:50:48 compute-0 systemd[1]: Reloading.
Oct 02 11:50:49 compute-0 systemd-rc-local-generator[159463]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:50:49 compute-0 systemd-sysv-generator[159467]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:50:49 compute-0 sudo[159433]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:49 compute-0 python3.9[159622]: ansible-ansible.builtin.stat Invoked with path=/etc/iscsi/.initiator_reset follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:50:50 compute-0 sudo[159772]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rwxdvlvevgeeiycgwcdgsryqtamqgnco ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405850.1859794-298-212272331880309/AnsiballZ_podman_container.py'
Oct 02 11:50:50 compute-0 sudo[159772]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:50 compute-0 python3.9[159774]: ansible-containers.podman.podman_container Invoked with command=/usr/sbin/iscsi-iname detach=False image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified name=iscsid_config rm=True tty=True executable=podman state=started debug=False force_restart=False force_delete=True generate_systemd={} image_strict=False recreate=False annotation=None arch=None attach=None authfile=None blkio_weight=None blkio_weight_device=None cap_add=None cap_drop=None cgroup_conf=None cgroup_parent=None cgroupns=None cgroups=None chrootdirs=None cidfile=None cmd_args=None conmon_pidfile=None cpu_period=None cpu_quota=None cpu_rt_period=None cpu_rt_runtime=None cpu_shares=None cpus=None cpuset_cpus=None cpuset_mems=None decryption_key=None delete_depend=None delete_time=None delete_volumes=None detach_keys=None device=None device_cgroup_rule=None device_read_bps=None device_read_iops=None device_write_bps=None device_write_iops=None dns=None dns_option=None dns_search=None entrypoint=None env=None env_file=None env_host=None env_merge=None etc_hosts=None expose=None gidmap=None gpus=None group_add=None group_entry=None healthcheck=None healthcheck_interval=None healthcheck_retries=None healthcheck_start_period=None health_startup_cmd=None health_startup_interval=None health_startup_retries=None health_startup_success=None health_startup_timeout=None healthcheck_timeout=None healthcheck_failure_action=None hooks_dir=None hostname=None hostuser=None http_proxy=None image_volume=None init=None init_ctr=None init_path=None interactive=None ip=None ip6=None ipc=None kernel_memory=None label=None label_file=None log_driver=None log_level=None log_opt=None mac_address=None memory=None memory_reservation=None memory_swap=None memory_swappiness=None mount=None network=None network_aliases=None no_healthcheck=None no_hosts=None oom_kill_disable=None oom_score_adj=None os=None passwd=None passwd_entry=None personality=None pid=None pid_file=None pids_limit=None platform=None pod=None pod_id_file=None preserve_fd=None preserve_fds=None privileged=None publish=None publish_all=None pull=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None rdt_class=None read_only=None read_only_tmpfs=None requires=None restart_policy=None restart_time=None retry=None retry_delay=None rmi=None rootfs=None seccomp_policy=None secrets=NOT_LOGGING_PARAMETER sdnotify=None security_opt=None shm_size=None shm_size_systemd=None sig_proxy=None stop_signal=None stop_timeout=None stop_time=None subgidname=None subuidname=None sysctl=None systemd=None timeout=None timezone=None tls_verify=None tmpfs=None uidmap=None ulimit=None umask=None unsetenv=None unsetenv_all=None user=None userns=None uts=None variant=None volume=None volumes_from=None workdir=None
Oct 02 11:50:51 compute-0 podman[159806]: 2025-10-02 11:50:51.046690086 +0000 UTC m=+0.051184540 container create d01a058f72ca614921583f782f4a5a9368067507c6b148f73cb77cd51579795b (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid_config, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team)
Oct 02 11:50:51 compute-0 rsyslogd[1013]: imjournal: journal files changed, reloading...  [v8.2506.0-2.el9 try https://www.rsyslog.com/e/0 ]
Oct 02 11:50:51 compute-0 rsyslogd[1013]: imjournal: journal files changed, reloading...  [v8.2506.0-2.el9 try https://www.rsyslog.com/e/0 ]
Oct 02 11:50:51 compute-0 NetworkManager[51160]: <info>  [1759405851.0659] manager: (podman0): new Bridge device (/org/freedesktop/NetworkManager/Devices/21)
Oct 02 11:50:51 compute-0 kernel: podman0: port 1(veth0) entered blocking state
Oct 02 11:50:51 compute-0 kernel: podman0: port 1(veth0) entered disabled state
Oct 02 11:50:51 compute-0 kernel: veth0: entered allmulticast mode
Oct 02 11:50:51 compute-0 kernel: veth0: entered promiscuous mode
Oct 02 11:50:51 compute-0 NetworkManager[51160]: <info>  [1759405851.0779] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/22)
Oct 02 11:50:51 compute-0 kernel: podman0: port 1(veth0) entered blocking state
Oct 02 11:50:51 compute-0 kernel: podman0: port 1(veth0) entered forwarding state
Oct 02 11:50:51 compute-0 NetworkManager[51160]: <info>  [1759405851.0795] device (veth0): carrier: link connected
Oct 02 11:50:51 compute-0 NetworkManager[51160]: <info>  [1759405851.0798] device (podman0): carrier: link connected
Oct 02 11:50:51 compute-0 systemd-udevd[159833]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 11:50:51 compute-0 systemd-udevd[159836]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 11:50:51 compute-0 NetworkManager[51160]: <info>  [1759405851.1021] device (podman0): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 11:50:51 compute-0 NetworkManager[51160]: <info>  [1759405851.1030] device (podman0): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external')
Oct 02 11:50:51 compute-0 NetworkManager[51160]: <info>  [1759405851.1039] device (podman0): Activation: starting connection 'podman0' (bcef6c5f-5e44-4825-9cf3-f694eed1e22b)
Oct 02 11:50:51 compute-0 NetworkManager[51160]: <info>  [1759405851.1040] device (podman0): state change: disconnected -> prepare (reason 'none', managed-type: 'external')
Oct 02 11:50:51 compute-0 NetworkManager[51160]: <info>  [1759405851.1042] device (podman0): state change: prepare -> config (reason 'none', managed-type: 'external')
Oct 02 11:50:51 compute-0 NetworkManager[51160]: <info>  [1759405851.1044] device (podman0): state change: config -> ip-config (reason 'none', managed-type: 'external')
Oct 02 11:50:51 compute-0 NetworkManager[51160]: <info>  [1759405851.1048] device (podman0): state change: ip-config -> ip-check (reason 'none', managed-type: 'external')
Oct 02 11:50:51 compute-0 podman[159806]: 2025-10-02 11:50:51.015045764 +0000 UTC m=+0.019540128 image pull 1b3fd7f2436e5c6f2e28c01b83721476c7b295789c77b3d63e30f49404389ea1 quay.io/podified-antelope-centos9/openstack-iscsid:current-podified
Oct 02 11:50:51 compute-0 systemd[1]: Starting Network Manager Script Dispatcher Service...
Oct 02 11:50:51 compute-0 systemd[1]: Started Network Manager Script Dispatcher Service.
Oct 02 11:50:51 compute-0 NetworkManager[51160]: <info>  [1759405851.1287] device (podman0): state change: ip-check -> secondaries (reason 'none', managed-type: 'external')
Oct 02 11:50:51 compute-0 NetworkManager[51160]: <info>  [1759405851.1290] device (podman0): state change: secondaries -> activated (reason 'none', managed-type: 'external')
Oct 02 11:50:51 compute-0 NetworkManager[51160]: <info>  [1759405851.1296] device (podman0): Activation: successful, device activated.
Oct 02 11:50:51 compute-0 systemd[1]: iscsi.service: Unit cannot be reloaded because it is inactive.
Oct 02 11:50:51 compute-0 systemd[1]: Started libpod-conmon-d01a058f72ca614921583f782f4a5a9368067507c6b148f73cb77cd51579795b.scope.
Oct 02 11:50:51 compute-0 systemd[1]: Started libcrun container.
Oct 02 11:50:51 compute-0 podman[159806]: 2025-10-02 11:50:51.37425021 +0000 UTC m=+0.378744584 container init d01a058f72ca614921583f782f4a5a9368067507c6b148f73cb77cd51579795b (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid_config, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 11:50:51 compute-0 podman[159806]: 2025-10-02 11:50:51.381339812 +0000 UTC m=+0.385834156 container start d01a058f72ca614921583f782f4a5a9368067507c6b148f73cb77cd51579795b (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid_config, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team)
Oct 02 11:50:51 compute-0 iscsid_config[159965]: iqn.1994-05.com.redhat:b4dbcb958747
Oct 02 11:50:51 compute-0 systemd[1]: libpod-d01a058f72ca614921583f782f4a5a9368067507c6b148f73cb77cd51579795b.scope: Deactivated successfully.
Oct 02 11:50:51 compute-0 podman[159806]: 2025-10-02 11:50:51.397937249 +0000 UTC m=+0.402431613 container attach d01a058f72ca614921583f782f4a5a9368067507c6b148f73cb77cd51579795b (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid_config, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001)
Oct 02 11:50:51 compute-0 podman[159806]: 2025-10-02 11:50:51.399022448 +0000 UTC m=+0.403516812 container died d01a058f72ca614921583f782f4a5a9368067507c6b148f73cb77cd51579795b (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid_config, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS)
Oct 02 11:50:51 compute-0 kernel: podman0: port 1(veth0) entered disabled state
Oct 02 11:50:51 compute-0 kernel: veth0 (unregistering): left allmulticast mode
Oct 02 11:50:51 compute-0 kernel: veth0 (unregistering): left promiscuous mode
Oct 02 11:50:51 compute-0 kernel: podman0: port 1(veth0) entered disabled state
Oct 02 11:50:51 compute-0 NetworkManager[51160]: <info>  [1759405851.4632] device (podman0): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 11:50:51 compute-0 systemd[1]: run-netns-netns\x2dfd280535\x2d2cc8\x2d7ee5\x2daf74\x2d6a74b8d11257.mount: Deactivated successfully.
Oct 02 11:50:51 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-d01a058f72ca614921583f782f4a5a9368067507c6b148f73cb77cd51579795b-userdata-shm.mount: Deactivated successfully.
Oct 02 11:50:51 compute-0 podman[159806]: 2025-10-02 11:50:51.814445668 +0000 UTC m=+0.818940012 container remove d01a058f72ca614921583f782f4a5a9368067507c6b148f73cb77cd51579795b (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid_config, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 11:50:51 compute-0 python3.9[159774]: ansible-containers.podman.podman_container PODMAN-CONTAINER-DEBUG: podman run --name iscsid_config --detach=False --rm --tty=True quay.io/podified-antelope-centos9/openstack-iscsid:current-podified /usr/sbin/iscsi-iname
Oct 02 11:50:51 compute-0 systemd[1]: libpod-conmon-d01a058f72ca614921583f782f4a5a9368067507c6b148f73cb77cd51579795b.scope: Deactivated successfully.
Oct 02 11:50:51 compute-0 python3.9[159774]: ansible-containers.podman.podman_container PODMAN-CONTAINER-DEBUG: Error generating systemd: 
                                             DEPRECATED command:
                                             It is recommended to use Quadlets for running containers and pods under systemd.
                                             
                                             Please refer to podman-systemd.unit(5) for details.
                                             Error: iscsid_config does not refer to a container or pod: no pod with name or ID iscsid_config found: no such pod: no container with name or ID "iscsid_config" found: no such container
Oct 02 11:50:51 compute-0 sudo[159772]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:52 compute-0 systemd[1]: var-lib-containers-storage-overlay-39c5d1fae55e2eee724962858cc1f9f532513a29de590173f3d748d2eb7343eb-merged.mount: Deactivated successfully.
Oct 02 11:50:52 compute-0 sudo[160208]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-izmdraeeuqmojsnbzwskhytilgeinocf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405852.212162-322-71205920264642/AnsiballZ_stat.py'
Oct 02 11:50:52 compute-0 sudo[160208]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:52 compute-0 python3.9[160210]: ansible-ansible.legacy.stat Invoked with path=/etc/iscsi/initiatorname.iscsi follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:50:52 compute-0 sudo[160208]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:53 compute-0 sudo[160331]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cffgiiwrotxrqetgbrwrtqomswdcgopr ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405852.212162-322-71205920264642/AnsiballZ_copy.py'
Oct 02 11:50:53 compute-0 sudo[160331]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:53 compute-0 python3.9[160333]: ansible-ansible.legacy.copy Invoked with dest=/etc/iscsi/initiatorname.iscsi mode=0644 src=/home/zuul/.ansible/tmp/ansible-tmp-1759405852.212162-322-71205920264642/.source.iscsi _original_basename=.1n9n4ob7 follow=False checksum=5e02f60f307b966b37def090e2433d84d78f0fa9 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:50:53 compute-0 sudo[160331]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:53 compute-0 sudo[160483]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-afsrizyrwhnzltboxwljdzdpphqecbxe ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405853.641745-367-120828365575913/AnsiballZ_file.py'
Oct 02 11:50:53 compute-0 sudo[160483]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:54 compute-0 python3.9[160485]: ansible-ansible.builtin.file Invoked with mode=0600 path=/etc/iscsi/.initiator_reset state=touch recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:50:54 compute-0 sudo[160483]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:54 compute-0 python3.9[160635]: ansible-ansible.builtin.stat Invoked with path=/etc/iscsi/iscsid.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:50:55 compute-0 sudo[160787]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fsokvhaiqsbpikkxzgzohzaxrklpyqwb ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405855.0697863-418-180968966348109/AnsiballZ_lineinfile.py'
Oct 02 11:50:55 compute-0 sudo[160787]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:55 compute-0 python3.9[160789]: ansible-ansible.builtin.lineinfile Invoked with insertafter=^#node.session.auth.chap.algs line=node.session.auth.chap_algs = SHA3-256,SHA256,SHA1,MD5 path=/etc/iscsi/iscsid.conf regexp=^node.session.auth.chap_algs state=present backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:50:55 compute-0 sudo[160787]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:56 compute-0 sudo[160939]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-udtcyruomvmtohnvwcoatjpajvhrxkqe ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405856.128951-445-6901577009166/AnsiballZ_file.py'
Oct 02 11:50:56 compute-0 sudo[160939]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:56 compute-0 python3.9[160941]: ansible-ansible.builtin.file Invoked with path=/var/local/libexec recurse=True setype=container_file_t state=directory force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:50:56 compute-0 sudo[160939]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:56 compute-0 sshd-session[161039]: Connection closed by 8.134.239.76 port 44752
Oct 02 11:50:57 compute-0 sudo[161092]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rflrjbdxwikfamdhkfuyuswunezdsidu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405856.8089232-469-271519324235681/AnsiballZ_stat.py'
Oct 02 11:50:57 compute-0 sudo[161092]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:57 compute-0 python3.9[161094]: ansible-ansible.legacy.stat Invoked with path=/var/local/libexec/edpm-container-shutdown follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:50:57 compute-0 sudo[161092]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:57 compute-0 sudo[161170]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zfdzohgdmbmwuurpewqiklevndatmicb ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405856.8089232-469-271519324235681/AnsiballZ_file.py'
Oct 02 11:50:57 compute-0 sudo[161170]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:57 compute-0 python3.9[161172]: ansible-ansible.legacy.file Invoked with group=root mode=0700 owner=root setype=container_file_t dest=/var/local/libexec/edpm-container-shutdown _original_basename=edpm-container-shutdown recurse=False state=file path=/var/local/libexec/edpm-container-shutdown force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:50:57 compute-0 sudo[161170]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:58 compute-0 sudo[161322]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dmvyszpnurzhosjblhxibqrffeuwdoqq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405857.8042092-469-146100139873836/AnsiballZ_stat.py'
Oct 02 11:50:58 compute-0 sudo[161322]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:58 compute-0 python3.9[161324]: ansible-ansible.legacy.stat Invoked with path=/var/local/libexec/edpm-start-podman-container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:50:58 compute-0 sudo[161322]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:58 compute-0 sudo[161400]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vbtubxckeofwrbtcipswxnfsootmjjha ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405857.8042092-469-146100139873836/AnsiballZ_file.py'
Oct 02 11:50:58 compute-0 sudo[161400]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:58 compute-0 python3.9[161402]: ansible-ansible.legacy.file Invoked with group=root mode=0700 owner=root setype=container_file_t dest=/var/local/libexec/edpm-start-podman-container _original_basename=edpm-start-podman-container recurse=False state=file path=/var/local/libexec/edpm-start-podman-container force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:50:58 compute-0 sudo[161400]: pam_unix(sudo:session): session closed for user root
Oct 02 11:50:59 compute-0 sudo[161552]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zigerzlckccwnjcuqainvqgwqwgsntup ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405859.1331182-538-115017254948878/AnsiballZ_file.py'
Oct 02 11:50:59 compute-0 sudo[161552]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:50:59 compute-0 python3.9[161554]: ansible-ansible.builtin.file Invoked with mode=420 path=/etc/systemd/system-preset state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:50:59 compute-0 sudo[161552]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:00 compute-0 sudo[161704]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wyhoafiqyversqrbcwgrzpvnkjvtcafu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405859.826868-562-225153270875075/AnsiballZ_stat.py'
Oct 02 11:51:00 compute-0 sudo[161704]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:00 compute-0 python3.9[161706]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/edpm-container-shutdown.service follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:51:00 compute-0 sudo[161704]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:00 compute-0 sudo[161782]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tmyqmgpfdqfdwhyxpwiknsghymuoyiyx ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405859.826868-562-225153270875075/AnsiballZ_file.py'
Oct 02 11:51:00 compute-0 sudo[161782]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:00 compute-0 python3.9[161784]: ansible-ansible.legacy.file Invoked with group=root mode=0644 owner=root dest=/etc/systemd/system/edpm-container-shutdown.service _original_basename=edpm-container-shutdown-service recurse=False state=file path=/etc/systemd/system/edpm-container-shutdown.service force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:51:00 compute-0 sudo[161782]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:01 compute-0 sudo[161934]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zvoygnasqyiuqrmgwcpfhtctgfsgyfag ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405860.9609482-598-35170939317565/AnsiballZ_stat.py'
Oct 02 11:51:01 compute-0 sudo[161934]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:01 compute-0 python3.9[161936]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system-preset/91-edpm-container-shutdown.preset follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:51:01 compute-0 sudo[161934]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:01 compute-0 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.
Oct 02 11:51:01 compute-0 sudo[162012]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vkadoqstpartjxjlktnnvvkucnameibx ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405860.9609482-598-35170939317565/AnsiballZ_file.py'
Oct 02 11:51:01 compute-0 sudo[162012]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:01 compute-0 python3.9[162014]: ansible-ansible.legacy.file Invoked with group=root mode=0644 owner=root dest=/etc/systemd/system-preset/91-edpm-container-shutdown.preset _original_basename=91-edpm-container-shutdown-preset recurse=False state=file path=/etc/systemd/system-preset/91-edpm-container-shutdown.preset force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:51:01 compute-0 sudo[162012]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:51:02.191 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 11:51:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:51:02.192 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 11:51:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:51:02.192 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 11:51:02 compute-0 sudo[162164]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ruxadldwmgrcftkvbychsfvgcexnzwml ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405862.0715442-634-256442842983923/AnsiballZ_systemd.py'
Oct 02 11:51:02 compute-0 sudo[162164]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:02 compute-0 python3.9[162166]: ansible-ansible.builtin.systemd Invoked with daemon_reload=True enabled=True name=edpm-container-shutdown state=started daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:51:02 compute-0 systemd[1]: Reloading.
Oct 02 11:51:02 compute-0 systemd-rc-local-generator[162194]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:51:02 compute-0 systemd-sysv-generator[162197]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:51:02 compute-0 sudo[162164]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:03 compute-0 sudo[162367]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-otrlhzerpqqsysdtmkulqleqpssabrzv ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405863.2752638-658-269378205713712/AnsiballZ_stat.py'
Oct 02 11:51:03 compute-0 sudo[162367]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:03 compute-0 podman[162327]: 2025-10-02 11:51:03.557519415 +0000 UTC m=+0.074212240 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=ovn_controller)
Oct 02 11:51:03 compute-0 python3.9[162375]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/netns-placeholder.service follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:51:03 compute-0 sudo[162367]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:03 compute-0 sudo[162459]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jzgtqlcgrtktrzkmmzqvtppkzdhqylmm ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405863.2752638-658-269378205713712/AnsiballZ_file.py'
Oct 02 11:51:03 compute-0 sudo[162459]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:04 compute-0 python3.9[162461]: ansible-ansible.legacy.file Invoked with group=root mode=0644 owner=root dest=/etc/systemd/system/netns-placeholder.service _original_basename=netns-placeholder-service recurse=False state=file path=/etc/systemd/system/netns-placeholder.service force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:51:04 compute-0 sudo[162459]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:04 compute-0 sudo[162611]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-oaelgddzakzzlcumjgbfkmabvnywecoc ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405864.360246-694-94054094549992/AnsiballZ_stat.py'
Oct 02 11:51:04 compute-0 sudo[162611]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:04 compute-0 python3.9[162613]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system-preset/91-netns-placeholder.preset follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:51:04 compute-0 sudo[162611]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:05 compute-0 sudo[162689]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gfcghnwuhvulnhpvoqihaetecqprxixd ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405864.360246-694-94054094549992/AnsiballZ_file.py'
Oct 02 11:51:05 compute-0 sudo[162689]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:05 compute-0 python3.9[162691]: ansible-ansible.legacy.file Invoked with group=root mode=0644 owner=root dest=/etc/systemd/system-preset/91-netns-placeholder.preset _original_basename=91-netns-placeholder-preset recurse=False state=file path=/etc/systemd/system-preset/91-netns-placeholder.preset force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:51:05 compute-0 sudo[162689]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:05 compute-0 sudo[162841]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ihfbzgfelkggjxojxjatuyxyarqfncoe ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405865.4603937-730-116688475403642/AnsiballZ_systemd.py'
Oct 02 11:51:05 compute-0 sudo[162841]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:06 compute-0 python3.9[162843]: ansible-ansible.builtin.systemd Invoked with daemon_reload=True enabled=True name=netns-placeholder state=started daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:51:06 compute-0 systemd[1]: Reloading.
Oct 02 11:51:06 compute-0 systemd-rc-local-generator[162868]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:51:06 compute-0 systemd-sysv-generator[162871]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:51:06 compute-0 systemd[1]: Starting Create netns directory...
Oct 02 11:51:06 compute-0 systemd[1]: run-netns-placeholder.mount: Deactivated successfully.
Oct 02 11:51:06 compute-0 systemd[1]: netns-placeholder.service: Deactivated successfully.
Oct 02 11:51:06 compute-0 systemd[1]: Finished Create netns directory.
Oct 02 11:51:06 compute-0 podman[162879]: 2025-10-02 11:51:06.4410512 +0000 UTC m=+0.082361410 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_managed=true, config_id=ovn_metadata_agent, container_name=ovn_metadata_agent, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team)
Oct 02 11:51:06 compute-0 sudo[162841]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:07 compute-0 sudo[163052]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-aazzomyiahkyrpvpwyfuwvliktezkylp ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405866.9901593-760-77134085827894/AnsiballZ_file.py'
Oct 02 11:51:07 compute-0 sudo[163052]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:07 compute-0 python3.9[163054]: ansible-ansible.builtin.file Invoked with group=zuul mode=0755 owner=zuul path=/var/lib/openstack/healthchecks setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:51:07 compute-0 sudo[163052]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:07 compute-0 sudo[163204]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lwrikbzdpuybczngxaytuzymrdgbstkw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405867.7459164-784-246740395516468/AnsiballZ_stat.py'
Oct 02 11:51:07 compute-0 sudo[163204]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:08 compute-0 python3.9[163206]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/healthchecks/iscsid/healthcheck follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:51:08 compute-0 sudo[163204]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:08 compute-0 sudo[163327]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ychdepwosaobmktqkbaimxujcmnxqkyl ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405867.7459164-784-246740395516468/AnsiballZ_copy.py'
Oct 02 11:51:08 compute-0 sudo[163327]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:08 compute-0 python3.9[163329]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/healthchecks/iscsid/ group=zuul mode=0700 owner=zuul setype=container_file_t src=/home/zuul/.ansible/tmp/ansible-tmp-1759405867.7459164-784-246740395516468/.source _original_basename=healthcheck follow=False checksum=2e1237e7fe015c809b173c52e24cfb87132f4344 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:51:08 compute-0 sudo[163327]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:09 compute-0 sudo[163479]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jvlcwamjuadfgbuwmryzyqhjqwqhctjv ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405869.4973812-835-42863364101306/AnsiballZ_file.py'
Oct 02 11:51:09 compute-0 sudo[163479]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:10 compute-0 python3.9[163481]: ansible-ansible.builtin.file Invoked with path=/var/lib/kolla/config_files recurse=True setype=container_file_t state=directory force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:51:10 compute-0 sudo[163479]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:10 compute-0 sudo[163631]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ardxrtfnlpdlelznoisjfelvcosdkpye ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405870.3211462-859-146148516761674/AnsiballZ_stat.py'
Oct 02 11:51:10 compute-0 sudo[163631]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:10 compute-0 python3.9[163633]: ansible-ansible.legacy.stat Invoked with path=/var/lib/kolla/config_files/iscsid.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:51:10 compute-0 sudo[163631]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:11 compute-0 sudo[163754]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ozxzohvyuqkgtpqbywunuodtfvcjxowz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405870.3211462-859-146148516761674/AnsiballZ_copy.py'
Oct 02 11:51:11 compute-0 sudo[163754]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:11 compute-0 python3.9[163756]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/kolla/config_files/iscsid.json mode=0600 src=/home/zuul/.ansible/tmp/ansible-tmp-1759405870.3211462-859-146148516761674/.source.json _original_basename=.gf9jr0_6 follow=False checksum=80e4f97460718c7e5c66b21ef8b846eba0e0dbc8 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:51:11 compute-0 sudo[163754]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:11 compute-0 sudo[163906]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-uywgiqsvludhtasnhktghmteqyztmpdg ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405871.6196325-904-277190597605941/AnsiballZ_file.py'
Oct 02 11:51:11 compute-0 sudo[163906]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:12 compute-0 python3.9[163908]: ansible-ansible.builtin.file Invoked with mode=0755 path=/var/lib/edpm-config/container-startup-config/iscsid state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:51:12 compute-0 sudo[163906]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:12 compute-0 sudo[164058]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vqcxwygxjuwuifoanfgpmwwiqhoeomrm ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405872.4533193-928-111907833134355/AnsiballZ_stat.py'
Oct 02 11:51:12 compute-0 sudo[164058]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:12 compute-0 sudo[164058]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:13 compute-0 sudo[164181]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fqczvvtexdpckpoonustiqwhsgfekkad ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405872.4533193-928-111907833134355/AnsiballZ_copy.py'
Oct 02 11:51:13 compute-0 sudo[164181]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:13 compute-0 sudo[164181]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:14 compute-0 sudo[164333]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-oqxgiggtjajltmnrtjmbdjbrexfkaamz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405873.8855853-979-259280617512262/AnsiballZ_container_config_data.py'
Oct 02 11:51:14 compute-0 sudo[164333]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:14 compute-0 python3.9[164335]: ansible-container_config_data Invoked with config_overrides={} config_path=/var/lib/edpm-config/container-startup-config/iscsid config_pattern=*.json debug=False
Oct 02 11:51:14 compute-0 sudo[164333]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:15 compute-0 sudo[164485]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wtnthdxywowlknvkseswyrlomhktohws ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405874.857097-1006-246297031959700/AnsiballZ_container_config_hash.py'
Oct 02 11:51:15 compute-0 sudo[164485]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:15 compute-0 python3.9[164487]: ansible-container_config_hash Invoked with check_mode=False config_vol_prefix=/var/lib/config-data
Oct 02 11:51:15 compute-0 sudo[164485]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:16 compute-0 sudo[164637]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ylkfsaoqdxfgejpfsmivqgqmevjvfiti ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405875.846364-1033-157705330719026/AnsiballZ_podman_container_info.py'
Oct 02 11:51:16 compute-0 sudo[164637]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:16 compute-0 python3.9[164639]: ansible-containers.podman.podman_container_info Invoked with executable=podman name=None
Oct 02 11:51:16 compute-0 sudo[164637]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:17 compute-0 sudo[164816]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jpapnmuomgkagtczyficqiloyfjwjlgo ; /usr/bin/python3 /home/zuul/.ansible/tmp/ansible-tmp-1759405877.279913-1072-129728045128740/AnsiballZ_edpm_container_manage.py'
Oct 02 11:51:17 compute-0 sudo[164816]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:18 compute-0 python3[164818]: ansible-edpm_container_manage Invoked with concurrency=1 config_dir=/var/lib/edpm-config/container-startup-config/iscsid config_id=iscsid config_overrides={} config_patterns=*.json log_base_path=/var/log/containers/stdouts debug=False
Oct 02 11:51:18 compute-0 podman[164855]: 2025-10-02 11:51:18.241702316 +0000 UTC m=+0.031781202 image pull 1b3fd7f2436e5c6f2e28c01b83721476c7b295789c77b3d63e30f49404389ea1 quay.io/podified-antelope-centos9/openstack-iscsid:current-podified
Oct 02 11:51:18 compute-0 podman[164855]: 2025-10-02 11:51:18.583974736 +0000 UTC m=+0.374053592 container create d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, container_name=iscsid, org.label-schema.build-date=20251001, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, config_id=iscsid, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, managed_by=edpm_ansible)
Oct 02 11:51:18 compute-0 python3[164818]: ansible-edpm_container_manage PODMAN-CONTAINER-DEBUG: podman create --name iscsid --conmon-pidfile /run/iscsid.pid --env KOLLA_CONFIG_STRATEGY=COPY_ALWAYS --healthcheck-command /openstack/healthcheck --label config_id=iscsid --label container_name=iscsid --label managed_by=edpm_ansible --label config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']} --log-driver journald --log-level info --network host --privileged=True --volume /etc/hosts:/etc/hosts:ro --volume /etc/localtime:/etc/localtime:ro --volume /etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro --volume /etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro --volume /etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro --volume /etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro --volume /etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro --volume /dev/log:/dev/log --volume /var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro --volume /dev:/dev --volume /run:/run --volume /sys:/sys --volume /lib/modules:/lib/modules:ro --volume /etc/iscsi:/etc/iscsi:z --volume /etc/target:/etc/target:z --volume /var/lib/iscsi:/var/lib/iscsi:z --volume /var/lib/openstack/healthchecks/iscsid:/openstack:ro,z quay.io/podified-antelope-centos9/openstack-iscsid:current-podified
Oct 02 11:51:18 compute-0 sudo[164816]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:19 compute-0 sudo[165043]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wmnemrcnztoyufcxixhmhipojwdexhys ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405878.9107277-1096-91544266077058/AnsiballZ_stat.py'
Oct 02 11:51:19 compute-0 sudo[165043]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:19 compute-0 python3.9[165045]: ansible-ansible.builtin.stat Invoked with path=/etc/sysconfig/podman_drop_in follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:51:19 compute-0 sudo[165043]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:20 compute-0 sudo[165197]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qdfeehnivmzksxqyivrkaqxrkentlxvf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405879.7328637-1123-153813449304559/AnsiballZ_file.py'
Oct 02 11:51:20 compute-0 sudo[165197]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:20 compute-0 python3.9[165199]: ansible-file Invoked with path=/etc/systemd/system/edpm_iscsid.requires state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:51:20 compute-0 sudo[165197]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:20 compute-0 sudo[165273]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nmzqvegqurgdegbbwbzkxxariwijjikx ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405879.7328637-1123-153813449304559/AnsiballZ_stat.py'
Oct 02 11:51:20 compute-0 sudo[165273]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:20 compute-0 python3.9[165275]: ansible-stat Invoked with path=/etc/systemd/system/edpm_iscsid_healthcheck.timer follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:51:20 compute-0 sudo[165273]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:21 compute-0 sudo[165424]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ewcyctrjotoyooftttjxcplbxadgpxjc ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405880.708484-1123-19743087418028/AnsiballZ_copy.py'
Oct 02 11:51:21 compute-0 sudo[165424]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:21 compute-0 python3.9[165426]: ansible-copy Invoked with src=/home/zuul/.ansible/tmp/ansible-tmp-1759405880.708484-1123-19743087418028/source dest=/etc/systemd/system/edpm_iscsid.service mode=0644 owner=root group=root backup=False force=True remote_src=False follow=False unsafe_writes=False _original_basename=None content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None checksum=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:51:21 compute-0 sudo[165424]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:21 compute-0 sudo[165500]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dtjvpenwrxaeapnanuygcvlzgbabcpbp ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405880.708484-1123-19743087418028/AnsiballZ_systemd.py'
Oct 02 11:51:21 compute-0 sudo[165500]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:21 compute-0 python3.9[165502]: ansible-systemd Invoked with daemon_reload=True daemon_reexec=False scope=system no_block=False name=None state=None enabled=None force=None masked=None
Oct 02 11:51:21 compute-0 systemd[1]: Reloading.
Oct 02 11:51:22 compute-0 systemd-rc-local-generator[165530]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:51:22 compute-0 systemd-sysv-generator[165535]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:51:22 compute-0 sudo[165500]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:22 compute-0 sudo[165611]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wqdrqdrdlnlbiydzxreitnelgyqjraqv ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405880.708484-1123-19743087418028/AnsiballZ_systemd.py'
Oct 02 11:51:22 compute-0 sudo[165611]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:22 compute-0 python3.9[165613]: ansible-systemd Invoked with state=restarted name=edpm_iscsid.service enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:51:22 compute-0 systemd[1]: Reloading.
Oct 02 11:51:22 compute-0 systemd-rc-local-generator[165635]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:51:22 compute-0 systemd-sysv-generator[165641]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:51:23 compute-0 systemd[1]: Starting iscsid container...
Oct 02 11:51:23 compute-0 systemd[1]: Started libcrun container.
Oct 02 11:51:23 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/fab59d22430f2ad3a93be367e6642c3829db807b10824103b823b8fde49c10f7/merged/etc/iscsi supports timestamps until 2038 (0x7fffffff)
Oct 02 11:51:23 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/fab59d22430f2ad3a93be367e6642c3829db807b10824103b823b8fde49c10f7/merged/etc/target supports timestamps until 2038 (0x7fffffff)
Oct 02 11:51:23 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/fab59d22430f2ad3a93be367e6642c3829db807b10824103b823b8fde49c10f7/merged/var/lib/iscsi supports timestamps until 2038 (0x7fffffff)
Oct 02 11:51:23 compute-0 systemd[1]: Started /usr/bin/podman healthcheck run d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818.
Oct 02 11:51:23 compute-0 podman[165653]: 2025-10-02 11:51:23.603372201 +0000 UTC m=+0.412834194 container init d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, config_id=iscsid, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_managed=true, container_name=iscsid, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, managed_by=edpm_ansible)
Oct 02 11:51:23 compute-0 iscsid[165669]: + sudo -E kolla_set_configs
Oct 02 11:51:23 compute-0 sudo[165675]:     root : PWD=/ ; USER=root ; COMMAND=/usr/local/bin/kolla_set_configs
Oct 02 11:51:23 compute-0 podman[165653]: 2025-10-02 11:51:23.646496938 +0000 UTC m=+0.455958871 container start d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid, managed_by=edpm_ansible, container_name=iscsid, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_managed=true, io.buildah.version=1.41.3)
Oct 02 11:51:23 compute-0 systemd[1]: Created slice User Slice of UID 0.
Oct 02 11:51:23 compute-0 systemd[1]: Starting User Runtime Directory /run/user/0...
Oct 02 11:51:23 compute-0 systemd[1]: Finished User Runtime Directory /run/user/0.
Oct 02 11:51:23 compute-0 systemd[1]: Starting User Manager for UID 0...
Oct 02 11:51:23 compute-0 podman[165653]: iscsid
Oct 02 11:51:23 compute-0 systemd[1]: Started iscsid container.
Oct 02 11:51:23 compute-0 systemd[165690]: pam_unix(systemd-user:session): session opened for user root(uid=0) by root(uid=0)
Oct 02 11:51:23 compute-0 sudo[165611]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:23 compute-0 podman[165676]: 2025-10-02 11:51:23.757041312 +0000 UTC m=+0.097564873 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=starting, health_failing_streak=1, health_log=, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=iscsid, container_name=iscsid, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, managed_by=edpm_ansible)
Oct 02 11:51:23 compute-0 systemd[1]: d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818-115ed4b416c29b92.service: Main process exited, code=exited, status=1/FAILURE
Oct 02 11:51:23 compute-0 systemd[1]: d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818-115ed4b416c29b92.service: Failed with result 'exit-code'.
Oct 02 11:51:23 compute-0 systemd[165690]: Queued start job for default target Main User Target.
Oct 02 11:51:23 compute-0 systemd[165690]: Created slice User Application Slice.
Oct 02 11:51:23 compute-0 systemd[165690]: Mark boot as successful after the user session has run 2 minutes was skipped because of an unmet condition check (ConditionUser=!@system).
Oct 02 11:51:23 compute-0 systemd[165690]: Started Daily Cleanup of User's Temporary Directories.
Oct 02 11:51:23 compute-0 systemd[165690]: Reached target Paths.
Oct 02 11:51:23 compute-0 systemd[165690]: Reached target Timers.
Oct 02 11:51:23 compute-0 systemd[165690]: Starting D-Bus User Message Bus Socket...
Oct 02 11:51:23 compute-0 systemd[165690]: Starting Create User's Volatile Files and Directories...
Oct 02 11:51:23 compute-0 systemd[165690]: Finished Create User's Volatile Files and Directories.
Oct 02 11:51:23 compute-0 systemd[165690]: Listening on D-Bus User Message Bus Socket.
Oct 02 11:51:23 compute-0 systemd[165690]: Reached target Sockets.
Oct 02 11:51:23 compute-0 systemd[165690]: Reached target Basic System.
Oct 02 11:51:23 compute-0 systemd[165690]: Reached target Main User Target.
Oct 02 11:51:23 compute-0 systemd[165690]: Startup finished in 129ms.
Oct 02 11:51:23 compute-0 systemd[1]: Started User Manager for UID 0.
Oct 02 11:51:23 compute-0 systemd[1]: Started Session c3 of User root.
Oct 02 11:51:23 compute-0 sudo[165675]: pam_unix(sudo:session): session opened for user root(uid=0) by (uid=0)
Oct 02 11:51:23 compute-0 iscsid[165669]: INFO:__main__:Loading config file at /var/lib/kolla/config_files/config.json
Oct 02 11:51:23 compute-0 iscsid[165669]: INFO:__main__:Validating config file
Oct 02 11:51:23 compute-0 iscsid[165669]: INFO:__main__:Kolla config strategy set to: COPY_ALWAYS
Oct 02 11:51:23 compute-0 iscsid[165669]: INFO:__main__:Writing out command to execute
Oct 02 11:51:23 compute-0 sudo[165675]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:23 compute-0 systemd[1]: session-c3.scope: Deactivated successfully.
Oct 02 11:51:23 compute-0 iscsid[165669]: ++ cat /run_command
Oct 02 11:51:23 compute-0 iscsid[165669]: + CMD='/usr/sbin/iscsid -f'
Oct 02 11:51:23 compute-0 iscsid[165669]: + ARGS=
Oct 02 11:51:23 compute-0 iscsid[165669]: + sudo kolla_copy_cacerts
Oct 02 11:51:23 compute-0 sudo[165739]:     root : PWD=/ ; USER=root ; COMMAND=/usr/local/bin/kolla_copy_cacerts
Oct 02 11:51:23 compute-0 systemd[1]: Started Session c4 of User root.
Oct 02 11:51:23 compute-0 sudo[165739]: pam_unix(sudo:session): session opened for user root(uid=0) by (uid=0)
Oct 02 11:51:23 compute-0 sudo[165739]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:23 compute-0 systemd[1]: session-c4.scope: Deactivated successfully.
Oct 02 11:51:23 compute-0 iscsid[165669]: + [[ ! -n '' ]]
Oct 02 11:51:23 compute-0 iscsid[165669]: + . kolla_extend_start
Oct 02 11:51:23 compute-0 iscsid[165669]: ++ [[ ! -f /etc/iscsi/initiatorname.iscsi ]]
Oct 02 11:51:23 compute-0 iscsid[165669]: + echo 'Running command: '\''/usr/sbin/iscsid -f'\'''
Oct 02 11:51:23 compute-0 iscsid[165669]: Running command: '/usr/sbin/iscsid -f'
Oct 02 11:51:23 compute-0 iscsid[165669]: + umask 0022
Oct 02 11:51:23 compute-0 iscsid[165669]: + exec /usr/sbin/iscsid -f
Oct 02 11:51:24 compute-0 kernel: Loading iSCSI transport class v2.0-870.
Oct 02 11:51:24 compute-0 python3.9[165875]: ansible-ansible.builtin.stat Invoked with path=/etc/iscsi/.iscsid_restart_required follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:51:25 compute-0 sudo[166025]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mtolxebbrvyolzqtfwfkppnxzqbhpclb ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405884.9395351-1234-17486136636992/AnsiballZ_file.py'
Oct 02 11:51:25 compute-0 sudo[166025]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:25 compute-0 python3.9[166027]: ansible-ansible.builtin.file Invoked with path=/etc/iscsi/.iscsid_restart_required state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:51:25 compute-0 sudo[166025]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:26 compute-0 sudo[166177]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-seiozgiljnovrxkqogouvsshqmrwjmon ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405886.012514-1267-70804846227666/AnsiballZ_service_facts.py'
Oct 02 11:51:26 compute-0 sudo[166177]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:26 compute-0 python3.9[166179]: ansible-ansible.builtin.service_facts Invoked
Oct 02 11:51:26 compute-0 network[166196]: You are using 'network' service provided by 'network-scripts', which are now deprecated.
Oct 02 11:51:26 compute-0 network[166197]: 'network-scripts' will be removed from distribution in near future.
Oct 02 11:51:26 compute-0 network[166198]: It is advised to switch to 'NetworkManager' instead for network management.
Oct 02 11:51:30 compute-0 sudo[166177]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:30 compute-0 sudo[166470]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bgqkziuklcxznchzzkoahamgdecrimou ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405890.4945996-1297-120824854635688/AnsiballZ_file.py'
Oct 02 11:51:30 compute-0 sudo[166470]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:31 compute-0 python3.9[166472]: ansible-ansible.builtin.file Invoked with mode=0755 path=/etc/modules-load.d selevel=s0 setype=etc_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None attributes=None
Oct 02 11:51:31 compute-0 sudo[166470]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:31 compute-0 sudo[166622]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vqlkgkaudkxnmhjhtekggtdyswnhauap ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405891.2544076-1321-22715369493292/AnsiballZ_modprobe.py'
Oct 02 11:51:31 compute-0 sudo[166622]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:31 compute-0 python3.9[166624]: ansible-community.general.modprobe Invoked with name=dm-multipath state=present params= persistent=disabled
Oct 02 11:51:32 compute-0 sudo[166622]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:32 compute-0 sudo[166778]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hfvhcjvtmhgwhdmapdbnlaicrbasamro ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405892.204732-1345-27982885896840/AnsiballZ_stat.py'
Oct 02 11:51:32 compute-0 sudo[166778]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:32 compute-0 python3.9[166780]: ansible-ansible.legacy.stat Invoked with path=/etc/modules-load.d/dm-multipath.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:51:32 compute-0 sudo[166778]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:33 compute-0 sudo[166901]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jhyrflnvqtkmmqexuoyjcklkyviapzky ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405892.204732-1345-27982885896840/AnsiballZ_copy.py'
Oct 02 11:51:33 compute-0 sudo[166901]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:33 compute-0 python3.9[166903]: ansible-ansible.legacy.copy Invoked with dest=/etc/modules-load.d/dm-multipath.conf mode=0644 src=/home/zuul/.ansible/tmp/ansible-tmp-1759405892.204732-1345-27982885896840/.source.conf follow=False _original_basename=module-load.conf.j2 checksum=065061c60917e4f67cecc70d12ce55e42f9d0b3f backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:51:33 compute-0 sudo[166901]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:34 compute-0 systemd[1]: Stopping User Manager for UID 0...
Oct 02 11:51:34 compute-0 systemd[165690]: Activating special unit Exit the Session...
Oct 02 11:51:34 compute-0 systemd[165690]: Stopped target Main User Target.
Oct 02 11:51:34 compute-0 systemd[165690]: Stopped target Basic System.
Oct 02 11:51:34 compute-0 systemd[165690]: Stopped target Paths.
Oct 02 11:51:34 compute-0 systemd[165690]: Stopped target Sockets.
Oct 02 11:51:34 compute-0 systemd[165690]: Stopped target Timers.
Oct 02 11:51:34 compute-0 systemd[165690]: Stopped Daily Cleanup of User's Temporary Directories.
Oct 02 11:51:34 compute-0 systemd[165690]: Closed D-Bus User Message Bus Socket.
Oct 02 11:51:34 compute-0 systemd[165690]: Stopped Create User's Volatile Files and Directories.
Oct 02 11:51:34 compute-0 systemd[165690]: Removed slice User Application Slice.
Oct 02 11:51:34 compute-0 systemd[165690]: Reached target Shutdown.
Oct 02 11:51:34 compute-0 systemd[165690]: Finished Exit the Session.
Oct 02 11:51:34 compute-0 systemd[165690]: Reached target Exit the Session.
Oct 02 11:51:34 compute-0 systemd[1]: user@0.service: Deactivated successfully.
Oct 02 11:51:34 compute-0 systemd[1]: Stopped User Manager for UID 0.
Oct 02 11:51:34 compute-0 systemd[1]: Stopping User Runtime Directory /run/user/0...
Oct 02 11:51:34 compute-0 systemd[1]: run-user-0.mount: Deactivated successfully.
Oct 02 11:51:34 compute-0 systemd[1]: user-runtime-dir@0.service: Deactivated successfully.
Oct 02 11:51:34 compute-0 systemd[1]: Stopped User Runtime Directory /run/user/0.
Oct 02 11:51:34 compute-0 systemd[1]: Removed slice User Slice of UID 0.
Oct 02 11:51:34 compute-0 sudo[167071]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-epuicxedmcduumfubtbocltzmutaxdhm ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405893.760962-1393-12797954311420/AnsiballZ_lineinfile.py'
Oct 02 11:51:34 compute-0 sudo[167071]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:34 compute-0 podman[167027]: 2025-10-02 11:51:34.098917297 +0000 UTC m=+0.081422827 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 11:51:34 compute-0 python3.9[167076]: ansible-ansible.builtin.lineinfile Invoked with create=True dest=/etc/modules line=dm-multipath  mode=0644 state=present path=/etc/modules backrefs=False backup=False firstmatch=False unsafe_writes=False regexp=None search_string=None insertafter=None insertbefore=None validate=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:51:34 compute-0 sudo[167071]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:34 compute-0 sudo[167232]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vvbttfjwuyzzixugcjixxhrtjtikhegh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405894.4903243-1417-209328424416353/AnsiballZ_systemd.py'
Oct 02 11:51:34 compute-0 sudo[167232]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:35 compute-0 python3.9[167234]: ansible-ansible.builtin.systemd Invoked with name=systemd-modules-load.service state=restarted daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None
Oct 02 11:51:35 compute-0 systemd[1]: systemd-modules-load.service: Deactivated successfully.
Oct 02 11:51:35 compute-0 systemd[1]: Stopped Load Kernel Modules.
Oct 02 11:51:35 compute-0 systemd[1]: Stopping Load Kernel Modules...
Oct 02 11:51:35 compute-0 systemd[1]: Starting Load Kernel Modules...
Oct 02 11:51:35 compute-0 systemd[1]: Finished Load Kernel Modules.
Oct 02 11:51:35 compute-0 sudo[167232]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:35 compute-0 sudo[167388]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kzdpjazwtguzjaugernyjknzxxjomwbf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405895.5090947-1441-36056278979909/AnsiballZ_file.py'
Oct 02 11:51:35 compute-0 sudo[167388]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:36 compute-0 python3.9[167390]: ansible-ansible.builtin.file Invoked with mode=0755 path=/etc/multipath setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:51:36 compute-0 sudo[167388]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:36 compute-0 sudo[167557]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dhzugsgtxkdfrwkwqvgnsondtvtxkkvk ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405896.3689005-1468-9595852162971/AnsiballZ_stat.py'
Oct 02 11:51:36 compute-0 sudo[167557]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:36 compute-0 podman[167514]: 2025-10-02 11:51:36.698756714 +0000 UTC m=+0.067118449 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_metadata_agent, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, config_id=ovn_metadata_agent, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 11:51:36 compute-0 python3.9[167562]: ansible-ansible.builtin.stat Invoked with path=/etc/multipath.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:51:36 compute-0 sudo[167557]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:37 compute-0 sudo[167712]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zefvoquitjkrzyypuxzrpdexolcbsemf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405897.2631915-1495-159383933800251/AnsiballZ_stat.py'
Oct 02 11:51:37 compute-0 sudo[167712]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:37 compute-0 python3.9[167714]: ansible-ansible.builtin.stat Invoked with path=/etc/multipath.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:51:37 compute-0 sudo[167712]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:38 compute-0 sudo[167864]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-debltenxbcqrbobwilvyohsgjbxzkgry ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405897.9340043-1519-120775315962709/AnsiballZ_stat.py'
Oct 02 11:51:38 compute-0 sudo[167864]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:38 compute-0 python3.9[167866]: ansible-ansible.legacy.stat Invoked with path=/etc/multipath.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:51:38 compute-0 sudo[167864]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:38 compute-0 sudo[167987]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nmgfcgrrkixsrgiianytsxqynnazngda ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405897.9340043-1519-120775315962709/AnsiballZ_copy.py'
Oct 02 11:51:38 compute-0 sudo[167987]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:38 compute-0 python3.9[167989]: ansible-ansible.legacy.copy Invoked with dest=/etc/multipath.conf mode=0644 src=/home/zuul/.ansible/tmp/ansible-tmp-1759405897.9340043-1519-120775315962709/.source.conf _original_basename=multipath.conf follow=False checksum=bf02ab264d3d648048a81f3bacec8bc58db93162 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:51:39 compute-0 sudo[167987]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:39 compute-0 sudo[168139]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-aafsabsfmovxzrwoeowtgkftkregilrf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405899.248864-1564-206615296603821/AnsiballZ_command.py'
Oct 02 11:51:39 compute-0 sudo[168139]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:39 compute-0 python3.9[168141]: ansible-ansible.legacy.command Invoked with _raw_params=grep -q '^blacklist\s*{' /etc/multipath.conf _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:51:39 compute-0 sudo[168139]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:40 compute-0 sudo[168292]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nmmlcskiomafyluzmpezumzkpsrsbbqy ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405900.201272-1588-185823848083714/AnsiballZ_lineinfile.py'
Oct 02 11:51:40 compute-0 sudo[168292]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:40 compute-0 python3.9[168294]: ansible-ansible.builtin.lineinfile Invoked with line=blacklist { path=/etc/multipath.conf state=present backrefs=False create=False backup=False firstmatch=False unsafe_writes=False regexp=None search_string=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:51:40 compute-0 sudo[168292]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:41 compute-0 sudo[168444]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ksiblzioxbtozzmcnhqnfmxmknwzfiuu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405900.8981009-1612-271487637371213/AnsiballZ_replace.py'
Oct 02 11:51:41 compute-0 sudo[168444]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:41 compute-0 python3.9[168446]: ansible-ansible.builtin.replace Invoked with path=/etc/multipath.conf regexp=^(blacklist {) replace=\1\n} backup=False encoding=utf-8 unsafe_writes=False after=None before=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:51:41 compute-0 sudo[168444]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:41 compute-0 sudo[168596]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-eqvicxeqmqofqvskmniqnaeigvufajld ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405901.7266166-1636-85904449835412/AnsiballZ_replace.py'
Oct 02 11:51:41 compute-0 sudo[168596]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:42 compute-0 python3.9[168598]: ansible-ansible.builtin.replace Invoked with path=/etc/multipath.conf regexp=^blacklist\s*{\n[\s]+devnode \"\.\*\" replace=blacklist { backup=False encoding=utf-8 unsafe_writes=False after=None before=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:51:42 compute-0 sudo[168596]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:42 compute-0 sudo[168748]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ozipulhustnzjjaoupvagvgbztmxrzlz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405902.4969516-1663-152338945359564/AnsiballZ_lineinfile.py'
Oct 02 11:51:42 compute-0 sudo[168748]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:42 compute-0 python3.9[168750]: ansible-ansible.builtin.lineinfile Invoked with firstmatch=True insertafter=^defaults line=        find_multipaths yes path=/etc/multipath.conf regexp=^\s+find_multipaths state=present backrefs=False create=False backup=False unsafe_writes=False search_string=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:51:42 compute-0 sudo[168748]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:43 compute-0 sudo[168900]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-uldwwxjecxcvrlllyrjrqcxubjjaqzud ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405903.0975468-1663-189950773895929/AnsiballZ_lineinfile.py'
Oct 02 11:51:43 compute-0 sudo[168900]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:43 compute-0 python3.9[168902]: ansible-ansible.builtin.lineinfile Invoked with firstmatch=True insertafter=^defaults line=        recheck_wwid yes path=/etc/multipath.conf regexp=^\s+recheck_wwid state=present backrefs=False create=False backup=False unsafe_writes=False search_string=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:51:43 compute-0 sudo[168900]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:44 compute-0 sudo[169052]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gmftnwpsbyvdlnqlqrjcfczffozxcmqx ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405903.798069-1663-51129455088616/AnsiballZ_lineinfile.py'
Oct 02 11:51:44 compute-0 sudo[169052]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:44 compute-0 python3.9[169054]: ansible-ansible.builtin.lineinfile Invoked with firstmatch=True insertafter=^defaults line=        skip_kpartx yes path=/etc/multipath.conf regexp=^\s+skip_kpartx state=present backrefs=False create=False backup=False unsafe_writes=False search_string=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:51:44 compute-0 sudo[169052]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:44 compute-0 sudo[169204]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tjvrnowglrfhmaxjiheqianmdtbhvjmo ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405904.5408013-1663-52574661540323/AnsiballZ_lineinfile.py'
Oct 02 11:51:44 compute-0 sudo[169204]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:45 compute-0 python3.9[169206]: ansible-ansible.builtin.lineinfile Invoked with firstmatch=True insertafter=^defaults line=        user_friendly_names no path=/etc/multipath.conf regexp=^\s+user_friendly_names state=present backrefs=False create=False backup=False unsafe_writes=False search_string=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:51:45 compute-0 sudo[169204]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:45 compute-0 sudo[169356]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fvrdlaqltejjonbxqvichftitcenwxsh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405905.2379274-1750-82164871842992/AnsiballZ_stat.py'
Oct 02 11:51:45 compute-0 sudo[169356]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:45 compute-0 python3.9[169358]: ansible-ansible.builtin.stat Invoked with path=/etc/multipath.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:51:45 compute-0 sudo[169356]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:46 compute-0 sudo[169510]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hodzzwegsrwsbmaispsdcmvdiorrskzf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405906.0310712-1774-176650029717777/AnsiballZ_file.py'
Oct 02 11:51:46 compute-0 sudo[169510]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:46 compute-0 python3.9[169512]: ansible-ansible.builtin.file Invoked with mode=0644 path=/etc/multipath/.multipath_restart_required state=touch recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:51:46 compute-0 sudo[169510]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:47 compute-0 sudo[169662]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-alweulorxwusvsjxcomgciohnmqtiaha ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405906.877744-1801-65359060917691/AnsiballZ_file.py'
Oct 02 11:51:47 compute-0 sudo[169662]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:47 compute-0 python3.9[169664]: ansible-ansible.builtin.file Invoked with path=/var/local/libexec recurse=True setype=container_file_t state=directory force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:51:47 compute-0 sudo[169662]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:47 compute-0 sudo[169814]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kxayvtmzrqadukejecmvybslcjxhhniy ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405907.566815-1825-21376335937782/AnsiballZ_stat.py'
Oct 02 11:51:47 compute-0 sudo[169814]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:48 compute-0 python3.9[169816]: ansible-ansible.legacy.stat Invoked with path=/var/local/libexec/edpm-container-shutdown follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:51:48 compute-0 sudo[169814]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:48 compute-0 sudo[169892]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xrfacpabkacydeaseltedvxhihtemrbv ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405907.566815-1825-21376335937782/AnsiballZ_file.py'
Oct 02 11:51:48 compute-0 sudo[169892]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:48 compute-0 python3.9[169894]: ansible-ansible.legacy.file Invoked with group=root mode=0700 owner=root setype=container_file_t dest=/var/local/libexec/edpm-container-shutdown _original_basename=edpm-container-shutdown recurse=False state=file path=/var/local/libexec/edpm-container-shutdown force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:51:48 compute-0 sudo[169892]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:48 compute-0 sudo[170044]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ljtfwbgfosgifyqukclbynynetnyvkse ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405908.6509755-1825-48120503859512/AnsiballZ_stat.py'
Oct 02 11:51:48 compute-0 sudo[170044]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:49 compute-0 python3.9[170046]: ansible-ansible.legacy.stat Invoked with path=/var/local/libexec/edpm-start-podman-container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:51:49 compute-0 sudo[170044]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:49 compute-0 sudo[170122]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-idlwsijovufsochpiyuhgoqfvqheonlt ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405908.6509755-1825-48120503859512/AnsiballZ_file.py'
Oct 02 11:51:49 compute-0 sudo[170122]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:49 compute-0 python3.9[170124]: ansible-ansible.legacy.file Invoked with group=root mode=0700 owner=root setype=container_file_t dest=/var/local/libexec/edpm-start-podman-container _original_basename=edpm-start-podman-container recurse=False state=file path=/var/local/libexec/edpm-start-podman-container force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:51:49 compute-0 sudo[170122]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:50 compute-0 sudo[170274]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-srisatonglldkbelfhmhknqktagucmeh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405909.9149735-1894-212366209933345/AnsiballZ_file.py'
Oct 02 11:51:50 compute-0 sudo[170274]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:50 compute-0 python3.9[170276]: ansible-ansible.builtin.file Invoked with mode=420 path=/etc/systemd/system-preset state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:51:50 compute-0 sudo[170274]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:50 compute-0 sudo[170426]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pqooubdkpsmxonpgcvzonvockbopoxbr ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405910.6469293-1918-247344878501906/AnsiballZ_stat.py'
Oct 02 11:51:50 compute-0 sudo[170426]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:51 compute-0 python3.9[170428]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/edpm-container-shutdown.service follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:51:51 compute-0 sudo[170426]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:51 compute-0 sudo[170504]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-txzfsnnacunmxetzpaqwfyscyvzqizaa ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405910.6469293-1918-247344878501906/AnsiballZ_file.py'
Oct 02 11:51:51 compute-0 sudo[170504]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:51 compute-0 python3.9[170506]: ansible-ansible.legacy.file Invoked with group=root mode=0644 owner=root dest=/etc/systemd/system/edpm-container-shutdown.service _original_basename=edpm-container-shutdown-service recurse=False state=file path=/etc/systemd/system/edpm-container-shutdown.service force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:51:51 compute-0 sudo[170504]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:52 compute-0 sudo[170656]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lqbixowojuhllmkozuxouvjtnjgdibxu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405911.9346666-1954-5752112046343/AnsiballZ_stat.py'
Oct 02 11:51:52 compute-0 sudo[170656]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:52 compute-0 python3.9[170658]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system-preset/91-edpm-container-shutdown.preset follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:51:52 compute-0 sudo[170656]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:52 compute-0 sudo[170734]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qtxiyfywasfqbqsamxtbitvvzgjsunfx ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405911.9346666-1954-5752112046343/AnsiballZ_file.py'
Oct 02 11:51:52 compute-0 sudo[170734]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:52 compute-0 python3.9[170736]: ansible-ansible.legacy.file Invoked with group=root mode=0644 owner=root dest=/etc/systemd/system-preset/91-edpm-container-shutdown.preset _original_basename=91-edpm-container-shutdown-preset recurse=False state=file path=/etc/systemd/system-preset/91-edpm-container-shutdown.preset force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:51:52 compute-0 sudo[170734]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:53 compute-0 sudo[170886]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-scwlwyjvaekgxkloxgrjsbcwlmtlkmye ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405913.2644224-1990-21730977903967/AnsiballZ_systemd.py'
Oct 02 11:51:53 compute-0 sudo[170886]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:53 compute-0 python3.9[170888]: ansible-ansible.builtin.systemd Invoked with daemon_reload=True enabled=True name=edpm-container-shutdown state=started daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:51:53 compute-0 systemd[1]: Reloading.
Oct 02 11:51:53 compute-0 systemd-sysv-generator[170934]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:51:54 compute-0 systemd-rc-local-generator[170930]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:51:54 compute-0 podman[170890]: 2025-10-02 11:51:54.00489545 +0000 UTC m=+0.097372679 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid, container_name=iscsid, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0)
Oct 02 11:51:54 compute-0 sudo[170886]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:54 compute-0 sudo[171094]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rnqgnoynmlvosqdupkgkxrdkgllftnya ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405914.6339757-2014-161729187487660/AnsiballZ_stat.py'
Oct 02 11:51:54 compute-0 sudo[171094]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:55 compute-0 python3.9[171096]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/netns-placeholder.service follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:51:55 compute-0 sudo[171094]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:55 compute-0 sudo[171172]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-itwtvwhzqwhcjlbkarcwhgiwtguwpdjt ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405914.6339757-2014-161729187487660/AnsiballZ_file.py'
Oct 02 11:51:55 compute-0 sudo[171172]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:55 compute-0 python3.9[171174]: ansible-ansible.legacy.file Invoked with group=root mode=0644 owner=root dest=/etc/systemd/system/netns-placeholder.service _original_basename=netns-placeholder-service recurse=False state=file path=/etc/systemd/system/netns-placeholder.service force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:51:55 compute-0 sudo[171172]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:56 compute-0 sudo[171324]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nomrwzzgchllsppwvonzrlkkotzmmvlo ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405915.8137653-2050-235068166407342/AnsiballZ_stat.py'
Oct 02 11:51:56 compute-0 sudo[171324]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:56 compute-0 python3.9[171326]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system-preset/91-netns-placeholder.preset follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:51:56 compute-0 sudo[171324]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:56 compute-0 sudo[171402]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ccwthzltwtwvdkfhlkztohzvdetyqlhc ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405915.8137653-2050-235068166407342/AnsiballZ_file.py'
Oct 02 11:51:56 compute-0 sudo[171402]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:56 compute-0 python3.9[171404]: ansible-ansible.legacy.file Invoked with group=root mode=0644 owner=root dest=/etc/systemd/system-preset/91-netns-placeholder.preset _original_basename=91-netns-placeholder-preset recurse=False state=file path=/etc/systemd/system-preset/91-netns-placeholder.preset force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:51:56 compute-0 sudo[171402]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:57 compute-0 systemd[1]: virtnodedevd.service: Deactivated successfully.
Oct 02 11:51:57 compute-0 sudo[171555]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bunhgbejmbumgcpgxlutmlaursvrasoo ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405916.9141452-2086-151145794980049/AnsiballZ_systemd.py'
Oct 02 11:51:57 compute-0 sudo[171555]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:57 compute-0 python3.9[171557]: ansible-ansible.builtin.systemd Invoked with daemon_reload=True enabled=True name=netns-placeholder state=started daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:51:57 compute-0 systemd[1]: Reloading.
Oct 02 11:51:57 compute-0 systemd-sysv-generator[171587]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:51:57 compute-0 systemd-rc-local-generator[171583]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:51:57 compute-0 systemd[1]: Starting Create netns directory...
Oct 02 11:51:57 compute-0 systemd[1]: run-netns-placeholder.mount: Deactivated successfully.
Oct 02 11:51:57 compute-0 systemd[1]: netns-placeholder.service: Deactivated successfully.
Oct 02 11:51:57 compute-0 systemd[1]: Finished Create netns directory.
Oct 02 11:51:57 compute-0 sudo[171555]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:58 compute-0 systemd[1]: virtproxyd.service: Deactivated successfully.
Oct 02 11:51:58 compute-0 sudo[171748]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ntbkzmgstsbktxiyavsrhaovtvjxowrl ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405918.270206-2116-241715020812856/AnsiballZ_file.py'
Oct 02 11:51:58 compute-0 sudo[171748]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:58 compute-0 python3.9[171750]: ansible-ansible.builtin.file Invoked with group=zuul mode=0755 owner=zuul path=/var/lib/openstack/healthchecks setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:51:58 compute-0 sudo[171748]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:59 compute-0 systemd[1]: virtqemud.service: Deactivated successfully.
Oct 02 11:51:59 compute-0 sudo[171901]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vwuhkahdyxxdwqhgfyxacfjshjknltey ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405918.978982-2140-126199770600574/AnsiballZ_stat.py'
Oct 02 11:51:59 compute-0 sudo[171901]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:59 compute-0 python3.9[171903]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/healthchecks/multipathd/healthcheck follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:51:59 compute-0 sudo[171901]: pam_unix(sudo:session): session closed for user root
Oct 02 11:51:59 compute-0 sudo[172024]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lcdtlwqwktgltobxdrlpfsabqzgcysjd ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405918.978982-2140-126199770600574/AnsiballZ_copy.py'
Oct 02 11:51:59 compute-0 sudo[172024]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:51:59 compute-0 python3.9[172026]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/healthchecks/multipathd/ group=zuul mode=0700 owner=zuul setype=container_file_t src=/home/zuul/.ansible/tmp/ansible-tmp-1759405918.978982-2140-126199770600574/.source _original_basename=healthcheck follow=False checksum=af9d0c1c8f3cb0e30ce9609be9d5b01924d0d23f backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:52:00 compute-0 sudo[172024]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:00 compute-0 systemd[1]: virtsecretd.service: Deactivated successfully.
Oct 02 11:52:00 compute-0 sudo[172177]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-exdsculuqxgnalytbzjgsggmchhhoyvu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405920.4454637-2191-223519279950340/AnsiballZ_file.py'
Oct 02 11:52:00 compute-0 sudo[172177]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:00 compute-0 python3.9[172179]: ansible-ansible.builtin.file Invoked with path=/var/lib/kolla/config_files recurse=True setype=container_file_t state=directory force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:52:00 compute-0 sudo[172177]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:01 compute-0 sudo[172329]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qoaegwtgqhymobpikrpeavojpkpytlyd ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405921.1775558-2215-229173040414695/AnsiballZ_stat.py'
Oct 02 11:52:01 compute-0 sudo[172329]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:01 compute-0 python3.9[172331]: ansible-ansible.legacy.stat Invoked with path=/var/lib/kolla/config_files/multipathd.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:52:01 compute-0 sudo[172329]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:02 compute-0 sudo[172452]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-svmcydotgfjmyradbshvdqfkipnnzsvi ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405921.1775558-2215-229173040414695/AnsiballZ_copy.py'
Oct 02 11:52:02 compute-0 sudo[172452]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:52:02.192 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 11:52:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:52:02.193 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 11:52:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:52:02.193 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 11:52:02 compute-0 python3.9[172454]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/kolla/config_files/multipathd.json mode=0600 src=/home/zuul/.ansible/tmp/ansible-tmp-1759405921.1775558-2215-229173040414695/.source.json _original_basename=.584ema1_ follow=False checksum=3f7959ee8ac9757398adcc451c3b416c957d7c14 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:52:02 compute-0 sudo[172452]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:02 compute-0 sudo[172604]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vqdalztnmvedradzlfckrqlqxntdihqd ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405922.4964921-2260-182564767079237/AnsiballZ_file.py'
Oct 02 11:52:02 compute-0 sudo[172604]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:02 compute-0 python3.9[172606]: ansible-ansible.builtin.file Invoked with mode=0755 path=/var/lib/edpm-config/container-startup-config/multipathd state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:52:02 compute-0 sudo[172604]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:03 compute-0 sudo[172756]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hogzckgoperjqqtbpnpwehkkqnrevbej ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405923.3547032-2284-21195468194745/AnsiballZ_stat.py'
Oct 02 11:52:03 compute-0 sudo[172756]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:03 compute-0 sudo[172756]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:04 compute-0 sudo[172890]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nznjnhvhxggpiujxercmpkktcowasbro ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405923.3547032-2284-21195468194745/AnsiballZ_copy.py'
Oct 02 11:52:04 compute-0 sudo[172890]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:04 compute-0 podman[172853]: 2025-10-02 11:52:04.295120495 +0000 UTC m=+0.153044605 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, config_id=ovn_controller, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, container_name=ovn_controller, org.label-schema.license=GPLv2)
Oct 02 11:52:04 compute-0 sudo[172890]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:05 compute-0 sudo[173058]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-shkzdtjnlaauiuqcqaeiholjnjkdgzfy ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405924.816462-2335-81687872085492/AnsiballZ_container_config_data.py'
Oct 02 11:52:05 compute-0 sudo[173058]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:05 compute-0 python3.9[173060]: ansible-container_config_data Invoked with config_overrides={} config_path=/var/lib/edpm-config/container-startup-config/multipathd config_pattern=*.json debug=False
Oct 02 11:52:05 compute-0 sudo[173058]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:05 compute-0 sudo[173210]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-isfyiuhjdxggjwlwltgotibmmiioyjtw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405925.585837-2362-201779928808854/AnsiballZ_container_config_hash.py'
Oct 02 11:52:05 compute-0 sudo[173210]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:06 compute-0 python3.9[173212]: ansible-container_config_hash Invoked with check_mode=False config_vol_prefix=/var/lib/config-data
Oct 02 11:52:06 compute-0 sudo[173210]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:06 compute-0 sudo[173362]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-knthlploztjafveajahnwfkbqmubzsah ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405926.3934515-2389-232991106393838/AnsiballZ_podman_container_info.py'
Oct 02 11:52:06 compute-0 sudo[173362]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:06 compute-0 podman[173364]: 2025-10-02 11:52:06.864615712 +0000 UTC m=+0.086736151 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_metadata_agent, managed_by=edpm_ansible, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_id=ovn_metadata_agent, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS)
Oct 02 11:52:06 compute-0 python3.9[173365]: ansible-containers.podman.podman_container_info Invoked with executable=podman name=None
Oct 02 11:52:07 compute-0 sudo[173362]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:08 compute-0 sudo[173558]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ixabrxnddzabxugdzcvlqgdenlqznrkg ; /usr/bin/python3 /home/zuul/.ansible/tmp/ansible-tmp-1759405928.0120077-2428-43656240953553/AnsiballZ_edpm_container_manage.py'
Oct 02 11:52:08 compute-0 sudo[173558]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:08 compute-0 python3[173560]: ansible-edpm_container_manage Invoked with concurrency=1 config_dir=/var/lib/edpm-config/container-startup-config/multipathd config_id=multipathd config_overrides={} config_patterns=*.json log_base_path=/var/log/containers/stdouts debug=False
Oct 02 11:52:08 compute-0 podman[173597]: 2025-10-02 11:52:08.798054559 +0000 UTC m=+0.049481201 container create 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, managed_by=edpm_ansible, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, config_id=multipathd, org.label-schema.schema-version=1.0)
Oct 02 11:52:08 compute-0 podman[173597]: 2025-10-02 11:52:08.772908838 +0000 UTC m=+0.024335510 image pull d8d739f82a6fecf9df690e49539b589e74665b54e36448657b874630717d5bd1 quay.io/podified-antelope-centos9/openstack-multipathd:current-podified
Oct 02 11:52:09 compute-0 python3[173560]: ansible-edpm_container_manage PODMAN-CONTAINER-DEBUG: podman create --name multipathd --conmon-pidfile /run/multipathd.pid --env KOLLA_CONFIG_STRATEGY=COPY_ALWAYS --healthcheck-command /openstack/healthcheck --label config_id=multipathd --label container_name=multipathd --label managed_by=edpm_ansible --label config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']} --log-driver journald --log-level info --network host --privileged=True --volume /etc/hosts:/etc/hosts:ro --volume /etc/localtime:/etc/localtime:ro --volume /etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro --volume /etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro --volume /etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro --volume /etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro --volume /etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro --volume /dev/log:/dev/log --volume /var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro --volume /dev:/dev --volume /run/udev:/run/udev --volume /sys:/sys --volume /lib/modules:/lib/modules:ro --volume /etc/iscsi:/etc/iscsi:ro --volume /var/lib/iscsi:/var/lib/iscsi:z --volume /etc/multipath:/etc/multipath:z --volume /etc/multipath.conf:/etc/multipath.conf:ro --volume /var/lib/openstack/healthchecks/multipathd:/openstack:ro,z quay.io/podified-antelope-centos9/openstack-multipathd:current-podified
Oct 02 11:52:09 compute-0 sudo[173558]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:10 compute-0 sudo[173785]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cyfnmifopguayvbxeilsojxosnjcxyif ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405930.1539898-2452-123018969420862/AnsiballZ_stat.py'
Oct 02 11:52:10 compute-0 sudo[173785]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:10 compute-0 python3.9[173787]: ansible-ansible.builtin.stat Invoked with path=/etc/sysconfig/podman_drop_in follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:52:10 compute-0 sudo[173785]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:11 compute-0 sudo[173939]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dragsgjgwsrdgqpjyxvxkdbhamuxzomj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405931.0347974-2479-118142461393394/AnsiballZ_file.py'
Oct 02 11:52:11 compute-0 sudo[173939]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:11 compute-0 python3.9[173941]: ansible-file Invoked with path=/etc/systemd/system/edpm_multipathd.requires state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:52:11 compute-0 sudo[173939]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:11 compute-0 sudo[174015]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rmwlbdgmquruodovohjwymbrkofqwheh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405931.0347974-2479-118142461393394/AnsiballZ_stat.py'
Oct 02 11:52:11 compute-0 sudo[174015]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:11 compute-0 python3.9[174017]: ansible-stat Invoked with path=/etc/systemd/system/edpm_multipathd_healthcheck.timer follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:52:11 compute-0 sudo[174015]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:12 compute-0 sudo[174166]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-curgxgqogsrlphyvpaevxfvzbdutjrgb ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405932.028695-2479-76793159323651/AnsiballZ_copy.py'
Oct 02 11:52:12 compute-0 sudo[174166]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:12 compute-0 python3.9[174168]: ansible-copy Invoked with src=/home/zuul/.ansible/tmp/ansible-tmp-1759405932.028695-2479-76793159323651/source dest=/etc/systemd/system/edpm_multipathd.service mode=0644 owner=root group=root backup=False force=True remote_src=False follow=False unsafe_writes=False _original_basename=None content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None checksum=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:52:12 compute-0 sudo[174166]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:12 compute-0 sudo[174242]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xirhnagohxfxgdxxaxfcrudytcrcdjir ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405932.028695-2479-76793159323651/AnsiballZ_systemd.py'
Oct 02 11:52:12 compute-0 sudo[174242]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:13 compute-0 python3.9[174244]: ansible-systemd Invoked with daemon_reload=True daemon_reexec=False scope=system no_block=False name=None state=None enabled=None force=None masked=None
Oct 02 11:52:13 compute-0 systemd[1]: Reloading.
Oct 02 11:52:13 compute-0 systemd-rc-local-generator[174271]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:52:13 compute-0 systemd-sysv-generator[174274]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:52:13 compute-0 sudo[174242]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:13 compute-0 sudo[174352]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cqiyhuhbvftctujpiohwqtplcrrlvdgo ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405932.028695-2479-76793159323651/AnsiballZ_systemd.py'
Oct 02 11:52:13 compute-0 sudo[174352]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:14 compute-0 python3.9[174354]: ansible-systemd Invoked with state=restarted name=edpm_multipathd.service enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:52:14 compute-0 systemd[1]: Reloading.
Oct 02 11:52:14 compute-0 systemd-rc-local-generator[174382]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:52:14 compute-0 systemd-sysv-generator[174386]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:52:14 compute-0 systemd[1]: Starting multipathd container...
Oct 02 11:52:14 compute-0 systemd[1]: Started libcrun container.
Oct 02 11:52:14 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/b53eff38bb6a7412f9e2a38dc6391c9fa140ef1db633d18b8b1dea5aaf6016d2/merged/etc/multipath supports timestamps until 2038 (0x7fffffff)
Oct 02 11:52:14 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/b53eff38bb6a7412f9e2a38dc6391c9fa140ef1db633d18b8b1dea5aaf6016d2/merged/var/lib/iscsi supports timestamps until 2038 (0x7fffffff)
Oct 02 11:52:14 compute-0 systemd[1]: Started /usr/bin/podman healthcheck run 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26.
Oct 02 11:52:14 compute-0 podman[174394]: 2025-10-02 11:52:14.75911023 +0000 UTC m=+0.142137311 container init 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, tcib_managed=true, config_id=multipathd, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, io.buildah.version=1.41.3, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS)
Oct 02 11:52:14 compute-0 multipathd[174410]: + sudo -E kolla_set_configs
Oct 02 11:52:14 compute-0 podman[174394]: 2025-10-02 11:52:14.782782953 +0000 UTC m=+0.165810024 container start 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, config_id=multipathd, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_managed=true)
Oct 02 11:52:14 compute-0 podman[174394]: multipathd
Oct 02 11:52:14 compute-0 sudo[174417]:     root : PWD=/ ; USER=root ; COMMAND=/usr/local/bin/kolla_set_configs
Oct 02 11:52:14 compute-0 systemd[1]: Started multipathd container.
Oct 02 11:52:14 compute-0 sudo[174417]: pam_systemd(sudo:session): Failed to connect to system bus: No such file or directory
Oct 02 11:52:14 compute-0 sudo[174417]: pam_unix(sudo:session): session opened for user root(uid=0) by (uid=0)
Oct 02 11:52:14 compute-0 sudo[174352]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:14 compute-0 multipathd[174410]: INFO:__main__:Loading config file at /var/lib/kolla/config_files/config.json
Oct 02 11:52:14 compute-0 multipathd[174410]: INFO:__main__:Validating config file
Oct 02 11:52:14 compute-0 multipathd[174410]: INFO:__main__:Kolla config strategy set to: COPY_ALWAYS
Oct 02 11:52:14 compute-0 multipathd[174410]: INFO:__main__:Writing out command to execute
Oct 02 11:52:14 compute-0 sudo[174417]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:14 compute-0 podman[174416]: 2025-10-02 11:52:14.871580675 +0000 UTC m=+0.072128490 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=starting, health_failing_streak=1, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_id=multipathd, container_name=multipathd, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0)
Oct 02 11:52:14 compute-0 multipathd[174410]: ++ cat /run_command
Oct 02 11:52:14 compute-0 multipathd[174410]: + CMD='/usr/sbin/multipathd -d'
Oct 02 11:52:14 compute-0 multipathd[174410]: + ARGS=
Oct 02 11:52:14 compute-0 multipathd[174410]: + sudo kolla_copy_cacerts
Oct 02 11:52:14 compute-0 systemd[1]: 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26-3de8ee90a1ee7a6e.service: Main process exited, code=exited, status=1/FAILURE
Oct 02 11:52:14 compute-0 systemd[1]: 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26-3de8ee90a1ee7a6e.service: Failed with result 'exit-code'.
Oct 02 11:52:14 compute-0 sudo[174448]:     root : PWD=/ ; USER=root ; COMMAND=/usr/local/bin/kolla_copy_cacerts
Oct 02 11:52:14 compute-0 sudo[174448]: pam_systemd(sudo:session): Failed to connect to system bus: No such file or directory
Oct 02 11:52:14 compute-0 sudo[174448]: pam_unix(sudo:session): session opened for user root(uid=0) by (uid=0)
Oct 02 11:52:14 compute-0 sudo[174448]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:14 compute-0 multipathd[174410]: + [[ ! -n '' ]]
Oct 02 11:52:14 compute-0 multipathd[174410]: + . kolla_extend_start
Oct 02 11:52:14 compute-0 multipathd[174410]: Running command: '/usr/sbin/multipathd -d'
Oct 02 11:52:14 compute-0 multipathd[174410]: + echo 'Running command: '\''/usr/sbin/multipathd -d'\'''
Oct 02 11:52:14 compute-0 multipathd[174410]: + umask 0022
Oct 02 11:52:14 compute-0 multipathd[174410]: + exec /usr/sbin/multipathd -d
Oct 02 11:52:14 compute-0 multipathd[174410]: 3932.602571 | --------start up--------
Oct 02 11:52:14 compute-0 multipathd[174410]: 3932.602592 | read /etc/multipath.conf
Oct 02 11:52:14 compute-0 multipathd[174410]: 3932.608334 | path checkers start up
Oct 02 11:52:15 compute-0 python3.9[174599]: ansible-ansible.builtin.stat Invoked with path=/etc/multipath/.multipath_restart_required follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:52:16 compute-0 sudo[174751]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qnypammukhpibusxgjywbkhmppnihspg ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405935.8689623-2587-202185721140797/AnsiballZ_command.py'
Oct 02 11:52:16 compute-0 sudo[174751]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:16 compute-0 python3.9[174753]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --filter volume=/etc/multipath.conf --format {{.Names}} _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:52:16 compute-0 sudo[174751]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:16 compute-0 sudo[174916]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cihwnednhzfirtqhmbithbnxtizhmden ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405936.639455-2611-175223998795482/AnsiballZ_systemd.py'
Oct 02 11:52:16 compute-0 sudo[174916]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:17 compute-0 python3.9[174918]: ansible-ansible.builtin.systemd Invoked with name=edpm_multipathd state=restarted daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None
Oct 02 11:52:17 compute-0 systemd[1]: Stopping multipathd container...
Oct 02 11:52:17 compute-0 multipathd[174410]: 3935.070154 | exit (signal)
Oct 02 11:52:17 compute-0 multipathd[174410]: 3935.070703 | --------shut down-------
Oct 02 11:52:17 compute-0 systemd[1]: libpod-8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26.scope: Deactivated successfully.
Oct 02 11:52:17 compute-0 podman[174922]: 2025-10-02 11:52:17.415581036 +0000 UTC m=+0.082605335 container died 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, container_name=multipathd, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 11:52:17 compute-0 systemd[1]: 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26-3de8ee90a1ee7a6e.timer: Deactivated successfully.
Oct 02 11:52:17 compute-0 systemd[1]: Stopped /usr/bin/podman healthcheck run 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26.
Oct 02 11:52:17 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26-userdata-shm.mount: Deactivated successfully.
Oct 02 11:52:17 compute-0 systemd[1]: var-lib-containers-storage-overlay-b53eff38bb6a7412f9e2a38dc6391c9fa140ef1db633d18b8b1dea5aaf6016d2-merged.mount: Deactivated successfully.
Oct 02 11:52:17 compute-0 podman[174922]: 2025-10-02 11:52:17.552240308 +0000 UTC m=+0.219264567 container cleanup 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, container_name=multipathd, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, managed_by=edpm_ansible, org.label-schema.build-date=20251001, config_id=multipathd, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']})
Oct 02 11:52:17 compute-0 podman[174922]: multipathd
Oct 02 11:52:17 compute-0 podman[174951]: multipathd
Oct 02 11:52:17 compute-0 systemd[1]: edpm_multipathd.service: Deactivated successfully.
Oct 02 11:52:17 compute-0 systemd[1]: Stopped multipathd container.
Oct 02 11:52:17 compute-0 systemd[1]: Starting multipathd container...
Oct 02 11:52:17 compute-0 systemd[1]: Started libcrun container.
Oct 02 11:52:17 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/b53eff38bb6a7412f9e2a38dc6391c9fa140ef1db633d18b8b1dea5aaf6016d2/merged/etc/multipath supports timestamps until 2038 (0x7fffffff)
Oct 02 11:52:17 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/b53eff38bb6a7412f9e2a38dc6391c9fa140ef1db633d18b8b1dea5aaf6016d2/merged/var/lib/iscsi supports timestamps until 2038 (0x7fffffff)
Oct 02 11:52:17 compute-0 systemd[1]: Started /usr/bin/podman healthcheck run 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26.
Oct 02 11:52:17 compute-0 podman[174964]: 2025-10-02 11:52:17.757918543 +0000 UTC m=+0.120744140 container init 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, io.buildah.version=1.41.3, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=multipathd, managed_by=edpm_ansible, org.label-schema.build-date=20251001, container_name=multipathd, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS)
Oct 02 11:52:17 compute-0 multipathd[174979]: + sudo -E kolla_set_configs
Oct 02 11:52:17 compute-0 sudo[174985]:     root : PWD=/ ; USER=root ; COMMAND=/usr/local/bin/kolla_set_configs
Oct 02 11:52:17 compute-0 sudo[174985]: pam_systemd(sudo:session): Failed to connect to system bus: No such file or directory
Oct 02 11:52:17 compute-0 sudo[174985]: pam_unix(sudo:session): session opened for user root(uid=0) by (uid=0)
Oct 02 11:52:17 compute-0 podman[174964]: 2025-10-02 11:52:17.785475892 +0000 UTC m=+0.148301389 container start 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, io.buildah.version=1.41.3, managed_by=edpm_ansible, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=multipathd)
Oct 02 11:52:17 compute-0 podman[174964]: multipathd
Oct 02 11:52:17 compute-0 systemd[1]: Started multipathd container.
Oct 02 11:52:17 compute-0 multipathd[174979]: INFO:__main__:Loading config file at /var/lib/kolla/config_files/config.json
Oct 02 11:52:17 compute-0 multipathd[174979]: INFO:__main__:Validating config file
Oct 02 11:52:17 compute-0 multipathd[174979]: INFO:__main__:Kolla config strategy set to: COPY_ALWAYS
Oct 02 11:52:17 compute-0 multipathd[174979]: INFO:__main__:Writing out command to execute
Oct 02 11:52:17 compute-0 sudo[174916]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:17 compute-0 sudo[174985]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:17 compute-0 multipathd[174979]: ++ cat /run_command
Oct 02 11:52:17 compute-0 multipathd[174979]: + CMD='/usr/sbin/multipathd -d'
Oct 02 11:52:17 compute-0 multipathd[174979]: + ARGS=
Oct 02 11:52:17 compute-0 multipathd[174979]: + sudo kolla_copy_cacerts
Oct 02 11:52:17 compute-0 sudo[175010]:     root : PWD=/ ; USER=root ; COMMAND=/usr/local/bin/kolla_copy_cacerts
Oct 02 11:52:17 compute-0 sudo[175010]: pam_systemd(sudo:session): Failed to connect to system bus: No such file or directory
Oct 02 11:52:17 compute-0 sudo[175010]: pam_unix(sudo:session): session opened for user root(uid=0) by (uid=0)
Oct 02 11:52:17 compute-0 sudo[175010]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:17 compute-0 multipathd[174979]: + [[ ! -n '' ]]
Oct 02 11:52:17 compute-0 multipathd[174979]: + . kolla_extend_start
Oct 02 11:52:17 compute-0 multipathd[174979]: Running command: '/usr/sbin/multipathd -d'
Oct 02 11:52:17 compute-0 multipathd[174979]: + echo 'Running command: '\''/usr/sbin/multipathd -d'\'''
Oct 02 11:52:17 compute-0 multipathd[174979]: + umask 0022
Oct 02 11:52:17 compute-0 multipathd[174979]: + exec /usr/sbin/multipathd -d
Oct 02 11:52:17 compute-0 podman[174986]: 2025-10-02 11:52:17.884226534 +0000 UTC m=+0.081765892 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=starting, health_failing_streak=1, health_log=, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, container_name=multipathd, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, org.label-schema.build-date=20251001, config_id=multipathd)
Oct 02 11:52:17 compute-0 systemd[1]: 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26-6575eb6543834654.service: Main process exited, code=exited, status=1/FAILURE
Oct 02 11:52:17 compute-0 systemd[1]: 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26-6575eb6543834654.service: Failed with result 'exit-code'.
Oct 02 11:52:17 compute-0 multipathd[174979]: 3935.580091 | --------start up--------
Oct 02 11:52:17 compute-0 multipathd[174979]: 3935.580108 | read /etc/multipath.conf
Oct 02 11:52:17 compute-0 multipathd[174979]: 3935.585625 | path checkers start up
Oct 02 11:52:18 compute-0 sudo[175169]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tukfksxnwkwpahjfvyicuwumyfyoemac ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405938.066249-2635-16395700158360/AnsiballZ_file.py'
Oct 02 11:52:18 compute-0 sudo[175169]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:18 compute-0 python3.9[175171]: ansible-ansible.builtin.file Invoked with path=/etc/multipath/.multipath_restart_required state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:52:18 compute-0 sudo[175169]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:19 compute-0 sudo[175321]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fgthibiddypvkymwdrajsdkqhenhxwis ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405939.3625572-2671-186034237732394/AnsiballZ_file.py'
Oct 02 11:52:19 compute-0 sudo[175321]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:19 compute-0 python3.9[175323]: ansible-ansible.builtin.file Invoked with mode=0755 path=/etc/modules-load.d selevel=s0 setype=etc_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None attributes=None
Oct 02 11:52:19 compute-0 sudo[175321]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:20 compute-0 sudo[175473]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xybzivjqcakvhvijieowxetsalcczkjs ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405940.0856261-2695-153342446606708/AnsiballZ_modprobe.py'
Oct 02 11:52:20 compute-0 sudo[175473]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:20 compute-0 python3.9[175475]: ansible-community.general.modprobe Invoked with name=nvme-fabrics state=present params= persistent=disabled
Oct 02 11:52:20 compute-0 kernel: Key type psk registered
Oct 02 11:52:20 compute-0 sudo[175473]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:21 compute-0 sudo[175636]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zqdkunluxoxflecxpdtornnlrutbnuoa ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405940.8290672-2719-40596603816403/AnsiballZ_stat.py'
Oct 02 11:52:21 compute-0 sudo[175636]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:21 compute-0 python3.9[175638]: ansible-ansible.legacy.stat Invoked with path=/etc/modules-load.d/nvme-fabrics.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:52:21 compute-0 sudo[175636]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:21 compute-0 sudo[175759]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mvqhoiitszbsvsvzazwqpnjpdrhjgzcx ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405940.8290672-2719-40596603816403/AnsiballZ_copy.py'
Oct 02 11:52:21 compute-0 sudo[175759]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:21 compute-0 python3.9[175761]: ansible-ansible.legacy.copy Invoked with dest=/etc/modules-load.d/nvme-fabrics.conf mode=0644 src=/home/zuul/.ansible/tmp/ansible-tmp-1759405940.8290672-2719-40596603816403/.source.conf follow=False _original_basename=module-load.conf.j2 checksum=783c778f0c68cc414f35486f234cbb1cf3f9bbff backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:52:21 compute-0 sudo[175759]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:22 compute-0 sudo[175911]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kbyffnclkdujvxovchygpmdapodscajp ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405942.458679-2767-272352977212662/AnsiballZ_lineinfile.py'
Oct 02 11:52:22 compute-0 sudo[175911]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:23 compute-0 python3.9[175913]: ansible-ansible.builtin.lineinfile Invoked with create=True dest=/etc/modules line=nvme-fabrics  mode=0644 state=present path=/etc/modules backrefs=False backup=False firstmatch=False unsafe_writes=False regexp=None search_string=None insertafter=None insertbefore=None validate=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:52:23 compute-0 sudo[175911]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:23 compute-0 sudo[176063]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-izpnpydosmzamiegtagfmpfsdpsjashq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405943.2742295-2791-89561532219131/AnsiballZ_systemd.py'
Oct 02 11:52:23 compute-0 sudo[176063]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:23 compute-0 python3.9[176065]: ansible-ansible.builtin.systemd Invoked with name=systemd-modules-load.service state=restarted daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None
Oct 02 11:52:23 compute-0 systemd[1]: systemd-modules-load.service: Deactivated successfully.
Oct 02 11:52:23 compute-0 systemd[1]: Stopped Load Kernel Modules.
Oct 02 11:52:23 compute-0 systemd[1]: Stopping Load Kernel Modules...
Oct 02 11:52:23 compute-0 systemd[1]: Starting Load Kernel Modules...
Oct 02 11:52:23 compute-0 systemd[1]: Finished Load Kernel Modules.
Oct 02 11:52:23 compute-0 sudo[176063]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:24 compute-0 sudo[176230]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pqgwqyylzcihefleulglwsqkvgfsbcge ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405944.3906922-2815-192515021734518/AnsiballZ_setup.py'
Oct 02 11:52:24 compute-0 sudo[176230]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:24 compute-0 podman[176193]: 2025-10-02 11:52:24.738768593 +0000 UTC m=+0.067211536 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, config_id=iscsid, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 11:52:24 compute-0 python3.9[176240]: ansible-ansible.legacy.setup Invoked with filter=['ansible_pkg_mgr'] gather_subset=['!all'] gather_timeout=10 fact_path=/etc/ansible/facts.d
Oct 02 11:52:25 compute-0 sudo[176230]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:25 compute-0 sudo[176323]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ouknlolrmaifydatigawvgmpbmqbgsyf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405944.3906922-2815-192515021734518/AnsiballZ_dnf.py'
Oct 02 11:52:25 compute-0 sudo[176323]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:25 compute-0 python3.9[176325]: ansible-ansible.legacy.dnf Invoked with name=['nvme-cli'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None
Oct 02 11:52:31 compute-0 systemd[1]: Reloading.
Oct 02 11:52:31 compute-0 systemd-rc-local-generator[176356]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:52:31 compute-0 systemd-sysv-generator[176361]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:52:31 compute-0 systemd[1]: Reloading.
Oct 02 11:52:32 compute-0 systemd-sysv-generator[176395]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:52:32 compute-0 systemd-rc-local-generator[176391]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:52:32 compute-0 systemd-logind[827]: Watching system buttons on /dev/input/event0 (Power Button)
Oct 02 11:52:32 compute-0 systemd-logind[827]: Watching system buttons on /dev/input/event1 (AT Translated Set 2 keyboard)
Oct 02 11:52:32 compute-0 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update.
Oct 02 11:52:32 compute-0 systemd[1]: Starting man-db-cache-update.service...
Oct 02 11:52:32 compute-0 systemd[1]: Reloading.
Oct 02 11:52:32 compute-0 systemd-rc-local-generator[176485]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:52:32 compute-0 systemd-sysv-generator[176489]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:52:32 compute-0 systemd[1]: Queuing reload/restart jobs for marked units…
Oct 02 11:52:33 compute-0 sudo[176323]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:34 compute-0 systemd[1]: man-db-cache-update.service: Deactivated successfully.
Oct 02 11:52:34 compute-0 systemd[1]: Finished man-db-cache-update.service.
Oct 02 11:52:34 compute-0 systemd[1]: man-db-cache-update.service: Consumed 1.737s CPU time.
Oct 02 11:52:34 compute-0 systemd[1]: run-ref98b4c5763e449e81c791140d5db40a.service: Deactivated successfully.
Oct 02 11:52:34 compute-0 sudo[177785]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ypavmghpiqjyrbsufzenzmumowjksyho ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405954.6295497-2851-114728275481600/AnsiballZ_file.py'
Oct 02 11:52:34 compute-0 sudo[177785]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:34 compute-0 podman[177748]: 2025-10-02 11:52:34.933837257 +0000 UTC m=+0.091679571 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, container_name=ovn_controller, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_id=ovn_controller, maintainer=OpenStack Kubernetes Operator team)
Oct 02 11:52:35 compute-0 python3.9[177793]: ansible-ansible.builtin.file Invoked with mode=0600 path=/etc/iscsi/.iscsid_restart_required state=touch recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:52:35 compute-0 sudo[177785]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:35 compute-0 sshd-session[177832]: Connection closed by 77.110.113.94 port 53436 [preauth]
Oct 02 11:52:35 compute-0 python3.9[177954]: ansible-ansible.builtin.setup Invoked with gather_subset=['!all', '!min', 'local'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:52:36 compute-0 sudo[178108]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ypvntakailrfdpubyprngmcdgdzksnsf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405956.427499-2903-188517234369047/AnsiballZ_file.py'
Oct 02 11:52:36 compute-0 sudo[178108]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:36 compute-0 python3.9[178110]: ansible-ansible.builtin.file Invoked with mode=0644 path=/etc/ssh/ssh_known_hosts state=touch recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:52:36 compute-0 sudo[178108]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:37 compute-0 podman[178135]: 2025-10-02 11:52:37.130840974 +0000 UTC m=+0.046853445 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2)
Oct 02 11:52:38 compute-0 sudo[178281]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fkwqcuepuevcbbmegoqofjgzswuxkytw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405957.358236-2936-173142982377702/AnsiballZ_systemd_service.py'
Oct 02 11:52:38 compute-0 sudo[178281]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:38 compute-0 python3.9[178283]: ansible-ansible.builtin.systemd_service Invoked with daemon_reload=True daemon_reexec=False scope=system no_block=False name=None state=None enabled=None force=None masked=None
Oct 02 11:52:38 compute-0 systemd[1]: Reloading.
Oct 02 11:52:38 compute-0 systemd-rc-local-generator[178310]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:52:38 compute-0 systemd-sysv-generator[178314]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:52:38 compute-0 sudo[178281]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:39 compute-0 python3.9[178468]: ansible-ansible.builtin.service_facts Invoked
Oct 02 11:52:39 compute-0 network[178485]: You are using 'network' service provided by 'network-scripts', which are now deprecated.
Oct 02 11:52:39 compute-0 network[178486]: 'network-scripts' will be removed from distribution in near future.
Oct 02 11:52:39 compute-0 network[178487]: It is advised to switch to 'NetworkManager' instead for network management.
Oct 02 11:52:43 compute-0 sudo[178762]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ayuisiadmilfapmxeraskthsploljfwx ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405963.7107112-2993-63922229003248/AnsiballZ_systemd_service.py'
Oct 02 11:52:44 compute-0 sudo[178762]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:44 compute-0 python3.9[178764]: ansible-ansible.builtin.systemd_service Invoked with enabled=False name=tripleo_nova_compute.service state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:52:44 compute-0 sudo[178762]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:44 compute-0 sudo[178915]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ryevpauqckuulbtorkplnghyycrdyvfh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405964.4817438-2993-32441895278467/AnsiballZ_systemd_service.py'
Oct 02 11:52:44 compute-0 sudo[178915]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:45 compute-0 python3.9[178917]: ansible-ansible.builtin.systemd_service Invoked with enabled=False name=tripleo_nova_migration_target.service state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:52:45 compute-0 sudo[178915]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:45 compute-0 sudo[179068]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tggotkgkwfpianugpjdylnepbkznryoe ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405965.2090023-2993-221679861825128/AnsiballZ_systemd_service.py'
Oct 02 11:52:45 compute-0 sudo[179068]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:45 compute-0 python3.9[179070]: ansible-ansible.builtin.systemd_service Invoked with enabled=False name=tripleo_nova_api_cron.service state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:52:45 compute-0 sudo[179068]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:46 compute-0 sudo[179221]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zsuzmeondfmhwtieyincfltrygnfyqtl ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405965.8763502-2993-272422443258501/AnsiballZ_systemd_service.py'
Oct 02 11:52:46 compute-0 sudo[179221]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:46 compute-0 python3.9[179223]: ansible-ansible.builtin.systemd_service Invoked with enabled=False name=tripleo_nova_api.service state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:52:46 compute-0 sudo[179221]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:46 compute-0 sudo[179374]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xiivsrlnntwonhozudmekyzrafaywjtl ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405966.650171-2993-204650249190611/AnsiballZ_systemd_service.py'
Oct 02 11:52:46 compute-0 sudo[179374]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:47 compute-0 python3.9[179376]: ansible-ansible.builtin.systemd_service Invoked with enabled=False name=tripleo_nova_conductor.service state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:52:47 compute-0 sudo[179374]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:47 compute-0 sudo[179527]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xsdgccjqktvalkkawpxhfdfbmyilzgdn ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405967.393186-2993-62457734645436/AnsiballZ_systemd_service.py'
Oct 02 11:52:47 compute-0 sudo[179527]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:48 compute-0 python3.9[179529]: ansible-ansible.builtin.systemd_service Invoked with enabled=False name=tripleo_nova_metadata.service state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:52:48 compute-0 sudo[179527]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:48 compute-0 podman[179531]: 2025-10-02 11:52:48.137401836 +0000 UTC m=+0.067608728 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, config_id=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_managed=true, container_name=multipathd, managed_by=edpm_ansible, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']})
Oct 02 11:52:48 compute-0 sudo[179700]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tzbdzdaubjemepjtxztoektlrsuypvkv ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405968.2196796-2993-63206980847328/AnsiballZ_systemd_service.py'
Oct 02 11:52:48 compute-0 sudo[179700]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:48 compute-0 python3.9[179702]: ansible-ansible.builtin.systemd_service Invoked with enabled=False name=tripleo_nova_scheduler.service state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:52:48 compute-0 sudo[179700]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:49 compute-0 sudo[179853]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lmiplzayofphkkxcxjdiwgilsbvirnzb ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405968.9713564-2993-79663399362195/AnsiballZ_systemd_service.py'
Oct 02 11:52:49 compute-0 sudo[179853]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:49 compute-0 python3.9[179855]: ansible-ansible.builtin.systemd_service Invoked with enabled=False name=tripleo_nova_vnc_proxy.service state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:52:49 compute-0 sudo[179853]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:51 compute-0 sudo[180006]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ixbxqkmbkhcgakjayhqurimfivvagdci ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405971.0888166-3170-22552453539588/AnsiballZ_file.py'
Oct 02 11:52:51 compute-0 sudo[180006]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:51 compute-0 python3.9[180008]: ansible-ansible.builtin.file Invoked with path=/usr/lib/systemd/system/tripleo_nova_compute.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:52:51 compute-0 sudo[180006]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:51 compute-0 sudo[180158]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fdkoooorgvcubjrxrxiiosmxbyyrmlod ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405971.6929471-3170-159190854770763/AnsiballZ_file.py'
Oct 02 11:52:51 compute-0 sudo[180158]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:52 compute-0 python3.9[180160]: ansible-ansible.builtin.file Invoked with path=/usr/lib/systemd/system/tripleo_nova_migration_target.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:52:52 compute-0 sudo[180158]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:52 compute-0 sudo[180310]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-djqvmhlaujtullmljptubjtroyktycck ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405972.2666488-3170-260632408749768/AnsiballZ_file.py'
Oct 02 11:52:52 compute-0 sudo[180310]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:52 compute-0 python3.9[180312]: ansible-ansible.builtin.file Invoked with path=/usr/lib/systemd/system/tripleo_nova_api_cron.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:52:52 compute-0 sudo[180310]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:53 compute-0 sudo[180462]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cojlwzlhdjgptazvedadamtdoeybtoxg ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405972.951972-3170-223685441113010/AnsiballZ_file.py'
Oct 02 11:52:53 compute-0 sudo[180462]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:53 compute-0 python3.9[180464]: ansible-ansible.builtin.file Invoked with path=/usr/lib/systemd/system/tripleo_nova_api.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:52:53 compute-0 sudo[180462]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:53 compute-0 sudo[180614]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wysjhwryjheakuwkztgtjtxbqhazjocd ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405973.5997689-3170-21830826056849/AnsiballZ_file.py'
Oct 02 11:52:53 compute-0 sudo[180614]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:54 compute-0 python3.9[180616]: ansible-ansible.builtin.file Invoked with path=/usr/lib/systemd/system/tripleo_nova_conductor.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:52:54 compute-0 sudo[180614]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:54 compute-0 sudo[180766]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pvqmilddoahfvnecaugabeucbrgmyfiz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405974.4145615-3170-39839110431870/AnsiballZ_file.py'
Oct 02 11:52:54 compute-0 sudo[180766]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:54 compute-0 python3.9[180768]: ansible-ansible.builtin.file Invoked with path=/usr/lib/systemd/system/tripleo_nova_metadata.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:52:54 compute-0 sudo[180766]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:55 compute-0 podman[180798]: 2025-10-02 11:52:55.146494323 +0000 UTC m=+0.063003593 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, container_name=iscsid, io.buildah.version=1.41.3)
Oct 02 11:52:55 compute-0 sudo[180938]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bspeidgkestqnizwywpmkwqkrtfqjibf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405975.0951688-3170-8667433080012/AnsiballZ_file.py'
Oct 02 11:52:55 compute-0 sudo[180938]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:55 compute-0 python3.9[180940]: ansible-ansible.builtin.file Invoked with path=/usr/lib/systemd/system/tripleo_nova_scheduler.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:52:55 compute-0 sudo[180938]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:56 compute-0 sudo[181090]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-iycvmcmgrbfispdnkxiltuqsghvxlqek ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405975.8113873-3170-162157295950705/AnsiballZ_file.py'
Oct 02 11:52:56 compute-0 sudo[181090]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:56 compute-0 python3.9[181092]: ansible-ansible.builtin.file Invoked with path=/usr/lib/systemd/system/tripleo_nova_vnc_proxy.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:52:56 compute-0 sudo[181090]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:56 compute-0 sudo[181242]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vdmyunszbduiquhodkvheymeltaofepf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405976.6372898-3341-265578190848155/AnsiballZ_file.py'
Oct 02 11:52:56 compute-0 sudo[181242]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:57 compute-0 python3.9[181244]: ansible-ansible.builtin.file Invoked with path=/etc/systemd/system/tripleo_nova_compute.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:52:57 compute-0 sudo[181242]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:57 compute-0 sudo[181394]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vfqtfpvnkzujofyojwsyuvrokouspxyu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405977.4137573-3341-45609479592376/AnsiballZ_file.py'
Oct 02 11:52:57 compute-0 sudo[181394]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:58 compute-0 python3.9[181396]: ansible-ansible.builtin.file Invoked with path=/etc/systemd/system/tripleo_nova_migration_target.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:52:58 compute-0 sudo[181394]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:58 compute-0 sudo[181546]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bqjxyhyttnyrmliejkpntjvlvggxjsdm ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405978.1814265-3341-167156649649191/AnsiballZ_file.py'
Oct 02 11:52:58 compute-0 sudo[181546]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:58 compute-0 python3.9[181548]: ansible-ansible.builtin.file Invoked with path=/etc/systemd/system/tripleo_nova_api_cron.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:52:58 compute-0 sudo[181546]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:59 compute-0 sudo[181698]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rkpkultsjabhdnfpccrsyqckoxnunxtn ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405978.8560221-3341-161692374231978/AnsiballZ_file.py'
Oct 02 11:52:59 compute-0 sudo[181698]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:52:59 compute-0 python3.9[181700]: ansible-ansible.builtin.file Invoked with path=/etc/systemd/system/tripleo_nova_api.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:52:59 compute-0 sudo[181698]: pam_unix(sudo:session): session closed for user root
Oct 02 11:52:59 compute-0 sudo[181850]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bksvvrvsuqogqkdatmoqxiiqxbknnroh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405979.5576365-3341-184295380928488/AnsiballZ_file.py'
Oct 02 11:52:59 compute-0 sudo[181850]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:00 compute-0 python3.9[181852]: ansible-ansible.builtin.file Invoked with path=/etc/systemd/system/tripleo_nova_conductor.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:53:00 compute-0 sudo[181850]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:00 compute-0 sudo[182002]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-apflhhtdoylhrrhowsuyardttiqzzwmz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405980.233657-3341-131786615162482/AnsiballZ_file.py'
Oct 02 11:53:00 compute-0 sudo[182002]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:00 compute-0 python3.9[182004]: ansible-ansible.builtin.file Invoked with path=/etc/systemd/system/tripleo_nova_metadata.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:53:00 compute-0 sudo[182002]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:01 compute-0 sudo[182154]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qrsyiwkkrrflzbfoalaymifdsjmhwuvx ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405980.911594-3341-76544956841923/AnsiballZ_file.py'
Oct 02 11:53:01 compute-0 sudo[182154]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:01 compute-0 python3.9[182156]: ansible-ansible.builtin.file Invoked with path=/etc/systemd/system/tripleo_nova_scheduler.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:53:01 compute-0 sudo[182154]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:01 compute-0 sudo[182306]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sfbhumoakgtdwxdkwyskwqtxzegotuso ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405981.5648901-3341-114911753517708/AnsiballZ_file.py'
Oct 02 11:53:01 compute-0 sudo[182306]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:01 compute-0 python3.9[182308]: ansible-ansible.builtin.file Invoked with path=/etc/systemd/system/tripleo_nova_vnc_proxy.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:53:02 compute-0 sudo[182306]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:53:02.193 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 11:53:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:53:02.193 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 11:53:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:53:02.194 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 11:53:02 compute-0 sudo[182458]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zyyivuqtvxfikdqrylwivagmkpeebvix ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405982.3462954-3515-99118896007867/AnsiballZ_command.py'
Oct 02 11:53:02 compute-0 sudo[182458]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:02 compute-0 python3.9[182460]: ansible-ansible.legacy.command Invoked with _raw_params=if systemctl is-active certmonger.service; then
                                               systemctl disable --now certmonger.service
                                               test -f /etc/systemd/system/certmonger.service || systemctl mask certmonger.service
                                             fi
                                              _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:53:02 compute-0 sudo[182458]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:03 compute-0 python3.9[182612]: ansible-ansible.builtin.find Invoked with file_type=any hidden=True paths=['/var/lib/certmonger/requests'] patterns=[] read_whole_file=False age_stamp=mtime recurse=False follow=False get_checksum=False checksum_algorithm=sha1 use_regex=False exact_mode=True excludes=None contains=None age=None size=None depth=None mode=None encoding=None limit=None
Oct 02 11:53:04 compute-0 sudo[182762]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cygmftjbkfhrzudsfbusmtqyvvsqzehd ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405984.262943-3569-62572682760711/AnsiballZ_systemd_service.py'
Oct 02 11:53:04 compute-0 sudo[182762]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:04 compute-0 python3.9[182764]: ansible-ansible.builtin.systemd_service Invoked with daemon_reload=True daemon_reexec=False scope=system no_block=False name=None state=None enabled=None force=None masked=None
Oct 02 11:53:05 compute-0 systemd[1]: Reloading.
Oct 02 11:53:05 compute-0 systemd-rc-local-generator[182815]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:53:05 compute-0 systemd-sysv-generator[182818]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:53:05 compute-0 podman[182766]: 2025-10-02 11:53:05.133137946 +0000 UTC m=+0.112783685 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_managed=true, container_name=ovn_controller, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001)
Oct 02 11:53:05 compute-0 sudo[182762]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:05 compute-0 sudo[182975]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sckwjsesgtluutzprfrzjdsaqqjvsqbc ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405985.5302432-3593-119441335840532/AnsiballZ_command.py'
Oct 02 11:53:05 compute-0 sudo[182975]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:05 compute-0 python3.9[182977]: ansible-ansible.legacy.command Invoked with cmd=/usr/bin/systemctl reset-failed tripleo_nova_compute.service _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True _raw_params=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:53:06 compute-0 sudo[182975]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:06 compute-0 sudo[183128]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cnnsosummvxrkxreykxxedrlmeyecblg ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405986.192045-3593-173051614346509/AnsiballZ_command.py'
Oct 02 11:53:06 compute-0 sudo[183128]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:06 compute-0 python3.9[183130]: ansible-ansible.legacy.command Invoked with cmd=/usr/bin/systemctl reset-failed tripleo_nova_migration_target.service _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True _raw_params=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:53:06 compute-0 sudo[183128]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:06 compute-0 sudo[183281]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fpashaywutjmbnscrjkkyvljspwpkrux ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405986.7562225-3593-161084909964231/AnsiballZ_command.py'
Oct 02 11:53:06 compute-0 sudo[183281]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:07 compute-0 python3.9[183283]: ansible-ansible.legacy.command Invoked with cmd=/usr/bin/systemctl reset-failed tripleo_nova_api_cron.service _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True _raw_params=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:53:07 compute-0 sudo[183281]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:07 compute-0 podman[183285]: 2025-10-02 11:53:07.298160324 +0000 UTC m=+0.079573701 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, config_id=ovn_metadata_agent, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_metadata_agent, org.label-schema.license=GPLv2)
Oct 02 11:53:07 compute-0 sudo[183453]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ohyuraoifqxnnshsxswgrrdfaburqpxl ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405987.5476363-3593-234351274802595/AnsiballZ_command.py'
Oct 02 11:53:07 compute-0 sudo[183453]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:07 compute-0 python3.9[183455]: ansible-ansible.legacy.command Invoked with cmd=/usr/bin/systemctl reset-failed tripleo_nova_api.service _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True _raw_params=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:53:08 compute-0 sudo[183453]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:08 compute-0 sudo[183606]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-uddoxspamqectjmfrjtgcbdicdvgjrsu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405988.1558414-3593-1987549611953/AnsiballZ_command.py'
Oct 02 11:53:08 compute-0 sudo[183606]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:08 compute-0 python3.9[183608]: ansible-ansible.legacy.command Invoked with cmd=/usr/bin/systemctl reset-failed tripleo_nova_conductor.service _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True _raw_params=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:53:08 compute-0 sudo[183606]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:09 compute-0 sudo[183759]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cdmkunwucocarnvmxbqtxbhbvbzplfci ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405988.793487-3593-7067415433930/AnsiballZ_command.py'
Oct 02 11:53:09 compute-0 sudo[183759]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:09 compute-0 python3.9[183761]: ansible-ansible.legacy.command Invoked with cmd=/usr/bin/systemctl reset-failed tripleo_nova_metadata.service _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True _raw_params=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:53:09 compute-0 sudo[183759]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:09 compute-0 sudo[183912]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dnbazvxebyarztkgkcitblktcgsbumyj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405989.4309392-3593-204228465963739/AnsiballZ_command.py'
Oct 02 11:53:09 compute-0 sudo[183912]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:09 compute-0 python3.9[183914]: ansible-ansible.legacy.command Invoked with cmd=/usr/bin/systemctl reset-failed tripleo_nova_scheduler.service _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True _raw_params=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:53:09 compute-0 sudo[183912]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:10 compute-0 sudo[184065]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zieugjfjcprupfjaiiljwdetceqzvroy ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405990.0829084-3593-84333587227217/AnsiballZ_command.py'
Oct 02 11:53:10 compute-0 sudo[184065]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:10 compute-0 python3.9[184067]: ansible-ansible.legacy.command Invoked with cmd=/usr/bin/systemctl reset-failed tripleo_nova_vnc_proxy.service _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True _raw_params=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:53:10 compute-0 sudo[184065]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:12 compute-0 sudo[184218]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pgfpadadpwqdtsxwpqitfjzmxodneojk ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405991.9784095-3800-154722884082241/AnsiballZ_file.py'
Oct 02 11:53:12 compute-0 sudo[184218]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:12 compute-0 python3.9[184220]: ansible-ansible.builtin.file Invoked with group=zuul mode=0755 owner=zuul path=/var/lib/openstack/config/nova setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:53:12 compute-0 sudo[184218]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:12 compute-0 sudo[184370]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bwhuacuwcbwukkgexftjksczcznmsgjq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405992.7084062-3800-121843375239172/AnsiballZ_file.py'
Oct 02 11:53:12 compute-0 sudo[184370]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:13 compute-0 python3.9[184372]: ansible-ansible.builtin.file Invoked with group=zuul mode=0755 owner=zuul path=/var/lib/openstack/config/containers setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:53:13 compute-0 sudo[184370]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:13 compute-0 sudo[184522]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jykeokglhsvdcevzxtmcjmwtfmcirxhr ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405993.3363826-3800-66643266328363/AnsiballZ_file.py'
Oct 02 11:53:13 compute-0 sudo[184522]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:13 compute-0 python3.9[184524]: ansible-ansible.builtin.file Invoked with group=zuul mode=0755 owner=zuul path=/var/lib/openstack/config/nova_nvme_cleaner setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:53:13 compute-0 sudo[184522]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:14 compute-0 sudo[184674]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vbofdhekprbzjtbbnoazqldsyiikwked ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405994.0853245-3866-104153452630663/AnsiballZ_file.py'
Oct 02 11:53:14 compute-0 sudo[184674]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:14 compute-0 python3.9[184676]: ansible-ansible.builtin.file Invoked with group=zuul mode=0755 owner=zuul path=/var/lib/nova setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:53:14 compute-0 sudo[184674]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:15 compute-0 sudo[184826]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-iopobephedeqsdffaqmurjoflwflcnru ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405994.7544708-3866-174580374272206/AnsiballZ_file.py'
Oct 02 11:53:15 compute-0 sudo[184826]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:15 compute-0 python3.9[184828]: ansible-ansible.builtin.file Invoked with group=zuul mode=0755 owner=zuul path=/var/lib/_nova_secontext setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:53:15 compute-0 sudo[184826]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:15 compute-0 sudo[184978]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xjfggirnlujamovkkbrdbdksgemjbjtn ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405995.3805711-3866-134849332601345/AnsiballZ_file.py'
Oct 02 11:53:15 compute-0 sudo[184978]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:15 compute-0 python3.9[184980]: ansible-ansible.builtin.file Invoked with group=zuul mode=0755 owner=zuul path=/var/lib/nova/instances setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:53:15 compute-0 sudo[184978]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:16 compute-0 sudo[185130]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lroemmmlrkvwozojulubplyzxwzgfrpl ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405995.9899614-3866-244725745212898/AnsiballZ_file.py'
Oct 02 11:53:16 compute-0 sudo[185130]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:16 compute-0 python3.9[185132]: ansible-ansible.builtin.file Invoked with group=root mode=0750 owner=root path=/etc/ceph setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:53:16 compute-0 sudo[185130]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:17 compute-0 sudo[185282]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yibwumljlzzjjwdhnukyyxnqryfzadmo ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405996.7906349-3866-158650467803893/AnsiballZ_file.py'
Oct 02 11:53:17 compute-0 sudo[185282]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:17 compute-0 python3.9[185284]: ansible-ansible.builtin.file Invoked with group=zuul owner=zuul path=/etc/multipath setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:53:17 compute-0 sudo[185282]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:17 compute-0 sudo[185434]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yhxkdoldwbhagcadsguhvhxnigjnkmfh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405997.495076-3866-23197489790680/AnsiballZ_file.py'
Oct 02 11:53:17 compute-0 sudo[185434]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:17 compute-0 python3.9[185436]: ansible-ansible.builtin.file Invoked with group=zuul owner=zuul path=/etc/iscsi setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:53:17 compute-0 sudo[185434]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:18 compute-0 sudo[185599]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qpsmkgeajizpcsmdqdpymcorijrjyfag ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405998.0807245-3866-199772205760127/AnsiballZ_file.py'
Oct 02 11:53:18 compute-0 sudo[185599]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:18 compute-0 podman[185560]: 2025-10-02 11:53:18.343029838 +0000 UTC m=+0.050379101 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=multipathd, config_id=multipathd, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.build-date=20251001)
Oct 02 11:53:18 compute-0 python3.9[185607]: ansible-ansible.builtin.file Invoked with group=zuul owner=zuul path=/var/lib/iscsi setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:53:18 compute-0 sudo[185599]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:18 compute-0 sudo[185758]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-glgzvxstzpcexmatclpxyvecvhywyqjq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405998.683592-3866-163181409842860/AnsiballZ_file.py'
Oct 02 11:53:18 compute-0 sudo[185758]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:19 compute-0 python3.9[185760]: ansible-ansible.builtin.file Invoked with group=zuul owner=zuul path=/etc/nvme setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:53:19 compute-0 sudo[185758]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:19 compute-0 sudo[185910]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qbimfyutwszyurhexnrjgyalqjerwezf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759405999.2614043-3866-18858164835160/AnsiballZ_file.py'
Oct 02 11:53:19 compute-0 sudo[185910]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:19 compute-0 python3.9[185912]: ansible-ansible.builtin.file Invoked with group=zuul owner=zuul path=/run/openvswitch setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:53:19 compute-0 sudo[185910]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:24 compute-0 sudo[186062]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zcmmfkncavwisqtqhhxbkxegikgwolam ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406004.4397476-4213-135906350549956/AnsiballZ_getent.py'
Oct 02 11:53:24 compute-0 sudo[186062]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:25 compute-0 python3.9[186064]: ansible-ansible.builtin.getent Invoked with database=passwd key=nova fail_key=True service=None split=None
Oct 02 11:53:25 compute-0 sudo[186062]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:25 compute-0 sudo[186230]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yoryeefojxxpyeklarxyzhyozgalwrac ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406005.3180122-4237-180520034218479/AnsiballZ_group.py'
Oct 02 11:53:25 compute-0 podman[186189]: 2025-10-02 11:53:25.776709749 +0000 UTC m=+0.052496141 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, managed_by=edpm_ansible, org.label-schema.build-date=20251001, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_managed=true, config_id=iscsid, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=iscsid)
Oct 02 11:53:25 compute-0 sudo[186230]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:25 compute-0 python3.9[186237]: ansible-ansible.builtin.group Invoked with gid=42436 name=nova state=present force=False system=False local=False non_unique=False gid_min=None gid_max=None
Oct 02 11:53:25 compute-0 groupadd[186238]: group added to /etc/group: name=nova, GID=42436
Oct 02 11:53:25 compute-0 groupadd[186238]: group added to /etc/gshadow: name=nova
Oct 02 11:53:25 compute-0 groupadd[186238]: new group: name=nova, GID=42436
Oct 02 11:53:26 compute-0 sudo[186230]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:26 compute-0 sudo[186393]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kjwozbqnkmyadzaewgivfnoiheropkdf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406006.2089503-4261-225932667291254/AnsiballZ_user.py'
Oct 02 11:53:26 compute-0 sudo[186393]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:27 compute-0 python3.9[186395]: ansible-ansible.builtin.user Invoked with comment=nova user group=nova groups=['libvirt'] name=nova shell=/bin/sh state=present uid=42436 non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on compute-0 update_password=always home=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None password_expire_account_disable=None uid_min=None uid_max=None
Oct 02 11:53:27 compute-0 useradd[186397]: new user: name=nova, UID=42436, GID=42436, home=/home/nova, shell=/bin/sh, from=/dev/pts/0
Oct 02 11:53:27 compute-0 useradd[186397]: add 'nova' to group 'libvirt'
Oct 02 11:53:27 compute-0 useradd[186397]: add 'nova' to shadow group 'libvirt'
Oct 02 11:53:27 compute-0 sudo[186393]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:28 compute-0 sshd-session[186428]: Accepted publickey for zuul from 192.168.122.30 port 33696 ssh2: ECDSA SHA256:tAEsFSop2MjuMQQ/UqKU2uCkxqWXGfsM5crdhd6tlI4
Oct 02 11:53:28 compute-0 systemd-logind[827]: New session 26 of user zuul.
Oct 02 11:53:28 compute-0 systemd[1]: Started Session 26 of User zuul.
Oct 02 11:53:28 compute-0 sshd-session[186428]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Oct 02 11:53:28 compute-0 sshd-session[186431]: Received disconnect from 192.168.122.30 port 33696:11: disconnected by user
Oct 02 11:53:28 compute-0 sshd-session[186431]: Disconnected from user zuul 192.168.122.30 port 33696
Oct 02 11:53:28 compute-0 sshd-session[186428]: pam_unix(sshd:session): session closed for user zuul
Oct 02 11:53:28 compute-0 systemd[1]: session-26.scope: Deactivated successfully.
Oct 02 11:53:28 compute-0 systemd-logind[827]: Session 26 logged out. Waiting for processes to exit.
Oct 02 11:53:28 compute-0 systemd-logind[827]: Removed session 26.
Oct 02 11:53:29 compute-0 python3.9[186581]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/config/nova/config.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:53:29 compute-0 python3.9[186702]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/config/nova/config.json mode=0644 setype=container_file_t src=/home/zuul/.ansible/tmp/ansible-tmp-1759406008.6687844-4336-157140385722140/.source.json follow=False _original_basename=config.json.j2 checksum=2c2474b5f24ef7c9ed37f49680082593e0d1100b backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:53:30 compute-0 python3.9[186852]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/config/nova/nova-blank.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:53:30 compute-0 python3.9[186928]: ansible-ansible.legacy.file Invoked with mode=0644 setype=container_file_t dest=/var/lib/openstack/config/nova/nova-blank.conf _original_basename=nova-blank.conf recurse=False state=file path=/var/lib/openstack/config/nova/nova-blank.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:53:31 compute-0 python3.9[187078]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/config/nova/ssh-config follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:53:32 compute-0 python3.9[187199]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/config/nova/ssh-config mode=0644 setype=container_file_t src=/home/zuul/.ansible/tmp/ansible-tmp-1759406010.851271-4336-35210651238572/.source follow=False _original_basename=ssh-config checksum=4297f735c41bdc1ff52d72e6f623a02242f37958 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:53:32 compute-0 python3.9[187349]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/config/nova/02-nova-host-specific.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:53:33 compute-0 python3.9[187470]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/config/nova/02-nova-host-specific.conf mode=0644 setype=container_file_t src=/home/zuul/.ansible/tmp/ansible-tmp-1759406012.324746-4336-102106561318807/.source.conf follow=False _original_basename=02-nova-host-specific.conf.j2 checksum=1feba546d0beacad9258164ab79b8a747685ccc8 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:53:34 compute-0 python3.9[187620]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/config/nova/nova_statedir_ownership.py follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:53:34 compute-0 python3.9[187741]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/config/nova/nova_statedir_ownership.py mode=0644 setype=container_file_t src=/home/zuul/.ansible/tmp/ansible-tmp-1759406013.5534623-4336-101826857489347/.source.py follow=False _original_basename=nova_statedir_ownership.py checksum=c6c8a3cfefa5efd60ceb1408c4e977becedb71e2 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:53:35 compute-0 sudo[187891]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wulwbxonoycqkkmeqjrqvvpdjdfddlpr ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406014.90154-4543-189440200308979/AnsiballZ_file.py'
Oct 02 11:53:35 compute-0 sudo[187891]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:35 compute-0 python3.9[187893]: ansible-ansible.builtin.file Invoked with group=nova mode=0700 owner=nova path=/home/nova/.ssh state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:53:35 compute-0 sudo[187891]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:35 compute-0 podman[187894]: 2025-10-02 11:53:35.569820716 +0000 UTC m=+0.089786231 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, tcib_managed=true, config_id=ovn_controller, io.buildah.version=1.41.3, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 11:53:35 compute-0 sudo[188066]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-iwaclbtzcfdxyjicvcodtzrefkaykrru ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406015.6630096-4567-163232441154039/AnsiballZ_copy.py'
Oct 02 11:53:35 compute-0 sudo[188066]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:36 compute-0 python3.9[188068]: ansible-ansible.legacy.copy Invoked with dest=/home/nova/.ssh/authorized_keys group=nova mode=0600 owner=nova remote_src=True src=/var/lib/openstack/config/nova/ssh-publickey backup=False force=True follow=False unsafe_writes=False _original_basename=None content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None checksum=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:53:36 compute-0 sudo[188066]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:36 compute-0 sudo[188218]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dctmtvbhfgqoadghmwtzhmctfkizwzuq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406016.4593396-4591-67774925664531/AnsiballZ_stat.py'
Oct 02 11:53:36 compute-0 sudo[188218]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:37 compute-0 python3.9[188220]: ansible-ansible.builtin.stat Invoked with path=/var/lib/nova/compute_id follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:53:37 compute-0 sudo[188218]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:37 compute-0 sudo[188383]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xmvxhbmsygpwtbpgltyniqlspqmdvjec ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406017.3616977-4615-235424490873261/AnsiballZ_stat.py'
Oct 02 11:53:37 compute-0 sudo[188383]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:37 compute-0 podman[188344]: 2025-10-02 11:53:37.738053025 +0000 UTC m=+0.104805524 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_metadata_agent, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, org.label-schema.license=GPLv2, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 11:53:37 compute-0 python3.9[188389]: ansible-ansible.legacy.stat Invoked with path=/var/lib/nova/compute_id follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:53:37 compute-0 sudo[188383]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:38 compute-0 sudo[188510]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-oxebltfhjsdlxscgkvvqpeljalgtypsj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406017.3616977-4615-235424490873261/AnsiballZ_copy.py'
Oct 02 11:53:38 compute-0 sudo[188510]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:38 compute-0 python3.9[188512]: ansible-ansible.legacy.copy Invoked with attributes=+i dest=/var/lib/nova/compute_id group=nova mode=0400 owner=nova src=/home/zuul/.ansible/tmp/ansible-tmp-1759406017.3616977-4615-235424490873261/.source _original_basename=.cw_yjd4r follow=False checksum=3dee3c464ac3e7f92a2f7a0d2a7af808fed5f2df backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None
Oct 02 11:53:38 compute-0 sudo[188510]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:39 compute-0 python3.9[188664]: ansible-ansible.builtin.stat Invoked with path=/var/lib/openstack/cacerts/nova/tls-ca-bundle.pem follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:53:40 compute-0 python3.9[188816]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/config/containers/nova_compute.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:53:40 compute-0 python3.9[188937]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/config/containers/nova_compute.json mode=0644 setype=container_file_t src=/home/zuul/.ansible/tmp/ansible-tmp-1759406019.5543103-4693-180106096765159/.source.json follow=False _original_basename=nova_compute.json.j2 checksum=f022386746472553146d29f689b545df70fa8a60 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:53:41 compute-0 python3.9[189087]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/config/containers/nova_compute_init.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:53:42 compute-0 python3.9[189208]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/config/containers/nova_compute_init.json mode=0700 setype=container_file_t src=/home/zuul/.ansible/tmp/ansible-tmp-1759406021.0039587-4738-278739043632225/.source.json follow=False _original_basename=nova_compute_init.json.j2 checksum=60b024e6db49dc6e700fc0d50263944d98d4c034 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:53:42 compute-0 sudo[189358]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ddprsqxqvzcblohseamcbkcuzxlqfama ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406022.5988688-4789-254262069747963/AnsiballZ_container_config_data.py'
Oct 02 11:53:42 compute-0 sudo[189358]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:43 compute-0 python3.9[189360]: ansible-container_config_data Invoked with config_overrides={} config_path=/var/lib/openstack/config/containers config_pattern=nova_compute_init.json debug=False
Oct 02 11:53:43 compute-0 sudo[189358]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:43 compute-0 sudo[189510]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qtrzhilanzvmlditljhefkigpikcdbxz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406023.5178032-4816-140187330438102/AnsiballZ_container_config_hash.py'
Oct 02 11:53:43 compute-0 sudo[189510]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:44 compute-0 python3.9[189512]: ansible-container_config_hash Invoked with check_mode=False config_vol_prefix=/var/lib/config-data
Oct 02 11:53:44 compute-0 sudo[189510]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:44 compute-0 sudo[189662]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zkthovmdnoihqllwyefdcjaxgnrquvlk ; /usr/bin/python3 /home/zuul/.ansible/tmp/ansible-tmp-1759406024.3994782-4846-137639380536112/AnsiballZ_edpm_container_manage.py'
Oct 02 11:53:44 compute-0 sudo[189662]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:44 compute-0 python3[189664]: ansible-edpm_container_manage Invoked with concurrency=1 config_dir=/var/lib/openstack/config/containers config_id=edpm config_overrides={} config_patterns=nova_compute_init.json log_base_path=/var/log/containers/stdouts debug=False
Oct 02 11:53:45 compute-0 podman[189698]: 2025-10-02 11:53:45.200375023 +0000 UTC m=+0.077947828 container create a84b5326f13e77dc75c2f0bf3a5df719b9ba6279ef3e62de4c7ef314984f720b (image=quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified, name=nova_compute_init, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, container_name=nova_compute_init, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified', 'privileged': False, 'user': 'root', 'restart': 'never', 'command': 'bash -c $* -- eval python3 /sbin/nova_statedir_ownership.py | logger -t nova_compute_init', 'net': 'none', 'security_opt': ['label=disable'], 'detach': False, 'environment': {'NOVA_STATEDIR_OWNERSHIP_SKIP': '/var/lib/nova/compute_id', '__OS_DEBUG': False}, 'volumes': ['/dev/log:/dev/log', '/var/lib/nova:/var/lib/nova:shared', '/var/lib/_nova_secontext:/var/lib/_nova_secontext:shared,z', '/var/lib/openstack/config/nova/nova_statedir_ownership.py:/sbin/nova_statedir_ownership.py:z']}, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, config_id=edpm)
Oct 02 11:53:45 compute-0 podman[189698]: 2025-10-02 11:53:45.145312511 +0000 UTC m=+0.022885336 image pull e36f31143f26011980def9337d375f895bea59b742a3a2b372b996aa8ad58eba quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified
Oct 02 11:53:45 compute-0 python3[189664]: ansible-edpm_container_manage PODMAN-CONTAINER-DEBUG: podman create --name nova_compute_init --conmon-pidfile /run/nova_compute_init.pid --env NOVA_STATEDIR_OWNERSHIP_SKIP=/var/lib/nova/compute_id --env __OS_DEBUG=False --label config_id=edpm --label container_name=nova_compute_init --label managed_by=edpm_ansible --label config_data={'image': 'quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified', 'privileged': False, 'user': 'root', 'restart': 'never', 'command': 'bash -c $* -- eval python3 /sbin/nova_statedir_ownership.py | logger -t nova_compute_init', 'net': 'none', 'security_opt': ['label=disable'], 'detach': False, 'environment': {'NOVA_STATEDIR_OWNERSHIP_SKIP': '/var/lib/nova/compute_id', '__OS_DEBUG': False}, 'volumes': ['/dev/log:/dev/log', '/var/lib/nova:/var/lib/nova:shared', '/var/lib/_nova_secontext:/var/lib/_nova_secontext:shared,z', '/var/lib/openstack/config/nova/nova_statedir_ownership.py:/sbin/nova_statedir_ownership.py:z']} --log-driver journald --log-level info --network none --privileged=False --security-opt label=disable --user root --volume /dev/log:/dev/log --volume /var/lib/nova:/var/lib/nova:shared --volume /var/lib/_nova_secontext:/var/lib/_nova_secontext:shared,z --volume /var/lib/openstack/config/nova/nova_statedir_ownership.py:/sbin/nova_statedir_ownership.py:z quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified bash -c $* -- eval python3 /sbin/nova_statedir_ownership.py | logger -t nova_compute_init
Oct 02 11:53:45 compute-0 sudo[189662]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:45 compute-0 sudo[189885]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kzsnbnjhbjpgmniyfrgudcgtrrtjlwsd ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406025.4896052-4870-163111916955854/AnsiballZ_stat.py'
Oct 02 11:53:45 compute-0 sudo[189885]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:45 compute-0 python3.9[189887]: ansible-ansible.builtin.stat Invoked with path=/etc/sysconfig/podman_drop_in follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:53:45 compute-0 sudo[189885]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:46 compute-0 sudo[190039]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dmljqlpcgaekhibctlyfsrdsznwyjwmb ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406026.66467-4906-31148967468935/AnsiballZ_container_config_data.py'
Oct 02 11:53:46 compute-0 sudo[190039]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:47 compute-0 python3.9[190041]: ansible-container_config_data Invoked with config_overrides={} config_path=/var/lib/openstack/config/containers config_pattern=nova_compute.json debug=False
Oct 02 11:53:47 compute-0 sudo[190039]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:47 compute-0 sudo[190191]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gsvdobvxtwhrkquuwkttzxapjjvzjckg ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406027.5035458-4933-163088804579759/AnsiballZ_container_config_hash.py'
Oct 02 11:53:47 compute-0 sudo[190191]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:48 compute-0 python3.9[190193]: ansible-container_config_hash Invoked with check_mode=False config_vol_prefix=/var/lib/config-data
Oct 02 11:53:48 compute-0 sudo[190191]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:48 compute-0 sudo[190356]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tyoubdntsrjddtrrhpaqepbbvikvqayn ; /usr/bin/python3 /home/zuul/.ansible/tmp/ansible-tmp-1759406028.4009244-4963-101027331298644/AnsiballZ_edpm_container_manage.py'
Oct 02 11:53:48 compute-0 sudo[190356]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:48 compute-0 podman[190317]: 2025-10-02 11:53:48.68880191 +0000 UTC m=+0.047155019 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, config_id=multipathd, org.label-schema.license=GPLv2, tcib_managed=true, container_name=multipathd, io.buildah.version=1.41.3)
Oct 02 11:53:48 compute-0 python3[190364]: ansible-edpm_container_manage Invoked with concurrency=1 config_dir=/var/lib/openstack/config/containers config_id=edpm config_overrides={} config_patterns=nova_compute.json log_base_path=/var/log/containers/stdouts debug=False
Oct 02 11:53:49 compute-0 podman[190405]: 2025-10-02 11:53:49.147773965 +0000 UTC m=+0.056013480 container create 6787aaf4386267c01b59730f16adbda66cb221f8ada3d52065fad1aa02c96e7b (image=quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified, name=nova_compute, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_id=edpm, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, container_name=nova_compute, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified', 'privileged': True, 'user': 'nova', 'restart': 'always', 'command': 'kolla_start', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'volumes': ['/var/lib/openstack/config/nova:/var/lib/kolla/config_files:ro', '/var/lib/openstack/cacerts/nova/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/etc/localtime:/etc/localtime:ro', '/lib/modules:/lib/modules:ro', '/dev:/dev', '/var/lib/libvirt:/var/lib/libvirt', '/run/libvirt:/run/libvirt:shared', '/var/lib/nova:/var/lib/nova:shared', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/etc/iscsi:/etc/iscsi:ro', '/etc/nvme:/etc/nvme', '/var/lib/openstack/config/ceph:/var/lib/kolla/config_files/ceph:ro', '/etc/ssh/ssh_known_hosts:/etc/ssh/ssh_known_hosts:ro']}, org.label-schema.license=GPLv2)
Oct 02 11:53:49 compute-0 podman[190405]: 2025-10-02 11:53:49.113639733 +0000 UTC m=+0.021879238 image pull e36f31143f26011980def9337d375f895bea59b742a3a2b372b996aa8ad58eba quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified
Oct 02 11:53:49 compute-0 python3[190364]: ansible-edpm_container_manage PODMAN-CONTAINER-DEBUG: podman create --name nova_compute --conmon-pidfile /run/nova_compute.pid --env KOLLA_CONFIG_STRATEGY=COPY_ALWAYS --label config_id=edpm --label container_name=nova_compute --label managed_by=edpm_ansible --label config_data={'image': 'quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified', 'privileged': True, 'user': 'nova', 'restart': 'always', 'command': 'kolla_start', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'volumes': ['/var/lib/openstack/config/nova:/var/lib/kolla/config_files:ro', '/var/lib/openstack/cacerts/nova/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/etc/localtime:/etc/localtime:ro', '/lib/modules:/lib/modules:ro', '/dev:/dev', '/var/lib/libvirt:/var/lib/libvirt', '/run/libvirt:/run/libvirt:shared', '/var/lib/nova:/var/lib/nova:shared', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/etc/iscsi:/etc/iscsi:ro', '/etc/nvme:/etc/nvme', '/var/lib/openstack/config/ceph:/var/lib/kolla/config_files/ceph:ro', '/etc/ssh/ssh_known_hosts:/etc/ssh/ssh_known_hosts:ro']} --log-driver journald --log-level info --network host --privileged=True --user nova --volume /var/lib/openstack/config/nova:/var/lib/kolla/config_files:ro --volume /var/lib/openstack/cacerts/nova/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z --volume /etc/localtime:/etc/localtime:ro --volume /lib/modules:/lib/modules:ro --volume /dev:/dev --volume /var/lib/libvirt:/var/lib/libvirt --volume /run/libvirt:/run/libvirt:shared --volume /var/lib/nova:/var/lib/nova:shared --volume /var/lib/iscsi:/var/lib/iscsi:z --volume /etc/multipath:/etc/multipath:z --volume /etc/multipath.conf:/etc/multipath.conf:ro --volume /etc/iscsi:/etc/iscsi:ro --volume /etc/nvme:/etc/nvme --volume /var/lib/openstack/config/ceph:/var/lib/kolla/config_files/ceph:ro --volume /etc/ssh/ssh_known_hosts:/etc/ssh/ssh_known_hosts:ro quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified kolla_start
Oct 02 11:53:49 compute-0 sudo[190356]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:49 compute-0 sudo[190593]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-afrmnagkgxiofddsrwcfxdpztkmoujhw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406029.5129805-4987-17229653273801/AnsiballZ_stat.py'
Oct 02 11:53:49 compute-0 sudo[190593]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:50 compute-0 python3.9[190595]: ansible-ansible.builtin.stat Invoked with path=/etc/sysconfig/podman_drop_in follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:53:50 compute-0 sudo[190593]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:50 compute-0 sudo[190747]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-owkebitfowmgzoenfjewlynbcknicncq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406030.4322026-5014-118704116414544/AnsiballZ_file.py'
Oct 02 11:53:50 compute-0 sudo[190747]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:50 compute-0 python3.9[190749]: ansible-file Invoked with path=/etc/systemd/system/edpm_nova_compute.requires state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:53:50 compute-0 sudo[190747]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:51 compute-0 sudo[190898]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ftyehmbdolhneutktyqvyuzfaklznecr ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406030.9638233-5014-195106530262596/AnsiballZ_copy.py'
Oct 02 11:53:51 compute-0 sudo[190898]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:51 compute-0 python3.9[190900]: ansible-copy Invoked with src=/home/zuul/.ansible/tmp/ansible-tmp-1759406030.9638233-5014-195106530262596/source dest=/etc/systemd/system/edpm_nova_compute.service mode=0644 owner=root group=root backup=False force=True remote_src=False follow=False unsafe_writes=False _original_basename=None content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None checksum=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:53:51 compute-0 sudo[190898]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:51 compute-0 sudo[190974]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jqnvtblqxtxiuqtuigyzplvhlzbuduil ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406030.9638233-5014-195106530262596/AnsiballZ_systemd.py'
Oct 02 11:53:51 compute-0 sudo[190974]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:52 compute-0 python3.9[190976]: ansible-systemd Invoked with daemon_reload=True daemon_reexec=False scope=system no_block=False name=None state=None enabled=None force=None masked=None
Oct 02 11:53:52 compute-0 systemd[1]: Reloading.
Oct 02 11:53:52 compute-0 systemd-sysv-generator[191005]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:53:52 compute-0 systemd-rc-local-generator[191001]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:53:52 compute-0 sudo[190974]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:52 compute-0 sudo[191085]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pohksmydeljgtoqbxotyooyyeqobgien ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406030.9638233-5014-195106530262596/AnsiballZ_systemd.py'
Oct 02 11:53:52 compute-0 sudo[191085]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:53 compute-0 python3.9[191087]: ansible-systemd Invoked with state=restarted name=edpm_nova_compute.service enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:53:53 compute-0 systemd[1]: Reloading.
Oct 02 11:53:53 compute-0 systemd-rc-local-generator[191117]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:53:53 compute-0 systemd-sysv-generator[191120]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:53:53 compute-0 systemd[1]: Starting nova_compute container...
Oct 02 11:53:53 compute-0 systemd[1]: Started libcrun container.
Oct 02 11:53:53 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/67556d8b117c7cc5b237517a7333adae8c4f4a88b1bac9af1bb3f96ef15a577b/merged/etc/nvme supports timestamps until 2038 (0x7fffffff)
Oct 02 11:53:53 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/67556d8b117c7cc5b237517a7333adae8c4f4a88b1bac9af1bb3f96ef15a577b/merged/etc/multipath supports timestamps until 2038 (0x7fffffff)
Oct 02 11:53:53 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/67556d8b117c7cc5b237517a7333adae8c4f4a88b1bac9af1bb3f96ef15a577b/merged/var/lib/iscsi supports timestamps until 2038 (0x7fffffff)
Oct 02 11:53:53 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/67556d8b117c7cc5b237517a7333adae8c4f4a88b1bac9af1bb3f96ef15a577b/merged/var/lib/libvirt supports timestamps until 2038 (0x7fffffff)
Oct 02 11:53:53 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/67556d8b117c7cc5b237517a7333adae8c4f4a88b1bac9af1bb3f96ef15a577b/merged/var/lib/nova supports timestamps until 2038 (0x7fffffff)
Oct 02 11:53:53 compute-0 podman[191130]: 2025-10-02 11:53:53.801768083 +0000 UTC m=+0.158252541 container init 6787aaf4386267c01b59730f16adbda66cb221f8ada3d52065fad1aa02c96e7b (image=quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified, name=nova_compute, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified', 'privileged': True, 'user': 'nova', 'restart': 'always', 'command': 'kolla_start', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'volumes': ['/var/lib/openstack/config/nova:/var/lib/kolla/config_files:ro', '/var/lib/openstack/cacerts/nova/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/etc/localtime:/etc/localtime:ro', '/lib/modules:/lib/modules:ro', '/dev:/dev', '/var/lib/libvirt:/var/lib/libvirt', '/run/libvirt:/run/libvirt:shared', '/var/lib/nova:/var/lib/nova:shared', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/etc/iscsi:/etc/iscsi:ro', '/etc/nvme:/etc/nvme', '/var/lib/openstack/config/ceph:/var/lib/kolla/config_files/ceph:ro', '/etc/ssh/ssh_known_hosts:/etc/ssh/ssh_known_hosts:ro']}, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=edpm, container_name=nova_compute, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team)
Oct 02 11:53:53 compute-0 podman[191130]: 2025-10-02 11:53:53.809724467 +0000 UTC m=+0.166208855 container start 6787aaf4386267c01b59730f16adbda66cb221f8ada3d52065fad1aa02c96e7b (image=quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified, name=nova_compute, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_managed=true, container_name=nova_compute, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified', 'privileged': True, 'user': 'nova', 'restart': 'always', 'command': 'kolla_start', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'volumes': ['/var/lib/openstack/config/nova:/var/lib/kolla/config_files:ro', '/var/lib/openstack/cacerts/nova/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/etc/localtime:/etc/localtime:ro', '/lib/modules:/lib/modules:ro', '/dev:/dev', '/var/lib/libvirt:/var/lib/libvirt', '/run/libvirt:/run/libvirt:shared', '/var/lib/nova:/var/lib/nova:shared', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/etc/iscsi:/etc/iscsi:ro', '/etc/nvme:/etc/nvme', '/var/lib/openstack/config/ceph:/var/lib/kolla/config_files/ceph:ro', '/etc/ssh/ssh_known_hosts:/etc/ssh/ssh_known_hosts:ro']}, config_id=edpm, io.buildah.version=1.41.3, org.label-schema.build-date=20251001)
Oct 02 11:53:53 compute-0 nova_compute[191146]: + sudo -E kolla_set_configs
Oct 02 11:53:53 compute-0 podman[191130]: nova_compute
Oct 02 11:53:53 compute-0 systemd[1]: Started nova_compute container.
Oct 02 11:53:53 compute-0 nova_compute[191146]: INFO:__main__:Loading config file at /var/lib/kolla/config_files/config.json
Oct 02 11:53:53 compute-0 nova_compute[191146]: INFO:__main__:Validating config file
Oct 02 11:53:53 compute-0 nova_compute[191146]: INFO:__main__:Kolla config strategy set to: COPY_ALWAYS
Oct 02 11:53:53 compute-0 nova_compute[191146]: INFO:__main__:Copying service configuration files
Oct 02 11:53:53 compute-0 nova_compute[191146]: INFO:__main__:Deleting /etc/nova/nova.conf
Oct 02 11:53:53 compute-0 nova_compute[191146]: INFO:__main__:Copying /var/lib/kolla/config_files/nova-blank.conf to /etc/nova/nova.conf
Oct 02 11:53:53 compute-0 nova_compute[191146]: INFO:__main__:Setting permission for /etc/nova/nova.conf
Oct 02 11:53:53 compute-0 nova_compute[191146]: INFO:__main__:Copying /var/lib/kolla/config_files/01-nova.conf to /etc/nova/nova.conf.d/01-nova.conf
Oct 02 11:53:53 compute-0 nova_compute[191146]: INFO:__main__:Setting permission for /etc/nova/nova.conf.d/01-nova.conf
Oct 02 11:53:53 compute-0 nova_compute[191146]: INFO:__main__:Copying /var/lib/kolla/config_files/25-nova-extra.conf to /etc/nova/nova.conf.d/25-nova-extra.conf
Oct 02 11:53:53 compute-0 nova_compute[191146]: INFO:__main__:Setting permission for /etc/nova/nova.conf.d/25-nova-extra.conf
Oct 02 11:53:53 compute-0 nova_compute[191146]: INFO:__main__:Copying /var/lib/kolla/config_files/nova-blank.conf to /etc/nova/nova.conf.d/nova-blank.conf
Oct 02 11:53:53 compute-0 nova_compute[191146]: INFO:__main__:Setting permission for /etc/nova/nova.conf.d/nova-blank.conf
Oct 02 11:53:53 compute-0 nova_compute[191146]: INFO:__main__:Copying /var/lib/kolla/config_files/02-nova-host-specific.conf to /etc/nova/nova.conf.d/02-nova-host-specific.conf
Oct 02 11:53:53 compute-0 nova_compute[191146]: INFO:__main__:Setting permission for /etc/nova/nova.conf.d/02-nova-host-specific.conf
Oct 02 11:53:53 compute-0 nova_compute[191146]: INFO:__main__:Deleting /etc/ceph
Oct 02 11:53:53 compute-0 nova_compute[191146]: INFO:__main__:Creating directory /etc/ceph
Oct 02 11:53:53 compute-0 nova_compute[191146]: INFO:__main__:Setting permission for /etc/ceph
Oct 02 11:53:53 compute-0 nova_compute[191146]: INFO:__main__:Copying /var/lib/kolla/config_files/ssh-privatekey to /var/lib/nova/.ssh/ssh-privatekey
Oct 02 11:53:53 compute-0 nova_compute[191146]: INFO:__main__:Setting permission for /var/lib/nova/.ssh/ssh-privatekey
Oct 02 11:53:53 compute-0 nova_compute[191146]: INFO:__main__:Copying /var/lib/kolla/config_files/ssh-config to /var/lib/nova/.ssh/config
Oct 02 11:53:53 compute-0 nova_compute[191146]: INFO:__main__:Setting permission for /var/lib/nova/.ssh/config
Oct 02 11:53:53 compute-0 nova_compute[191146]: INFO:__main__:Writing out command to execute
Oct 02 11:53:53 compute-0 nova_compute[191146]: INFO:__main__:Setting permission for /var/lib/nova/.ssh/
Oct 02 11:53:53 compute-0 nova_compute[191146]: INFO:__main__:Setting permission for /var/lib/nova/.ssh/ssh-privatekey
Oct 02 11:53:53 compute-0 nova_compute[191146]: INFO:__main__:Setting permission for /var/lib/nova/.ssh/config
Oct 02 11:53:53 compute-0 nova_compute[191146]: ++ cat /run_command
Oct 02 11:53:53 compute-0 nova_compute[191146]: + CMD=nova-compute
Oct 02 11:53:53 compute-0 nova_compute[191146]: + ARGS=
Oct 02 11:53:53 compute-0 nova_compute[191146]: + sudo kolla_copy_cacerts
Oct 02 11:53:53 compute-0 sudo[191085]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:53 compute-0 nova_compute[191146]: + [[ ! -n '' ]]
Oct 02 11:53:53 compute-0 nova_compute[191146]: + . kolla_extend_start
Oct 02 11:53:53 compute-0 nova_compute[191146]: + echo 'Running command: '\''nova-compute'\'''
Oct 02 11:53:53 compute-0 nova_compute[191146]: + umask 0022
Oct 02 11:53:53 compute-0 nova_compute[191146]: + exec nova-compute
Oct 02 11:53:53 compute-0 nova_compute[191146]: Running command: 'nova-compute'
Oct 02 11:53:55 compute-0 python3.9[191308]: ansible-ansible.builtin.stat Invoked with path=/etc/systemd/system/edpm_nova_nvme_cleaner_healthcheck.service follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:53:55 compute-0 nova_compute[191146]: 2025-10-02 11:53:55.809 2 DEBUG os_vif [-] Loaded VIF plugin class '<class 'vif_plug_linux_bridge.linux_bridge.LinuxBridgePlugin'>' with name 'linux_bridge' initialize /usr/lib/python3.9/site-packages/os_vif/__init__.py:44
Oct 02 11:53:55 compute-0 nova_compute[191146]: 2025-10-02 11:53:55.810 2 DEBUG os_vif [-] Loaded VIF plugin class '<class 'vif_plug_noop.noop.NoOpPlugin'>' with name 'noop' initialize /usr/lib/python3.9/site-packages/os_vif/__init__.py:44
Oct 02 11:53:55 compute-0 nova_compute[191146]: 2025-10-02 11:53:55.810 2 DEBUG os_vif [-] Loaded VIF plugin class '<class 'vif_plug_ovs.ovs.OvsPlugin'>' with name 'ovs' initialize /usr/lib/python3.9/site-packages/os_vif/__init__.py:44
Oct 02 11:53:55 compute-0 nova_compute[191146]: 2025-10-02 11:53:55.810 2 INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs
Oct 02 11:53:55 compute-0 podman[191434]: 2025-10-02 11:53:55.909817286 +0000 UTC m=+0.052600524 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, container_name=iscsid, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=iscsid, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS)
Oct 02 11:53:55 compute-0 nova_compute[191146]: 2025-10-02 11:53:55.944 2 DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 11:53:55 compute-0 nova_compute[191146]: 2025-10-02 11:53:55.969 2 DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.024s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 11:53:56 compute-0 python3.9[191473]: ansible-ansible.builtin.stat Invoked with path=/etc/systemd/system/edpm_nova_nvme_cleaner.service follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:53:56 compute-0 python3.9[191633]: ansible-ansible.builtin.stat Invoked with path=/etc/systemd/system/edpm_nova_nvme_cleaner.service.requires follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:53:57 compute-0 nova_compute[191146]: 2025-10-02 11:53:57.892 2 INFO nova.virt.driver [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] Loading compute driver 'libvirt.LibvirtDriver'
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.035 2 INFO nova.compute.provider_config [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access.
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.252 2 DEBUG oslo_concurrency.lockutils [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] Acquiring lock "singleton_lock" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.253 2 DEBUG oslo_concurrency.lockutils [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] Acquired lock "singleton_lock" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.253 2 DEBUG oslo_concurrency.lockutils [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] Releasing lock "singleton_lock" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.254 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] Full set of CONF: _wait_for_exit_or_signal /usr/lib/python3.9/site-packages/oslo_service/service.py:362
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.254 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ******************************************************************************** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2589
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.254 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] Configuration options gathered from: log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2590
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.254 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] command line args: [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2591
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.254 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-compute.conf'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2592
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.255 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ================================================================================ log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2594
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.255 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] allow_resize_to_same_host      = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.255 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] arq_binding_timeout            = 300 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.255 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] backdoor_port                  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.255 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] backdoor_socket                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.255 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] block_device_allocate_retries  = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.256 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] block_device_allocate_retries_interval = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.256 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cert                           = self.pem log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.256 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] compute_driver                 = libvirt.LibvirtDriver log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.256 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] compute_monitors               = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.256 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] config_dir                     = ['/etc/nova/nova.conf.d'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.257 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] config_drive_format            = iso9660 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.257 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] config_file                    = ['/etc/nova/nova.conf', '/etc/nova/nova-compute.conf'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.257 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] config_source                  = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.257 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] console_host                   = compute-0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.257 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] control_exchange               = nova log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.258 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cpu_allocation_ratio           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.258 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] daemon                         = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.258 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] debug                          = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.258 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] default_access_ip_network_name = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.258 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] default_availability_zone      = nova log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.258 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] default_ephemeral_format       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.259 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] default_log_levels             = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.259 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] default_schedule_zone          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.259 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] disk_allocation_ratio          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.259 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] enable_new_services            = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.259 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] enabled_apis                   = ['osapi_compute', 'metadata'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.260 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] enabled_ssl_apis               = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.260 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] flat_injected                  = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.260 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] force_config_drive             = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.260 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] force_raw_images               = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.260 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] graceful_shutdown_timeout      = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.260 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] heal_instance_info_cache_interval = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.261 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] host                           = compute-0.ctlplane.example.com log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.261 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] initial_cpu_allocation_ratio   = 4.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.261 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] initial_disk_allocation_ratio  = 0.9 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.261 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] initial_ram_allocation_ratio   = 1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.262 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] injected_network_template      = /usr/lib/python3.9/site-packages/nova/virt/interfaces.template log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.262 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] instance_build_timeout         = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.262 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] instance_delete_interval       = 300 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.262 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] instance_format                = [instance: %(uuid)s]  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.262 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] instance_name_template         = instance-%08x log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.263 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] instance_usage_audit           = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.263 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] instance_usage_audit_period    = month log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.263 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] instance_uuid_format           = [instance: %(uuid)s]  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.263 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] instances_path                 = /var/lib/nova/instances log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.263 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] internal_service_availability_zone = internal log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.263 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] key                            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.263 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] live_migration_retry_count     = 30 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.264 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] log_config_append              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.264 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] log_date_format                = %Y-%m-%d %H:%M:%S log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.264 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] log_dir                        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.264 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] log_file                       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.264 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] log_options                    = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.264 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] log_rotate_interval            = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.265 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] log_rotate_interval_type       = days log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.265 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] log_rotation_type              = size log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.265 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] logging_context_format_string  = %(asctime)s.%(msecs)03d %(process)d %(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(user_identity)s] %(instance)s%(message)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.265 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] logging_debug_format_suffix    = %(funcName)s %(pathname)s:%(lineno)d log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.265 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] logging_default_format_string  = %(asctime)s.%(msecs)03d %(process)d %(levelname)s %(name)s [-] %(instance)s%(message)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.265 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] logging_exception_prefix       = %(asctime)s.%(msecs)03d %(process)d ERROR %(name)s %(instance)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.266 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] logging_user_identity_format   = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.266 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] long_rpc_timeout               = 1800 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.266 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] max_concurrent_builds          = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.266 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] max_concurrent_live_migrations = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.266 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] max_concurrent_snapshots       = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.266 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] max_local_block_devices        = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.266 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] max_logfile_count              = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.267 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] max_logfile_size_mb            = 20 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.267 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] maximum_instance_delete_attempts = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.267 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] metadata_listen                = 0.0.0.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.267 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] metadata_listen_port           = 8775 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.267 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] metadata_workers               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.267 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] migrate_max_retries            = -1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.268 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] mkisofs_cmd                    = /usr/bin/mkisofs log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.268 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] my_block_storage_ip            = 192.168.122.100 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.268 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] my_ip                          = 192.168.122.100 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.268 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] network_allocate_retries       = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.268 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.268 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] osapi_compute_listen           = 0.0.0.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.269 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] osapi_compute_listen_port      = 8774 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.269 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] osapi_compute_unique_server_name_scope =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.269 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] osapi_compute_workers          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.269 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] password_length                = 12 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.269 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] periodic_enable                = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.269 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] periodic_fuzzy_delay           = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.269 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] pointer_model                  = usbtablet log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.270 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] preallocate_images             = none log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.270 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] publish_errors                 = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.270 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] pybasedir                      = /usr/lib/python3.9/site-packages log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.270 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ram_allocation_ratio           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.270 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] rate_limit_burst               = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.270 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] rate_limit_except_level        = CRITICAL log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.270 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] rate_limit_interval            = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.271 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] reboot_timeout                 = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.271 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] reclaim_instance_interval      = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.271 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] record                         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.271 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] reimage_timeout_per_gb         = 20 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.271 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] report_interval                = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.271 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] rescue_timeout                 = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.271 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] reserved_host_cpus             = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.272 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] reserved_host_disk_mb          = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.272 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] reserved_host_memory_mb        = 512 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.272 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] reserved_huge_pages            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.272 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] resize_confirm_window          = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.272 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] resize_fs_using_block_device   = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.272 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] resume_guests_state_on_host_boot = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.272 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] rootwrap_config                = /etc/nova/rootwrap.conf log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.273 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] rpc_response_timeout           = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.273 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] run_external_periodic_tasks    = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.273 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] running_deleted_instance_action = reap log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.273 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] running_deleted_instance_poll_interval = 1800 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.273 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] running_deleted_instance_timeout = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.273 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] scheduler_instance_sync_interval = 120 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.273 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] service_down_time              = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.274 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] servicegroup_driver            = db log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.274 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] shelved_offload_time           = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.274 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] shelved_poll_interval          = 3600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.274 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] shutdown_timeout               = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.274 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] source_is_ipv6                 = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.274 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ssl_only                       = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.274 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] state_path                     = /var/lib/nova log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.275 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] sync_power_state_interval      = 600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.275 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] sync_power_state_pool_size     = 1000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.275 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] syslog_log_facility            = LOG_USER log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.275 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] tempdir                        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.275 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] timeout_nbd                    = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.275 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] transport_url                  = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.275 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] update_resources_interval      = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.276 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] use_cow_images                 = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.276 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] use_eventlog                   = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.276 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] use_journal                    = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.276 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] use_json                       = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.276 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] use_rootwrap_daemon            = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.276 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] use_stderr                     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.276 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] use_syslog                     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.277 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vcpu_pin_set                   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.277 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vif_plugging_is_fatal          = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.277 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vif_plugging_timeout           = 300 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.277 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] virt_mkfs                      = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.277 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] volume_usage_poll_interval     = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.277 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] watch_log_file                 = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.277 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] web                            = /usr/share/spice-html5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.278 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_concurrency.disable_process_locking = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.278 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_concurrency.lock_path     = /var/lib/nova/tmp log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.278 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_metrics.metrics_buffer_size = 1000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.278 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_metrics.metrics_enabled = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.278 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_metrics.metrics_process_name =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.278 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.279 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.279 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api.auth_strategy              = keystone log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.279 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api.compute_link_prefix        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.279 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.279 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api.dhcp_domain                =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.279 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api.enable_instance_password   = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.279 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api.glance_link_prefix         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.280 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api.instance_list_cells_batch_fixed_size = 100 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.280 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api.instance_list_cells_batch_strategy = distributed log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.280 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api.instance_list_per_project_cells = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.280 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api.list_records_by_skipping_down_cells = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.280 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api.local_metadata_per_cell    = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.280 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api.max_limit                  = 1000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.280 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api.metadata_cache_expiration  = 15 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.281 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api.neutron_default_tenant_id  = default log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.281 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api.use_forwarded_for          = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.281 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api.use_neutron_default_nets   = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.281 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api.vendordata_dynamic_connect_timeout = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.281 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api.vendordata_dynamic_failure_fatal = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.281 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api.vendordata_dynamic_read_timeout = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.282 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api.vendordata_dynamic_ssl_certfile =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.282 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api.vendordata_dynamic_targets = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.282 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api.vendordata_jsonfile_path   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.282 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api.vendordata_providers       = ['StaticJSON'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.282 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.backend                  = oslo_cache.dict log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.282 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.backend_argument         = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.283 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.config_prefix            = cache.oslo log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.283 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.dead_timeout             = 60.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.283 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.debug_cache_backend      = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.283 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.enable_retry_client      = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.283 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.enable_socket_keepalive  = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.283 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.enabled                  = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.284 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.expiration_time          = 600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.284 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.hashclient_retry_attempts = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 sudo[191783]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xvaijgksqifbejdfghdthhyorkvtrtop ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406037.923657-5194-210524953254339/AnsiballZ_podman_container.py'
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.284 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.hashclient_retry_delay   = 1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.284 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.memcache_dead_retry      = 300 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.284 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.memcache_password        =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.285 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.memcache_pool_connection_get_timeout = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.285 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.memcache_pool_flush_on_reconnect = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.285 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.memcache_pool_maxsize    = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.285 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.memcache_pool_unused_timeout = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.285 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.memcache_sasl_enabled    = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.286 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.memcache_servers         = ['localhost:11211'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.286 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.memcache_socket_timeout  = 1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.286 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.memcache_username        =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.286 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.proxies                  = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.286 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.retry_attempts           = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.287 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.retry_delay              = 0.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 sudo[191783]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.287 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.socket_keepalive_count   = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.288 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.socket_keepalive_idle    = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.288 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.socket_keepalive_interval = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.288 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.tls_allowed_ciphers      = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.288 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.tls_cafile               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.288 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.tls_certfile             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.288 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.tls_enabled              = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.288 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cache.tls_keyfile              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.289 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cinder.auth_section            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.289 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cinder.auth_type               = password log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.289 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cinder.cafile                  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.289 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cinder.catalog_info            = volumev3:cinderv3:internalURL log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.289 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cinder.certfile                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.289 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cinder.collect_timing          = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.289 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cinder.cross_az_attach         = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.290 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cinder.debug                   = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.290 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cinder.endpoint_template       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.290 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cinder.http_retries            = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.290 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cinder.insecure                = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.290 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cinder.keyfile                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.290 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cinder.os_region_name          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.290 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cinder.split_loggers           = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.290 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cinder.timeout                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.291 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] compute.consecutive_build_service_disable_threshold = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.291 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] compute.cpu_dedicated_set      = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.291 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] compute.cpu_shared_set         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.291 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] compute.image_type_exclude_list = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.291 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] compute.live_migration_wait_for_vif_plug = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.291 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] compute.max_concurrent_disk_ops = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.292 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] compute.max_disk_devices_to_attach = -1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.292 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] compute.packing_host_numa_cells_allocation_strategy = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.292 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] compute.provider_config_location = /etc/nova/provider_config/ log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.292 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] compute.resource_provider_association_refresh = 300 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.292 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] compute.shutdown_retry_interval = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.292 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] compute.vmdk_allowed_types     = ['streamOptimized', 'monolithicSparse'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.292 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] conductor.workers              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.292 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] console.allowed_origins        = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.293 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] console.ssl_ciphers            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.293 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] console.ssl_minimum_version    = default log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.293 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] consoleauth.token_ttl          = 600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.293 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cyborg.cafile                  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.293 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cyborg.certfile                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.293 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cyborg.collect_timing          = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.294 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cyborg.connect_retries         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.294 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cyborg.connect_retry_delay     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.294 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cyborg.endpoint_override       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.294 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cyborg.insecure                = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.294 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cyborg.keyfile                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.294 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cyborg.max_version             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.294 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cyborg.min_version             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.294 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cyborg.region_name             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.295 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cyborg.service_name            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.295 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cyborg.service_type            = accelerator log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.295 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cyborg.split_loggers           = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.295 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cyborg.status_code_retries     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.295 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cyborg.status_code_retry_delay = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.295 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cyborg.timeout                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.296 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cyborg.valid_interfaces        = ['internal', 'public'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.296 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] cyborg.version                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.296 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] database.backend               = sqlalchemy log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.296 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] database.connection            = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.296 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] database.connection_debug      = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.296 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] database.connection_parameters =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.297 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] database.connection_recycle_time = 3600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.297 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] database.connection_trace      = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.297 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] database.db_inc_retry_interval = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.297 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] database.db_max_retries        = 20 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.297 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] database.db_max_retry_interval = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.297 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] database.db_retry_interval     = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.298 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] database.max_overflow          = 50 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.298 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] database.max_pool_size         = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.298 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] database.max_retries           = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.298 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] database.mysql_enable_ndb      = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.298 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] database.mysql_sql_mode        = TRADITIONAL log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.298 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] database.mysql_wsrep_sync_wait = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.298 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] database.pool_timeout          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.299 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] database.retry_interval        = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.299 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] database.slave_connection      = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.299 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] database.sqlite_synchronous    = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.299 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api_database.backend           = sqlalchemy log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.299 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api_database.connection        = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.299 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api_database.connection_debug  = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.300 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api_database.connection_parameters =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.300 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api_database.connection_recycle_time = 3600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.300 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api_database.connection_trace  = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.300 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api_database.db_inc_retry_interval = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.300 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api_database.db_max_retries    = 20 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.300 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api_database.db_max_retry_interval = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.301 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api_database.db_retry_interval = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.301 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api_database.max_overflow      = 50 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.301 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api_database.max_pool_size     = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.301 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api_database.max_retries       = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.301 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api_database.mysql_enable_ndb  = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.301 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api_database.mysql_sql_mode    = TRADITIONAL log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.301 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api_database.mysql_wsrep_sync_wait = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.302 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api_database.pool_timeout      = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.302 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api_database.retry_interval    = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.302 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api_database.slave_connection  = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.302 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] api_database.sqlite_synchronous = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.302 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] devices.enabled_mdev_types     = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.302 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ephemeral_storage_encryption.cipher = aes-xts-plain64 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.303 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ephemeral_storage_encryption.enabled = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.303 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ephemeral_storage_encryption.key_size = 512 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.303 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.api_servers             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.303 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.cafile                  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.303 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.certfile                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.303 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.collect_timing          = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.303 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.connect_retries         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.304 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.connect_retry_delay     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.304 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.debug                   = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.304 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.default_trusted_certificate_ids = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.304 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.enable_certificate_validation = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.304 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.enable_rbd_download     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.304 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.endpoint_override       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.304 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.insecure                = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.305 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.keyfile                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.305 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.max_version             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.305 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.min_version             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.305 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.num_retries             = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.305 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.rbd_ceph_conf           =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.305 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.rbd_connect_timeout     = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.305 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.rbd_pool                =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.306 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.rbd_user                =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.306 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.region_name             = regionOne log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.306 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.service_name            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.306 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.service_type            = image log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.306 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.split_loggers           = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.306 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.status_code_retries     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.306 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.status_code_retry_delay = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.307 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.timeout                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.307 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.valid_interfaces        = ['internal'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.307 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.verify_glance_signatures = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.307 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] glance.version                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.307 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] guestfs.debug                  = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.307 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] hyperv.config_drive_cdrom      = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.307 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] hyperv.config_drive_inject_password = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.308 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] hyperv.dynamic_memory_ratio    = 1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.308 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] hyperv.enable_instance_metrics_collection = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.308 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] hyperv.enable_remotefx         = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.308 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] hyperv.instances_path_share    =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.308 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] hyperv.iscsi_initiator_list    = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.308 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] hyperv.limit_cpu_features      = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.308 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] hyperv.mounted_disk_query_retry_count = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.309 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] hyperv.mounted_disk_query_retry_interval = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.309 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] hyperv.power_state_check_timeframe = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.309 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] hyperv.power_state_event_polling_interval = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.309 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] hyperv.qemu_img_cmd            = qemu-img.exe log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.309 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] hyperv.use_multipath_io        = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.309 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] hyperv.volume_attach_retry_count = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.309 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] hyperv.volume_attach_retry_interval = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.310 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] hyperv.vswitch_name            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.310 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] hyperv.wait_soft_reboot_seconds = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.310 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] mks.enabled                    = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.310 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] mks.mksproxy_base_url          = http://127.0.0.1:6090/ log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.310 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] image_cache.manager_interval   = 2400 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.311 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] image_cache.precache_concurrency = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.311 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] image_cache.remove_unused_base_images = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.311 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] image_cache.remove_unused_original_minimum_age_seconds = 86400 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.311 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] image_cache.remove_unused_resized_minimum_age_seconds = 3600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.311 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] image_cache.subdirectory_name  = _base log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.311 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ironic.api_max_retries         = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.311 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ironic.api_retry_interval      = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.312 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ironic.auth_section            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.312 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ironic.auth_type               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.312 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ironic.cafile                  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.312 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ironic.certfile                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.312 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ironic.collect_timing          = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.312 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ironic.connect_retries         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.312 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ironic.connect_retry_delay     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.312 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ironic.endpoint_override       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.313 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ironic.insecure                = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.313 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ironic.keyfile                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.313 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ironic.max_version             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.313 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ironic.min_version             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.313 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ironic.partition_key           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.313 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ironic.peer_list               = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.313 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ironic.region_name             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.314 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ironic.serial_console_state_timeout = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.314 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ironic.service_name            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.314 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ironic.service_type            = baremetal log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.314 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ironic.split_loggers           = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.314 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ironic.status_code_retries     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.314 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ironic.status_code_retry_delay = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.314 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ironic.timeout                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.315 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ironic.valid_interfaces        = ['internal', 'public'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.315 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ironic.version                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.315 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] key_manager.backend            = barbican log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.315 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] key_manager.fixed_key          = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.315 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] barbican.auth_endpoint         = http://localhost/identity/v3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.315 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] barbican.barbican_api_version  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.316 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] barbican.barbican_endpoint     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.316 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] barbican.barbican_endpoint_type = internal log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.316 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] barbican.barbican_region_name  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.316 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] barbican.cafile                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.316 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] barbican.certfile              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.316 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] barbican.collect_timing        = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.317 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] barbican.insecure              = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.317 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] barbican.keyfile               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.317 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] barbican.number_of_retries     = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.317 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] barbican.retry_delay           = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.318 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] barbican.send_service_user_token = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.318 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] barbican.split_loggers         = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.318 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] barbican.timeout               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.318 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] barbican.verify_ssl            = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.318 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] barbican.verify_ssl_path       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.319 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] barbican_service_user.auth_section = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.319 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] barbican_service_user.auth_type = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.319 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] barbican_service_user.cafile   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.319 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] barbican_service_user.certfile = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.319 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] barbican_service_user.collect_timing = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.319 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] barbican_service_user.insecure = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.320 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] barbican_service_user.keyfile  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.320 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] barbican_service_user.split_loggers = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.320 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] barbican_service_user.timeout  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.320 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vault.approle_role_id          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.320 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vault.approle_secret_id        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.320 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vault.cafile                   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.321 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vault.certfile                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.321 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vault.collect_timing           = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.321 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vault.insecure                 = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.321 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vault.keyfile                  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.321 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vault.kv_mountpoint            = secret log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.321 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vault.kv_version               = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.321 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vault.namespace                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.322 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vault.root_token_id            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.322 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vault.split_loggers            = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.322 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vault.ssl_ca_crt_file          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.322 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vault.timeout                  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.322 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vault.use_ssl                  = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.322 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vault.vault_url                = http://127.0.0.1:8200 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.323 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] keystone.cafile                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.323 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] keystone.certfile              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.323 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] keystone.collect_timing        = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.323 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] keystone.connect_retries       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.323 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] keystone.connect_retry_delay   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.323 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] keystone.endpoint_override     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.323 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] keystone.insecure              = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.324 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] keystone.keyfile               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.324 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] keystone.max_version           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.324 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] keystone.min_version           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.324 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] keystone.region_name           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.324 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] keystone.service_name          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.324 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] keystone.service_type          = identity log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.324 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] keystone.split_loggers         = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.325 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] keystone.status_code_retries   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.325 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] keystone.status_code_retry_delay = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.325 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] keystone.timeout               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.325 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] keystone.valid_interfaces      = ['internal', 'public'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.325 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] keystone.version               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.325 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.connection_uri         =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.326 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.cpu_mode               = custom log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.326 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.cpu_model_extra_flags  = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.326 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.cpu_models             = ['Nehalem'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.326 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.cpu_power_governor_high = performance log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.326 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.cpu_power_governor_low = powersave log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.326 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.cpu_power_management   = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.327 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.cpu_power_management_strategy = cpu_state log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.327 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.device_detach_attempts = 8 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.327 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.device_detach_timeout  = 20 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.327 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.disk_cachemodes        = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.327 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.disk_prefix            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.327 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.enabled_perf_events    = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.327 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.file_backed_memory     = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.328 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.gid_maps               = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.328 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.hw_disk_discard        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.328 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.hw_machine_type        = ['x86_64=q35'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.328 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.images_rbd_ceph_conf   =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.328 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.images_rbd_glance_copy_poll_interval = 15 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.328 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.images_rbd_glance_copy_timeout = 600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.328 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.images_rbd_glance_store_name =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.329 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.images_rbd_pool        = rbd log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.329 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.images_type            = qcow2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.329 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.images_volume_group    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.329 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.inject_key             = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.329 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.inject_partition       = -2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.329 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.inject_password        = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.329 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.iscsi_iface            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.330 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.iser_use_multipath     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.330 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.live_migration_bandwidth = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.330 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.live_migration_completion_timeout = 800 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.330 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.live_migration_downtime = 500 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.330 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.live_migration_downtime_delay = 75 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.331 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.live_migration_downtime_steps = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.331 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.live_migration_inbound_addr = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.331 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.live_migration_permit_auto_converge = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.331 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.live_migration_permit_post_copy = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.331 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.live_migration_scheme  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.331 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.live_migration_timeout_action = force_complete log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.331 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.live_migration_tunnelled = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.332 2 WARNING oslo_config.cfg [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] Deprecated: Option "live_migration_uri" from group "libvirt" is deprecated for removal (
Oct 02 11:53:58 compute-0 nova_compute[191146]: live_migration_uri is deprecated for removal in favor of two other options that
Oct 02 11:53:58 compute-0 nova_compute[191146]: allow to change live migration scheme and target URI: ``live_migration_scheme``
Oct 02 11:53:58 compute-0 nova_compute[191146]: and ``live_migration_inbound_addr`` respectively.
Oct 02 11:53:58 compute-0 nova_compute[191146]: ).  Its value may be silently ignored in the future.
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.332 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.live_migration_uri     = qemu+tls://%s/system log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.332 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.live_migration_with_native_tls = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.332 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.max_queues             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.332 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.mem_stats_period_seconds = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.332 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.nfs_mount_options      = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.333 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.nfs_mount_point_base   = /var/lib/nova/mnt log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.333 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.num_aoe_discover_tries = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.333 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.num_iser_scan_tries    = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.333 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.num_memory_encrypted_guests = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.333 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.num_nvme_discover_tries = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.333 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.num_pcie_ports         = 24 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.334 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.num_volume_scan_tries  = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.334 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.pmem_namespaces        = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.334 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.quobyte_client_cfg     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.334 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.quobyte_mount_point_base = /var/lib/nova/mnt log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.334 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.rbd_connect_timeout    = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.334 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.rbd_destroy_volume_retries = 12 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.335 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.rbd_destroy_volume_retry_interval = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.335 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.rbd_secret_uuid        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.335 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.rbd_user               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.335 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.realtime_scheduler_priority = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.335 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.remote_filesystem_transport = ssh log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.335 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.rescue_image_id        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.335 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.rescue_kernel_id       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.336 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.rescue_ramdisk_id      = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.336 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.rng_dev_path           = /dev/urandom log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.336 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.rx_queue_size          = 512 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.336 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.smbfs_mount_options    =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.336 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.smbfs_mount_point_base = /var/lib/nova/mnt log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.336 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.snapshot_compression   = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.337 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.snapshot_image_format  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.337 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.snapshots_directory    = /var/lib/nova/instances/snapshots log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.337 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.sparse_logical_volumes = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.337 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.swtpm_enabled          = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.338 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.swtpm_group            = tss log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.338 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.swtpm_user             = tss log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.338 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.sysinfo_serial         = unique log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.338 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.tx_queue_size          = 512 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.338 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.uid_maps               = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.338 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.use_virtio_for_bridges = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.339 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.virt_type              = kvm log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.339 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.volume_clear           = zero log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.339 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.volume_clear_size      = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.339 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.volume_use_multipath   = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.339 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.vzstorage_cache_path   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.339 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.vzstorage_log_path     = /var/log/vstorage/%(cluster_name)s/nova.log.gz log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.340 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.vzstorage_mount_group  = qemu log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.340 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.vzstorage_mount_opts   = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.340 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.vzstorage_mount_perms  = 0770 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.340 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.vzstorage_mount_point_base = /var/lib/nova/mnt log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.340 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.vzstorage_mount_user   = stack log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.340 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] libvirt.wait_soft_reboot_seconds = 120 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.341 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] neutron.auth_section           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.341 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] neutron.auth_type              = password log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.341 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] neutron.cafile                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.341 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] neutron.certfile               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.341 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] neutron.collect_timing         = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.341 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] neutron.connect_retries        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.342 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] neutron.connect_retry_delay    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.342 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] neutron.default_floating_pool  = nova log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.342 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] neutron.endpoint_override      = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.342 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] neutron.extension_sync_interval = 600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.342 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] neutron.http_retries           = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.342 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] neutron.insecure               = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.342 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] neutron.keyfile                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.343 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] neutron.max_version            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.343 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] neutron.metadata_proxy_shared_secret = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.343 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] neutron.min_version            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.343 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] neutron.ovs_bridge             = br-int log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.343 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] neutron.physnets               = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.343 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] neutron.region_name            = regionOne log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.343 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] neutron.service_metadata_proxy = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.344 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] neutron.service_name           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.344 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] neutron.service_type           = network log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.344 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] neutron.split_loggers          = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.344 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] neutron.status_code_retries    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.344 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] neutron.status_code_retry_delay = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.344 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] neutron.timeout                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.344 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] neutron.valid_interfaces       = ['internal'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.345 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] neutron.version                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.345 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] notifications.bdms_in_notifications = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.345 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] notifications.default_level    = INFO log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.345 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] notifications.notification_format = both log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.345 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] notifications.notify_on_state_change = vm_and_task_state log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.345 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] notifications.versioned_notifications_topics = ['versioned_notifications'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.346 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] pci.alias                      = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.346 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] pci.device_spec                = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.346 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] pci.report_in_placement        = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.346 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.auth_section         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.346 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.auth_type            = password log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.346 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.auth_url             = https://keystone-internal.openstack.svc:5000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.346 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.cafile               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.347 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.certfile             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.347 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.collect_timing       = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.347 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.connect_retries      = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.347 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.connect_retry_delay  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.347 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.default_domain_id    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.347 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.default_domain_name  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.348 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.domain_id            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.348 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.domain_name          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.348 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.endpoint_override    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.348 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.insecure             = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.348 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.keyfile              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.348 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.max_version          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.349 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.min_version          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.349 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.password             = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.349 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.project_domain_id    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.349 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.project_domain_name  = Default log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.349 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.project_id           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.349 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.project_name         = service log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.349 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.region_name          = regionOne log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.350 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.service_name         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.350 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.service_type         = placement log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.350 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.split_loggers        = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.350 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.status_code_retries  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.350 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.status_code_retry_delay = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.350 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.system_scope         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.350 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.timeout              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.351 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.trust_id             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.351 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.user_domain_id       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.351 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.user_domain_name     = Default log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.351 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.user_id              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.351 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.username             = nova log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.351 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.valid_interfaces     = ['internal'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.352 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] placement.version              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.352 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] quota.cores                    = 20 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.352 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] quota.count_usage_from_placement = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.352 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] quota.driver                   = nova.quota.DbQuotaDriver log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.352 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] quota.injected_file_content_bytes = 10240 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.352 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] quota.injected_file_path_length = 255 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.353 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] quota.injected_files           = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.353 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] quota.instances                = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.353 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] quota.key_pairs                = 100 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.353 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] quota.metadata_items           = 128 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.353 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] quota.ram                      = 51200 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.353 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] quota.recheck_quota            = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.353 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] quota.server_group_members     = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.354 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] quota.server_groups            = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.354 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] rdp.enabled                    = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.354 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] rdp.html5_proxy_base_url       = http://127.0.0.1:6083/ log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.354 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] scheduler.discover_hosts_in_cells_interval = -1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.354 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] scheduler.enable_isolated_aggregate_filtering = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.355 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] scheduler.image_metadata_prefilter = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.355 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] scheduler.limit_tenants_to_placement_aggregate = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.355 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] scheduler.max_attempts         = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.355 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] scheduler.max_placement_results = 1000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.355 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] scheduler.placement_aggregate_required_for_tenants = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.355 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] scheduler.query_placement_for_availability_zone = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.355 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] scheduler.query_placement_for_image_type_support = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.356 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] scheduler.query_placement_for_routed_network_aggregates = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.356 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] scheduler.workers              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.356 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] filter_scheduler.aggregate_image_properties_isolation_namespace = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.356 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] filter_scheduler.aggregate_image_properties_isolation_separator = . log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.356 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.356 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] filter_scheduler.build_failure_weight_multiplier = 1000000.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.356 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] filter_scheduler.cpu_weight_multiplier = 1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.357 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.357 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] filter_scheduler.disk_weight_multiplier = 1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.357 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.357 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] filter_scheduler.host_subset_size = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.357 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] filter_scheduler.image_properties_default_architecture = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.357 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] filter_scheduler.io_ops_weight_multiplier = -1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.357 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] filter_scheduler.isolated_hosts = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.358 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] filter_scheduler.isolated_images = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.358 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] filter_scheduler.max_instances_per_host = 50 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.358 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] filter_scheduler.max_io_ops_per_host = 8 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.358 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] filter_scheduler.pci_in_placement = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.358 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] filter_scheduler.pci_weight_multiplier = 1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.358 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] filter_scheduler.ram_weight_multiplier = 1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.358 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.359 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] filter_scheduler.shuffle_best_same_weighed_hosts = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.359 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] filter_scheduler.soft_affinity_weight_multiplier = 1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.359 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.359 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] filter_scheduler.track_instance_changes = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.359 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.359 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] metrics.required               = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.359 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] metrics.weight_multiplier      = 1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.360 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] metrics.weight_of_unavailable  = -10000.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.360 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] metrics.weight_setting         = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.360 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] serial_console.base_url        = ws://127.0.0.1:6083/ log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.360 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] serial_console.enabled         = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.360 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] serial_console.port_range      = 10000:20000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.360 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] serial_console.proxyclient_address = 127.0.0.1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.360 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] serial_console.serialproxy_host = 0.0.0.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.361 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] serial_console.serialproxy_port = 6083 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.361 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] service_user.auth_section      = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.361 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] service_user.auth_type         = password log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.361 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] service_user.cafile            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.361 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] service_user.certfile          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.361 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] service_user.collect_timing    = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.361 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] service_user.insecure          = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.362 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] service_user.keyfile           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.362 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] service_user.send_service_user_token = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.362 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] service_user.split_loggers     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.362 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] service_user.timeout           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.362 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] spice.agent_enabled            = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.362 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] spice.enabled                  = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.363 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] spice.html5proxy_base_url      = http://127.0.0.1:6082/spice_auto.html log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.363 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] spice.html5proxy_host          = 0.0.0.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.363 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] spice.html5proxy_port          = 6082 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.363 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] spice.image_compression        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.363 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] spice.jpeg_compression         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.363 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] spice.playback_compression     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.363 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] spice.server_listen            = 127.0.0.1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.364 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] spice.server_proxyclient_address = 127.0.0.1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.364 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] spice.streaming_mode           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.364 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] spice.zlib_compression         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.364 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] upgrade_levels.baseapi         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.364 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] upgrade_levels.cert            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.364 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] upgrade_levels.compute         = auto log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.364 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] upgrade_levels.conductor       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.365 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] upgrade_levels.scheduler       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.365 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vendordata_dynamic_auth.auth_section = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.365 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vendordata_dynamic_auth.auth_type = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.365 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vendordata_dynamic_auth.cafile = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.365 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vendordata_dynamic_auth.certfile = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.366 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vendordata_dynamic_auth.collect_timing = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.366 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vendordata_dynamic_auth.insecure = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.366 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vendordata_dynamic_auth.keyfile = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.366 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vendordata_dynamic_auth.split_loggers = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.366 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vendordata_dynamic_auth.timeout = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.367 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vmware.api_retry_count         = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.367 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vmware.ca_file                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.367 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vmware.cache_prefix            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.367 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vmware.cluster_name            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.367 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vmware.connection_pool_size    = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.367 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vmware.console_delay_seconds   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.368 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vmware.datastore_regex         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.368 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vmware.host_ip                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.368 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vmware.host_password           = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.368 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vmware.host_port               = 443 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.368 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vmware.host_username           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.369 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vmware.insecure                = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.369 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vmware.integration_bridge      = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.369 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vmware.maximum_objects         = 100 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.369 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vmware.pbm_default_policy      = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.369 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vmware.pbm_enabled             = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.370 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vmware.pbm_wsdl_location       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.370 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vmware.serial_log_dir          = /opt/vmware/vspc log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.370 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vmware.serial_port_proxy_uri   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.370 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vmware.serial_port_service_uri = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.370 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vmware.task_poll_interval      = 0.5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.370 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vmware.use_linked_clone        = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.371 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vmware.vnc_keymap              = en-us log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.371 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vmware.vnc_port                = 5900 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.371 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vmware.vnc_port_total          = 10000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.371 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vnc.auth_schemes               = ['none'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.371 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vnc.enabled                    = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.372 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vnc.novncproxy_base_url        = https://nova-novncproxy-cell1-public-openstack.apps-crc.testing/vnc_lite.html log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.372 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vnc.novncproxy_host            = 0.0.0.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.372 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vnc.novncproxy_port            = 6080 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.372 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vnc.server_listen              = ::0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.373 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vnc.server_proxyclient_address = 192.168.122.100 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.373 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vnc.vencrypt_ca_certs          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.373 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vnc.vencrypt_client_cert       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.373 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vnc.vencrypt_client_key        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.373 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] workarounds.disable_compute_service_check_for_ffu = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.374 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] workarounds.disable_deep_image_inspection = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.374 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] workarounds.disable_fallback_pcpu_query = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.374 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] workarounds.disable_group_policy_check_upcall = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.374 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] workarounds.disable_libvirt_livesnapshot = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.374 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] workarounds.disable_rootwrap   = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.375 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] workarounds.enable_numa_live_migration = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.375 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] workarounds.enable_qemu_monitor_announce_self = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.375 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.375 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] workarounds.handle_virt_lifecycle_events = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.375 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] workarounds.libvirt_disable_apic = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.376 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] workarounds.never_download_image_if_on_rbd = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.376 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] workarounds.qemu_monitor_announce_self_count = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.376 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] workarounds.qemu_monitor_announce_self_interval = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.376 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] workarounds.reserve_disk_resource_for_image_cache = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.376 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] workarounds.skip_cpu_compare_at_startup = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.377 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] workarounds.skip_cpu_compare_on_dest = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.377 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] workarounds.skip_hypervisor_version_check_on_lm = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.377 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] workarounds.skip_reserve_in_use_ironic_nodes = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.377 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] workarounds.unified_limits_count_pcpu_as_vcpu = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.378 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.378 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] wsgi.api_paste_config          = api-paste.ini log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.378 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] wsgi.client_socket_timeout     = 900 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.378 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] wsgi.default_pool_size         = 1000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.379 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] wsgi.keep_alive                = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.379 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] wsgi.max_header_line           = 16384 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.379 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] wsgi.secure_proxy_ssl_header   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.379 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] wsgi.ssl_ca_file               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.379 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] wsgi.ssl_cert_file             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.380 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] wsgi.ssl_key_file              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.380 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] wsgi.tcp_keepidle              = 600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.380 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] wsgi.wsgi_log_format           = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.380 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] zvm.ca_file                    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.380 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] zvm.cloud_connector_url        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.381 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] zvm.image_tmp_path             = /var/lib/nova/images log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.381 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] zvm.reachable_timeout          = 300 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.381 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_policy.enforce_new_defaults = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.381 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_policy.enforce_scope      = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.382 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_policy.policy_default_rule = default log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.382 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_policy.policy_dirs        = ['policy.d'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.382 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_policy.policy_file        = policy.yaml log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.382 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_policy.remote_content_type = application/x-www-form-urlencoded log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.382 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_policy.remote_ssl_ca_crt_file = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.383 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_policy.remote_ssl_client_crt_file = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.383 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_policy.remote_ssl_client_key_file = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.383 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_policy.remote_ssl_verify_server_crt = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.383 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_versionedobjects.fatal_exception_format_errors = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.383 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.384 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] remote_debug.host              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.384 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] remote_debug.port              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.384 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.amqp_auto_delete = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.384 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.amqp_durable_queues = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.384 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.conn_pool_min_size = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.385 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.conn_pool_ttl = 1200 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.385 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.direct_mandatory_flag = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.385 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.enable_cancel_on_failover = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.385 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.heartbeat_in_pthread = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.385 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.heartbeat_rate = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.386 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.386 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.kombu_compression = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.386 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.kombu_failover_strategy = round-robin log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.386 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.386 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.387 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.rabbit_ha_queues = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.387 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.rabbit_interval_max = 30 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.387 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.387 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.387 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.388 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.388 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.388 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.rabbit_quorum_queue = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.388 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.rabbit_retry_backoff = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.388 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.rabbit_retry_interval = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.389 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.389 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.rpc_conn_pool_size = 30 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.389 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.ssl      = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.389 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.ssl_ca_file =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.390 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.ssl_cert_file =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.390 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.ssl_enforce_fips_mode = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.390 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.ssl_key_file =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.390 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_rabbit.ssl_version =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.390 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_notifications.driver = ['messagingv2'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.391 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_notifications.retry = -1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.391 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_notifications.topics = ['notifications'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.391 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_messaging_notifications.transport_url = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.391 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.auth_section        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.392 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.auth_type           = password log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.392 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.auth_url            = https://keystone-internal.openstack.svc:5000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.392 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.cafile              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.392 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.certfile            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.392 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.collect_timing      = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.393 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.connect_retries     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.393 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.connect_retry_delay = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.393 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.default_domain_id   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.393 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.default_domain_name = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.393 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.domain_id           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.394 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.domain_name         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.394 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.endpoint_id         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.394 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.endpoint_override   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.394 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.insecure            = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.394 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.keyfile             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.395 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.max_version         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.395 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.min_version         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.395 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.password            = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.395 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.project_domain_id   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.395 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.project_domain_name = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.396 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.project_id          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.396 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.project_name        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.396 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.region_name         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.396 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.service_name        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.396 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.service_type        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.397 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.split_loggers       = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.397 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.status_code_retries = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.397 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.status_code_retry_delay = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.397 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.system_scope        = all log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.397 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.timeout             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.397 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.trust_id            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.398 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.user_domain_id      = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.398 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.user_domain_name    = Default log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.398 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.user_id             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.398 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.username            = nova log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.398 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.valid_interfaces    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.398 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_limit.version             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.399 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_reports.file_event_handler = /var/lib/nova log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.399 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_reports.file_event_handler_interval = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.399 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] oslo_reports.log_dir           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.399 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vif_plug_linux_bridge_privileged.capabilities = [12] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.399 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vif_plug_linux_bridge_privileged.group = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.400 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vif_plug_linux_bridge_privileged.helper_command = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.400 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.400 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vif_plug_linux_bridge_privileged.thread_pool_size = 8 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.400 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vif_plug_linux_bridge_privileged.user = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.400 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vif_plug_ovs_privileged.capabilities = [12, 1] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.400 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vif_plug_ovs_privileged.group  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.401 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vif_plug_ovs_privileged.helper_command = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.401 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.401 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vif_plug_ovs_privileged.thread_pool_size = 8 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.401 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] vif_plug_ovs_privileged.user   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.401 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] os_vif_linux_bridge.flat_interface = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.401 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] os_vif_linux_bridge.forward_bridge_interface = ['all'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.402 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] os_vif_linux_bridge.iptables_bottom_regex =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.402 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] os_vif_linux_bridge.iptables_drop_action = DROP log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.402 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] os_vif_linux_bridge.iptables_top_regex =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.402 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] os_vif_linux_bridge.network_device_mtu = 1500 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.402 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] os_vif_linux_bridge.use_ipv6   = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.402 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] os_vif_linux_bridge.vlan_interface = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.403 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] os_vif_ovs.isolate_vif         = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.403 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] os_vif_ovs.network_device_mtu  = 1500 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.403 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] os_vif_ovs.ovs_vsctl_timeout   = 120 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.403 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] os_vif_ovs.ovsdb_connection    = tcp:127.0.0.1:6640 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.403 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] os_vif_ovs.ovsdb_interface     = native log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.403 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] os_vif_ovs.per_port_bridge     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.403 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] os_brick.lock_path             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.404 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] os_brick.wait_mpath_device_attempts = 4 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.404 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] os_brick.wait_mpath_device_interval = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.404 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] privsep_osbrick.capabilities   = [21] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.404 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] privsep_osbrick.group          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.404 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] privsep_osbrick.helper_command = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.404 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] privsep_osbrick.logger_name    = os_brick.privileged log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.404 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] privsep_osbrick.thread_pool_size = 8 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.405 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] privsep_osbrick.user           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.405 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] nova_sys_admin.capabilities    = [0, 1, 2, 3, 12, 21] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.405 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] nova_sys_admin.group           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.405 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] nova_sys_admin.helper_command  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.405 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] nova_sys_admin.logger_name     = oslo_privsep.daemon log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.405 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] nova_sys_admin.thread_pool_size = 8 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.405 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] nova_sys_admin.user            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.406 2 DEBUG oslo_service.service [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] ******************************************************************************** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2613
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.407 2 INFO nova.service [-] Starting compute node (version 27.5.2-0.20250829104910.6f8decf.el9)
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.475 2 DEBUG nova.virt.libvirt.host [None req-50bbc704-95d9-4560-894b-26657473f6d0 - - - - - -] Starting native event thread _init_events /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:492
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.476 2 DEBUG nova.virt.libvirt.host [None req-50bbc704-95d9-4560-894b-26657473f6d0 - - - - - -] Starting green dispatch thread _init_events /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:498
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.476 2 DEBUG nova.virt.libvirt.host [None req-50bbc704-95d9-4560-894b-26657473f6d0 - - - - - -] Starting connection event dispatch thread initialize /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:620
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.476 2 DEBUG nova.virt.libvirt.host [None req-50bbc704-95d9-4560-894b-26657473f6d0 - - - - - -] Connecting to libvirt: qemu:///system _get_new_connection /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:503
Oct 02 11:53:58 compute-0 systemd[1]: Starting libvirt QEMU daemon...
Oct 02 11:53:58 compute-0 systemd[1]: Started libvirt QEMU daemon.
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.536 2 DEBUG nova.virt.libvirt.host [None req-50bbc704-95d9-4560-894b-26657473f6d0 - - - - - -] Registering for lifecycle events <nova.virt.libvirt.host.Host object at 0x7fcb0232f2b0> _get_new_connection /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:509
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.538 2 DEBUG nova.virt.libvirt.host [None req-50bbc704-95d9-4560-894b-26657473f6d0 - - - - - -] Registering for connection events: <nova.virt.libvirt.host.Host object at 0x7fcb0232f2b0> _get_new_connection /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:530
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.539 2 INFO nova.virt.libvirt.driver [None req-50bbc704-95d9-4560-894b-26657473f6d0 - - - - - -] Connection event '1' reason 'None'
Oct 02 11:53:58 compute-0 python3.9[191785]: ansible-containers.podman.podman_container Invoked with name=nova_nvme_cleaner state=absent executable=podman detach=True debug=False force_restart=False force_delete=True generate_systemd={} image_strict=False recreate=False image=None annotation=None arch=None attach=None authfile=None blkio_weight=None blkio_weight_device=None cap_add=None cap_drop=None cgroup_conf=None cgroup_parent=None cgroupns=None cgroups=None chrootdirs=None cidfile=None cmd_args=None conmon_pidfile=None command=None cpu_period=None cpu_quota=None cpu_rt_period=None cpu_rt_runtime=None cpu_shares=None cpus=None cpuset_cpus=None cpuset_mems=None decryption_key=None delete_depend=None delete_time=None delete_volumes=None detach_keys=None device=None device_cgroup_rule=None device_read_bps=None device_read_iops=None device_write_bps=None device_write_iops=None dns=None dns_option=None dns_search=None entrypoint=None env=None env_file=None env_host=None env_merge=None etc_hosts=None expose=None gidmap=None gpus=None group_add=None group_entry=None healthcheck=None healthcheck_interval=None healthcheck_retries=None healthcheck_start_period=None health_startup_cmd=None health_startup_interval=None health_startup_retries=None health_startup_success=None health_startup_timeout=None healthcheck_timeout=None healthcheck_failure_action=None hooks_dir=None hostname=None hostuser=None http_proxy=None image_volume=None init=None init_ctr=None init_path=None interactive=None ip=None ip6=None ipc=None kernel_memory=None label=None label_file=None log_driver=None log_level=None log_opt=None mac_address=None memory=None memory_reservation=None memory_swap=None memory_swappiness=None mount=None network=None network_aliases=None no_healthcheck=None no_hosts=None oom_kill_disable=None oom_score_adj=None os=None passwd=None passwd_entry=None personality=None pid=None pid_file=None pids_limit=None platform=None pod=None pod_id_file=None preserve_fd=None preserve_fds=None privileged=None publish=None publish_all=None pull=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None rdt_class=None read_only=None read_only_tmpfs=None requires=None restart_policy=None restart_time=None retry=None retry_delay=None rm=None rmi=None rootfs=None seccomp_policy=None secrets=NOT_LOGGING_PARAMETER sdnotify=None security_opt=None shm_size=None shm_size_systemd=None sig_proxy=None stop_signal=None stop_timeout=None stop_time=None subgidname=None subuidname=None sysctl=None systemd=None timeout=None timezone=None tls_verify=None tmpfs=None tty=None uidmap=None ulimit=None umask=None unsetenv=None unsetenv_all=None user=None userns=None uts=None variant=None volume=None volumes_from=None workdir=None
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.585 2 WARNING nova.virt.libvirt.driver [None req-50bbc704-95d9-4560-894b-26657473f6d0 - - - - - -] Cannot update service status on host "compute-0.ctlplane.example.com" since it is not registered.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host compute-0.ctlplane.example.com could not be found.
Oct 02 11:53:58 compute-0 nova_compute[191146]: 2025-10-02 11:53:58.585 2 DEBUG nova.virt.libvirt.volume.mount [None req-50bbc704-95d9-4560-894b-26657473f6d0 - - - - - -] Initialising _HostMountState generation 0 host_up /usr/lib/python3.9/site-packages/nova/virt/libvirt/volume/mount.py:130
Oct 02 11:53:58 compute-0 sudo[191783]: pam_unix(sudo:session): session closed for user root
Oct 02 11:53:58 compute-0 rsyslogd[1013]: imjournal: journal files changed, reloading...  [v8.2506.0-2.el9 try https://www.rsyslog.com/e/0 ]
Oct 02 11:53:59 compute-0 nova_compute[191146]: 2025-10-02 11:53:59.336 2 INFO nova.virt.libvirt.host [None req-50bbc704-95d9-4560-894b-26657473f6d0 - - - - - -] Libvirt host capabilities <capabilities>
Oct 02 11:53:59 compute-0 nova_compute[191146]: 
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <host>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <uuid>a6ea5ec0-bd37-4735-94f0-b41eba3dd400</uuid>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <cpu>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <arch>x86_64</arch>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model>EPYC-Rome-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <vendor>AMD</vendor>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <microcode version='16777317'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <signature family='23' model='49' stepping='0'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <topology sockets='8' dies='1' clusters='1' cores='1' threads='1'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <maxphysaddr mode='emulate' bits='40'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature name='x2apic'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature name='tsc-deadline'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature name='osxsave'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature name='hypervisor'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature name='tsc_adjust'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature name='spec-ctrl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature name='stibp'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature name='arch-capabilities'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature name='ssbd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature name='cmp_legacy'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature name='topoext'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature name='virt-ssbd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature name='lbrv'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature name='tsc-scale'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature name='vmcb-clean'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature name='pause-filter'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature name='pfthreshold'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature name='svme-addr-chk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature name='rdctl-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature name='skip-l1dfl-vmentry'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature name='mds-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature name='pschange-mc-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <pages unit='KiB' size='4'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <pages unit='KiB' size='2048'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <pages unit='KiB' size='1048576'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </cpu>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <power_management>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <suspend_mem/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <suspend_disk/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <suspend_hybrid/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </power_management>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <iommu support='no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <migration_features>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <live/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <uri_transports>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <uri_transport>tcp</uri_transport>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <uri_transport>rdma</uri_transport>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </uri_transports>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </migration_features>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <topology>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <cells num='1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <cell id='0'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:           <memory unit='KiB'>7864092</memory>
Oct 02 11:53:59 compute-0 nova_compute[191146]:           <pages unit='KiB' size='4'>1966023</pages>
Oct 02 11:53:59 compute-0 nova_compute[191146]:           <pages unit='KiB' size='2048'>0</pages>
Oct 02 11:53:59 compute-0 nova_compute[191146]:           <pages unit='KiB' size='1048576'>0</pages>
Oct 02 11:53:59 compute-0 nova_compute[191146]:           <distances>
Oct 02 11:53:59 compute-0 nova_compute[191146]:             <sibling id='0' value='10'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:           </distances>
Oct 02 11:53:59 compute-0 nova_compute[191146]:           <cpus num='8'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:             <cpu id='0' socket_id='0' die_id='0' cluster_id='65535' core_id='0' siblings='0'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:             <cpu id='1' socket_id='1' die_id='1' cluster_id='65535' core_id='0' siblings='1'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:             <cpu id='2' socket_id='2' die_id='2' cluster_id='65535' core_id='0' siblings='2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:             <cpu id='3' socket_id='3' die_id='3' cluster_id='65535' core_id='0' siblings='3'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:             <cpu id='4' socket_id='4' die_id='4' cluster_id='65535' core_id='0' siblings='4'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:             <cpu id='5' socket_id='5' die_id='5' cluster_id='65535' core_id='0' siblings='5'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:             <cpu id='6' socket_id='6' die_id='6' cluster_id='65535' core_id='0' siblings='6'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:             <cpu id='7' socket_id='7' die_id='7' cluster_id='65535' core_id='0' siblings='7'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:           </cpus>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         </cell>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </cells>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </topology>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <cache>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <bank id='0' level='2' type='both' size='512' unit='KiB' cpus='0'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <bank id='1' level='2' type='both' size='512' unit='KiB' cpus='1'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <bank id='2' level='2' type='both' size='512' unit='KiB' cpus='2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <bank id='3' level='2' type='both' size='512' unit='KiB' cpus='3'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <bank id='4' level='2' type='both' size='512' unit='KiB' cpus='4'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <bank id='5' level='2' type='both' size='512' unit='KiB' cpus='5'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <bank id='6' level='2' type='both' size='512' unit='KiB' cpus='6'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <bank id='7' level='2' type='both' size='512' unit='KiB' cpus='7'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <bank id='0' level='3' type='both' size='16' unit='MiB' cpus='0'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <bank id='1' level='3' type='both' size='16' unit='MiB' cpus='1'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <bank id='2' level='3' type='both' size='16' unit='MiB' cpus='2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <bank id='3' level='3' type='both' size='16' unit='MiB' cpus='3'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <bank id='4' level='3' type='both' size='16' unit='MiB' cpus='4'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <bank id='5' level='3' type='both' size='16' unit='MiB' cpus='5'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <bank id='6' level='3' type='both' size='16' unit='MiB' cpus='6'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <bank id='7' level='3' type='both' size='16' unit='MiB' cpus='7'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </cache>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <secmodel>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model>selinux</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <doi>0</doi>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <baselabel type='kvm'>system_u:system_r:svirt_t:s0</baselabel>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <baselabel type='qemu'>system_u:system_r:svirt_tcg_t:s0</baselabel>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </secmodel>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <secmodel>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model>dac</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <doi>0</doi>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <baselabel type='kvm'>+107:+107</baselabel>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <baselabel type='qemu'>+107:+107</baselabel>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </secmodel>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   </host>
Oct 02 11:53:59 compute-0 nova_compute[191146]: 
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <guest>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <os_type>hvm</os_type>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <arch name='i686'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <wordsize>32</wordsize>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <emulator>/usr/libexec/qemu-kvm</emulator>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine maxCpus='240' deprecated='yes'>pc-i440fx-rhel7.6.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine canonical='pc-i440fx-rhel7.6.0' maxCpus='240' deprecated='yes'>pc</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine maxCpus='4096'>pc-q35-rhel9.6.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine canonical='pc-q35-rhel9.6.0' maxCpus='4096'>q35</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel8.6.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine maxCpus='710'>pc-q35-rhel9.4.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel8.5.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel8.3.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel7.6.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel8.4.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine maxCpus='710'>pc-q35-rhel9.2.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel8.2.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine maxCpus='710'>pc-q35-rhel9.0.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel8.0.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel8.1.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <domain type='qemu'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <domain type='kvm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </arch>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <features>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <pae/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <nonpae/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <acpi default='on' toggle='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <apic default='on' toggle='no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <cpuselection/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <deviceboot/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <disksnapshot default='on' toggle='no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <externalSnapshot/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </features>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   </guest>
Oct 02 11:53:59 compute-0 nova_compute[191146]: 
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <guest>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <os_type>hvm</os_type>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <arch name='x86_64'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <wordsize>64</wordsize>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <emulator>/usr/libexec/qemu-kvm</emulator>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine maxCpus='240' deprecated='yes'>pc-i440fx-rhel7.6.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine canonical='pc-i440fx-rhel7.6.0' maxCpus='240' deprecated='yes'>pc</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine maxCpus='4096'>pc-q35-rhel9.6.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine canonical='pc-q35-rhel9.6.0' maxCpus='4096'>q35</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel8.6.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine maxCpus='710'>pc-q35-rhel9.4.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel8.5.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel8.3.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel7.6.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel8.4.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine maxCpus='710'>pc-q35-rhel9.2.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel8.2.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine maxCpus='710'>pc-q35-rhel9.0.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel8.0.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel8.1.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <domain type='qemu'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <domain type='kvm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </arch>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <features>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <acpi default='on' toggle='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <apic default='on' toggle='no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <cpuselection/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <deviceboot/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <disksnapshot default='on' toggle='no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <externalSnapshot/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </features>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   </guest>
Oct 02 11:53:59 compute-0 nova_compute[191146]: 
Oct 02 11:53:59 compute-0 nova_compute[191146]: </capabilities>
Oct 02 11:53:59 compute-0 nova_compute[191146]: 
Oct 02 11:53:59 compute-0 nova_compute[191146]: 2025-10-02 11:53:59.342 2 DEBUG nova.virt.libvirt.host [None req-50bbc704-95d9-4560-894b-26657473f6d0 - - - - - -] Getting domain capabilities for i686 via machine types: {'q35', 'pc'} _get_machine_types /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:952
Oct 02 11:53:59 compute-0 nova_compute[191146]: 2025-10-02 11:53:59.359 2 DEBUG nova.virt.libvirt.host [None req-50bbc704-95d9-4560-894b-26657473f6d0 - - - - - -] Libvirt host hypervisor capabilities for arch=i686 and machine_type=q35:
Oct 02 11:53:59 compute-0 nova_compute[191146]: <domainCapabilities>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <path>/usr/libexec/qemu-kvm</path>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <domain>kvm</domain>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <machine>pc-q35-rhel9.6.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <arch>i686</arch>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <vcpu max='4096'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <iothreads supported='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <os supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <enum name='firmware'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <loader supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <value>/usr/share/OVMF/OVMF_CODE.secboot.fd</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='type'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>rom</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>pflash</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='readonly'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>yes</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>no</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='secure'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>no</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </loader>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   </os>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <cpu>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <mode name='host-passthrough' supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='hostPassthroughMigratable'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>on</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>off</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </mode>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <mode name='maximum' supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='maximumMigratable'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>on</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>off</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </mode>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <mode name='host-model' supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model fallback='forbid'>EPYC-Rome</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <vendor>AMD</vendor>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <maxphysaddr mode='passthrough' limit='40'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='x2apic'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='tsc-deadline'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='hypervisor'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='tsc_adjust'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='spec-ctrl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='stibp'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='arch-capabilities'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='ssbd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='cmp_legacy'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='overflow-recov'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='succor'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='ibrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='amd-ssbd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='virt-ssbd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='lbrv'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='tsc-scale'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='vmcb-clean'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='flushbyasid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='pause-filter'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='pfthreshold'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='svme-addr-chk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='lfence-always-serializing'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='rdctl-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='skip-l1dfl-vmentry'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='mds-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='pschange-mc-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='gds-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='rfds-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='disable' name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </mode>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <mode name='custom' supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='486-v1'>486</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>486-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Broadwell-v1'>Broadwell</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Broadwell-v3'>Broadwell-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Broadwell-v2'>Broadwell-noTSX</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell-noTSX'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Broadwell-v4'>Broadwell-noTSX-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell-noTSX-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Broadwell-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Broadwell-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Broadwell-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Broadwell-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Cascadelake-Server-v1'>Cascadelake-Server</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cascadelake-Server'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Cascadelake-Server-v3'>Cascadelake-Server-noTSX</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cascadelake-Server-noTSX'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cascadelake-Server-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cascadelake-Server-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cascadelake-Server-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cascadelake-Server-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v5</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cascadelake-Server-v5'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='Intel' canonical='Conroe-v1'>Conroe</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='Intel'>Conroe-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Cooperlake-v1'>Cooperlake</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cooperlake'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Cooperlake-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cooperlake-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Cooperlake-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cooperlake-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Denverton-v1'>Denverton</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Denverton'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mpx'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Denverton-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Denverton-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mpx'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Denverton-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Denverton-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Denverton-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Denverton-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Hygon' canonical='Dhyana-v1'>Dhyana</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Hygon'>Dhyana-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Hygon'>Dhyana-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Dhyana-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='AMD' canonical='EPYC-v1'>EPYC</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD' canonical='EPYC-Genoa-v1'>EPYC-Genoa</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Genoa'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amd-psfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='auto-ibrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='no-nested-data-bp'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='null-sel-clr-base'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='stibp-always-on'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-Genoa-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Genoa-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amd-psfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='auto-ibrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='no-nested-data-bp'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='null-sel-clr-base'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='stibp-always-on'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='AMD' canonical='EPYC-v2'>EPYC-IBPB</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD' canonical='EPYC-Milan-v1'>EPYC-Milan</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Milan'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-Milan-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Milan-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-Milan-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Milan-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amd-psfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='no-nested-data-bp'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='null-sel-clr-base'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='stibp-always-on'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD' canonical='EPYC-Rome-v1'>EPYC-Rome</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Rome'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-Rome-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Rome-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-Rome-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Rome-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-Rome-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Rome-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='AMD'>EPYC-Rome-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='AMD'>EPYC-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='AMD'>EPYC-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='GraniteRapids-v1'>GraniteRapids</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='GraniteRapids'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-tile'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fbsdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrc'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fzrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mcdt-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pbrsb-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='prefetchiti'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='psdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tsx-ldtrk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>GraniteRapids-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='GraniteRapids-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-tile'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fbsdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrc'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fzrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mcdt-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pbrsb-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='prefetchiti'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='psdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tsx-ldtrk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>GraniteRapids-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='GraniteRapids-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-tile'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx10'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx10-128'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx10-256'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx10-512'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cldemote'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fbsdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrc'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fzrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mcdt-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdir64b'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdiri'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pbrsb-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='prefetchiti'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='psdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tsx-ldtrk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Haswell-v1'>Haswell</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Haswell-v3'>Haswell-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Haswell-v2'>Haswell-noTSX</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell-noTSX'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Haswell-v4'>Haswell-noTSX-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell-noTSX-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Haswell-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Haswell-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Haswell-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Haswell-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Icelake-Server-v1'>Icelake-Server</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Icelake-Server-v2'>Icelake-Server-noTSX</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-noTSX'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Icelake-Server-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Icelake-Server-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Icelake-Server-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Icelake-Server-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Icelake-Server-v5</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-v5'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Icelake-Server-v6</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-v6'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Icelake-Server-v7</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-v7'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='IvyBridge-v1'>IvyBridge</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='IvyBridge'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='IvyBridge-v2'>IvyBridge-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='IvyBridge-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>IvyBridge-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='IvyBridge-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>IvyBridge-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='IvyBridge-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='KnightsMill-v1'>KnightsMill</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='KnightsMill'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-4fmaps'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-4vnniw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512er'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512pf'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>KnightsMill-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='KnightsMill-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-4fmaps'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-4vnniw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512er'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512pf'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel' canonical='Nehalem-v1'>Nehalem</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel' canonical='Nehalem-v2'>Nehalem-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel'>Nehalem-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel'>Nehalem-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='AMD' canonical='Opteron_G1-v1'>Opteron_G1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='AMD'>Opteron_G1-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='AMD' canonical='Opteron_G2-v1'>Opteron_G2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='AMD'>Opteron_G2-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='AMD' canonical='Opteron_G3-v1'>Opteron_G3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='AMD'>Opteron_G3-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD' canonical='Opteron_G4-v1'>Opteron_G4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Opteron_G4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fma4'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xop'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>Opteron_G4-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Opteron_G4-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fma4'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xop'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD' canonical='Opteron_G5-v1'>Opteron_G5</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Opteron_G5'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fma4'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tbm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xop'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>Opteron_G5-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Opteron_G5-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fma4'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tbm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xop'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='Intel' canonical='Penryn-v1'>Penryn</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='Intel'>Penryn-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel' canonical='SandyBridge-v1'>SandyBridge</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel' canonical='SandyBridge-v2'>SandyBridge-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel'>SandyBridge-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel'>SandyBridge-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='SapphireRapids-v1'>SapphireRapids</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='SapphireRapids'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-tile'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrc'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fzrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tsx-ldtrk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>SapphireRapids-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='SapphireRapids-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-tile'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrc'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fzrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tsx-ldtrk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>SapphireRapids-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='SapphireRapids-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-tile'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fbsdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrc'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fzrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='psdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tsx-ldtrk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>SapphireRapids-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='SapphireRapids-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-tile'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cldemote'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fbsdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrc'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fzrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdir64b'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdiri'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='psdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tsx-ldtrk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='SierraForest-v1'>SierraForest</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='SierraForest'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-ne-convert'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cmpccxadd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fbsdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mcdt-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pbrsb-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='psdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>SierraForest-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='SierraForest-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-ne-convert'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cmpccxadd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fbsdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mcdt-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pbrsb-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='psdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Skylake-Client-v1'>Skylake-Client</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Client'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Skylake-Client-v2'>Skylake-Client-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Client-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Skylake-Client-v3'>Skylake-Client-noTSX-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Client-noTSX-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Client-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Client-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Client-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Client-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Client-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Client-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Client-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Client-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Skylake-Server-v1'>Skylake-Server</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 sudo[192019]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xjgmmveugdleodvwfbhamwxbagaipmpn ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406039.0150611-5218-181584622006273/AnsiballZ_systemd.py'
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Skylake-Server-v2'>Skylake-Server-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Skylake-Server-v3'>Skylake-Server-noTSX-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server-noTSX-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Server-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Server-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Server-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Server-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Server-v5</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server-v5'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Snowridge-v1'>Snowridge</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Snowridge'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cldemote'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='core-capability'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdir64b'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdiri'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mpx'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='split-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Snowridge-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Snowridge-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cldemote'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='core-capability'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdir64b'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdiri'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mpx'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='split-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Snowridge-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Snowridge-v2'>
Oct 02 11:53:59 compute-0 sudo[192019]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cldemote'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='core-capability'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdir64b'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdiri'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='split-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Snowridge-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Snowridge-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cldemote'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='core-capability'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdir64b'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdiri'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='split-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Snowridge-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Snowridge-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cldemote'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdir64b'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdiri'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel' canonical='Westmere-v1'>Westmere</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel' canonical='Westmere-v2'>Westmere-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel'>Westmere-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel'>Westmere-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='AMD' canonical='athlon-v1'>athlon</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='athlon'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnow'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnowext'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='AMD'>athlon-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='athlon-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnow'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnowext'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='Intel' canonical='core2duo-v1'>core2duo</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='core2duo'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='Intel'>core2duo-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='core2duo-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='Intel' canonical='coreduo-v1'>coreduo</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='coreduo'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='Intel'>coreduo-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='coreduo-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='kvm32-v1'>kvm32</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>kvm32-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='kvm64-v1'>kvm64</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>kvm64-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='Intel' canonical='n270-v1'>n270</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='n270'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='Intel'>n270-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='n270-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='pentium-v1'>pentium</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>pentium-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='pentium2-v1'>pentium2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>pentium2-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='pentium3-v1'>pentium3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>pentium3-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='AMD' canonical='phenom-v1'>phenom</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='phenom'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnow'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnowext'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='AMD'>phenom-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='phenom-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnow'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnowext'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='qemu32-v1'>qemu32</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>qemu32-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='qemu64-v1'>qemu64</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>qemu64-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </mode>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   </cpu>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <memoryBacking supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <enum name='sourceType'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <value>file</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <value>anonymous</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <value>memfd</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   </memoryBacking>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <devices>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <disk supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='diskDevice'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>disk</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>cdrom</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>floppy</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>lun</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='bus'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>fdc</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>scsi</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>usb</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>sata</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='model'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio-transitional</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio-non-transitional</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </disk>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <graphics supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='type'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>vnc</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>egl-headless</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>dbus</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </graphics>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <video supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='modelType'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>vga</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>cirrus</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>none</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>bochs</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>ramfb</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </video>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <hostdev supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='mode'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>subsystem</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='startupPolicy'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>default</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>mandatory</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>requisite</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>optional</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='subsysType'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>usb</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>pci</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>scsi</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='capsType'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='pciBackend'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </hostdev>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <rng supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='model'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio-transitional</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio-non-transitional</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='backendModel'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>random</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>egd</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>builtin</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </rng>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <filesystem supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='driverType'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>path</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>handle</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtiofs</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </filesystem>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <tpm supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='model'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>tpm-tis</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>tpm-crb</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='backendModel'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>emulator</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>external</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='backendVersion'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>2.0</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </tpm>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <redirdev supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='bus'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>usb</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </redirdev>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <channel supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='type'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>pty</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>unix</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </channel>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <crypto supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='model'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='type'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>qemu</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='backendModel'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>builtin</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </crypto>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <interface supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='backendType'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>default</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>passt</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </interface>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <panic supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='model'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>isa</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>hyperv</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </panic>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   </devices>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <features>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <gic supported='no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <vmcoreinfo supported='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <genid supported='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <backingStoreInput supported='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <backup supported='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <async-teardown supported='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <ps2 supported='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <sev supported='no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <sgx supported='no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <hyperv supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='features'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>relaxed</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>vapic</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>spinlocks</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>vpindex</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>runtime</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>synic</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>stimer</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>reset</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>vendor_id</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>frequencies</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>reenlightenment</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>tlbflush</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>ipi</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>avic</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>emsr_bitmap</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>xmm_input</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </hyperv>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <launchSecurity supported='no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   </features>
Oct 02 11:53:59 compute-0 nova_compute[191146]: </domainCapabilities>
Oct 02 11:53:59 compute-0 nova_compute[191146]:  _get_domain_capabilities /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1037
Oct 02 11:53:59 compute-0 nova_compute[191146]: 2025-10-02 11:53:59.367 2 DEBUG nova.virt.libvirt.host [None req-50bbc704-95d9-4560-894b-26657473f6d0 - - - - - -] Libvirt host hypervisor capabilities for arch=i686 and machine_type=pc:
Oct 02 11:53:59 compute-0 nova_compute[191146]: <domainCapabilities>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <path>/usr/libexec/qemu-kvm</path>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <domain>kvm</domain>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <machine>pc-i440fx-rhel7.6.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <arch>i686</arch>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <vcpu max='240'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <iothreads supported='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <os supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <enum name='firmware'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <loader supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <value>/usr/share/OVMF/OVMF_CODE.secboot.fd</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='type'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>rom</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>pflash</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='readonly'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>yes</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>no</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='secure'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>no</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </loader>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   </os>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <cpu>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <mode name='host-passthrough' supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='hostPassthroughMigratable'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>on</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>off</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </mode>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <mode name='maximum' supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='maximumMigratable'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>on</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>off</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </mode>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <mode name='host-model' supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model fallback='forbid'>EPYC-Rome</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <vendor>AMD</vendor>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <maxphysaddr mode='passthrough' limit='40'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='x2apic'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='tsc-deadline'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='hypervisor'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='tsc_adjust'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='spec-ctrl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='stibp'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='arch-capabilities'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='ssbd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='cmp_legacy'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='overflow-recov'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='succor'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='ibrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='amd-ssbd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='virt-ssbd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='lbrv'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='tsc-scale'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='vmcb-clean'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='flushbyasid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='pause-filter'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='pfthreshold'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='svme-addr-chk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='lfence-always-serializing'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='rdctl-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='skip-l1dfl-vmentry'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='mds-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='pschange-mc-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='gds-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='rfds-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='disable' name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </mode>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <mode name='custom' supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='486-v1'>486</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>486-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Broadwell-v1'>Broadwell</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Broadwell-v3'>Broadwell-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Broadwell-v2'>Broadwell-noTSX</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell-noTSX'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Broadwell-v4'>Broadwell-noTSX-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell-noTSX-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Broadwell-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Broadwell-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Broadwell-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Broadwell-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Cascadelake-Server-v1'>Cascadelake-Server</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cascadelake-Server'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Cascadelake-Server-v3'>Cascadelake-Server-noTSX</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cascadelake-Server-noTSX'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cascadelake-Server-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cascadelake-Server-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cascadelake-Server-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cascadelake-Server-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v5</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cascadelake-Server-v5'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='Intel' canonical='Conroe-v1'>Conroe</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='Intel'>Conroe-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Cooperlake-v1'>Cooperlake</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cooperlake'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Cooperlake-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cooperlake-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Cooperlake-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cooperlake-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Denverton-v1'>Denverton</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Denverton'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mpx'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Denverton-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Denverton-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mpx'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Denverton-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Denverton-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Denverton-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Denverton-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Hygon' canonical='Dhyana-v1'>Dhyana</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Hygon'>Dhyana-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Hygon'>Dhyana-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Dhyana-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='AMD' canonical='EPYC-v1'>EPYC</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD' canonical='EPYC-Genoa-v1'>EPYC-Genoa</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Genoa'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amd-psfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='auto-ibrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='no-nested-data-bp'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='null-sel-clr-base'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='stibp-always-on'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-Genoa-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Genoa-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amd-psfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='auto-ibrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='no-nested-data-bp'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='null-sel-clr-base'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='stibp-always-on'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='AMD' canonical='EPYC-v2'>EPYC-IBPB</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD' canonical='EPYC-Milan-v1'>EPYC-Milan</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Milan'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-Milan-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Milan-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-Milan-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Milan-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amd-psfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='no-nested-data-bp'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='null-sel-clr-base'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='stibp-always-on'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD' canonical='EPYC-Rome-v1'>EPYC-Rome</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Rome'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-Rome-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Rome-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-Rome-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Rome-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-Rome-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Rome-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='AMD'>EPYC-Rome-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='AMD'>EPYC-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='AMD'>EPYC-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='GraniteRapids-v1'>GraniteRapids</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='GraniteRapids'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-tile'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fbsdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrc'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fzrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mcdt-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pbrsb-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='prefetchiti'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='psdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tsx-ldtrk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>GraniteRapids-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='GraniteRapids-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-tile'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fbsdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrc'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fzrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mcdt-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pbrsb-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='prefetchiti'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='psdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tsx-ldtrk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>GraniteRapids-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='GraniteRapids-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-tile'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx10'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx10-128'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx10-256'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx10-512'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cldemote'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fbsdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrc'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fzrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mcdt-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdir64b'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdiri'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pbrsb-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='prefetchiti'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='psdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tsx-ldtrk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Haswell-v1'>Haswell</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Haswell-v3'>Haswell-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Haswell-v2'>Haswell-noTSX</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell-noTSX'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Haswell-v4'>Haswell-noTSX-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell-noTSX-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Haswell-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Haswell-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Haswell-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Haswell-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Icelake-Server-v1'>Icelake-Server</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Icelake-Server-v2'>Icelake-Server-noTSX</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-noTSX'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Icelake-Server-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Icelake-Server-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Icelake-Server-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Icelake-Server-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Icelake-Server-v5</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-v5'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Icelake-Server-v6</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-v6'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Icelake-Server-v7</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-v7'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='IvyBridge-v1'>IvyBridge</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='IvyBridge'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='IvyBridge-v2'>IvyBridge-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='IvyBridge-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>IvyBridge-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='IvyBridge-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>IvyBridge-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='IvyBridge-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='KnightsMill-v1'>KnightsMill</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='KnightsMill'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-4fmaps'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-4vnniw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512er'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512pf'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>KnightsMill-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='KnightsMill-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-4fmaps'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-4vnniw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512er'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512pf'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel' canonical='Nehalem-v1'>Nehalem</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel' canonical='Nehalem-v2'>Nehalem-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel'>Nehalem-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel'>Nehalem-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='AMD' canonical='Opteron_G1-v1'>Opteron_G1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='AMD'>Opteron_G1-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='AMD' canonical='Opteron_G2-v1'>Opteron_G2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='AMD'>Opteron_G2-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='AMD' canonical='Opteron_G3-v1'>Opteron_G3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='AMD'>Opteron_G3-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD' canonical='Opteron_G4-v1'>Opteron_G4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Opteron_G4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fma4'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xop'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>Opteron_G4-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Opteron_G4-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fma4'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xop'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD' canonical='Opteron_G5-v1'>Opteron_G5</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Opteron_G5'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fma4'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tbm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xop'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>Opteron_G5-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Opteron_G5-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fma4'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tbm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xop'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='Intel' canonical='Penryn-v1'>Penryn</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='Intel'>Penryn-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel' canonical='SandyBridge-v1'>SandyBridge</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel' canonical='SandyBridge-v2'>SandyBridge-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel'>SandyBridge-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel'>SandyBridge-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='SapphireRapids-v1'>SapphireRapids</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='SapphireRapids'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-tile'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrc'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fzrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tsx-ldtrk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>SapphireRapids-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='SapphireRapids-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-tile'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrc'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fzrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tsx-ldtrk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>SapphireRapids-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='SapphireRapids-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-tile'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fbsdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrc'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fzrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='psdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tsx-ldtrk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>SapphireRapids-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='SapphireRapids-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-tile'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cldemote'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fbsdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrc'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fzrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdir64b'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdiri'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='psdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tsx-ldtrk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='SierraForest-v1'>SierraForest</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='SierraForest'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-ne-convert'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cmpccxadd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fbsdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mcdt-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pbrsb-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='psdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>SierraForest-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='SierraForest-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-ne-convert'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cmpccxadd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fbsdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mcdt-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pbrsb-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='psdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Skylake-Client-v1'>Skylake-Client</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Client'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Skylake-Client-v2'>Skylake-Client-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Client-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Skylake-Client-v3'>Skylake-Client-noTSX-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Client-noTSX-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Client-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Client-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Client-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Client-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Client-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Client-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Client-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Client-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Skylake-Server-v1'>Skylake-Server</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Skylake-Server-v2'>Skylake-Server-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Skylake-Server-v3'>Skylake-Server-noTSX-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server-noTSX-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Server-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Server-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Server-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Server-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Server-v5</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server-v5'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Snowridge-v1'>Snowridge</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Snowridge'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cldemote'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='core-capability'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdir64b'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdiri'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mpx'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='split-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Snowridge-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Snowridge-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cldemote'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='core-capability'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdir64b'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdiri'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mpx'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='split-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Snowridge-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Snowridge-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cldemote'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='core-capability'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdir64b'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdiri'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='split-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Snowridge-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Snowridge-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cldemote'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='core-capability'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdir64b'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdiri'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='split-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Snowridge-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Snowridge-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cldemote'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdir64b'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdiri'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel' canonical='Westmere-v1'>Westmere</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel' canonical='Westmere-v2'>Westmere-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel'>Westmere-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel'>Westmere-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='AMD' canonical='athlon-v1'>athlon</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='athlon'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnow'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnowext'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='AMD'>athlon-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='athlon-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnow'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnowext'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='Intel' canonical='core2duo-v1'>core2duo</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='core2duo'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='Intel'>core2duo-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='core2duo-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='Intel' canonical='coreduo-v1'>coreduo</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='coreduo'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='Intel'>coreduo-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='coreduo-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='kvm32-v1'>kvm32</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>kvm32-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='kvm64-v1'>kvm64</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>kvm64-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='Intel' canonical='n270-v1'>n270</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='n270'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='Intel'>n270-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='n270-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='pentium-v1'>pentium</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>pentium-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='pentium2-v1'>pentium2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>pentium2-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='pentium3-v1'>pentium3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>pentium3-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='AMD' canonical='phenom-v1'>phenom</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='phenom'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnow'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnowext'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='AMD'>phenom-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='phenom-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnow'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnowext'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='qemu32-v1'>qemu32</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>qemu32-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='qemu64-v1'>qemu64</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>qemu64-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </mode>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   </cpu>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <memoryBacking supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <enum name='sourceType'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <value>file</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <value>anonymous</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <value>memfd</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   </memoryBacking>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <devices>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <disk supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='diskDevice'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>disk</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>cdrom</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>floppy</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>lun</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='bus'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>ide</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>fdc</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>scsi</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>usb</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>sata</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='model'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio-transitional</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio-non-transitional</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </disk>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <graphics supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='type'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>vnc</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>egl-headless</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>dbus</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </graphics>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <video supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='modelType'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>vga</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>cirrus</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>none</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>bochs</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>ramfb</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </video>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <hostdev supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='mode'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>subsystem</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='startupPolicy'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>default</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>mandatory</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>requisite</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>optional</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='subsysType'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>usb</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>pci</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>scsi</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='capsType'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='pciBackend'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </hostdev>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <rng supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='model'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio-transitional</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio-non-transitional</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='backendModel'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>random</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>egd</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>builtin</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </rng>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <filesystem supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='driverType'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>path</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>handle</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtiofs</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </filesystem>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <tpm supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='model'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>tpm-tis</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>tpm-crb</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='backendModel'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>emulator</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>external</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='backendVersion'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>2.0</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </tpm>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <redirdev supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='bus'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>usb</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </redirdev>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <channel supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='type'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>pty</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>unix</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </channel>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <crypto supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='model'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='type'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>qemu</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='backendModel'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>builtin</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </crypto>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <interface supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='backendType'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>default</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>passt</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </interface>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <panic supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='model'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>isa</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>hyperv</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </panic>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   </devices>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <features>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <gic supported='no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <vmcoreinfo supported='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <genid supported='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <backingStoreInput supported='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <backup supported='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <async-teardown supported='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <ps2 supported='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <sev supported='no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <sgx supported='no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <hyperv supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='features'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>relaxed</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>vapic</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>spinlocks</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>vpindex</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>runtime</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>synic</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>stimer</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>reset</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>vendor_id</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>frequencies</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>reenlightenment</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>tlbflush</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>ipi</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>avic</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>emsr_bitmap</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>xmm_input</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </hyperv>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <launchSecurity supported='no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   </features>
Oct 02 11:53:59 compute-0 nova_compute[191146]: </domainCapabilities>
Oct 02 11:53:59 compute-0 nova_compute[191146]:  _get_domain_capabilities /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1037
Oct 02 11:53:59 compute-0 nova_compute[191146]: 2025-10-02 11:53:59.408 2 DEBUG nova.virt.libvirt.host [None req-50bbc704-95d9-4560-894b-26657473f6d0 - - - - - -] Getting domain capabilities for x86_64 via machine types: {'q35', 'pc'} _get_machine_types /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:952
Oct 02 11:53:59 compute-0 nova_compute[191146]: 2025-10-02 11:53:59.411 2 DEBUG nova.virt.libvirt.host [None req-50bbc704-95d9-4560-894b-26657473f6d0 - - - - - -] Libvirt host hypervisor capabilities for arch=x86_64 and machine_type=q35:
Oct 02 11:53:59 compute-0 nova_compute[191146]: <domainCapabilities>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <path>/usr/libexec/qemu-kvm</path>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <domain>kvm</domain>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <machine>pc-q35-rhel9.6.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <arch>x86_64</arch>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <vcpu max='4096'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <iothreads supported='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <os supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <enum name='firmware'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <value>efi</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <loader supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <value>/usr/share/edk2/ovmf/OVMF_CODE.secboot.fd</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <value>/usr/share/edk2/ovmf/OVMF_CODE.fd</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <value>/usr/share/edk2/ovmf/OVMF.amdsev.fd</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <value>/usr/share/edk2/ovmf/OVMF.inteltdx.secboot.fd</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='type'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>rom</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>pflash</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='readonly'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>yes</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>no</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='secure'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>yes</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>no</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </loader>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   </os>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <cpu>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <mode name='host-passthrough' supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='hostPassthroughMigratable'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>on</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>off</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </mode>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <mode name='maximum' supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='maximumMigratable'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>on</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>off</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </mode>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <mode name='host-model' supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model fallback='forbid'>EPYC-Rome</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <vendor>AMD</vendor>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <maxphysaddr mode='passthrough' limit='40'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='x2apic'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='tsc-deadline'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='hypervisor'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='tsc_adjust'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='spec-ctrl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='stibp'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='arch-capabilities'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='ssbd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='cmp_legacy'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='overflow-recov'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='succor'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='ibrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='amd-ssbd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='virt-ssbd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='lbrv'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='tsc-scale'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='vmcb-clean'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='flushbyasid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='pause-filter'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='pfthreshold'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='svme-addr-chk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='lfence-always-serializing'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='rdctl-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='skip-l1dfl-vmentry'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='mds-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='pschange-mc-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='gds-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='rfds-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='disable' name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </mode>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <mode name='custom' supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='486-v1'>486</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>486-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Broadwell-v1'>Broadwell</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Broadwell-v3'>Broadwell-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Broadwell-v2'>Broadwell-noTSX</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell-noTSX'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Broadwell-v4'>Broadwell-noTSX-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell-noTSX-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Broadwell-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Broadwell-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Broadwell-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Broadwell-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Cascadelake-Server-v1'>Cascadelake-Server</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cascadelake-Server'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Cascadelake-Server-v3'>Cascadelake-Server-noTSX</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cascadelake-Server-noTSX'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cascadelake-Server-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cascadelake-Server-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cascadelake-Server-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cascadelake-Server-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v5</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cascadelake-Server-v5'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='Intel' canonical='Conroe-v1'>Conroe</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='Intel'>Conroe-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Cooperlake-v1'>Cooperlake</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cooperlake'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Cooperlake-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cooperlake-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Cooperlake-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cooperlake-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Denverton-v1'>Denverton</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Denverton'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mpx'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Denverton-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Denverton-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mpx'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Denverton-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Denverton-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Denverton-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Denverton-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Hygon' canonical='Dhyana-v1'>Dhyana</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Hygon'>Dhyana-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Hygon'>Dhyana-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Dhyana-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='AMD' canonical='EPYC-v1'>EPYC</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD' canonical='EPYC-Genoa-v1'>EPYC-Genoa</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Genoa'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amd-psfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='auto-ibrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='no-nested-data-bp'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='null-sel-clr-base'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='stibp-always-on'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-Genoa-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Genoa-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amd-psfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='auto-ibrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='no-nested-data-bp'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='null-sel-clr-base'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='stibp-always-on'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='AMD' canonical='EPYC-v2'>EPYC-IBPB</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD' canonical='EPYC-Milan-v1'>EPYC-Milan</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Milan'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-Milan-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Milan-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-Milan-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Milan-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amd-psfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='no-nested-data-bp'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='null-sel-clr-base'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='stibp-always-on'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD' canonical='EPYC-Rome-v1'>EPYC-Rome</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Rome'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-Rome-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Rome-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-Rome-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Rome-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-Rome-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Rome-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='AMD'>EPYC-Rome-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='AMD'>EPYC-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='AMD'>EPYC-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='GraniteRapids-v1'>GraniteRapids</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='GraniteRapids'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-tile'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fbsdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrc'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fzrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mcdt-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pbrsb-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='prefetchiti'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='psdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tsx-ldtrk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>GraniteRapids-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='GraniteRapids-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-tile'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fbsdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrc'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fzrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mcdt-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pbrsb-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='prefetchiti'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='psdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tsx-ldtrk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>GraniteRapids-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='GraniteRapids-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-tile'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx10'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx10-128'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx10-256'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx10-512'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cldemote'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fbsdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrc'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fzrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mcdt-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdir64b'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdiri'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pbrsb-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='prefetchiti'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='psdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tsx-ldtrk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Haswell-v1'>Haswell</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Haswell-v3'>Haswell-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Haswell-v2'>Haswell-noTSX</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell-noTSX'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Haswell-v4'>Haswell-noTSX-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell-noTSX-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Haswell-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Haswell-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Haswell-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Haswell-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Icelake-Server-v1'>Icelake-Server</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Icelake-Server-v2'>Icelake-Server-noTSX</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-noTSX'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Icelake-Server-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Icelake-Server-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Icelake-Server-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Icelake-Server-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Icelake-Server-v5</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-v5'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Icelake-Server-v6</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-v6'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Icelake-Server-v7</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-v7'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='IvyBridge-v1'>IvyBridge</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='IvyBridge'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='IvyBridge-v2'>IvyBridge-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='IvyBridge-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>IvyBridge-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='IvyBridge-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>IvyBridge-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='IvyBridge-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='KnightsMill-v1'>KnightsMill</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='KnightsMill'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-4fmaps'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-4vnniw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512er'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512pf'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>KnightsMill-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='KnightsMill-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-4fmaps'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-4vnniw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512er'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512pf'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel' canonical='Nehalem-v1'>Nehalem</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel' canonical='Nehalem-v2'>Nehalem-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel'>Nehalem-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel'>Nehalem-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='AMD' canonical='Opteron_G1-v1'>Opteron_G1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='AMD'>Opteron_G1-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='AMD' canonical='Opteron_G2-v1'>Opteron_G2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='AMD'>Opteron_G2-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='AMD' canonical='Opteron_G3-v1'>Opteron_G3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='AMD'>Opteron_G3-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD' canonical='Opteron_G4-v1'>Opteron_G4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Opteron_G4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fma4'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xop'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>Opteron_G4-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Opteron_G4-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fma4'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xop'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD' canonical='Opteron_G5-v1'>Opteron_G5</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Opteron_G5'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fma4'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tbm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xop'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>Opteron_G5-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Opteron_G5-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fma4'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tbm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xop'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='Intel' canonical='Penryn-v1'>Penryn</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='Intel'>Penryn-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel' canonical='SandyBridge-v1'>SandyBridge</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel' canonical='SandyBridge-v2'>SandyBridge-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel'>SandyBridge-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel'>SandyBridge-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='SapphireRapids-v1'>SapphireRapids</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='SapphireRapids'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-tile'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrc'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fzrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tsx-ldtrk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>SapphireRapids-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='SapphireRapids-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-tile'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrc'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fzrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tsx-ldtrk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>SapphireRapids-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='SapphireRapids-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-tile'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fbsdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrc'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fzrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='psdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tsx-ldtrk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>SapphireRapids-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='SapphireRapids-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-tile'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cldemote'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fbsdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrc'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fzrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdir64b'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdiri'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='psdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tsx-ldtrk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='SierraForest-v1'>SierraForest</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='SierraForest'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-ne-convert'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cmpccxadd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fbsdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mcdt-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pbrsb-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='psdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>SierraForest-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='SierraForest-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-ne-convert'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cmpccxadd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fbsdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mcdt-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pbrsb-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='psdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Skylake-Client-v1'>Skylake-Client</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Client'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Skylake-Client-v2'>Skylake-Client-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Client-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Skylake-Client-v3'>Skylake-Client-noTSX-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Client-noTSX-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Client-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Client-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Client-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Client-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Client-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Client-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Client-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Client-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Skylake-Server-v1'>Skylake-Server</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Skylake-Server-v2'>Skylake-Server-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Skylake-Server-v3'>Skylake-Server-noTSX-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server-noTSX-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Server-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Server-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Server-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Server-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Server-v5</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server-v5'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Snowridge-v1'>Snowridge</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Snowridge'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cldemote'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='core-capability'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdir64b'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdiri'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mpx'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='split-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Snowridge-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Snowridge-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cldemote'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='core-capability'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdir64b'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdiri'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mpx'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='split-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Snowridge-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Snowridge-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cldemote'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='core-capability'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdir64b'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdiri'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='split-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Snowridge-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Snowridge-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cldemote'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='core-capability'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdir64b'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdiri'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='split-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Snowridge-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Snowridge-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cldemote'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdir64b'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdiri'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel' canonical='Westmere-v1'>Westmere</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel' canonical='Westmere-v2'>Westmere-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel'>Westmere-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel'>Westmere-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='AMD' canonical='athlon-v1'>athlon</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='athlon'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnow'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnowext'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='AMD'>athlon-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='athlon-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnow'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnowext'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='Intel' canonical='core2duo-v1'>core2duo</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='core2duo'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='Intel'>core2duo-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='core2duo-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='Intel' canonical='coreduo-v1'>coreduo</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='coreduo'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='Intel'>coreduo-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='coreduo-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='kvm32-v1'>kvm32</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>kvm32-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='kvm64-v1'>kvm64</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>kvm64-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='Intel' canonical='n270-v1'>n270</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='n270'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='Intel'>n270-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='n270-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='pentium-v1'>pentium</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>pentium-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='pentium2-v1'>pentium2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>pentium2-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='pentium3-v1'>pentium3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>pentium3-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='AMD' canonical='phenom-v1'>phenom</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='phenom'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnow'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnowext'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='AMD'>phenom-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='phenom-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnow'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnowext'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='qemu32-v1'>qemu32</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>qemu32-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='qemu64-v1'>qemu64</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>qemu64-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </mode>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   </cpu>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <memoryBacking supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <enum name='sourceType'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <value>file</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <value>anonymous</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <value>memfd</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   </memoryBacking>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <devices>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <disk supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='diskDevice'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>disk</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>cdrom</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>floppy</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>lun</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='bus'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>fdc</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>scsi</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>usb</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>sata</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='model'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio-transitional</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio-non-transitional</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </disk>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <graphics supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='type'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>vnc</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>egl-headless</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>dbus</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </graphics>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <video supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='modelType'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>vga</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>cirrus</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>none</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>bochs</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>ramfb</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </video>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <hostdev supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='mode'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>subsystem</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='startupPolicy'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>default</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>mandatory</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>requisite</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>optional</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='subsysType'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>usb</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>pci</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>scsi</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='capsType'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='pciBackend'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </hostdev>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <rng supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='model'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio-transitional</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio-non-transitional</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='backendModel'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>random</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>egd</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>builtin</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </rng>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <filesystem supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='driverType'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>path</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>handle</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtiofs</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </filesystem>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <tpm supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='model'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>tpm-tis</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>tpm-crb</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='backendModel'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>emulator</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>external</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='backendVersion'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>2.0</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </tpm>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <redirdev supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='bus'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>usb</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </redirdev>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <channel supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='type'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>pty</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>unix</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </channel>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <crypto supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='model'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='type'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>qemu</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='backendModel'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>builtin</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </crypto>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <interface supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='backendType'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>default</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>passt</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </interface>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <panic supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='model'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>isa</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>hyperv</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </panic>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   </devices>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <features>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <gic supported='no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <vmcoreinfo supported='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <genid supported='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <backingStoreInput supported='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <backup supported='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <async-teardown supported='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <ps2 supported='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <sev supported='no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <sgx supported='no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <hyperv supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='features'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>relaxed</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>vapic</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>spinlocks</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>vpindex</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>runtime</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>synic</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>stimer</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>reset</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>vendor_id</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>frequencies</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>reenlightenment</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>tlbflush</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>ipi</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>avic</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>emsr_bitmap</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>xmm_input</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </hyperv>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <launchSecurity supported='no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   </features>
Oct 02 11:53:59 compute-0 nova_compute[191146]: </domainCapabilities>
Oct 02 11:53:59 compute-0 nova_compute[191146]:  _get_domain_capabilities /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1037
Oct 02 11:53:59 compute-0 nova_compute[191146]: 2025-10-02 11:53:59.477 2 DEBUG nova.virt.libvirt.host [None req-50bbc704-95d9-4560-894b-26657473f6d0 - - - - - -] Libvirt host hypervisor capabilities for arch=x86_64 and machine_type=pc:
Oct 02 11:53:59 compute-0 nova_compute[191146]: <domainCapabilities>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <path>/usr/libexec/qemu-kvm</path>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <domain>kvm</domain>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <machine>pc-i440fx-rhel7.6.0</machine>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <arch>x86_64</arch>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <vcpu max='240'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <iothreads supported='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <os supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <enum name='firmware'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <loader supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <value>/usr/share/OVMF/OVMF_CODE.secboot.fd</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='type'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>rom</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>pflash</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='readonly'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>yes</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>no</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='secure'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>no</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </loader>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   </os>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <cpu>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <mode name='host-passthrough' supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='hostPassthroughMigratable'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>on</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>off</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </mode>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <mode name='maximum' supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='maximumMigratable'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>on</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>off</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </mode>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <mode name='host-model' supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model fallback='forbid'>EPYC-Rome</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <vendor>AMD</vendor>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <maxphysaddr mode='passthrough' limit='40'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='x2apic'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='tsc-deadline'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='hypervisor'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='tsc_adjust'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='spec-ctrl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='stibp'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='arch-capabilities'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='ssbd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='cmp_legacy'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='overflow-recov'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='succor'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='ibrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='amd-ssbd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='virt-ssbd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='lbrv'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='tsc-scale'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='vmcb-clean'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='flushbyasid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='pause-filter'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='pfthreshold'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='svme-addr-chk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='lfence-always-serializing'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='rdctl-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='skip-l1dfl-vmentry'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='mds-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='pschange-mc-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='gds-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='require' name='rfds-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <feature policy='disable' name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </mode>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <mode name='custom' supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='486-v1'>486</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>486-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Broadwell-v1'>Broadwell</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Broadwell-v3'>Broadwell-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Broadwell-v2'>Broadwell-noTSX</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell-noTSX'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Broadwell-v4'>Broadwell-noTSX-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell-noTSX-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Broadwell-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Broadwell-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Broadwell-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Broadwell-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Broadwell-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Cascadelake-Server-v1'>Cascadelake-Server</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cascadelake-Server'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Cascadelake-Server-v3'>Cascadelake-Server-noTSX</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cascadelake-Server-noTSX'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cascadelake-Server-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cascadelake-Server-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cascadelake-Server-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cascadelake-Server-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v5</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cascadelake-Server-v5'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='Intel' canonical='Conroe-v1'>Conroe</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='Intel'>Conroe-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Cooperlake-v1'>Cooperlake</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cooperlake'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Cooperlake-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cooperlake-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Cooperlake-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Cooperlake-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Denverton-v1'>Denverton</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Denverton'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mpx'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Denverton-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Denverton-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mpx'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Denverton-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Denverton-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Denverton-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Denverton-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Hygon' canonical='Dhyana-v1'>Dhyana</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Hygon'>Dhyana-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Hygon'>Dhyana-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Dhyana-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='AMD' canonical='EPYC-v1'>EPYC</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD' canonical='EPYC-Genoa-v1'>EPYC-Genoa</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Genoa'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amd-psfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='auto-ibrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='no-nested-data-bp'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='null-sel-clr-base'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='stibp-always-on'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-Genoa-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Genoa-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amd-psfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='auto-ibrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='no-nested-data-bp'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='null-sel-clr-base'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='stibp-always-on'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='AMD' canonical='EPYC-v2'>EPYC-IBPB</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD' canonical='EPYC-Milan-v1'>EPYC-Milan</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Milan'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-Milan-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Milan-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-Milan-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Milan-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amd-psfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='no-nested-data-bp'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='null-sel-clr-base'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='stibp-always-on'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD' canonical='EPYC-Rome-v1'>EPYC-Rome</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Rome'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-Rome-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Rome-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-Rome-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Rome-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-Rome-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-Rome-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='AMD'>EPYC-Rome-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='AMD'>EPYC-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='AMD'>EPYC-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>EPYC-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='EPYC-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='GraniteRapids-v1'>GraniteRapids</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='GraniteRapids'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-tile'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fbsdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrc'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fzrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mcdt-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pbrsb-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='prefetchiti'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='psdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tsx-ldtrk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>GraniteRapids-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='GraniteRapids-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-tile'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fbsdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrc'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fzrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mcdt-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pbrsb-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='prefetchiti'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='psdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tsx-ldtrk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>GraniteRapids-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='GraniteRapids-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-tile'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx10'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx10-128'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx10-256'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx10-512'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cldemote'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fbsdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrc'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fzrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mcdt-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdir64b'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdiri'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pbrsb-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='prefetchiti'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='psdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tsx-ldtrk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Haswell-v1'>Haswell</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Haswell-v3'>Haswell-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Haswell-v2'>Haswell-noTSX</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell-noTSX'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Haswell-v4'>Haswell-noTSX-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell-noTSX-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Haswell-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Haswell-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Haswell-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Haswell-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Haswell-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Icelake-Server-v1'>Icelake-Server</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Icelake-Server-v2'>Icelake-Server-noTSX</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-noTSX'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Icelake-Server-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Icelake-Server-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Icelake-Server-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Icelake-Server-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Icelake-Server-v5</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-v5'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Icelake-Server-v6</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-v6'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Icelake-Server-v7</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Icelake-Server-v7'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='IvyBridge-v1'>IvyBridge</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='IvyBridge'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='IvyBridge-v2'>IvyBridge-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='IvyBridge-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>IvyBridge-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='IvyBridge-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>IvyBridge-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='IvyBridge-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='KnightsMill-v1'>KnightsMill</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='KnightsMill'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-4fmaps'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-4vnniw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512er'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512pf'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>KnightsMill-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='KnightsMill-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-4fmaps'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-4vnniw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512er'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512pf'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel' canonical='Nehalem-v1'>Nehalem</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel' canonical='Nehalem-v2'>Nehalem-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel'>Nehalem-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel'>Nehalem-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='AMD' canonical='Opteron_G1-v1'>Opteron_G1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='AMD'>Opteron_G1-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='AMD' canonical='Opteron_G2-v1'>Opteron_G2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='AMD'>Opteron_G2-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='AMD' canonical='Opteron_G3-v1'>Opteron_G3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='AMD'>Opteron_G3-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD' canonical='Opteron_G4-v1'>Opteron_G4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Opteron_G4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fma4'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xop'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>Opteron_G4-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Opteron_G4-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fma4'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xop'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD' canonical='Opteron_G5-v1'>Opteron_G5</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Opteron_G5'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fma4'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tbm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xop'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='AMD'>Opteron_G5-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Opteron_G5-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fma4'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tbm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xop'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='Intel' canonical='Penryn-v1'>Penryn</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='Intel'>Penryn-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel' canonical='SandyBridge-v1'>SandyBridge</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel' canonical='SandyBridge-v2'>SandyBridge-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel'>SandyBridge-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel'>SandyBridge-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='SapphireRapids-v1'>SapphireRapids</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='SapphireRapids'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-tile'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrc'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fzrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tsx-ldtrk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>SapphireRapids-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='SapphireRapids-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-tile'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrc'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fzrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tsx-ldtrk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>SapphireRapids-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='SapphireRapids-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-tile'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fbsdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrc'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fzrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='psdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tsx-ldtrk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>SapphireRapids-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='SapphireRapids-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='amx-tile'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-bf16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-fp16'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bitalg'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vbmi2'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cldemote'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fbsdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrc'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fzrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='la57'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdir64b'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdiri'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='psdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='taa-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='tsx-ldtrk'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xfd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='SierraForest-v1'>SierraForest</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='SierraForest'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-ne-convert'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cmpccxadd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fbsdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mcdt-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pbrsb-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='psdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>SierraForest-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='SierraForest-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-ifma'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-ne-convert'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx-vnni-int8'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='bus-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cmpccxadd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fbsdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='fsrs'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ibrs-all'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mcdt-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pbrsb-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='psdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='serialize'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vaes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='vpclmulqdq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Skylake-Client-v1'>Skylake-Client</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Client'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Skylake-Client-v2'>Skylake-Client-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Client-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Skylake-Client-v3'>Skylake-Client-noTSX-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Client-noTSX-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Client-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Client-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Client-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Client-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Client-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Client-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Client-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Client-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Skylake-Server-v1'>Skylake-Server</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Skylake-Server-v2'>Skylake-Server-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Skylake-Server-v3'>Skylake-Server-noTSX-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server-noTSX-IBRS'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Server-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Server-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='hle'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='rtm'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Server-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Server-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Skylake-Server-v5</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Skylake-Server-v5'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512bw'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512cd'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512dq'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512f'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='avx512vl'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='invpcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pcid'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='pku'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel' canonical='Snowridge-v1'>Snowridge</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Snowridge'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cldemote'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='core-capability'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdir64b'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdiri'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mpx'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='split-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Snowridge-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Snowridge-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cldemote'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='core-capability'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdir64b'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdiri'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='mpx'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='split-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Snowridge-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Snowridge-v2'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cldemote'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='core-capability'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdir64b'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdiri'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='split-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Snowridge-v3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Snowridge-v3'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cldemote'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='core-capability'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdir64b'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdiri'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='split-lock-detect'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' vendor='Intel'>Snowridge-v4</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='Snowridge-v4'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='cldemote'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='erms'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='gfni'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdir64b'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='movdiri'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='xsaves'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel' canonical='Westmere-v1'>Westmere</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel' canonical='Westmere-v2'>Westmere-IBRS</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel'>Westmere-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' vendor='Intel'>Westmere-v2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='AMD' canonical='athlon-v1'>athlon</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='athlon'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnow'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnowext'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='AMD'>athlon-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='athlon-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnow'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnowext'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='Intel' canonical='core2duo-v1'>core2duo</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='core2duo'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='Intel'>core2duo-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='core2duo-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='Intel' canonical='coreduo-v1'>coreduo</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='coreduo'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='Intel'>coreduo-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='coreduo-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='kvm32-v1'>kvm32</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>kvm32-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='kvm64-v1'>kvm64</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>kvm64-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='Intel' canonical='n270-v1'>n270</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='n270'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='Intel'>n270-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='n270-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='ss'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='pentium-v1'>pentium</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>pentium-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='pentium2-v1'>pentium2</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>pentium2-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='pentium3-v1'>pentium3</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>pentium3-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='AMD' canonical='phenom-v1'>phenom</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='phenom'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnow'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnowext'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='no' deprecated='yes' vendor='AMD'>phenom-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <blockers model='phenom-v1'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnow'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <feature name='3dnowext'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </blockers>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='qemu32-v1'>qemu32</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>qemu32-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='qemu64-v1'>qemu64</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <model usable='yes' deprecated='yes' vendor='unknown'>qemu64-v1</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </mode>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   </cpu>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <memoryBacking supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <enum name='sourceType'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <value>file</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <value>anonymous</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <value>memfd</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   </memoryBacking>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <devices>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <disk supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='diskDevice'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>disk</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>cdrom</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>floppy</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>lun</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='bus'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>ide</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>fdc</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>scsi</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>usb</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>sata</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='model'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio-transitional</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio-non-transitional</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </disk>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <graphics supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='type'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>vnc</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>egl-headless</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>dbus</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </graphics>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <video supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='modelType'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>vga</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>cirrus</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>none</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>bochs</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>ramfb</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </video>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <hostdev supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='mode'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>subsystem</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='startupPolicy'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>default</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>mandatory</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>requisite</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>optional</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='subsysType'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>usb</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>pci</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>scsi</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='capsType'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='pciBackend'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </hostdev>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <rng supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='model'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio-transitional</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtio-non-transitional</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='backendModel'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>random</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>egd</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>builtin</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </rng>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <filesystem supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='driverType'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>path</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>handle</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>virtiofs</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </filesystem>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <tpm supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='model'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>tpm-tis</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>tpm-crb</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='backendModel'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>emulator</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>external</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='backendVersion'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>2.0</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </tpm>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <redirdev supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='bus'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>usb</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </redirdev>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <channel supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='type'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>pty</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>unix</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </channel>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <crypto supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='model'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='type'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>qemu</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='backendModel'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>builtin</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </crypto>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <interface supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='backendType'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>default</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>passt</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </interface>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <panic supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='model'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>isa</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>hyperv</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </panic>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   </devices>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <features>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <gic supported='no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <vmcoreinfo supported='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <genid supported='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <backingStoreInput supported='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <backup supported='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <async-teardown supported='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <ps2 supported='yes'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <sev supported='no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <sgx supported='no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <hyperv supported='yes'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       <enum name='features'>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>relaxed</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>vapic</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>spinlocks</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>vpindex</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>runtime</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>synic</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>stimer</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>reset</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>vendor_id</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>frequencies</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>reenlightenment</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>tlbflush</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>ipi</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>avic</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>emsr_bitmap</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:         <value>xmm_input</value>
Oct 02 11:53:59 compute-0 nova_compute[191146]:       </enum>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     </hyperv>
Oct 02 11:53:59 compute-0 nova_compute[191146]:     <launchSecurity supported='no'/>
Oct 02 11:53:59 compute-0 nova_compute[191146]:   </features>
Oct 02 11:53:59 compute-0 nova_compute[191146]: </domainCapabilities>
Oct 02 11:53:59 compute-0 nova_compute[191146]:  _get_domain_capabilities /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1037
Oct 02 11:53:59 compute-0 nova_compute[191146]: 2025-10-02 11:53:59.531 2 DEBUG nova.virt.libvirt.host [None req-50bbc704-95d9-4560-894b-26657473f6d0 - - - - - -] Checking secure boot support for host arch (x86_64) supports_secure_boot /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1782
Oct 02 11:53:59 compute-0 nova_compute[191146]: 2025-10-02 11:53:59.531 2 INFO nova.virt.libvirt.host [None req-50bbc704-95d9-4560-894b-26657473f6d0 - - - - - -] Secure Boot support detected
Oct 02 11:53:59 compute-0 nova_compute[191146]: 2025-10-02 11:53:59.533 2 INFO nova.virt.libvirt.driver [None req-50bbc704-95d9-4560-894b-26657473f6d0 - - - - - -] The live_migration_permit_post_copy is set to True and post copy live migration is available so auto-converge will not be in use.
Oct 02 11:53:59 compute-0 nova_compute[191146]: 2025-10-02 11:53:59.533 2 INFO nova.virt.libvirt.driver [None req-50bbc704-95d9-4560-894b-26657473f6d0 - - - - - -] The live_migration_permit_post_copy is set to True and post copy live migration is available so auto-converge will not be in use.
Oct 02 11:53:59 compute-0 nova_compute[191146]: 2025-10-02 11:53:59.541 2 DEBUG nova.virt.libvirt.driver [None req-50bbc704-95d9-4560-894b-26657473f6d0 - - - - - -] cpu compare xml: <cpu match="exact">
Oct 02 11:53:59 compute-0 nova_compute[191146]:   <model>Nehalem</model>
Oct 02 11:53:59 compute-0 nova_compute[191146]: </cpu>
Oct 02 11:53:59 compute-0 nova_compute[191146]:  _compare_cpu /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10019
Oct 02 11:53:59 compute-0 nova_compute[191146]: 2025-10-02 11:53:59.544 2 DEBUG nova.virt.libvirt.driver [None req-50bbc704-95d9-4560-894b-26657473f6d0 - - - - - -] Enabling emulated TPM support _check_vtpm_support /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:1097
Oct 02 11:53:59 compute-0 python3.9[192021]: ansible-ansible.builtin.systemd Invoked with name=edpm_nova_compute.service state=restarted daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None
Oct 02 11:53:59 compute-0 systemd[1]: Stopping nova_compute container...
Oct 02 11:53:59 compute-0 nova_compute[191146]: 2025-10-02 11:53:59.857 2 DEBUG oslo_concurrency.lockutils [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] Acquiring lock "singleton_lock" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 11:53:59 compute-0 nova_compute[191146]: 2025-10-02 11:53:59.857 2 DEBUG oslo_concurrency.lockutils [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] Acquired lock "singleton_lock" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 11:53:59 compute-0 nova_compute[191146]: 2025-10-02 11:53:59.857 2 DEBUG oslo_concurrency.lockutils [None req-8632487d-5115-4a08-aa51-ec3e323cda6f - - - - - -] Releasing lock "singleton_lock" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 11:54:00 compute-0 virtqemud[191807]: libvirt version: 10.10.0, package: 15.el9 (builder@centos.org, 2025-08-18-13:22:20, )
Oct 02 11:54:00 compute-0 virtqemud[191807]: hostname: compute-0
Oct 02 11:54:00 compute-0 virtqemud[191807]: End of file while reading data: Input/output error
Oct 02 11:54:00 compute-0 systemd[1]: libpod-6787aaf4386267c01b59730f16adbda66cb221f8ada3d52065fad1aa02c96e7b.scope: Deactivated successfully.
Oct 02 11:54:00 compute-0 systemd[1]: libpod-6787aaf4386267c01b59730f16adbda66cb221f8ada3d52065fad1aa02c96e7b.scope: Consumed 2.938s CPU time.
Oct 02 11:54:00 compute-0 podman[192025]: 2025-10-02 11:54:00.258267162 +0000 UTC m=+0.442167773 container died 6787aaf4386267c01b59730f16adbda66cb221f8ada3d52065fad1aa02c96e7b (image=quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified, name=nova_compute, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified', 'privileged': True, 'user': 'nova', 'restart': 'always', 'command': 'kolla_start', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'volumes': ['/var/lib/openstack/config/nova:/var/lib/kolla/config_files:ro', '/var/lib/openstack/cacerts/nova/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/etc/localtime:/etc/localtime:ro', '/lib/modules:/lib/modules:ro', '/dev:/dev', '/var/lib/libvirt:/var/lib/libvirt', '/run/libvirt:/run/libvirt:shared', '/var/lib/nova:/var/lib/nova:shared', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/etc/iscsi:/etc/iscsi:ro', '/etc/nvme:/etc/nvme', '/var/lib/openstack/config/ceph:/var/lib/kolla/config_files/ceph:ro', '/etc/ssh/ssh_known_hosts:/etc/ssh/ssh_known_hosts:ro']}, config_id=edpm, container_name=nova_compute)
Oct 02 11:54:00 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-6787aaf4386267c01b59730f16adbda66cb221f8ada3d52065fad1aa02c96e7b-userdata-shm.mount: Deactivated successfully.
Oct 02 11:54:00 compute-0 systemd[1]: var-lib-containers-storage-overlay-67556d8b117c7cc5b237517a7333adae8c4f4a88b1bac9af1bb3f96ef15a577b-merged.mount: Deactivated successfully.
Oct 02 11:54:00 compute-0 podman[192025]: 2025-10-02 11:54:00.308847288 +0000 UTC m=+0.492747899 container cleanup 6787aaf4386267c01b59730f16adbda66cb221f8ada3d52065fad1aa02c96e7b (image=quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified, name=nova_compute, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified', 'privileged': True, 'user': 'nova', 'restart': 'always', 'command': 'kolla_start', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'volumes': ['/var/lib/openstack/config/nova:/var/lib/kolla/config_files:ro', '/var/lib/openstack/cacerts/nova/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/etc/localtime:/etc/localtime:ro', '/lib/modules:/lib/modules:ro', '/dev:/dev', '/var/lib/libvirt:/var/lib/libvirt', '/run/libvirt:/run/libvirt:shared', '/var/lib/nova:/var/lib/nova:shared', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/etc/iscsi:/etc/iscsi:ro', '/etc/nvme:/etc/nvme', '/var/lib/openstack/config/ceph:/var/lib/kolla/config_files/ceph:ro', '/etc/ssh/ssh_known_hosts:/etc/ssh/ssh_known_hosts:ro']}, config_id=edpm, container_name=nova_compute, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 11:54:00 compute-0 podman[192025]: nova_compute
Oct 02 11:54:00 compute-0 podman[192051]: nova_compute
Oct 02 11:54:00 compute-0 systemd[1]: edpm_nova_compute.service: Deactivated successfully.
Oct 02 11:54:00 compute-0 systemd[1]: Stopped nova_compute container.
Oct 02 11:54:00 compute-0 systemd[1]: Starting nova_compute container...
Oct 02 11:54:00 compute-0 systemd[1]: Started libcrun container.
Oct 02 11:54:00 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/67556d8b117c7cc5b237517a7333adae8c4f4a88b1bac9af1bb3f96ef15a577b/merged/etc/nvme supports timestamps until 2038 (0x7fffffff)
Oct 02 11:54:00 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/67556d8b117c7cc5b237517a7333adae8c4f4a88b1bac9af1bb3f96ef15a577b/merged/etc/multipath supports timestamps until 2038 (0x7fffffff)
Oct 02 11:54:00 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/67556d8b117c7cc5b237517a7333adae8c4f4a88b1bac9af1bb3f96ef15a577b/merged/var/lib/iscsi supports timestamps until 2038 (0x7fffffff)
Oct 02 11:54:00 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/67556d8b117c7cc5b237517a7333adae8c4f4a88b1bac9af1bb3f96ef15a577b/merged/var/lib/libvirt supports timestamps until 2038 (0x7fffffff)
Oct 02 11:54:00 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/67556d8b117c7cc5b237517a7333adae8c4f4a88b1bac9af1bb3f96ef15a577b/merged/var/lib/nova supports timestamps until 2038 (0x7fffffff)
Oct 02 11:54:00 compute-0 podman[192064]: 2025-10-02 11:54:00.471719458 +0000 UTC m=+0.081943030 container init 6787aaf4386267c01b59730f16adbda66cb221f8ada3d52065fad1aa02c96e7b (image=quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified, name=nova_compute, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=edpm, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified', 'privileged': True, 'user': 'nova', 'restart': 'always', 'command': 'kolla_start', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'volumes': ['/var/lib/openstack/config/nova:/var/lib/kolla/config_files:ro', '/var/lib/openstack/cacerts/nova/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/etc/localtime:/etc/localtime:ro', '/lib/modules:/lib/modules:ro', '/dev:/dev', '/var/lib/libvirt:/var/lib/libvirt', '/run/libvirt:/run/libvirt:shared', '/var/lib/nova:/var/lib/nova:shared', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/etc/iscsi:/etc/iscsi:ro', '/etc/nvme:/etc/nvme', '/var/lib/openstack/config/ceph:/var/lib/kolla/config_files/ceph:ro', '/etc/ssh/ssh_known_hosts:/etc/ssh/ssh_known_hosts:ro']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, container_name=nova_compute)
Oct 02 11:54:00 compute-0 podman[192064]: 2025-10-02 11:54:00.480858956 +0000 UTC m=+0.091082518 container start 6787aaf4386267c01b59730f16adbda66cb221f8ada3d52065fad1aa02c96e7b (image=quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified, name=nova_compute, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=edpm, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, container_name=nova_compute, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified', 'privileged': True, 'user': 'nova', 'restart': 'always', 'command': 'kolla_start', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'volumes': ['/var/lib/openstack/config/nova:/var/lib/kolla/config_files:ro', '/var/lib/openstack/cacerts/nova/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/etc/localtime:/etc/localtime:ro', '/lib/modules:/lib/modules:ro', '/dev:/dev', '/var/lib/libvirt:/var/lib/libvirt', '/run/libvirt:/run/libvirt:shared', '/var/lib/nova:/var/lib/nova:shared', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/etc/iscsi:/etc/iscsi:ro', '/etc/nvme:/etc/nvme', '/var/lib/openstack/config/ceph:/var/lib/kolla/config_files/ceph:ro', '/etc/ssh/ssh_known_hosts:/etc/ssh/ssh_known_hosts:ro']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2)
Oct 02 11:54:00 compute-0 podman[192064]: nova_compute
Oct 02 11:54:00 compute-0 nova_compute[192079]: + sudo -E kolla_set_configs
Oct 02 11:54:00 compute-0 systemd[1]: Started nova_compute container.
Oct 02 11:54:00 compute-0 sudo[192019]: pam_unix(sudo:session): session closed for user root
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Loading config file at /var/lib/kolla/config_files/config.json
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Validating config file
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Kolla config strategy set to: COPY_ALWAYS
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Copying service configuration files
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Deleting /etc/nova/nova.conf
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Copying /var/lib/kolla/config_files/nova-blank.conf to /etc/nova/nova.conf
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Setting permission for /etc/nova/nova.conf
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Deleting /etc/nova/nova.conf.d/01-nova.conf
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Copying /var/lib/kolla/config_files/01-nova.conf to /etc/nova/nova.conf.d/01-nova.conf
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Setting permission for /etc/nova/nova.conf.d/01-nova.conf
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Deleting /etc/nova/nova.conf.d/25-nova-extra.conf
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Copying /var/lib/kolla/config_files/25-nova-extra.conf to /etc/nova/nova.conf.d/25-nova-extra.conf
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Setting permission for /etc/nova/nova.conf.d/25-nova-extra.conf
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Deleting /etc/nova/nova.conf.d/nova-blank.conf
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Copying /var/lib/kolla/config_files/nova-blank.conf to /etc/nova/nova.conf.d/nova-blank.conf
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Setting permission for /etc/nova/nova.conf.d/nova-blank.conf
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Deleting /etc/nova/nova.conf.d/02-nova-host-specific.conf
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Copying /var/lib/kolla/config_files/02-nova-host-specific.conf to /etc/nova/nova.conf.d/02-nova-host-specific.conf
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Setting permission for /etc/nova/nova.conf.d/02-nova-host-specific.conf
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Deleting /etc/ceph
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Creating directory /etc/ceph
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Setting permission for /etc/ceph
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Deleting /var/lib/nova/.ssh/ssh-privatekey
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Copying /var/lib/kolla/config_files/ssh-privatekey to /var/lib/nova/.ssh/ssh-privatekey
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Setting permission for /var/lib/nova/.ssh/ssh-privatekey
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Deleting /var/lib/nova/.ssh/config
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Copying /var/lib/kolla/config_files/ssh-config to /var/lib/nova/.ssh/config
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Setting permission for /var/lib/nova/.ssh/config
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Writing out command to execute
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Setting permission for /var/lib/nova/.ssh/
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Setting permission for /var/lib/nova/.ssh/ssh-privatekey
Oct 02 11:54:00 compute-0 nova_compute[192079]: INFO:__main__:Setting permission for /var/lib/nova/.ssh/config
Oct 02 11:54:00 compute-0 nova_compute[192079]: ++ cat /run_command
Oct 02 11:54:00 compute-0 nova_compute[192079]: + CMD=nova-compute
Oct 02 11:54:00 compute-0 nova_compute[192079]: + ARGS=
Oct 02 11:54:00 compute-0 nova_compute[192079]: + sudo kolla_copy_cacerts
Oct 02 11:54:00 compute-0 nova_compute[192079]: + [[ ! -n '' ]]
Oct 02 11:54:00 compute-0 nova_compute[192079]: + . kolla_extend_start
Oct 02 11:54:00 compute-0 nova_compute[192079]: Running command: 'nova-compute'
Oct 02 11:54:00 compute-0 nova_compute[192079]: + echo 'Running command: '\''nova-compute'\'''
Oct 02 11:54:00 compute-0 nova_compute[192079]: + umask 0022
Oct 02 11:54:00 compute-0 nova_compute[192079]: + exec nova-compute
Oct 02 11:54:01 compute-0 sudo[192241]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-asxjrgmulzddltgjucysldxrihaorhka ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406040.993547-5245-211136799117834/AnsiballZ_podman_container.py'
Oct 02 11:54:01 compute-0 sudo[192241]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:54:01 compute-0 python3.9[192243]: ansible-containers.podman.podman_container Invoked with name=nova_compute_init state=started executable=podman detach=True debug=False force_restart=False force_delete=True generate_systemd={} image_strict=False recreate=False image=None annotation=None arch=None attach=None authfile=None blkio_weight=None blkio_weight_device=None cap_add=None cap_drop=None cgroup_conf=None cgroup_parent=None cgroupns=None cgroups=None chrootdirs=None cidfile=None cmd_args=None conmon_pidfile=None command=None cpu_period=None cpu_quota=None cpu_rt_period=None cpu_rt_runtime=None cpu_shares=None cpus=None cpuset_cpus=None cpuset_mems=None decryption_key=None delete_depend=None delete_time=None delete_volumes=None detach_keys=None device=None device_cgroup_rule=None device_read_bps=None device_read_iops=None device_write_bps=None device_write_iops=None dns=None dns_option=None dns_search=None entrypoint=None env=None env_file=None env_host=None env_merge=None etc_hosts=None expose=None gidmap=None gpus=None group_add=None group_entry=None healthcheck=None healthcheck_interval=None healthcheck_retries=None healthcheck_start_period=None health_startup_cmd=None health_startup_interval=None health_startup_retries=None health_startup_success=None health_startup_timeout=None healthcheck_timeout=None healthcheck_failure_action=None hooks_dir=None hostname=None hostuser=None http_proxy=None image_volume=None init=None init_ctr=None init_path=None interactive=None ip=None ip6=None ipc=None kernel_memory=None label=None label_file=None log_driver=None log_level=None log_opt=None mac_address=None memory=None memory_reservation=None memory_swap=None memory_swappiness=None mount=None network=None network_aliases=None no_healthcheck=None no_hosts=None oom_kill_disable=None oom_score_adj=None os=None passwd=None passwd_entry=None personality=None pid=None pid_file=None pids_limit=None platform=None pod=None pod_id_file=None preserve_fd=None preserve_fds=None privileged=None publish=None publish_all=None pull=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None rdt_class=None read_only=None read_only_tmpfs=None requires=None restart_policy=None restart_time=None retry=None retry_delay=None rm=None rmi=None rootfs=None seccomp_policy=None secrets=NOT_LOGGING_PARAMETER sdnotify=None security_opt=None shm_size=None shm_size_systemd=None sig_proxy=None stop_signal=None stop_timeout=None stop_time=None subgidname=None subuidname=None sysctl=None systemd=None timeout=None timezone=None tls_verify=None tmpfs=None tty=None uidmap=None ulimit=None umask=None unsetenv=None unsetenv_all=None user=None userns=None uts=None variant=None volume=None volumes_from=None workdir=None
Oct 02 11:54:01 compute-0 systemd[1]: Started libpod-conmon-a84b5326f13e77dc75c2f0bf3a5df719b9ba6279ef3e62de4c7ef314984f720b.scope.
Oct 02 11:54:01 compute-0 systemd[1]: Started libcrun container.
Oct 02 11:54:01 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/cd5e8f9e283dac1e112ec52e7f1a223e1a59deaa3e93132ce0d912f3ed081426/merged/usr/sbin/nova_statedir_ownership.py supports timestamps until 2038 (0x7fffffff)
Oct 02 11:54:01 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/cd5e8f9e283dac1e112ec52e7f1a223e1a59deaa3e93132ce0d912f3ed081426/merged/var/lib/_nova_secontext supports timestamps until 2038 (0x7fffffff)
Oct 02 11:54:01 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/cd5e8f9e283dac1e112ec52e7f1a223e1a59deaa3e93132ce0d912f3ed081426/merged/var/lib/nova supports timestamps until 2038 (0x7fffffff)
Oct 02 11:54:01 compute-0 podman[192268]: 2025-10-02 11:54:01.723067546 +0000 UTC m=+0.100404321 container init a84b5326f13e77dc75c2f0bf3a5df719b9ba6279ef3e62de4c7ef314984f720b (image=quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified, name=nova_compute_init, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified', 'privileged': False, 'user': 'root', 'restart': 'never', 'command': 'bash -c $* -- eval python3 /sbin/nova_statedir_ownership.py | logger -t nova_compute_init', 'net': 'none', 'security_opt': ['label=disable'], 'detach': False, 'environment': {'NOVA_STATEDIR_OWNERSHIP_SKIP': '/var/lib/nova/compute_id', '__OS_DEBUG': False}, 'volumes': ['/dev/log:/dev/log', '/var/lib/nova:/var/lib/nova:shared', '/var/lib/_nova_secontext:/var/lib/_nova_secontext:shared,z', '/var/lib/openstack/config/nova/nova_statedir_ownership.py:/sbin/nova_statedir_ownership.py:z']}, config_id=edpm, container_name=nova_compute_init, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 11:54:01 compute-0 podman[192268]: 2025-10-02 11:54:01.72958519 +0000 UTC m=+0.106921945 container start a84b5326f13e77dc75c2f0bf3a5df719b9ba6279ef3e62de4c7ef314984f720b (image=quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified, name=nova_compute_init, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=nova_compute_init, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified', 'privileged': False, 'user': 'root', 'restart': 'never', 'command': 'bash -c $* -- eval python3 /sbin/nova_statedir_ownership.py | logger -t nova_compute_init', 'net': 'none', 'security_opt': ['label=disable'], 'detach': False, 'environment': {'NOVA_STATEDIR_OWNERSHIP_SKIP': '/var/lib/nova/compute_id', '__OS_DEBUG': False}, 'volumes': ['/dev/log:/dev/log', '/var/lib/nova:/var/lib/nova:shared', '/var/lib/_nova_secontext:/var/lib/_nova_secontext:shared,z', '/var/lib/openstack/config/nova/nova_statedir_ownership.py:/sbin/nova_statedir_ownership.py:z']}, config_id=edpm)
Oct 02 11:54:01 compute-0 python3.9[192243]: ansible-containers.podman.podman_container PODMAN-CONTAINER-DEBUG: podman start nova_compute_init
Oct 02 11:54:01 compute-0 nova_compute_init[192288]: INFO:nova_statedir:Applying nova statedir ownership
Oct 02 11:54:01 compute-0 nova_compute_init[192288]: INFO:nova_statedir:Target ownership for /var/lib/nova: 42436:42436
Oct 02 11:54:01 compute-0 nova_compute_init[192288]: INFO:nova_statedir:Checking uid: 1000 gid: 1000 path: /var/lib/nova/
Oct 02 11:54:01 compute-0 nova_compute_init[192288]: INFO:nova_statedir:Changing ownership of /var/lib/nova from 1000:1000 to 42436:42436
Oct 02 11:54:01 compute-0 nova_compute_init[192288]: INFO:nova_statedir:Setting selinux context of /var/lib/nova to system_u:object_r:container_file_t:s0
Oct 02 11:54:01 compute-0 nova_compute_init[192288]: INFO:nova_statedir:Checking uid: 1000 gid: 1000 path: /var/lib/nova/instances/
Oct 02 11:54:01 compute-0 nova_compute_init[192288]: INFO:nova_statedir:Changing ownership of /var/lib/nova/instances from 1000:1000 to 42436:42436
Oct 02 11:54:01 compute-0 nova_compute_init[192288]: INFO:nova_statedir:Setting selinux context of /var/lib/nova/instances to system_u:object_r:container_file_t:s0
Oct 02 11:54:01 compute-0 nova_compute_init[192288]: INFO:nova_statedir:Checking uid: 42436 gid: 42436 path: /var/lib/nova/.ssh/
Oct 02 11:54:01 compute-0 nova_compute_init[192288]: INFO:nova_statedir:Ownership of /var/lib/nova/.ssh already 42436:42436
Oct 02 11:54:01 compute-0 nova_compute_init[192288]: INFO:nova_statedir:Setting selinux context of /var/lib/nova/.ssh to system_u:object_r:container_file_t:s0
Oct 02 11:54:01 compute-0 nova_compute_init[192288]: INFO:nova_statedir:Checking uid: 42436 gid: 42436 path: /var/lib/nova/.ssh/ssh-privatekey
Oct 02 11:54:01 compute-0 nova_compute_init[192288]: INFO:nova_statedir:Checking uid: 42436 gid: 42436 path: /var/lib/nova/.ssh/config
Oct 02 11:54:01 compute-0 nova_compute_init[192288]: INFO:nova_statedir:Nova statedir ownership complete
Oct 02 11:54:01 compute-0 systemd[1]: libpod-a84b5326f13e77dc75c2f0bf3a5df719b9ba6279ef3e62de4c7ef314984f720b.scope: Deactivated successfully.
Oct 02 11:54:01 compute-0 podman[192289]: 2025-10-02 11:54:01.778872399 +0000 UTC m=+0.023440551 container died a84b5326f13e77dc75c2f0bf3a5df719b9ba6279ef3e62de4c7ef314984f720b (image=quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified, name=nova_compute_init, io.buildah.version=1.41.3, managed_by=edpm_ansible, config_id=edpm, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified', 'privileged': False, 'user': 'root', 'restart': 'never', 'command': 'bash -c $* -- eval python3 /sbin/nova_statedir_ownership.py | logger -t nova_compute_init', 'net': 'none', 'security_opt': ['label=disable'], 'detach': False, 'environment': {'NOVA_STATEDIR_OWNERSHIP_SKIP': '/var/lib/nova/compute_id', '__OS_DEBUG': False}, 'volumes': ['/dev/log:/dev/log', '/var/lib/nova:/var/lib/nova:shared', '/var/lib/_nova_secontext:/var/lib/_nova_secontext:shared,z', '/var/lib/openstack/config/nova/nova_statedir_ownership.py:/sbin/nova_statedir_ownership.py:z']}, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_managed=true, container_name=nova_compute_init)
Oct 02 11:54:01 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-a84b5326f13e77dc75c2f0bf3a5df719b9ba6279ef3e62de4c7ef314984f720b-userdata-shm.mount: Deactivated successfully.
Oct 02 11:54:01 compute-0 systemd[1]: var-lib-containers-storage-overlay-cd5e8f9e283dac1e112ec52e7f1a223e1a59deaa3e93132ce0d912f3ed081426-merged.mount: Deactivated successfully.
Oct 02 11:54:01 compute-0 podman[192302]: 2025-10-02 11:54:01.835829984 +0000 UTC m=+0.047307214 container cleanup a84b5326f13e77dc75c2f0bf3a5df719b9ba6279ef3e62de4c7ef314984f720b (image=quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified, name=nova_compute_init, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified', 'privileged': False, 'user': 'root', 'restart': 'never', 'command': 'bash -c $* -- eval python3 /sbin/nova_statedir_ownership.py | logger -t nova_compute_init', 'net': 'none', 'security_opt': ['label=disable'], 'detach': False, 'environment': {'NOVA_STATEDIR_OWNERSHIP_SKIP': '/var/lib/nova/compute_id', '__OS_DEBUG': False}, 'volumes': ['/dev/log:/dev/log', '/var/lib/nova:/var/lib/nova:shared', '/var/lib/_nova_secontext:/var/lib/_nova_secontext:shared,z', '/var/lib/openstack/config/nova/nova_statedir_ownership.py:/sbin/nova_statedir_ownership.py:z']}, container_name=nova_compute_init, io.buildah.version=1.41.3, config_id=edpm, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 11:54:01 compute-0 systemd[1]: libpod-conmon-a84b5326f13e77dc75c2f0bf3a5df719b9ba6279ef3e62de4c7ef314984f720b.scope: Deactivated successfully.
Oct 02 11:54:01 compute-0 sudo[192241]: pam_unix(sudo:session): session closed for user root
Oct 02 11:54:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:54:02.194 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 11:54:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:54:02.196 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 11:54:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:54:02.196 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 11:54:02 compute-0 nova_compute[192079]: 2025-10-02 11:54:02.505 2 DEBUG os_vif [-] Loaded VIF plugin class '<class 'vif_plug_linux_bridge.linux_bridge.LinuxBridgePlugin'>' with name 'linux_bridge' initialize /usr/lib/python3.9/site-packages/os_vif/__init__.py:44
Oct 02 11:54:02 compute-0 nova_compute[192079]: 2025-10-02 11:54:02.506 2 DEBUG os_vif [-] Loaded VIF plugin class '<class 'vif_plug_noop.noop.NoOpPlugin'>' with name 'noop' initialize /usr/lib/python3.9/site-packages/os_vif/__init__.py:44
Oct 02 11:54:02 compute-0 nova_compute[192079]: 2025-10-02 11:54:02.506 2 DEBUG os_vif [-] Loaded VIF plugin class '<class 'vif_plug_ovs.ovs.OvsPlugin'>' with name 'ovs' initialize /usr/lib/python3.9/site-packages/os_vif/__init__.py:44
Oct 02 11:54:02 compute-0 nova_compute[192079]: 2025-10-02 11:54:02.506 2 INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs
Oct 02 11:54:02 compute-0 nova_compute[192079]: 2025-10-02 11:54:02.632 2 DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 11:54:02 compute-0 nova_compute[192079]: 2025-10-02 11:54:02.655 2 DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.023s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 11:54:02 compute-0 sshd-session[157715]: Connection closed by 192.168.122.30 port 42126
Oct 02 11:54:02 compute-0 sshd-session[157712]: pam_unix(sshd:session): session closed for user zuul
Oct 02 11:54:02 compute-0 systemd[1]: session-24.scope: Deactivated successfully.
Oct 02 11:54:02 compute-0 systemd[1]: session-24.scope: Consumed 2min 14.194s CPU time.
Oct 02 11:54:02 compute-0 systemd-logind[827]: Session 24 logged out. Waiting for processes to exit.
Oct 02 11:54:02 compute-0 systemd-logind[827]: Removed session 24.
Oct 02 11:54:03 compute-0 nova_compute[192079]: 2025-10-02 11:54:03.806 2 INFO nova.virt.driver [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] Loading compute driver 'libvirt.LibvirtDriver'
Oct 02 11:54:03 compute-0 nova_compute[192079]: 2025-10-02 11:54:03.921 2 INFO nova.compute.provider_config [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access.
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.187 2 DEBUG oslo_concurrency.lockutils [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] Acquiring lock "singleton_lock" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.187 2 DEBUG oslo_concurrency.lockutils [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] Acquired lock "singleton_lock" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.188 2 DEBUG oslo_concurrency.lockutils [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] Releasing lock "singleton_lock" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.188 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] Full set of CONF: _wait_for_exit_or_signal /usr/lib/python3.9/site-packages/oslo_service/service.py:362
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.188 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ******************************************************************************** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2589
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.189 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] Configuration options gathered from: log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2590
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.189 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] command line args: [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2591
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.189 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-compute.conf'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2592
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.189 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ================================================================================ log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2594
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.190 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] allow_resize_to_same_host      = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.190 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] arq_binding_timeout            = 300 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.190 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] backdoor_port                  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.190 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] backdoor_socket                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.190 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] block_device_allocate_retries  = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.190 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] block_device_allocate_retries_interval = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.191 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cert                           = self.pem log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.191 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] compute_driver                 = libvirt.LibvirtDriver log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.191 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] compute_monitors               = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.191 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] config_dir                     = ['/etc/nova/nova.conf.d'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.191 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] config_drive_format            = iso9660 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.192 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] config_file                    = ['/etc/nova/nova.conf', '/etc/nova/nova-compute.conf'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.192 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] config_source                  = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.192 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] console_host                   = compute-0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.192 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] control_exchange               = nova log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.192 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cpu_allocation_ratio           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.193 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] daemon                         = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.193 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] debug                          = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.193 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] default_access_ip_network_name = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.193 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] default_availability_zone      = nova log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.193 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] default_ephemeral_format       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.194 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] default_log_levels             = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.194 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] default_schedule_zone          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.194 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] disk_allocation_ratio          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.194 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] enable_new_services            = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.194 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] enabled_apis                   = ['osapi_compute', 'metadata'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.195 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] enabled_ssl_apis               = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.195 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] flat_injected                  = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.195 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] force_config_drive             = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.195 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] force_raw_images               = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.195 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] graceful_shutdown_timeout      = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.196 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] heal_instance_info_cache_interval = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.196 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] host                           = compute-0.ctlplane.example.com log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.196 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] initial_cpu_allocation_ratio   = 4.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.196 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] initial_disk_allocation_ratio  = 0.9 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.196 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] initial_ram_allocation_ratio   = 1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.197 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] injected_network_template      = /usr/lib/python3.9/site-packages/nova/virt/interfaces.template log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.197 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] instance_build_timeout         = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.197 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] instance_delete_interval       = 300 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.197 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] instance_format                = [instance: %(uuid)s]  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.197 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] instance_name_template         = instance-%08x log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.198 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] instance_usage_audit           = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.198 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] instance_usage_audit_period    = month log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.198 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] instance_uuid_format           = [instance: %(uuid)s]  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.198 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] instances_path                 = /var/lib/nova/instances log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.198 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] internal_service_availability_zone = internal log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.199 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] key                            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.199 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] live_migration_retry_count     = 30 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.199 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] log_config_append              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.199 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] log_date_format                = %Y-%m-%d %H:%M:%S log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.199 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] log_dir                        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.200 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] log_file                       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.200 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] log_options                    = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.200 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] log_rotate_interval            = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.200 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] log_rotate_interval_type       = days log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.200 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] log_rotation_type              = size log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.201 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] logging_context_format_string  = %(asctime)s.%(msecs)03d %(process)d %(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(user_identity)s] %(instance)s%(message)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.201 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] logging_debug_format_suffix    = %(funcName)s %(pathname)s:%(lineno)d log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.201 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] logging_default_format_string  = %(asctime)s.%(msecs)03d %(process)d %(levelname)s %(name)s [-] %(instance)s%(message)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.201 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] logging_exception_prefix       = %(asctime)s.%(msecs)03d %(process)d ERROR %(name)s %(instance)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.201 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] logging_user_identity_format   = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.202 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] long_rpc_timeout               = 1800 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.202 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] max_concurrent_builds          = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.202 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] max_concurrent_live_migrations = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.202 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] max_concurrent_snapshots       = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.203 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] max_local_block_devices        = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.203 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] max_logfile_count              = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.203 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] max_logfile_size_mb            = 20 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.203 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] maximum_instance_delete_attempts = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.203 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] metadata_listen                = 0.0.0.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.204 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] metadata_listen_port           = 8775 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.204 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] metadata_workers               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.204 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] migrate_max_retries            = -1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.204 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] mkisofs_cmd                    = /usr/bin/mkisofs log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.205 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] my_block_storage_ip            = 192.168.122.100 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.205 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] my_ip                          = 192.168.122.100 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.205 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] network_allocate_retries       = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.205 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.205 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] osapi_compute_listen           = 0.0.0.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.205 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] osapi_compute_listen_port      = 8774 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.206 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] osapi_compute_unique_server_name_scope =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.206 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] osapi_compute_workers          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.206 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] password_length                = 12 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.206 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] periodic_enable                = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.206 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] periodic_fuzzy_delay           = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.207 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] pointer_model                  = usbtablet log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.207 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] preallocate_images             = none log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.207 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] publish_errors                 = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.207 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] pybasedir                      = /usr/lib/python3.9/site-packages log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.207 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ram_allocation_ratio           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.208 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] rate_limit_burst               = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.208 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] rate_limit_except_level        = CRITICAL log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.208 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] rate_limit_interval            = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.208 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] reboot_timeout                 = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.208 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] reclaim_instance_interval      = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.209 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] record                         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.209 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] reimage_timeout_per_gb         = 20 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.209 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] report_interval                = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.209 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] rescue_timeout                 = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.209 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] reserved_host_cpus             = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.210 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] reserved_host_disk_mb          = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.210 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] reserved_host_memory_mb        = 512 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.210 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] reserved_huge_pages            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.210 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] resize_confirm_window          = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.210 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] resize_fs_using_block_device   = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.211 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] resume_guests_state_on_host_boot = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.211 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] rootwrap_config                = /etc/nova/rootwrap.conf log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.211 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] rpc_response_timeout           = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.211 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] run_external_periodic_tasks    = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.212 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] running_deleted_instance_action = reap log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.212 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] running_deleted_instance_poll_interval = 1800 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.212 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] running_deleted_instance_timeout = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.212 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] scheduler_instance_sync_interval = 120 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.213 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] service_down_time              = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.213 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] servicegroup_driver            = db log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.213 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] shelved_offload_time           = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.213 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] shelved_poll_interval          = 3600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.213 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] shutdown_timeout               = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.213 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] source_is_ipv6                 = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.214 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ssl_only                       = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.214 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] state_path                     = /var/lib/nova log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.214 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] sync_power_state_interval      = 600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.214 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] sync_power_state_pool_size     = 1000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.214 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] syslog_log_facility            = LOG_USER log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.215 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] tempdir                        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.215 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] timeout_nbd                    = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.215 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] transport_url                  = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.215 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] update_resources_interval      = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.215 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] use_cow_images                 = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.215 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] use_eventlog                   = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.216 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] use_journal                    = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.216 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] use_json                       = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.216 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] use_rootwrap_daemon            = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.216 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] use_stderr                     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.216 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] use_syslog                     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.217 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vcpu_pin_set                   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.217 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vif_plugging_is_fatal          = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.217 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vif_plugging_timeout           = 300 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.217 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] virt_mkfs                      = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.217 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] volume_usage_poll_interval     = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.218 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] watch_log_file                 = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.218 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] web                            = /usr/share/spice-html5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.218 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_concurrency.disable_process_locking = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.218 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_concurrency.lock_path     = /var/lib/nova/tmp log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.218 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_metrics.metrics_buffer_size = 1000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.218 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_metrics.metrics_enabled = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.219 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_metrics.metrics_process_name =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.219 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.219 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.219 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api.auth_strategy              = keystone log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.219 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api.compute_link_prefix        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.220 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.220 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api.dhcp_domain                =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.220 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api.enable_instance_password   = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.220 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api.glance_link_prefix         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.220 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api.instance_list_cells_batch_fixed_size = 100 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.221 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api.instance_list_cells_batch_strategy = distributed log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.221 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api.instance_list_per_project_cells = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.221 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api.list_records_by_skipping_down_cells = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.221 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api.local_metadata_per_cell    = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.221 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api.max_limit                  = 1000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.222 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api.metadata_cache_expiration  = 15 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.222 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api.neutron_default_tenant_id  = default log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.222 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api.use_forwarded_for          = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.222 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api.use_neutron_default_nets   = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.222 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api.vendordata_dynamic_connect_timeout = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.223 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api.vendordata_dynamic_failure_fatal = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.223 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api.vendordata_dynamic_read_timeout = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.223 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api.vendordata_dynamic_ssl_certfile =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.223 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api.vendordata_dynamic_targets = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.224 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api.vendordata_jsonfile_path   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.224 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api.vendordata_providers       = ['StaticJSON'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.224 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.backend                  = oslo_cache.dict log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.224 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.backend_argument         = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.224 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.config_prefix            = cache.oslo log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.225 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.dead_timeout             = 60.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.225 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.debug_cache_backend      = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.225 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.enable_retry_client      = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.225 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.enable_socket_keepalive  = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.225 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.enabled                  = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.226 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.expiration_time          = 600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.226 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.hashclient_retry_attempts = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.226 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.hashclient_retry_delay   = 1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.226 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.memcache_dead_retry      = 300 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.226 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.memcache_password        =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.227 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.memcache_pool_connection_get_timeout = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.227 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.memcache_pool_flush_on_reconnect = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.227 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.memcache_pool_maxsize    = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.227 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.memcache_pool_unused_timeout = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.227 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.memcache_sasl_enabled    = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.228 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.memcache_servers         = ['localhost:11211'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.228 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.memcache_socket_timeout  = 1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.228 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.memcache_username        =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.228 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.proxies                  = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.228 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.retry_attempts           = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.229 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.retry_delay              = 0.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.229 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.socket_keepalive_count   = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.229 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.socket_keepalive_idle    = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.229 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.socket_keepalive_interval = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.229 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.tls_allowed_ciphers      = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.230 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.tls_cafile               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.230 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.tls_certfile             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.230 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.tls_enabled              = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.230 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cache.tls_keyfile              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.231 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cinder.auth_section            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.231 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cinder.auth_type               = password log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.231 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cinder.cafile                  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.231 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cinder.catalog_info            = volumev3:cinderv3:internalURL log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.231 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cinder.certfile                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.231 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cinder.collect_timing          = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.232 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cinder.cross_az_attach         = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.232 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cinder.debug                   = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.232 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cinder.endpoint_template       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.232 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cinder.http_retries            = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.232 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cinder.insecure                = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.233 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cinder.keyfile                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.233 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cinder.os_region_name          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.233 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cinder.split_loggers           = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.233 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cinder.timeout                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.233 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] compute.consecutive_build_service_disable_threshold = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.234 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] compute.cpu_dedicated_set      = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.234 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] compute.cpu_shared_set         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.234 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] compute.image_type_exclude_list = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.234 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] compute.live_migration_wait_for_vif_plug = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.234 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] compute.max_concurrent_disk_ops = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.235 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] compute.max_disk_devices_to_attach = -1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.235 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] compute.packing_host_numa_cells_allocation_strategy = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.235 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] compute.provider_config_location = /etc/nova/provider_config/ log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.235 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] compute.resource_provider_association_refresh = 300 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.235 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] compute.shutdown_retry_interval = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.236 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] compute.vmdk_allowed_types     = ['streamOptimized', 'monolithicSparse'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.236 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] conductor.workers              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.236 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] console.allowed_origins        = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.236 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] console.ssl_ciphers            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.236 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] console.ssl_minimum_version    = default log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.237 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] consoleauth.token_ttl          = 600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.237 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cyborg.cafile                  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.237 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cyborg.certfile                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.237 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cyborg.collect_timing          = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.238 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cyborg.connect_retries         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.238 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cyborg.connect_retry_delay     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.238 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cyborg.endpoint_override       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.238 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cyborg.insecure                = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.238 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cyborg.keyfile                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.239 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cyborg.max_version             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.239 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cyborg.min_version             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.239 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cyborg.region_name             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.239 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cyborg.service_name            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.239 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cyborg.service_type            = accelerator log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.240 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cyborg.split_loggers           = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.240 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cyborg.status_code_retries     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.240 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cyborg.status_code_retry_delay = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.240 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cyborg.timeout                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.240 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cyborg.valid_interfaces        = ['internal', 'public'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.240 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] cyborg.version                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.241 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] database.backend               = sqlalchemy log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.241 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] database.connection            = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.241 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] database.connection_debug      = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.241 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] database.connection_parameters =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.242 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] database.connection_recycle_time = 3600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.242 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] database.connection_trace      = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.242 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] database.db_inc_retry_interval = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.242 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] database.db_max_retries        = 20 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.242 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] database.db_max_retry_interval = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.242 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] database.db_retry_interval     = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.243 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] database.max_overflow          = 50 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.243 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] database.max_pool_size         = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.243 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] database.max_retries           = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.243 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] database.mysql_enable_ndb      = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.243 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] database.mysql_sql_mode        = TRADITIONAL log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.244 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] database.mysql_wsrep_sync_wait = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.244 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] database.pool_timeout          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.244 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] database.retry_interval        = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.244 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] database.slave_connection      = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.245 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] database.sqlite_synchronous    = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.245 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api_database.backend           = sqlalchemy log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.245 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api_database.connection        = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.245 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api_database.connection_debug  = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.246 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api_database.connection_parameters =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.246 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api_database.connection_recycle_time = 3600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.246 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api_database.connection_trace  = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.246 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api_database.db_inc_retry_interval = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.246 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api_database.db_max_retries    = 20 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.247 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api_database.db_max_retry_interval = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.247 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api_database.db_retry_interval = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.247 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api_database.max_overflow      = 50 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.247 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api_database.max_pool_size     = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.247 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api_database.max_retries       = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.247 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api_database.mysql_enable_ndb  = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.248 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api_database.mysql_sql_mode    = TRADITIONAL log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.248 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api_database.mysql_wsrep_sync_wait = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.248 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api_database.pool_timeout      = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.248 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api_database.retry_interval    = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.249 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api_database.slave_connection  = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.249 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] api_database.sqlite_synchronous = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.249 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] devices.enabled_mdev_types     = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.249 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ephemeral_storage_encryption.cipher = aes-xts-plain64 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.249 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ephemeral_storage_encryption.enabled = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.250 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ephemeral_storage_encryption.key_size = 512 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.250 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.api_servers             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.250 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.cafile                  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.250 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.certfile                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.251 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.collect_timing          = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.251 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.connect_retries         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.251 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.connect_retry_delay     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.251 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.debug                   = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.251 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.default_trusted_certificate_ids = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.252 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.enable_certificate_validation = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.252 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.enable_rbd_download     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.252 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.endpoint_override       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.252 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.insecure                = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.252 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.keyfile                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.253 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.max_version             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.253 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.min_version             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.253 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.num_retries             = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.253 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.rbd_ceph_conf           =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.253 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.rbd_connect_timeout     = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.254 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.rbd_pool                =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.254 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.rbd_user                =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.254 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.region_name             = regionOne log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.254 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.service_name            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.254 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.service_type            = image log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.255 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.split_loggers           = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.255 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.status_code_retries     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.255 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.status_code_retry_delay = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.255 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.timeout                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.256 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.valid_interfaces        = ['internal'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.256 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.verify_glance_signatures = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.256 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] glance.version                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.256 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] guestfs.debug                  = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.256 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] hyperv.config_drive_cdrom      = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.257 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] hyperv.config_drive_inject_password = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.257 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] hyperv.dynamic_memory_ratio    = 1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.257 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] hyperv.enable_instance_metrics_collection = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.258 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] hyperv.enable_remotefx         = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.258 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] hyperv.instances_path_share    =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.258 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] hyperv.iscsi_initiator_list    = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.258 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] hyperv.limit_cpu_features      = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.258 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] hyperv.mounted_disk_query_retry_count = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.259 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] hyperv.mounted_disk_query_retry_interval = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.259 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] hyperv.power_state_check_timeframe = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.259 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] hyperv.power_state_event_polling_interval = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.259 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] hyperv.qemu_img_cmd            = qemu-img.exe log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.259 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] hyperv.use_multipath_io        = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.260 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] hyperv.volume_attach_retry_count = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.260 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] hyperv.volume_attach_retry_interval = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.260 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] hyperv.vswitch_name            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.260 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] hyperv.wait_soft_reboot_seconds = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.260 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] mks.enabled                    = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.261 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] mks.mksproxy_base_url          = http://127.0.0.1:6090/ log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.261 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] image_cache.manager_interval   = 2400 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.261 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] image_cache.precache_concurrency = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.261 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] image_cache.remove_unused_base_images = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.262 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] image_cache.remove_unused_original_minimum_age_seconds = 86400 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.262 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] image_cache.remove_unused_resized_minimum_age_seconds = 3600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.262 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] image_cache.subdirectory_name  = _base log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.262 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ironic.api_max_retries         = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.262 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ironic.api_retry_interval      = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.263 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ironic.auth_section            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.263 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ironic.auth_type               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.263 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ironic.cafile                  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.263 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ironic.certfile                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.263 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ironic.collect_timing          = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.264 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ironic.connect_retries         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.264 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ironic.connect_retry_delay     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.264 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ironic.endpoint_override       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.264 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ironic.insecure                = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.264 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ironic.keyfile                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.265 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ironic.max_version             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.265 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ironic.min_version             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.265 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ironic.partition_key           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.265 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ironic.peer_list               = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.265 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ironic.region_name             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.266 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ironic.serial_console_state_timeout = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.266 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ironic.service_name            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.266 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ironic.service_type            = baremetal log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.266 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ironic.split_loggers           = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.266 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ironic.status_code_retries     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.267 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ironic.status_code_retry_delay = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.267 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ironic.timeout                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.267 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ironic.valid_interfaces        = ['internal', 'public'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.267 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ironic.version                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.267 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] key_manager.backend            = barbican log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.268 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] key_manager.fixed_key          = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.268 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] barbican.auth_endpoint         = http://localhost/identity/v3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.268 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] barbican.barbican_api_version  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.268 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] barbican.barbican_endpoint     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.268 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] barbican.barbican_endpoint_type = internal log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.269 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] barbican.barbican_region_name  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.269 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] barbican.cafile                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.269 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] barbican.certfile              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.269 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] barbican.collect_timing        = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.269 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] barbican.insecure              = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.270 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] barbican.keyfile               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.270 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] barbican.number_of_retries     = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.270 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] barbican.retry_delay           = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.270 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] barbican.send_service_user_token = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.270 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] barbican.split_loggers         = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.270 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] barbican.timeout               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.271 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] barbican.verify_ssl            = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.271 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] barbican.verify_ssl_path       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.271 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] barbican_service_user.auth_section = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.271 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] barbican_service_user.auth_type = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.271 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] barbican_service_user.cafile   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.272 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] barbican_service_user.certfile = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.272 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] barbican_service_user.collect_timing = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.272 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] barbican_service_user.insecure = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.272 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] barbican_service_user.keyfile  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.272 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] barbican_service_user.split_loggers = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.273 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] barbican_service_user.timeout  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.273 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vault.approle_role_id          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.273 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vault.approle_secret_id        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.273 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vault.cafile                   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.273 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vault.certfile                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.274 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vault.collect_timing           = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.274 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vault.insecure                 = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.274 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vault.keyfile                  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.274 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vault.kv_mountpoint            = secret log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.274 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vault.kv_version               = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.274 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vault.namespace                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.275 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vault.root_token_id            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.275 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vault.split_loggers            = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.275 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vault.ssl_ca_crt_file          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.275 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vault.timeout                  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.275 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vault.use_ssl                  = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.275 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vault.vault_url                = http://127.0.0.1:8200 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.276 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] keystone.cafile                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.276 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] keystone.certfile              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.276 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] keystone.collect_timing        = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.276 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] keystone.connect_retries       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.276 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] keystone.connect_retry_delay   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.277 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] keystone.endpoint_override     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.277 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] keystone.insecure              = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.277 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] keystone.keyfile               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.277 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] keystone.max_version           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.277 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] keystone.min_version           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.278 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] keystone.region_name           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.278 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] keystone.service_name          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.278 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] keystone.service_type          = identity log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.278 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] keystone.split_loggers         = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.278 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] keystone.status_code_retries   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.278 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] keystone.status_code_retry_delay = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.279 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] keystone.timeout               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.279 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] keystone.valid_interfaces      = ['internal', 'public'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.279 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] keystone.version               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.279 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.connection_uri         =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.280 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.cpu_mode               = custom log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.280 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.cpu_model_extra_flags  = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.280 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.cpu_models             = ['Nehalem'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.280 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.cpu_power_governor_high = performance log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.280 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.cpu_power_governor_low = powersave log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.281 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.cpu_power_management   = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.281 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.cpu_power_management_strategy = cpu_state log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.281 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.device_detach_attempts = 8 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.281 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.device_detach_timeout  = 20 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.281 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.disk_cachemodes        = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.282 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.disk_prefix            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.282 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.enabled_perf_events    = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.282 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.file_backed_memory     = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.282 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.gid_maps               = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.282 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.hw_disk_discard        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.283 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.hw_machine_type        = ['x86_64=q35'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.283 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.images_rbd_ceph_conf   =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.283 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.images_rbd_glance_copy_poll_interval = 15 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.283 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.images_rbd_glance_copy_timeout = 600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.283 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.images_rbd_glance_store_name =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.283 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.images_rbd_pool        = rbd log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.284 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.images_type            = qcow2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.284 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.images_volume_group    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.284 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.inject_key             = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.284 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.inject_partition       = -2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.285 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.inject_password        = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.285 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.iscsi_iface            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.285 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.iser_use_multipath     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.285 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.live_migration_bandwidth = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.285 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.live_migration_completion_timeout = 800 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.285 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.live_migration_downtime = 500 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.286 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.live_migration_downtime_delay = 75 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.286 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.live_migration_downtime_steps = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.286 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.live_migration_inbound_addr = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.286 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.live_migration_permit_auto_converge = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.286 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.live_migration_permit_post_copy = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.287 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.live_migration_scheme  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.287 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.live_migration_timeout_action = force_complete log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.287 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.live_migration_tunnelled = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.287 2 WARNING oslo_config.cfg [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] Deprecated: Option "live_migration_uri" from group "libvirt" is deprecated for removal (
Oct 02 11:54:05 compute-0 nova_compute[192079]: live_migration_uri is deprecated for removal in favor of two other options that
Oct 02 11:54:05 compute-0 nova_compute[192079]: allow to change live migration scheme and target URI: ``live_migration_scheme``
Oct 02 11:54:05 compute-0 nova_compute[192079]: and ``live_migration_inbound_addr`` respectively.
Oct 02 11:54:05 compute-0 nova_compute[192079]: ).  Its value may be silently ignored in the future.
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.287 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.live_migration_uri     = qemu+tls://%s/system log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.288 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.live_migration_with_native_tls = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.288 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.max_queues             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.288 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.mem_stats_period_seconds = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.288 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.nfs_mount_options      = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.289 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.nfs_mount_point_base   = /var/lib/nova/mnt log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.289 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.num_aoe_discover_tries = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.289 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.num_iser_scan_tries    = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.289 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.num_memory_encrypted_guests = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.289 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.num_nvme_discover_tries = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.290 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.num_pcie_ports         = 24 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.290 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.num_volume_scan_tries  = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.290 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.pmem_namespaces        = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.290 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.quobyte_client_cfg     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.290 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.quobyte_mount_point_base = /var/lib/nova/mnt log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.291 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.rbd_connect_timeout    = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.291 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.rbd_destroy_volume_retries = 12 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.291 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.rbd_destroy_volume_retry_interval = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.291 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.rbd_secret_uuid        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.291 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.rbd_user               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.291 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.realtime_scheduler_priority = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.292 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.remote_filesystem_transport = ssh log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.292 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.rescue_image_id        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.292 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.rescue_kernel_id       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.292 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.rescue_ramdisk_id      = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.292 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.rng_dev_path           = /dev/urandom log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.293 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.rx_queue_size          = 512 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.293 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.smbfs_mount_options    =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.293 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.smbfs_mount_point_base = /var/lib/nova/mnt log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.293 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.snapshot_compression   = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.293 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.snapshot_image_format  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.294 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.snapshots_directory    = /var/lib/nova/instances/snapshots log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.294 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.sparse_logical_volumes = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.294 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.swtpm_enabled          = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.294 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.swtpm_group            = tss log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.294 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.swtpm_user             = tss log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.295 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.sysinfo_serial         = unique log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.295 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.tx_queue_size          = 512 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.295 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.uid_maps               = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.295 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.use_virtio_for_bridges = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.295 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.virt_type              = kvm log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.296 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.volume_clear           = zero log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.296 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.volume_clear_size      = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.296 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.volume_use_multipath   = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.296 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.vzstorage_cache_path   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.296 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.vzstorage_log_path     = /var/log/vstorage/%(cluster_name)s/nova.log.gz log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.296 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.vzstorage_mount_group  = qemu log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.297 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.vzstorage_mount_opts   = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.297 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.vzstorage_mount_perms  = 0770 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.297 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.vzstorage_mount_point_base = /var/lib/nova/mnt log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.297 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.vzstorage_mount_user   = stack log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.297 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] libvirt.wait_soft_reboot_seconds = 120 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.298 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] neutron.auth_section           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.298 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] neutron.auth_type              = password log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.298 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] neutron.cafile                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.298 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] neutron.certfile               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.298 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] neutron.collect_timing         = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.298 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] neutron.connect_retries        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.299 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] neutron.connect_retry_delay    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.299 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] neutron.default_floating_pool  = nova log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.299 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] neutron.endpoint_override      = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.299 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] neutron.extension_sync_interval = 600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.299 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] neutron.http_retries           = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.300 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] neutron.insecure               = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.300 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] neutron.keyfile                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.300 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] neutron.max_version            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.300 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] neutron.metadata_proxy_shared_secret = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.300 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] neutron.min_version            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.301 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] neutron.ovs_bridge             = br-int log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.301 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] neutron.physnets               = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.301 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] neutron.region_name            = regionOne log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.301 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] neutron.service_metadata_proxy = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.301 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] neutron.service_name           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.301 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] neutron.service_type           = network log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.302 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] neutron.split_loggers          = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.302 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] neutron.status_code_retries    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.302 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] neutron.status_code_retry_delay = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.302 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] neutron.timeout                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.303 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] neutron.valid_interfaces       = ['internal'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.303 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] neutron.version                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.303 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] notifications.bdms_in_notifications = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.303 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] notifications.default_level    = INFO log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.303 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] notifications.notification_format = both log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.304 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] notifications.notify_on_state_change = vm_and_task_state log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.304 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] notifications.versioned_notifications_topics = ['versioned_notifications'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.304 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] pci.alias                      = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.304 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] pci.device_spec                = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.304 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] pci.report_in_placement        = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.305 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.auth_section         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.305 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.auth_type            = password log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.305 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.auth_url             = https://keystone-internal.openstack.svc:5000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.305 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.cafile               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.305 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.certfile             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.306 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.collect_timing       = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.306 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.connect_retries      = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.306 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.connect_retry_delay  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.306 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.default_domain_id    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.306 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.default_domain_name  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.307 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.domain_id            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.307 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.domain_name          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.307 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.endpoint_override    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.307 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.insecure             = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.307 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.keyfile              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.307 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.max_version          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.308 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.min_version          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.308 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.password             = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.308 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.project_domain_id    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.308 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.project_domain_name  = Default log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.308 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.project_id           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.309 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.project_name         = service log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.309 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.region_name          = regionOne log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.309 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.service_name         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.309 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.service_type         = placement log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.309 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.split_loggers        = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.309 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.status_code_retries  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.310 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.status_code_retry_delay = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.310 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.system_scope         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.310 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.timeout              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.310 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.trust_id             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.310 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.user_domain_id       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.311 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.user_domain_name     = Default log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.311 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.user_id              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.311 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.username             = nova log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.311 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.valid_interfaces     = ['internal'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.311 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] placement.version              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.312 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] quota.cores                    = 20 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.312 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] quota.count_usage_from_placement = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.312 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] quota.driver                   = nova.quota.DbQuotaDriver log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.312 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] quota.injected_file_content_bytes = 10240 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.312 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] quota.injected_file_path_length = 255 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.313 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] quota.injected_files           = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.313 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] quota.instances                = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.313 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] quota.key_pairs                = 100 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.313 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] quota.metadata_items           = 128 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.314 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] quota.ram                      = 51200 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.314 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] quota.recheck_quota            = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.314 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] quota.server_group_members     = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.314 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] quota.server_groups            = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.314 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] rdp.enabled                    = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.315 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] rdp.html5_proxy_base_url       = http://127.0.0.1:6083/ log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.315 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] scheduler.discover_hosts_in_cells_interval = -1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.315 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] scheduler.enable_isolated_aggregate_filtering = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.315 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] scheduler.image_metadata_prefilter = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.315 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] scheduler.limit_tenants_to_placement_aggregate = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.316 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] scheduler.max_attempts         = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.316 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] scheduler.max_placement_results = 1000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.316 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] scheduler.placement_aggregate_required_for_tenants = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.316 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] scheduler.query_placement_for_availability_zone = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.316 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] scheduler.query_placement_for_image_type_support = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.317 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] scheduler.query_placement_for_routed_network_aggregates = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.317 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] scheduler.workers              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.317 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] filter_scheduler.aggregate_image_properties_isolation_namespace = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.317 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] filter_scheduler.aggregate_image_properties_isolation_separator = . log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.317 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.317 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] filter_scheduler.build_failure_weight_multiplier = 1000000.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.318 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] filter_scheduler.cpu_weight_multiplier = 1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.318 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.318 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] filter_scheduler.disk_weight_multiplier = 1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.318 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.318 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] filter_scheduler.host_subset_size = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.319 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] filter_scheduler.image_properties_default_architecture = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.319 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] filter_scheduler.io_ops_weight_multiplier = -1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.319 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] filter_scheduler.isolated_hosts = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.319 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] filter_scheduler.isolated_images = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.319 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] filter_scheduler.max_instances_per_host = 50 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.320 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] filter_scheduler.max_io_ops_per_host = 8 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.320 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] filter_scheduler.pci_in_placement = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.320 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] filter_scheduler.pci_weight_multiplier = 1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.320 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] filter_scheduler.ram_weight_multiplier = 1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.320 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.320 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] filter_scheduler.shuffle_best_same_weighed_hosts = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.321 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] filter_scheduler.soft_affinity_weight_multiplier = 1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.321 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.321 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] filter_scheduler.track_instance_changes = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.321 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.321 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] metrics.required               = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.322 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] metrics.weight_multiplier      = 1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.322 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] metrics.weight_of_unavailable  = -10000.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.322 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] metrics.weight_setting         = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.322 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] serial_console.base_url        = ws://127.0.0.1:6083/ log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.322 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] serial_console.enabled         = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.323 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] serial_console.port_range      = 10000:20000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.323 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] serial_console.proxyclient_address = 127.0.0.1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.323 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] serial_console.serialproxy_host = 0.0.0.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.323 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] serial_console.serialproxy_port = 6083 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.323 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] service_user.auth_section      = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.324 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] service_user.auth_type         = password log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.324 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] service_user.cafile            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.324 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] service_user.certfile          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.324 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] service_user.collect_timing    = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.325 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] service_user.insecure          = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.325 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] service_user.keyfile           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.325 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] service_user.send_service_user_token = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.325 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] service_user.split_loggers     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.325 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] service_user.timeout           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.325 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] spice.agent_enabled            = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.326 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] spice.enabled                  = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.326 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] spice.html5proxy_base_url      = http://127.0.0.1:6082/spice_auto.html log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.326 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] spice.html5proxy_host          = 0.0.0.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.326 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] spice.html5proxy_port          = 6082 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.327 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] spice.image_compression        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.327 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] spice.jpeg_compression         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.327 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] spice.playback_compression     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.327 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] spice.server_listen            = 127.0.0.1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.327 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] spice.server_proxyclient_address = 127.0.0.1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.327 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] spice.streaming_mode           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.328 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] spice.zlib_compression         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.328 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] upgrade_levels.baseapi         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.328 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] upgrade_levels.cert            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.328 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] upgrade_levels.compute         = auto log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.328 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] upgrade_levels.conductor       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.329 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] upgrade_levels.scheduler       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.329 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vendordata_dynamic_auth.auth_section = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.329 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vendordata_dynamic_auth.auth_type = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.329 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vendordata_dynamic_auth.cafile = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.329 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vendordata_dynamic_auth.certfile = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.330 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vendordata_dynamic_auth.collect_timing = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.330 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vendordata_dynamic_auth.insecure = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.330 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vendordata_dynamic_auth.keyfile = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.330 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vendordata_dynamic_auth.split_loggers = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.330 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vendordata_dynamic_auth.timeout = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.331 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vmware.api_retry_count         = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.331 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vmware.ca_file                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.331 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vmware.cache_prefix            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.331 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vmware.cluster_name            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.331 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vmware.connection_pool_size    = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.331 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vmware.console_delay_seconds   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.332 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vmware.datastore_regex         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.332 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vmware.host_ip                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.332 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vmware.host_password           = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.332 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vmware.host_port               = 443 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.332 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vmware.host_username           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.333 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vmware.insecure                = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.333 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vmware.integration_bridge      = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.333 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vmware.maximum_objects         = 100 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.333 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vmware.pbm_default_policy      = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.333 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vmware.pbm_enabled             = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.334 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vmware.pbm_wsdl_location       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.334 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vmware.serial_log_dir          = /opt/vmware/vspc log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.334 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vmware.serial_port_proxy_uri   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.334 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vmware.serial_port_service_uri = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.334 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vmware.task_poll_interval      = 0.5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.334 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vmware.use_linked_clone        = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.335 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vmware.vnc_keymap              = en-us log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.335 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vmware.vnc_port                = 5900 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.335 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vmware.vnc_port_total          = 10000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.335 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vnc.auth_schemes               = ['none'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.336 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vnc.enabled                    = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.336 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vnc.novncproxy_base_url        = https://nova-novncproxy-cell1-public-openstack.apps-crc.testing/vnc_lite.html log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.336 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vnc.novncproxy_host            = 0.0.0.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.337 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vnc.novncproxy_port            = 6080 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.337 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vnc.server_listen              = ::0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.337 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vnc.server_proxyclient_address = 192.168.122.100 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.337 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vnc.vencrypt_ca_certs          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.338 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vnc.vencrypt_client_cert       = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.338 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vnc.vencrypt_client_key        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.338 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] workarounds.disable_compute_service_check_for_ffu = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.338 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] workarounds.disable_deep_image_inspection = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.339 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] workarounds.disable_fallback_pcpu_query = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.339 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] workarounds.disable_group_policy_check_upcall = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.339 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] workarounds.disable_libvirt_livesnapshot = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.339 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] workarounds.disable_rootwrap   = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.340 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] workarounds.enable_numa_live_migration = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.340 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] workarounds.enable_qemu_monitor_announce_self = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.340 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.340 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] workarounds.handle_virt_lifecycle_events = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.341 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] workarounds.libvirt_disable_apic = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.341 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] workarounds.never_download_image_if_on_rbd = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.341 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] workarounds.qemu_monitor_announce_self_count = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.341 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] workarounds.qemu_monitor_announce_self_interval = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.341 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] workarounds.reserve_disk_resource_for_image_cache = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.342 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] workarounds.skip_cpu_compare_at_startup = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.342 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] workarounds.skip_cpu_compare_on_dest = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.342 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] workarounds.skip_hypervisor_version_check_on_lm = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.342 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] workarounds.skip_reserve_in_use_ironic_nodes = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.342 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] workarounds.unified_limits_count_pcpu_as_vcpu = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.342 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.343 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] wsgi.api_paste_config          = api-paste.ini log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.343 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] wsgi.client_socket_timeout     = 900 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.343 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] wsgi.default_pool_size         = 1000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.343 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] wsgi.keep_alive                = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.343 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] wsgi.max_header_line           = 16384 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.344 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] wsgi.secure_proxy_ssl_header   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.344 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] wsgi.ssl_ca_file               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.344 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] wsgi.ssl_cert_file             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.344 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] wsgi.ssl_key_file              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.344 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] wsgi.tcp_keepidle              = 600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.345 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] wsgi.wsgi_log_format           = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.345 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] zvm.ca_file                    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.345 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] zvm.cloud_connector_url        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.345 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] zvm.image_tmp_path             = /var/lib/nova/images log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.345 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] zvm.reachable_timeout          = 300 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.346 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_policy.enforce_new_defaults = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.346 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_policy.enforce_scope      = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.346 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_policy.policy_default_rule = default log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.346 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_policy.policy_dirs        = ['policy.d'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.346 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_policy.policy_file        = policy.yaml log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.347 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_policy.remote_content_type = application/x-www-form-urlencoded log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.347 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_policy.remote_ssl_ca_crt_file = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.347 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_policy.remote_ssl_client_crt_file = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.347 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_policy.remote_ssl_client_key_file = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.347 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_policy.remote_ssl_verify_server_crt = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.348 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_versionedobjects.fatal_exception_format_errors = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.348 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.348 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] remote_debug.host              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.348 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] remote_debug.port              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.348 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.amqp_auto_delete = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.349 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.amqp_durable_queues = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.349 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.conn_pool_min_size = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.349 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.conn_pool_ttl = 1200 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.349 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.direct_mandatory_flag = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.350 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.enable_cancel_on_failover = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.350 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.heartbeat_in_pthread = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.350 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.heartbeat_rate = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.350 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.350 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.kombu_compression = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.351 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.kombu_failover_strategy = round-robin log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.351 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.351 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.351 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.rabbit_ha_queues = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.351 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.rabbit_interval_max = 30 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.352 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.352 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.352 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.352 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.352 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.353 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.rabbit_quorum_queue = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.353 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.rabbit_retry_backoff = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.353 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.rabbit_retry_interval = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.353 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.353 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.rpc_conn_pool_size = 30 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.354 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.ssl      = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.354 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.ssl_ca_file =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.354 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.ssl_cert_file =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.354 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.ssl_enforce_fips_mode = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.354 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.ssl_key_file =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.355 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_rabbit.ssl_version =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.355 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_notifications.driver = ['messagingv2'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.355 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_notifications.retry = -1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.355 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_notifications.topics = ['notifications'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.355 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_messaging_notifications.transport_url = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.356 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.auth_section        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.356 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.auth_type           = password log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.356 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.auth_url            = https://keystone-internal.openstack.svc:5000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.356 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.cafile              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.356 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.certfile            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.357 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.collect_timing      = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.357 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.connect_retries     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.357 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.connect_retry_delay = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.357 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.default_domain_id   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.357 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.default_domain_name = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.357 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.domain_id           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.358 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.domain_name         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.358 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.endpoint_id         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.358 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.endpoint_override   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.358 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.insecure            = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.358 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.keyfile             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.359 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.max_version         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.359 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.min_version         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.359 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.password            = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.359 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.project_domain_id   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.359 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.project_domain_name = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.360 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.project_id          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.360 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.project_name        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.360 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.region_name         = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.360 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.service_name        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.360 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.service_type        = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.361 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.split_loggers       = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.361 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.status_code_retries = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.361 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.status_code_retry_delay = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.361 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.system_scope        = all log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.361 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.timeout             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.362 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.trust_id            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.362 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.user_domain_id      = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.362 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.user_domain_name    = Default log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.362 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.user_id             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.362 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.username            = nova log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.363 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.valid_interfaces    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.363 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_limit.version             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.363 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_reports.file_event_handler = /var/lib/nova log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.363 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_reports.file_event_handler_interval = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.363 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] oslo_reports.log_dir           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.364 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vif_plug_linux_bridge_privileged.capabilities = [12] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.364 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vif_plug_linux_bridge_privileged.group = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.364 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vif_plug_linux_bridge_privileged.helper_command = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.364 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.364 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vif_plug_linux_bridge_privileged.thread_pool_size = 8 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.364 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vif_plug_linux_bridge_privileged.user = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.365 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vif_plug_ovs_privileged.capabilities = [12, 1] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.365 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vif_plug_ovs_privileged.group  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.365 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vif_plug_ovs_privileged.helper_command = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.365 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.365 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vif_plug_ovs_privileged.thread_pool_size = 8 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.366 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] vif_plug_ovs_privileged.user   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.366 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] os_vif_linux_bridge.flat_interface = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.366 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] os_vif_linux_bridge.forward_bridge_interface = ['all'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.366 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] os_vif_linux_bridge.iptables_bottom_regex =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.366 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] os_vif_linux_bridge.iptables_drop_action = DROP log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.367 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] os_vif_linux_bridge.iptables_top_regex =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.367 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] os_vif_linux_bridge.network_device_mtu = 1500 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.367 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] os_vif_linux_bridge.use_ipv6   = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.367 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] os_vif_linux_bridge.vlan_interface = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.367 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] os_vif_ovs.isolate_vif         = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.367 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] os_vif_ovs.network_device_mtu  = 1500 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.368 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] os_vif_ovs.ovs_vsctl_timeout   = 120 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.368 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] os_vif_ovs.ovsdb_connection    = tcp:127.0.0.1:6640 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.368 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] os_vif_ovs.ovsdb_interface     = native log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.368 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] os_vif_ovs.per_port_bridge     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.368 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] os_brick.lock_path             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.369 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] os_brick.wait_mpath_device_attempts = 4 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.369 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] os_brick.wait_mpath_device_interval = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.369 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] privsep_osbrick.capabilities   = [21] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.369 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] privsep_osbrick.group          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.369 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] privsep_osbrick.helper_command = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.369 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] privsep_osbrick.logger_name    = os_brick.privileged log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.370 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] privsep_osbrick.thread_pool_size = 8 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.370 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] privsep_osbrick.user           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.370 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] nova_sys_admin.capabilities    = [0, 1, 2, 3, 12, 21] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.370 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] nova_sys_admin.group           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.370 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] nova_sys_admin.helper_command  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.371 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] nova_sys_admin.logger_name     = oslo_privsep.daemon log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.371 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] nova_sys_admin.thread_pool_size = 8 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.371 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] nova_sys_admin.user            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.371 2 DEBUG oslo_service.service [None req-47adea09-c197-4d1f-8da8-03f05680f0b7 - - - - - -] ******************************************************************************** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2613
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.372 2 INFO nova.service [-] Starting compute node (version 27.5.2-0.20250829104910.6f8decf.el9)
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.393 2 DEBUG nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Starting native event thread _init_events /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:492
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.394 2 DEBUG nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Starting green dispatch thread _init_events /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:498
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.394 2 DEBUG nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Starting connection event dispatch thread initialize /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:620
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.395 2 DEBUG nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Connecting to libvirt: qemu:///system _get_new_connection /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:503
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.406 2 DEBUG nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Registering for lifecycle events <nova.virt.libvirt.host.Host object at 0x7f2ed983b160> _get_new_connection /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:509
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.409 2 DEBUG nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Registering for connection events: <nova.virt.libvirt.host.Host object at 0x7f2ed983b160> _get_new_connection /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:530
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.410 2 INFO nova.virt.libvirt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Connection event '1' reason 'None'
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.415 2 INFO nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Libvirt host capabilities <capabilities>
Oct 02 11:54:05 compute-0 nova_compute[192079]: 
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <host>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <uuid>a6ea5ec0-bd37-4735-94f0-b41eba3dd400</uuid>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <cpu>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <arch>x86_64</arch>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model>EPYC-Rome-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <vendor>AMD</vendor>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <microcode version='16777317'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <signature family='23' model='49' stepping='0'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <topology sockets='8' dies='1' clusters='1' cores='1' threads='1'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <maxphysaddr mode='emulate' bits='40'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature name='x2apic'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature name='tsc-deadline'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature name='osxsave'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature name='hypervisor'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature name='tsc_adjust'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature name='spec-ctrl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature name='stibp'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature name='arch-capabilities'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature name='ssbd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature name='cmp_legacy'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature name='topoext'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature name='virt-ssbd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature name='lbrv'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature name='tsc-scale'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature name='vmcb-clean'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature name='pause-filter'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature name='pfthreshold'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature name='svme-addr-chk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature name='rdctl-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature name='skip-l1dfl-vmentry'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature name='mds-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature name='pschange-mc-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <pages unit='KiB' size='4'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <pages unit='KiB' size='2048'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <pages unit='KiB' size='1048576'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </cpu>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <power_management>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <suspend_mem/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <suspend_disk/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <suspend_hybrid/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </power_management>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <iommu support='no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <migration_features>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <live/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <uri_transports>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <uri_transport>tcp</uri_transport>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <uri_transport>rdma</uri_transport>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </uri_transports>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </migration_features>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <topology>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <cells num='1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <cell id='0'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:           <memory unit='KiB'>7864092</memory>
Oct 02 11:54:05 compute-0 nova_compute[192079]:           <pages unit='KiB' size='4'>1966023</pages>
Oct 02 11:54:05 compute-0 nova_compute[192079]:           <pages unit='KiB' size='2048'>0</pages>
Oct 02 11:54:05 compute-0 nova_compute[192079]:           <pages unit='KiB' size='1048576'>0</pages>
Oct 02 11:54:05 compute-0 nova_compute[192079]:           <distances>
Oct 02 11:54:05 compute-0 nova_compute[192079]:             <sibling id='0' value='10'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:           </distances>
Oct 02 11:54:05 compute-0 nova_compute[192079]:           <cpus num='8'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:             <cpu id='0' socket_id='0' die_id='0' cluster_id='65535' core_id='0' siblings='0'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:             <cpu id='1' socket_id='1' die_id='1' cluster_id='65535' core_id='0' siblings='1'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:             <cpu id='2' socket_id='2' die_id='2' cluster_id='65535' core_id='0' siblings='2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:             <cpu id='3' socket_id='3' die_id='3' cluster_id='65535' core_id='0' siblings='3'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:             <cpu id='4' socket_id='4' die_id='4' cluster_id='65535' core_id='0' siblings='4'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:             <cpu id='5' socket_id='5' die_id='5' cluster_id='65535' core_id='0' siblings='5'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:             <cpu id='6' socket_id='6' die_id='6' cluster_id='65535' core_id='0' siblings='6'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:             <cpu id='7' socket_id='7' die_id='7' cluster_id='65535' core_id='0' siblings='7'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:           </cpus>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         </cell>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </cells>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </topology>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <cache>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <bank id='0' level='2' type='both' size='512' unit='KiB' cpus='0'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <bank id='1' level='2' type='both' size='512' unit='KiB' cpus='1'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <bank id='2' level='2' type='both' size='512' unit='KiB' cpus='2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <bank id='3' level='2' type='both' size='512' unit='KiB' cpus='3'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <bank id='4' level='2' type='both' size='512' unit='KiB' cpus='4'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <bank id='5' level='2' type='both' size='512' unit='KiB' cpus='5'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <bank id='6' level='2' type='both' size='512' unit='KiB' cpus='6'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <bank id='7' level='2' type='both' size='512' unit='KiB' cpus='7'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <bank id='0' level='3' type='both' size='16' unit='MiB' cpus='0'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <bank id='1' level='3' type='both' size='16' unit='MiB' cpus='1'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <bank id='2' level='3' type='both' size='16' unit='MiB' cpus='2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <bank id='3' level='3' type='both' size='16' unit='MiB' cpus='3'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <bank id='4' level='3' type='both' size='16' unit='MiB' cpus='4'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <bank id='5' level='3' type='both' size='16' unit='MiB' cpus='5'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <bank id='6' level='3' type='both' size='16' unit='MiB' cpus='6'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <bank id='7' level='3' type='both' size='16' unit='MiB' cpus='7'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </cache>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <secmodel>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model>selinux</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <doi>0</doi>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <baselabel type='kvm'>system_u:system_r:svirt_t:s0</baselabel>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <baselabel type='qemu'>system_u:system_r:svirt_tcg_t:s0</baselabel>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </secmodel>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <secmodel>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model>dac</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <doi>0</doi>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <baselabel type='kvm'>+107:+107</baselabel>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <baselabel type='qemu'>+107:+107</baselabel>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </secmodel>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   </host>
Oct 02 11:54:05 compute-0 nova_compute[192079]: 
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <guest>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <os_type>hvm</os_type>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <arch name='i686'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <wordsize>32</wordsize>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <emulator>/usr/libexec/qemu-kvm</emulator>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine maxCpus='240' deprecated='yes'>pc-i440fx-rhel7.6.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine canonical='pc-i440fx-rhel7.6.0' maxCpus='240' deprecated='yes'>pc</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine maxCpus='4096'>pc-q35-rhel9.6.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine canonical='pc-q35-rhel9.6.0' maxCpus='4096'>q35</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel8.6.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine maxCpus='710'>pc-q35-rhel9.4.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel8.5.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel8.3.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel7.6.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel8.4.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine maxCpus='710'>pc-q35-rhel9.2.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel8.2.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine maxCpus='710'>pc-q35-rhel9.0.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel8.0.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel8.1.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <domain type='qemu'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <domain type='kvm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </arch>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <features>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <pae/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <nonpae/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <acpi default='on' toggle='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <apic default='on' toggle='no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <cpuselection/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <deviceboot/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <disksnapshot default='on' toggle='no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <externalSnapshot/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </features>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   </guest>
Oct 02 11:54:05 compute-0 nova_compute[192079]: 
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <guest>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <os_type>hvm</os_type>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <arch name='x86_64'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <wordsize>64</wordsize>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <emulator>/usr/libexec/qemu-kvm</emulator>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine maxCpus='240' deprecated='yes'>pc-i440fx-rhel7.6.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine canonical='pc-i440fx-rhel7.6.0' maxCpus='240' deprecated='yes'>pc</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine maxCpus='4096'>pc-q35-rhel9.6.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine canonical='pc-q35-rhel9.6.0' maxCpus='4096'>q35</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel8.6.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine maxCpus='710'>pc-q35-rhel9.4.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel8.5.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel8.3.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel7.6.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel8.4.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine maxCpus='710'>pc-q35-rhel9.2.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel8.2.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine maxCpus='710'>pc-q35-rhel9.0.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel8.0.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <machine maxCpus='710' deprecated='yes'>pc-q35-rhel8.1.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <domain type='qemu'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <domain type='kvm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </arch>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <features>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <acpi default='on' toggle='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <apic default='on' toggle='no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <cpuselection/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <deviceboot/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <disksnapshot default='on' toggle='no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <externalSnapshot/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </features>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   </guest>
Oct 02 11:54:05 compute-0 nova_compute[192079]: 
Oct 02 11:54:05 compute-0 nova_compute[192079]: </capabilities>
Oct 02 11:54:05 compute-0 nova_compute[192079]: 
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.423 2 DEBUG nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Getting domain capabilities for i686 via machine types: {'pc', 'q35'} _get_machine_types /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:952
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.427 2 DEBUG nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Libvirt host hypervisor capabilities for arch=i686 and machine_type=pc:
Oct 02 11:54:05 compute-0 nova_compute[192079]: <domainCapabilities>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <path>/usr/libexec/qemu-kvm</path>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <domain>kvm</domain>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <machine>pc-i440fx-rhel7.6.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <arch>i686</arch>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <vcpu max='240'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <iothreads supported='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <os supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <enum name='firmware'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <loader supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <value>/usr/share/OVMF/OVMF_CODE.secboot.fd</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='type'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>rom</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>pflash</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='readonly'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>yes</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>no</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='secure'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>no</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </loader>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   </os>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <cpu>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <mode name='host-passthrough' supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='hostPassthroughMigratable'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>on</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>off</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </mode>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <mode name='maximum' supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='maximumMigratable'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>on</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>off</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </mode>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <mode name='host-model' supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model fallback='forbid'>EPYC-Rome</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <vendor>AMD</vendor>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <maxphysaddr mode='passthrough' limit='40'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='x2apic'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='tsc-deadline'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='hypervisor'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='tsc_adjust'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='spec-ctrl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='stibp'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='arch-capabilities'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='ssbd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='cmp_legacy'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='overflow-recov'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='succor'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='ibrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='amd-ssbd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='virt-ssbd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='lbrv'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='tsc-scale'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='vmcb-clean'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='flushbyasid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='pause-filter'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='pfthreshold'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='svme-addr-chk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='lfence-always-serializing'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='rdctl-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='skip-l1dfl-vmentry'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='mds-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='pschange-mc-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='gds-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='rfds-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='disable' name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </mode>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <mode name='custom' supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='486-v1'>486</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>486-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Broadwell-v1'>Broadwell</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Broadwell-v3'>Broadwell-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Broadwell-v2'>Broadwell-noTSX</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell-noTSX'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Broadwell-v4'>Broadwell-noTSX-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell-noTSX-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Broadwell-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Broadwell-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Broadwell-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Broadwell-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Cascadelake-Server-v1'>Cascadelake-Server</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cascadelake-Server'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Cascadelake-Server-v3'>Cascadelake-Server-noTSX</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cascadelake-Server-noTSX'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cascadelake-Server-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cascadelake-Server-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cascadelake-Server-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cascadelake-Server-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v5</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cascadelake-Server-v5'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='Intel' canonical='Conroe-v1'>Conroe</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='Intel'>Conroe-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Cooperlake-v1'>Cooperlake</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cooperlake'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Cooperlake-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cooperlake-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Cooperlake-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cooperlake-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Denverton-v1'>Denverton</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Denverton'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mpx'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Denverton-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Denverton-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mpx'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Denverton-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Denverton-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Denverton-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Denverton-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Hygon' canonical='Dhyana-v1'>Dhyana</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Hygon'>Dhyana-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Hygon'>Dhyana-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Dhyana-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='AMD' canonical='EPYC-v1'>EPYC</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD' canonical='EPYC-Genoa-v1'>EPYC-Genoa</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Genoa'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amd-psfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='auto-ibrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='no-nested-data-bp'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='null-sel-clr-base'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='stibp-always-on'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-Genoa-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Genoa-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amd-psfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='auto-ibrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='no-nested-data-bp'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='null-sel-clr-base'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='stibp-always-on'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='AMD' canonical='EPYC-v2'>EPYC-IBPB</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD' canonical='EPYC-Milan-v1'>EPYC-Milan</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Milan'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-Milan-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Milan-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-Milan-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Milan-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amd-psfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='no-nested-data-bp'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='null-sel-clr-base'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='stibp-always-on'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD' canonical='EPYC-Rome-v1'>EPYC-Rome</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Rome'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-Rome-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Rome-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-Rome-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Rome-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-Rome-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Rome-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='AMD'>EPYC-Rome-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='AMD'>EPYC-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='AMD'>EPYC-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='GraniteRapids-v1'>GraniteRapids</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='GraniteRapids'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-tile'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fbsdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrc'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fzrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mcdt-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pbrsb-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='prefetchiti'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='psdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tsx-ldtrk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>GraniteRapids-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='GraniteRapids-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-tile'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fbsdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrc'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fzrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mcdt-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pbrsb-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='prefetchiti'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='psdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tsx-ldtrk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>GraniteRapids-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='GraniteRapids-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-tile'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx10'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx10-128'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx10-256'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx10-512'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cldemote'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fbsdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrc'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fzrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mcdt-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdir64b'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdiri'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pbrsb-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='prefetchiti'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='psdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tsx-ldtrk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Haswell-v1'>Haswell</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Haswell-v3'>Haswell-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Haswell-v2'>Haswell-noTSX</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell-noTSX'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Haswell-v4'>Haswell-noTSX-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell-noTSX-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Haswell-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Haswell-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Haswell-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Haswell-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Icelake-Server-v1'>Icelake-Server</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Icelake-Server-v2'>Icelake-Server-noTSX</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-noTSX'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Icelake-Server-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Icelake-Server-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Icelake-Server-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Icelake-Server-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Icelake-Server-v5</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-v5'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Icelake-Server-v6</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-v6'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Icelake-Server-v7</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-v7'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='IvyBridge-v1'>IvyBridge</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='IvyBridge'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='IvyBridge-v2'>IvyBridge-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='IvyBridge-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>IvyBridge-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='IvyBridge-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>IvyBridge-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='IvyBridge-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='KnightsMill-v1'>KnightsMill</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='KnightsMill'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-4fmaps'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-4vnniw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512er'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512pf'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>KnightsMill-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='KnightsMill-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-4fmaps'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-4vnniw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512er'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512pf'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel' canonical='Nehalem-v1'>Nehalem</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel' canonical='Nehalem-v2'>Nehalem-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel'>Nehalem-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel'>Nehalem-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='AMD' canonical='Opteron_G1-v1'>Opteron_G1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='AMD'>Opteron_G1-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='AMD' canonical='Opteron_G2-v1'>Opteron_G2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='AMD'>Opteron_G2-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='AMD' canonical='Opteron_G3-v1'>Opteron_G3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='AMD'>Opteron_G3-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD' canonical='Opteron_G4-v1'>Opteron_G4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Opteron_G4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fma4'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xop'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>Opteron_G4-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Opteron_G4-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fma4'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xop'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD' canonical='Opteron_G5-v1'>Opteron_G5</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Opteron_G5'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fma4'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tbm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xop'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>Opteron_G5-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Opteron_G5-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fma4'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tbm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xop'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='Intel' canonical='Penryn-v1'>Penryn</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='Intel'>Penryn-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel' canonical='SandyBridge-v1'>SandyBridge</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel' canonical='SandyBridge-v2'>SandyBridge-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel'>SandyBridge-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel'>SandyBridge-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='SapphireRapids-v1'>SapphireRapids</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='SapphireRapids'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-tile'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrc'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fzrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tsx-ldtrk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>SapphireRapids-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='SapphireRapids-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-tile'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrc'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fzrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tsx-ldtrk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>SapphireRapids-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='SapphireRapids-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-tile'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fbsdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrc'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fzrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='psdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tsx-ldtrk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>SapphireRapids-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='SapphireRapids-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-tile'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cldemote'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fbsdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrc'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fzrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdir64b'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdiri'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='psdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tsx-ldtrk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='SierraForest-v1'>SierraForest</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='SierraForest'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-ne-convert'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cmpccxadd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fbsdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mcdt-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pbrsb-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='psdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>SierraForest-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='SierraForest-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-ne-convert'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cmpccxadd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fbsdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mcdt-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pbrsb-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='psdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Skylake-Client-v1'>Skylake-Client</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Client'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Skylake-Client-v2'>Skylake-Client-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Client-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Skylake-Client-v3'>Skylake-Client-noTSX-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Client-noTSX-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Client-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Client-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Client-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Client-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Client-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Client-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Client-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Client-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Skylake-Server-v1'>Skylake-Server</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Skylake-Server-v2'>Skylake-Server-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Skylake-Server-v3'>Skylake-Server-noTSX-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server-noTSX-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Server-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Server-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Server-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Server-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Server-v5</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server-v5'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Snowridge-v1'>Snowridge</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Snowridge'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cldemote'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='core-capability'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdir64b'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdiri'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mpx'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='split-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Snowridge-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Snowridge-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cldemote'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='core-capability'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdir64b'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdiri'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mpx'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='split-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Snowridge-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Snowridge-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cldemote'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='core-capability'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdir64b'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdiri'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='split-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Snowridge-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Snowridge-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cldemote'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='core-capability'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdir64b'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdiri'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='split-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Snowridge-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Snowridge-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cldemote'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdir64b'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdiri'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel' canonical='Westmere-v1'>Westmere</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel' canonical='Westmere-v2'>Westmere-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel'>Westmere-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel'>Westmere-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='AMD' canonical='athlon-v1'>athlon</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='athlon'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnow'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnowext'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='AMD'>athlon-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='athlon-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnow'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnowext'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='Intel' canonical='core2duo-v1'>core2duo</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='core2duo'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='Intel'>core2duo-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='core2duo-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='Intel' canonical='coreduo-v1'>coreduo</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='coreduo'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='Intel'>coreduo-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='coreduo-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='kvm32-v1'>kvm32</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>kvm32-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='kvm64-v1'>kvm64</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>kvm64-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='Intel' canonical='n270-v1'>n270</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='n270'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='Intel'>n270-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='n270-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='pentium-v1'>pentium</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>pentium-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='pentium2-v1'>pentium2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>pentium2-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='pentium3-v1'>pentium3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>pentium3-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='AMD' canonical='phenom-v1'>phenom</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='phenom'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnow'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnowext'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='AMD'>phenom-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='phenom-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnow'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnowext'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='qemu32-v1'>qemu32</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>qemu32-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='qemu64-v1'>qemu64</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>qemu64-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </mode>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   </cpu>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <memoryBacking supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <enum name='sourceType'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <value>file</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <value>anonymous</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <value>memfd</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   </memoryBacking>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <devices>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <disk supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='diskDevice'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>disk</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>cdrom</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>floppy</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>lun</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='bus'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>ide</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>fdc</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>scsi</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>usb</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>sata</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='model'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio-transitional</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio-non-transitional</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </disk>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <graphics supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='type'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>vnc</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>egl-headless</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>dbus</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </graphics>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <video supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='modelType'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>vga</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>cirrus</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>none</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>bochs</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>ramfb</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </video>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <hostdev supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='mode'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>subsystem</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='startupPolicy'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>default</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>mandatory</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>requisite</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>optional</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='subsysType'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>usb</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>pci</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>scsi</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='capsType'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='pciBackend'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </hostdev>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <rng supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='model'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio-transitional</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio-non-transitional</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='backendModel'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>random</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>egd</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>builtin</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </rng>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <filesystem supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='driverType'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>path</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>handle</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtiofs</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </filesystem>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <tpm supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='model'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>tpm-tis</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>tpm-crb</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='backendModel'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>emulator</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>external</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='backendVersion'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>2.0</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </tpm>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <redirdev supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='bus'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>usb</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </redirdev>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <channel supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='type'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>pty</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>unix</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </channel>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <crypto supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='model'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='type'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>qemu</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='backendModel'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>builtin</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </crypto>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <interface supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='backendType'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>default</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>passt</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </interface>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <panic supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='model'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>isa</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>hyperv</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </panic>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   </devices>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <features>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <gic supported='no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <vmcoreinfo supported='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <genid supported='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <backingStoreInput supported='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <backup supported='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <async-teardown supported='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <ps2 supported='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <sev supported='no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <sgx supported='no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <hyperv supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='features'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>relaxed</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>vapic</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>spinlocks</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>vpindex</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>runtime</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>synic</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>stimer</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>reset</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>vendor_id</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>frequencies</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>reenlightenment</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>tlbflush</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>ipi</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>avic</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>emsr_bitmap</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>xmm_input</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </hyperv>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <launchSecurity supported='no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   </features>
Oct 02 11:54:05 compute-0 nova_compute[192079]: </domainCapabilities>
Oct 02 11:54:05 compute-0 nova_compute[192079]:  _get_domain_capabilities /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1037
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.433 2 DEBUG nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Libvirt host hypervisor capabilities for arch=i686 and machine_type=q35:
Oct 02 11:54:05 compute-0 nova_compute[192079]: <domainCapabilities>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <path>/usr/libexec/qemu-kvm</path>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <domain>kvm</domain>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <machine>pc-q35-rhel9.6.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <arch>i686</arch>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <vcpu max='4096'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <iothreads supported='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <os supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <enum name='firmware'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <loader supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <value>/usr/share/OVMF/OVMF_CODE.secboot.fd</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='type'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>rom</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>pflash</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='readonly'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>yes</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>no</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='secure'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>no</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </loader>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   </os>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <cpu>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <mode name='host-passthrough' supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='hostPassthroughMigratable'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>on</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>off</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </mode>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <mode name='maximum' supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='maximumMigratable'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>on</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>off</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </mode>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <mode name='host-model' supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model fallback='forbid'>EPYC-Rome</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <vendor>AMD</vendor>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <maxphysaddr mode='passthrough' limit='40'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='x2apic'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='tsc-deadline'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='hypervisor'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='tsc_adjust'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='spec-ctrl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='stibp'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='arch-capabilities'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='ssbd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='cmp_legacy'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='overflow-recov'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='succor'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='ibrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='amd-ssbd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='virt-ssbd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='lbrv'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='tsc-scale'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='vmcb-clean'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='flushbyasid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='pause-filter'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='pfthreshold'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='svme-addr-chk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='lfence-always-serializing'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='rdctl-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='skip-l1dfl-vmentry'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='mds-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='pschange-mc-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='gds-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='rfds-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='disable' name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </mode>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <mode name='custom' supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='486-v1'>486</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>486-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Broadwell-v1'>Broadwell</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Broadwell-v3'>Broadwell-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Broadwell-v2'>Broadwell-noTSX</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell-noTSX'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Broadwell-v4'>Broadwell-noTSX-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell-noTSX-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Broadwell-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Broadwell-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Broadwell-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Broadwell-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Cascadelake-Server-v1'>Cascadelake-Server</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cascadelake-Server'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Cascadelake-Server-v3'>Cascadelake-Server-noTSX</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cascadelake-Server-noTSX'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cascadelake-Server-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cascadelake-Server-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cascadelake-Server-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cascadelake-Server-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v5</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cascadelake-Server-v5'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='Intel' canonical='Conroe-v1'>Conroe</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='Intel'>Conroe-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Cooperlake-v1'>Cooperlake</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cooperlake'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Cooperlake-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cooperlake-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Cooperlake-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cooperlake-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Denverton-v1'>Denverton</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Denverton'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mpx'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Denverton-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Denverton-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mpx'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Denverton-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Denverton-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Denverton-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Denverton-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Hygon' canonical='Dhyana-v1'>Dhyana</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Hygon'>Dhyana-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Hygon'>Dhyana-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Dhyana-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='AMD' canonical='EPYC-v1'>EPYC</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD' canonical='EPYC-Genoa-v1'>EPYC-Genoa</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Genoa'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amd-psfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='auto-ibrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='no-nested-data-bp'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='null-sel-clr-base'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='stibp-always-on'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-Genoa-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Genoa-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amd-psfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='auto-ibrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='no-nested-data-bp'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='null-sel-clr-base'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='stibp-always-on'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='AMD' canonical='EPYC-v2'>EPYC-IBPB</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD' canonical='EPYC-Milan-v1'>EPYC-Milan</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Milan'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-Milan-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Milan-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-Milan-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Milan-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amd-psfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='no-nested-data-bp'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='null-sel-clr-base'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='stibp-always-on'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD' canonical='EPYC-Rome-v1'>EPYC-Rome</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Rome'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-Rome-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Rome-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-Rome-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Rome-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-Rome-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Rome-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='AMD'>EPYC-Rome-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='AMD'>EPYC-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='AMD'>EPYC-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='GraniteRapids-v1'>GraniteRapids</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='GraniteRapids'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-tile'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fbsdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrc'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fzrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mcdt-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pbrsb-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='prefetchiti'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='psdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tsx-ldtrk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>GraniteRapids-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='GraniteRapids-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-tile'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fbsdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrc'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fzrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mcdt-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pbrsb-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='prefetchiti'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='psdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tsx-ldtrk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>GraniteRapids-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='GraniteRapids-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-tile'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx10'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx10-128'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx10-256'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx10-512'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cldemote'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fbsdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrc'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fzrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mcdt-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdir64b'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdiri'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pbrsb-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='prefetchiti'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='psdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tsx-ldtrk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Haswell-v1'>Haswell</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Haswell-v3'>Haswell-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Haswell-v2'>Haswell-noTSX</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell-noTSX'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Haswell-v4'>Haswell-noTSX-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell-noTSX-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Haswell-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Haswell-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Haswell-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Haswell-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Icelake-Server-v1'>Icelake-Server</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Icelake-Server-v2'>Icelake-Server-noTSX</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-noTSX'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Icelake-Server-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Icelake-Server-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Icelake-Server-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Icelake-Server-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Icelake-Server-v5</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-v5'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Icelake-Server-v6</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-v6'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Icelake-Server-v7</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-v7'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='IvyBridge-v1'>IvyBridge</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='IvyBridge'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='IvyBridge-v2'>IvyBridge-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='IvyBridge-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>IvyBridge-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='IvyBridge-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>IvyBridge-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='IvyBridge-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='KnightsMill-v1'>KnightsMill</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='KnightsMill'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-4fmaps'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-4vnniw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512er'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512pf'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>KnightsMill-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='KnightsMill-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-4fmaps'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-4vnniw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512er'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512pf'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel' canonical='Nehalem-v1'>Nehalem</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel' canonical='Nehalem-v2'>Nehalem-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel'>Nehalem-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel'>Nehalem-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='AMD' canonical='Opteron_G1-v1'>Opteron_G1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='AMD'>Opteron_G1-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='AMD' canonical='Opteron_G2-v1'>Opteron_G2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='AMD'>Opteron_G2-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='AMD' canonical='Opteron_G3-v1'>Opteron_G3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='AMD'>Opteron_G3-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD' canonical='Opteron_G4-v1'>Opteron_G4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Opteron_G4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fma4'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xop'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>Opteron_G4-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Opteron_G4-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fma4'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xop'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD' canonical='Opteron_G5-v1'>Opteron_G5</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Opteron_G5'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fma4'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tbm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xop'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>Opteron_G5-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Opteron_G5-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fma4'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tbm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xop'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='Intel' canonical='Penryn-v1'>Penryn</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='Intel'>Penryn-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel' canonical='SandyBridge-v1'>SandyBridge</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel' canonical='SandyBridge-v2'>SandyBridge-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel'>SandyBridge-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel'>SandyBridge-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='SapphireRapids-v1'>SapphireRapids</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='SapphireRapids'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-tile'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrc'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fzrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tsx-ldtrk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>SapphireRapids-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='SapphireRapids-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-tile'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrc'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fzrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tsx-ldtrk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>SapphireRapids-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='SapphireRapids-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-tile'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fbsdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrc'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fzrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='psdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tsx-ldtrk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>SapphireRapids-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='SapphireRapids-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-tile'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cldemote'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fbsdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrc'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fzrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdir64b'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdiri'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='psdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tsx-ldtrk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='SierraForest-v1'>SierraForest</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='SierraForest'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-ne-convert'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cmpccxadd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fbsdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mcdt-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pbrsb-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='psdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>SierraForest-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='SierraForest-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-ne-convert'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cmpccxadd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fbsdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mcdt-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pbrsb-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='psdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Skylake-Client-v1'>Skylake-Client</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Client'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Skylake-Client-v2'>Skylake-Client-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Client-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Skylake-Client-v3'>Skylake-Client-noTSX-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Client-noTSX-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Client-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Client-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Client-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Client-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Client-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Client-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Client-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Client-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Skylake-Server-v1'>Skylake-Server</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Skylake-Server-v2'>Skylake-Server-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Skylake-Server-v3'>Skylake-Server-noTSX-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server-noTSX-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Server-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Server-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Server-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Server-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Server-v5</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server-v5'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Snowridge-v1'>Snowridge</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Snowridge'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cldemote'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='core-capability'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdir64b'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdiri'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mpx'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='split-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Snowridge-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Snowridge-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cldemote'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='core-capability'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdir64b'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdiri'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mpx'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='split-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Snowridge-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Snowridge-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cldemote'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='core-capability'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdir64b'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdiri'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='split-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Snowridge-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Snowridge-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cldemote'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='core-capability'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdir64b'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdiri'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='split-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Snowridge-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Snowridge-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cldemote'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdir64b'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdiri'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel' canonical='Westmere-v1'>Westmere</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel' canonical='Westmere-v2'>Westmere-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel'>Westmere-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel'>Westmere-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='AMD' canonical='athlon-v1'>athlon</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='athlon'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnow'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnowext'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='AMD'>athlon-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='athlon-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnow'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnowext'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='Intel' canonical='core2duo-v1'>core2duo</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='core2duo'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='Intel'>core2duo-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='core2duo-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='Intel' canonical='coreduo-v1'>coreduo</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='coreduo'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='Intel'>coreduo-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='coreduo-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='kvm32-v1'>kvm32</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>kvm32-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='kvm64-v1'>kvm64</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>kvm64-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='Intel' canonical='n270-v1'>n270</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='n270'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='Intel'>n270-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='n270-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='pentium-v1'>pentium</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>pentium-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='pentium2-v1'>pentium2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>pentium2-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='pentium3-v1'>pentium3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>pentium3-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='AMD' canonical='phenom-v1'>phenom</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='phenom'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnow'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnowext'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='AMD'>phenom-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='phenom-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnow'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnowext'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='qemu32-v1'>qemu32</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>qemu32-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='qemu64-v1'>qemu64</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>qemu64-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </mode>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   </cpu>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <memoryBacking supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <enum name='sourceType'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <value>file</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <value>anonymous</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <value>memfd</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   </memoryBacking>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <devices>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <disk supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='diskDevice'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>disk</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>cdrom</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>floppy</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>lun</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='bus'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>fdc</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>scsi</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>usb</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>sata</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='model'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio-transitional</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio-non-transitional</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </disk>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <graphics supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='type'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>vnc</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>egl-headless</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>dbus</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </graphics>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <video supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='modelType'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>vga</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>cirrus</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>none</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>bochs</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>ramfb</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </video>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <hostdev supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='mode'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>subsystem</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='startupPolicy'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>default</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>mandatory</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>requisite</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>optional</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='subsysType'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>usb</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>pci</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>scsi</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='capsType'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='pciBackend'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </hostdev>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <rng supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='model'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio-transitional</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio-non-transitional</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='backendModel'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>random</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>egd</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>builtin</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </rng>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <filesystem supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='driverType'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>path</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>handle</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtiofs</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </filesystem>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <tpm supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='model'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>tpm-tis</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>tpm-crb</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='backendModel'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>emulator</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>external</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='backendVersion'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>2.0</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </tpm>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <redirdev supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='bus'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>usb</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </redirdev>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <channel supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='type'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>pty</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>unix</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </channel>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <crypto supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='model'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='type'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>qemu</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='backendModel'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>builtin</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </crypto>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <interface supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='backendType'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>default</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>passt</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </interface>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <panic supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='model'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>isa</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>hyperv</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </panic>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   </devices>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <features>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <gic supported='no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <vmcoreinfo supported='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <genid supported='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <backingStoreInput supported='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <backup supported='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <async-teardown supported='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <ps2 supported='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <sev supported='no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <sgx supported='no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <hyperv supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='features'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>relaxed</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>vapic</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>spinlocks</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>vpindex</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>runtime</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>synic</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>stimer</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>reset</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>vendor_id</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>frequencies</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>reenlightenment</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>tlbflush</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>ipi</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>avic</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>emsr_bitmap</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>xmm_input</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </hyperv>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <launchSecurity supported='no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   </features>
Oct 02 11:54:05 compute-0 nova_compute[192079]: </domainCapabilities>
Oct 02 11:54:05 compute-0 nova_compute[192079]:  _get_domain_capabilities /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1037
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.487 2 DEBUG nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Getting domain capabilities for x86_64 via machine types: {'pc', 'q35'} _get_machine_types /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:952
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.488 2 WARNING nova.virt.libvirt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Cannot update service status on host "compute-0.ctlplane.example.com" since it is not registered.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host compute-0.ctlplane.example.com could not be found.
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.488 2 DEBUG nova.virt.libvirt.volume.mount [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Initialising _HostMountState generation 0 host_up /usr/lib/python3.9/site-packages/nova/virt/libvirt/volume/mount.py:130
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.492 2 DEBUG nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Libvirt host hypervisor capabilities for arch=x86_64 and machine_type=pc:
Oct 02 11:54:05 compute-0 nova_compute[192079]: <domainCapabilities>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <path>/usr/libexec/qemu-kvm</path>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <domain>kvm</domain>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <machine>pc-i440fx-rhel7.6.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <arch>x86_64</arch>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <vcpu max='240'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <iothreads supported='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <os supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <enum name='firmware'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <loader supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <value>/usr/share/OVMF/OVMF_CODE.secboot.fd</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='type'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>rom</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>pflash</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='readonly'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>yes</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>no</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='secure'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>no</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </loader>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   </os>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <cpu>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <mode name='host-passthrough' supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='hostPassthroughMigratable'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>on</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>off</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </mode>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <mode name='maximum' supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='maximumMigratable'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>on</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>off</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </mode>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <mode name='host-model' supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model fallback='forbid'>EPYC-Rome</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <vendor>AMD</vendor>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <maxphysaddr mode='passthrough' limit='40'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='x2apic'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='tsc-deadline'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='hypervisor'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='tsc_adjust'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='spec-ctrl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='stibp'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='arch-capabilities'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='ssbd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='cmp_legacy'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='overflow-recov'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='succor'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='ibrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='amd-ssbd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='virt-ssbd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='lbrv'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='tsc-scale'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='vmcb-clean'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='flushbyasid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='pause-filter'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='pfthreshold'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='svme-addr-chk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='lfence-always-serializing'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='rdctl-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='skip-l1dfl-vmentry'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='mds-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='pschange-mc-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='gds-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='rfds-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='disable' name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </mode>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <mode name='custom' supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='486-v1'>486</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>486-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Broadwell-v1'>Broadwell</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Broadwell-v3'>Broadwell-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Broadwell-v2'>Broadwell-noTSX</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell-noTSX'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Broadwell-v4'>Broadwell-noTSX-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell-noTSX-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Broadwell-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Broadwell-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Broadwell-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Broadwell-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Cascadelake-Server-v1'>Cascadelake-Server</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cascadelake-Server'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Cascadelake-Server-v3'>Cascadelake-Server-noTSX</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cascadelake-Server-noTSX'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cascadelake-Server-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cascadelake-Server-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cascadelake-Server-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cascadelake-Server-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v5</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cascadelake-Server-v5'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='Intel' canonical='Conroe-v1'>Conroe</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='Intel'>Conroe-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Cooperlake-v1'>Cooperlake</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cooperlake'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Cooperlake-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cooperlake-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Cooperlake-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cooperlake-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Denverton-v1'>Denverton</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Denverton'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mpx'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Denverton-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Denverton-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mpx'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Denverton-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Denverton-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Denverton-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Denverton-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Hygon' canonical='Dhyana-v1'>Dhyana</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Hygon'>Dhyana-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Hygon'>Dhyana-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Dhyana-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='AMD' canonical='EPYC-v1'>EPYC</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD' canonical='EPYC-Genoa-v1'>EPYC-Genoa</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Genoa'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amd-psfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='auto-ibrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='no-nested-data-bp'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='null-sel-clr-base'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='stibp-always-on'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-Genoa-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Genoa-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amd-psfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='auto-ibrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='no-nested-data-bp'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='null-sel-clr-base'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='stibp-always-on'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='AMD' canonical='EPYC-v2'>EPYC-IBPB</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD' canonical='EPYC-Milan-v1'>EPYC-Milan</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Milan'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-Milan-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Milan-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-Milan-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Milan-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amd-psfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='no-nested-data-bp'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='null-sel-clr-base'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='stibp-always-on'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD' canonical='EPYC-Rome-v1'>EPYC-Rome</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Rome'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-Rome-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Rome-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-Rome-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Rome-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-Rome-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Rome-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='AMD'>EPYC-Rome-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='AMD'>EPYC-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='AMD'>EPYC-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='GraniteRapids-v1'>GraniteRapids</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='GraniteRapids'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-tile'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fbsdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrc'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fzrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mcdt-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pbrsb-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='prefetchiti'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='psdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tsx-ldtrk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>GraniteRapids-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='GraniteRapids-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-tile'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fbsdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrc'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fzrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mcdt-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pbrsb-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='prefetchiti'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='psdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tsx-ldtrk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>GraniteRapids-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='GraniteRapids-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-tile'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx10'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx10-128'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx10-256'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx10-512'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cldemote'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fbsdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrc'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fzrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mcdt-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdir64b'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdiri'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pbrsb-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='prefetchiti'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='psdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tsx-ldtrk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Haswell-v1'>Haswell</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Haswell-v3'>Haswell-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Haswell-v2'>Haswell-noTSX</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell-noTSX'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Haswell-v4'>Haswell-noTSX-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell-noTSX-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Haswell-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Haswell-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Haswell-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Haswell-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Icelake-Server-v1'>Icelake-Server</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Icelake-Server-v2'>Icelake-Server-noTSX</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-noTSX'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Icelake-Server-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Icelake-Server-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Icelake-Server-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Icelake-Server-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Icelake-Server-v5</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-v5'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Icelake-Server-v6</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-v6'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Icelake-Server-v7</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-v7'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='IvyBridge-v1'>IvyBridge</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='IvyBridge'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='IvyBridge-v2'>IvyBridge-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='IvyBridge-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>IvyBridge-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='IvyBridge-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>IvyBridge-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='IvyBridge-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='KnightsMill-v1'>KnightsMill</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='KnightsMill'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-4fmaps'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-4vnniw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512er'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512pf'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>KnightsMill-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='KnightsMill-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-4fmaps'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-4vnniw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512er'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512pf'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel' canonical='Nehalem-v1'>Nehalem</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel' canonical='Nehalem-v2'>Nehalem-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel'>Nehalem-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel'>Nehalem-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='AMD' canonical='Opteron_G1-v1'>Opteron_G1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='AMD'>Opteron_G1-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='AMD' canonical='Opteron_G2-v1'>Opteron_G2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='AMD'>Opteron_G2-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='AMD' canonical='Opteron_G3-v1'>Opteron_G3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='AMD'>Opteron_G3-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD' canonical='Opteron_G4-v1'>Opteron_G4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Opteron_G4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fma4'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xop'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>Opteron_G4-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Opteron_G4-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fma4'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xop'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD' canonical='Opteron_G5-v1'>Opteron_G5</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Opteron_G5'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fma4'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tbm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xop'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>Opteron_G5-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Opteron_G5-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fma4'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tbm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xop'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='Intel' canonical='Penryn-v1'>Penryn</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='Intel'>Penryn-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel' canonical='SandyBridge-v1'>SandyBridge</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel' canonical='SandyBridge-v2'>SandyBridge-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel'>SandyBridge-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel'>SandyBridge-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='SapphireRapids-v1'>SapphireRapids</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='SapphireRapids'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-tile'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrc'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fzrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tsx-ldtrk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>SapphireRapids-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='SapphireRapids-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-tile'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrc'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fzrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tsx-ldtrk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>SapphireRapids-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='SapphireRapids-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-tile'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fbsdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrc'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fzrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='psdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tsx-ldtrk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>SapphireRapids-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='SapphireRapids-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-tile'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cldemote'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fbsdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrc'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fzrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdir64b'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdiri'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='psdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tsx-ldtrk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='SierraForest-v1'>SierraForest</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='SierraForest'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-ne-convert'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cmpccxadd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fbsdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mcdt-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pbrsb-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='psdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>SierraForest-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='SierraForest-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-ne-convert'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cmpccxadd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fbsdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mcdt-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pbrsb-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='psdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Skylake-Client-v1'>Skylake-Client</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Client'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Skylake-Client-v2'>Skylake-Client-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Client-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Skylake-Client-v3'>Skylake-Client-noTSX-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Client-noTSX-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Client-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Client-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Client-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Client-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Client-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Client-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Client-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Client-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Skylake-Server-v1'>Skylake-Server</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Skylake-Server-v2'>Skylake-Server-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Skylake-Server-v3'>Skylake-Server-noTSX-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server-noTSX-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Server-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Server-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Server-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Server-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Server-v5</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server-v5'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Snowridge-v1'>Snowridge</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Snowridge'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cldemote'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='core-capability'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdir64b'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdiri'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mpx'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='split-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Snowridge-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Snowridge-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cldemote'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='core-capability'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdir64b'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdiri'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mpx'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='split-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Snowridge-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Snowridge-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cldemote'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='core-capability'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdir64b'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdiri'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='split-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Snowridge-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Snowridge-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cldemote'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='core-capability'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdir64b'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdiri'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='split-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Snowridge-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Snowridge-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cldemote'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdir64b'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdiri'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel' canonical='Westmere-v1'>Westmere</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel' canonical='Westmere-v2'>Westmere-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel'>Westmere-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel'>Westmere-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='AMD' canonical='athlon-v1'>athlon</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='athlon'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnow'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnowext'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='AMD'>athlon-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='athlon-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnow'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnowext'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='Intel' canonical='core2duo-v1'>core2duo</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='core2duo'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='Intel'>core2duo-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='core2duo-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='Intel' canonical='coreduo-v1'>coreduo</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='coreduo'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='Intel'>coreduo-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='coreduo-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='kvm32-v1'>kvm32</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>kvm32-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='kvm64-v1'>kvm64</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>kvm64-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='Intel' canonical='n270-v1'>n270</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='n270'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='Intel'>n270-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='n270-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='pentium-v1'>pentium</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>pentium-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='pentium2-v1'>pentium2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>pentium2-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='pentium3-v1'>pentium3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>pentium3-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='AMD' canonical='phenom-v1'>phenom</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='phenom'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnow'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnowext'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='AMD'>phenom-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='phenom-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnow'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnowext'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='qemu32-v1'>qemu32</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>qemu32-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='qemu64-v1'>qemu64</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>qemu64-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </mode>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   </cpu>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <memoryBacking supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <enum name='sourceType'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <value>file</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <value>anonymous</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <value>memfd</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   </memoryBacking>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <devices>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <disk supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='diskDevice'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>disk</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>cdrom</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>floppy</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>lun</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='bus'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>ide</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>fdc</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>scsi</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>usb</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>sata</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='model'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio-transitional</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio-non-transitional</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </disk>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <graphics supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='type'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>vnc</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>egl-headless</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>dbus</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </graphics>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <video supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='modelType'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>vga</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>cirrus</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>none</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>bochs</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>ramfb</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </video>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <hostdev supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='mode'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>subsystem</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='startupPolicy'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>default</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>mandatory</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>requisite</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>optional</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='subsysType'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>usb</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>pci</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>scsi</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='capsType'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='pciBackend'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </hostdev>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <rng supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='model'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio-transitional</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio-non-transitional</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='backendModel'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>random</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>egd</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>builtin</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </rng>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <filesystem supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='driverType'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>path</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>handle</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtiofs</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </filesystem>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <tpm supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='model'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>tpm-tis</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>tpm-crb</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='backendModel'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>emulator</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>external</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='backendVersion'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>2.0</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </tpm>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <redirdev supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='bus'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>usb</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </redirdev>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <channel supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='type'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>pty</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>unix</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </channel>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <crypto supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='model'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='type'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>qemu</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='backendModel'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>builtin</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </crypto>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <interface supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='backendType'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>default</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>passt</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </interface>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <panic supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='model'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>isa</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>hyperv</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </panic>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   </devices>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <features>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <gic supported='no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <vmcoreinfo supported='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <genid supported='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <backingStoreInput supported='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <backup supported='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <async-teardown supported='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <ps2 supported='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <sev supported='no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <sgx supported='no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <hyperv supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='features'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>relaxed</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>vapic</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>spinlocks</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>vpindex</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>runtime</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>synic</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>stimer</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>reset</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>vendor_id</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>frequencies</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>reenlightenment</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>tlbflush</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>ipi</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>avic</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>emsr_bitmap</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>xmm_input</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </hyperv>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <launchSecurity supported='no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   </features>
Oct 02 11:54:05 compute-0 nova_compute[192079]: </domainCapabilities>
Oct 02 11:54:05 compute-0 nova_compute[192079]:  _get_domain_capabilities /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1037
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.550 2 DEBUG nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Libvirt host hypervisor capabilities for arch=x86_64 and machine_type=q35:
Oct 02 11:54:05 compute-0 nova_compute[192079]: <domainCapabilities>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <path>/usr/libexec/qemu-kvm</path>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <domain>kvm</domain>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <machine>pc-q35-rhel9.6.0</machine>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <arch>x86_64</arch>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <vcpu max='4096'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <iothreads supported='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <os supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <enum name='firmware'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <value>efi</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <loader supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <value>/usr/share/edk2/ovmf/OVMF_CODE.secboot.fd</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <value>/usr/share/edk2/ovmf/OVMF_CODE.fd</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <value>/usr/share/edk2/ovmf/OVMF.amdsev.fd</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <value>/usr/share/edk2/ovmf/OVMF.inteltdx.secboot.fd</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='type'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>rom</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>pflash</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='readonly'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>yes</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>no</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='secure'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>yes</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>no</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </loader>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   </os>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <cpu>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <mode name='host-passthrough' supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='hostPassthroughMigratable'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>on</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>off</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </mode>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <mode name='maximum' supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='maximumMigratable'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>on</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>off</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </mode>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <mode name='host-model' supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model fallback='forbid'>EPYC-Rome</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <vendor>AMD</vendor>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <maxphysaddr mode='passthrough' limit='40'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='x2apic'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='tsc-deadline'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='hypervisor'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='tsc_adjust'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='spec-ctrl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='stibp'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='arch-capabilities'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='ssbd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='cmp_legacy'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='overflow-recov'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='succor'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='ibrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='amd-ssbd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='virt-ssbd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='lbrv'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='tsc-scale'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='vmcb-clean'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='flushbyasid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='pause-filter'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='pfthreshold'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='svme-addr-chk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='lfence-always-serializing'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='rdctl-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='skip-l1dfl-vmentry'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='mds-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='pschange-mc-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='gds-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='require' name='rfds-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <feature policy='disable' name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </mode>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <mode name='custom' supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='486-v1'>486</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>486-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Broadwell-v1'>Broadwell</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Broadwell-v3'>Broadwell-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Broadwell-v2'>Broadwell-noTSX</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell-noTSX'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Broadwell-v4'>Broadwell-noTSX-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell-noTSX-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Broadwell-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Broadwell-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Broadwell-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Broadwell-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Broadwell-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Cascadelake-Server-v1'>Cascadelake-Server</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cascadelake-Server'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Cascadelake-Server-v3'>Cascadelake-Server-noTSX</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cascadelake-Server-noTSX'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cascadelake-Server-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cascadelake-Server-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cascadelake-Server-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cascadelake-Server-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Cascadelake-Server-v5</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cascadelake-Server-v5'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='Intel' canonical='Conroe-v1'>Conroe</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='Intel'>Conroe-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Cooperlake-v1'>Cooperlake</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cooperlake'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Cooperlake-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cooperlake-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Cooperlake-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Cooperlake-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Denverton-v1'>Denverton</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Denverton'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mpx'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Denverton-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Denverton-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mpx'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Denverton-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Denverton-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Denverton-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Denverton-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Hygon' canonical='Dhyana-v1'>Dhyana</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Hygon'>Dhyana-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Hygon'>Dhyana-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Dhyana-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='AMD' canonical='EPYC-v1'>EPYC</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD' canonical='EPYC-Genoa-v1'>EPYC-Genoa</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Genoa'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amd-psfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='auto-ibrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='no-nested-data-bp'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='null-sel-clr-base'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='stibp-always-on'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-Genoa-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Genoa-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amd-psfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='auto-ibrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='no-nested-data-bp'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='null-sel-clr-base'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='stibp-always-on'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='AMD' canonical='EPYC-v2'>EPYC-IBPB</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD' canonical='EPYC-Milan-v1'>EPYC-Milan</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Milan'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-Milan-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Milan-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-Milan-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Milan-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amd-psfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='no-nested-data-bp'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='null-sel-clr-base'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='stibp-always-on'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD' canonical='EPYC-Rome-v1'>EPYC-Rome</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Rome'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-Rome-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Rome-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-Rome-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Rome-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-Rome-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-Rome-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='AMD'>EPYC-Rome-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='AMD'>EPYC-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='AMD'>EPYC-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>EPYC-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='EPYC-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='GraniteRapids-v1'>GraniteRapids</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='GraniteRapids'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-tile'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fbsdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrc'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fzrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mcdt-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pbrsb-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='prefetchiti'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='psdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tsx-ldtrk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>GraniteRapids-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='GraniteRapids-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-tile'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fbsdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrc'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fzrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mcdt-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pbrsb-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='prefetchiti'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='psdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tsx-ldtrk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>GraniteRapids-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='GraniteRapids-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-tile'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx10'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx10-128'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx10-256'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx10-512'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cldemote'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fbsdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrc'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fzrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mcdt-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdir64b'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdiri'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pbrsb-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='prefetchiti'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='psdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tsx-ldtrk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Haswell-v1'>Haswell</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Haswell-v3'>Haswell-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Haswell-v2'>Haswell-noTSX</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell-noTSX'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Haswell-v4'>Haswell-noTSX-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell-noTSX-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Haswell-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Haswell-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Haswell-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Haswell-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Haswell-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Icelake-Server-v1'>Icelake-Server</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Icelake-Server-v2'>Icelake-Server-noTSX</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-noTSX'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Icelake-Server-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Icelake-Server-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Icelake-Server-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Icelake-Server-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Icelake-Server-v5</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-v5'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Icelake-Server-v6</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-v6'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Icelake-Server-v7</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Icelake-Server-v7'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='IvyBridge-v1'>IvyBridge</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='IvyBridge'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='IvyBridge-v2'>IvyBridge-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='IvyBridge-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>IvyBridge-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='IvyBridge-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>IvyBridge-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='IvyBridge-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='KnightsMill-v1'>KnightsMill</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='KnightsMill'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-4fmaps'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-4vnniw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512er'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512pf'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>KnightsMill-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='KnightsMill-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-4fmaps'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-4vnniw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512er'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512pf'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel' canonical='Nehalem-v1'>Nehalem</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel' canonical='Nehalem-v2'>Nehalem-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel'>Nehalem-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel'>Nehalem-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='AMD' canonical='Opteron_G1-v1'>Opteron_G1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='AMD'>Opteron_G1-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='AMD' canonical='Opteron_G2-v1'>Opteron_G2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='AMD'>Opteron_G2-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='AMD' canonical='Opteron_G3-v1'>Opteron_G3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='AMD'>Opteron_G3-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD' canonical='Opteron_G4-v1'>Opteron_G4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Opteron_G4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fma4'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xop'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>Opteron_G4-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Opteron_G4-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fma4'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xop'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD' canonical='Opteron_G5-v1'>Opteron_G5</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Opteron_G5'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fma4'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tbm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xop'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='AMD'>Opteron_G5-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Opteron_G5-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fma4'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tbm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xop'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='Intel' canonical='Penryn-v1'>Penryn</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='Intel'>Penryn-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel' canonical='SandyBridge-v1'>SandyBridge</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel' canonical='SandyBridge-v2'>SandyBridge-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel'>SandyBridge-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel'>SandyBridge-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='SapphireRapids-v1'>SapphireRapids</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='SapphireRapids'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-tile'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrc'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fzrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tsx-ldtrk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>SapphireRapids-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='SapphireRapids-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-tile'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrc'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fzrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tsx-ldtrk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>SapphireRapids-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='SapphireRapids-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-tile'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fbsdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrc'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fzrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='psdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tsx-ldtrk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>SapphireRapids-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='SapphireRapids-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='amx-tile'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-bf16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-fp16'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512-vpopcntdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bitalg'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vbmi2'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cldemote'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fbsdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrc'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fzrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='la57'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdir64b'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdiri'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='psdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='taa-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='tsx-ldtrk'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xfd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='SierraForest-v1'>SierraForest</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='SierraForest'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-ne-convert'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cmpccxadd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fbsdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mcdt-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pbrsb-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='psdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>SierraForest-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='SierraForest-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-ifma'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-ne-convert'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx-vnni-int8'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='bus-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cmpccxadd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fbsdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='fsrs'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ibrs-all'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mcdt-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pbrsb-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='psdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='sbdr-ssdp-no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='serialize'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vaes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='vpclmulqdq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Skylake-Client-v1'>Skylake-Client</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Client'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Skylake-Client-v2'>Skylake-Client-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Client-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Skylake-Client-v3'>Skylake-Client-noTSX-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Client-noTSX-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Client-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Client-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Client-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Client-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Client-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Client-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Client-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Client-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Skylake-Server-v1'>Skylake-Server</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Skylake-Server-v2'>Skylake-Server-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Skylake-Server-v3'>Skylake-Server-noTSX-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server-noTSX-IBRS'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Server-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Server-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='hle'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='rtm'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Server-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Server-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Skylake-Server-v5</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Skylake-Server-v5'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512bw'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512cd'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512dq'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512f'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='avx512vl'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='invpcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pcid'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='pku'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel' canonical='Snowridge-v1'>Snowridge</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Snowridge'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cldemote'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='core-capability'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdir64b'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdiri'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mpx'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='split-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Snowridge-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Snowridge-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cldemote'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='core-capability'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdir64b'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdiri'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='mpx'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='split-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Snowridge-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Snowridge-v2'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cldemote'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='core-capability'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdir64b'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdiri'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='split-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Snowridge-v3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Snowridge-v3'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cldemote'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='core-capability'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdir64b'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdiri'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='split-lock-detect'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' vendor='Intel'>Snowridge-v4</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='Snowridge-v4'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='cldemote'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='erms'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='gfni'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdir64b'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='movdiri'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='xsaves'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel' canonical='Westmere-v1'>Westmere</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel' canonical='Westmere-v2'>Westmere-IBRS</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel'>Westmere-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' vendor='Intel'>Westmere-v2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='AMD' canonical='athlon-v1'>athlon</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='athlon'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnow'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnowext'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='AMD'>athlon-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='athlon-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnow'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnowext'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='Intel' canonical='core2duo-v1'>core2duo</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='core2duo'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='Intel'>core2duo-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='core2duo-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='Intel' canonical='coreduo-v1'>coreduo</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='coreduo'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='Intel'>coreduo-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='coreduo-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='kvm32-v1'>kvm32</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>kvm32-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='kvm64-v1'>kvm64</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>kvm64-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='Intel' canonical='n270-v1'>n270</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='n270'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='Intel'>n270-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='n270-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='ss'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='pentium-v1'>pentium</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>pentium-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='pentium2-v1'>pentium2</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>pentium2-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='pentium3-v1'>pentium3</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>pentium3-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='AMD' canonical='phenom-v1'>phenom</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='phenom'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnow'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnowext'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='no' deprecated='yes' vendor='AMD'>phenom-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <blockers model='phenom-v1'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnow'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <feature name='3dnowext'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </blockers>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='qemu32-v1'>qemu32</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>qemu32-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown' canonical='qemu64-v1'>qemu64</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <model usable='yes' deprecated='yes' vendor='unknown'>qemu64-v1</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </mode>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   </cpu>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <memoryBacking supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <enum name='sourceType'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <value>file</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <value>anonymous</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <value>memfd</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   </memoryBacking>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <devices>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <disk supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='diskDevice'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>disk</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>cdrom</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>floppy</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>lun</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='bus'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>fdc</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>scsi</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>usb</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>sata</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='model'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio-transitional</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio-non-transitional</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </disk>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <graphics supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='type'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>vnc</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>egl-headless</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>dbus</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </graphics>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <video supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='modelType'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>vga</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>cirrus</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>none</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>bochs</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>ramfb</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </video>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <hostdev supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='mode'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>subsystem</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='startupPolicy'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>default</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>mandatory</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>requisite</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>optional</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='subsysType'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>usb</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>pci</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>scsi</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='capsType'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='pciBackend'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </hostdev>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <rng supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='model'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio-transitional</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtio-non-transitional</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='backendModel'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>random</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>egd</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>builtin</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </rng>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <filesystem supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='driverType'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>path</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>handle</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>virtiofs</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </filesystem>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <tpm supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='model'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>tpm-tis</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>tpm-crb</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='backendModel'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>emulator</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>external</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='backendVersion'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>2.0</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </tpm>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <redirdev supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='bus'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>usb</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </redirdev>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <channel supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='type'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>pty</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>unix</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </channel>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <crypto supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='model'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='type'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>qemu</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='backendModel'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>builtin</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </crypto>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <interface supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='backendType'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>default</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>passt</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </interface>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <panic supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='model'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>isa</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>hyperv</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </panic>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   </devices>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <features>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <gic supported='no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <vmcoreinfo supported='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <genid supported='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <backingStoreInput supported='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <backup supported='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <async-teardown supported='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <ps2 supported='yes'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <sev supported='no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <sgx supported='no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <hyperv supported='yes'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       <enum name='features'>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>relaxed</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>vapic</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>spinlocks</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>vpindex</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>runtime</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>synic</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>stimer</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>reset</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>vendor_id</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>frequencies</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>reenlightenment</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>tlbflush</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>ipi</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>avic</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>emsr_bitmap</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:         <value>xmm_input</value>
Oct 02 11:54:05 compute-0 nova_compute[192079]:       </enum>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     </hyperv>
Oct 02 11:54:05 compute-0 nova_compute[192079]:     <launchSecurity supported='no'/>
Oct 02 11:54:05 compute-0 nova_compute[192079]:   </features>
Oct 02 11:54:05 compute-0 nova_compute[192079]: </domainCapabilities>
Oct 02 11:54:05 compute-0 nova_compute[192079]:  _get_domain_capabilities /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1037
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.652 2 DEBUG nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Checking secure boot support for host arch (x86_64) supports_secure_boot /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1782
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.652 2 DEBUG nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Checking secure boot support for host arch (x86_64) supports_secure_boot /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1782
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.652 2 DEBUG nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Checking secure boot support for host arch (x86_64) supports_secure_boot /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1782
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.653 2 INFO nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Secure Boot support detected
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.654 2 INFO nova.virt.libvirt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] The live_migration_permit_post_copy is set to True and post copy live migration is available so auto-converge will not be in use.
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.654 2 INFO nova.virt.libvirt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] The live_migration_permit_post_copy is set to True and post copy live migration is available so auto-converge will not be in use.
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.664 2 DEBUG nova.virt.libvirt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] cpu compare xml: <cpu match="exact">
Oct 02 11:54:05 compute-0 nova_compute[192079]:   <model>Nehalem</model>
Oct 02 11:54:05 compute-0 nova_compute[192079]: </cpu>
Oct 02 11:54:05 compute-0 nova_compute[192079]:  _compare_cpu /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10019
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.666 2 DEBUG nova.virt.libvirt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Enabling emulated TPM support _check_vtpm_support /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:1097
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.714 2 INFO nova.virt.node [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Determined node identity 55f2ae21-42ea-47d7-8c73-c3134981d708 from /var/lib/nova/compute_id
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.749 2 WARNING nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Compute nodes ['55f2ae21-42ea-47d7-8c73-c3134981d708'] for host compute-0.ctlplane.example.com were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning.
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.811 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.957 2 WARNING nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] No compute node record found for host compute-0.ctlplane.example.com. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host compute-0.ctlplane.example.com could not be found.
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.958 2 DEBUG oslo_concurrency.lockutils [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.958 2 DEBUG oslo_concurrency.lockutils [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.958 2 DEBUG oslo_concurrency.lockutils [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 11:54:05 compute-0 nova_compute[192079]: 2025-10-02 11:54:05.959 2 DEBUG nova.compute.resource_tracker [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 11:54:05 compute-0 systemd[1]: Starting libvirt nodedev daemon...
Oct 02 11:54:06 compute-0 systemd[1]: Started libvirt nodedev daemon.
Oct 02 11:54:06 compute-0 podman[192379]: 2025-10-02 11:54:06.119826184 +0000 UTC m=+0.111850583 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0)
Oct 02 11:54:06 compute-0 nova_compute[192079]: 2025-10-02 11:54:06.257 2 WARNING nova.virt.libvirt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 11:54:06 compute-0 nova_compute[192079]: 2025-10-02 11:54:06.258 2 DEBUG nova.compute.resource_tracker [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=6212MB free_disk=73.66959381103516GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 11:54:06 compute-0 nova_compute[192079]: 2025-10-02 11:54:06.258 2 DEBUG oslo_concurrency.lockutils [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 11:54:06 compute-0 nova_compute[192079]: 2025-10-02 11:54:06.258 2 DEBUG oslo_concurrency.lockutils [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 11:54:08 compute-0 podman[192427]: 2025-10-02 11:54:08.122848548 +0000 UTC m=+0.042316914 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_managed=true, container_name=ovn_metadata_agent, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, org.label-schema.build-date=20251001)
Oct 02 11:54:08 compute-0 nova_compute[192079]: 2025-10-02 11:54:08.677 2 WARNING nova.compute.resource_tracker [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] No compute node record for compute-0.ctlplane.example.com:55f2ae21-42ea-47d7-8c73-c3134981d708: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 55f2ae21-42ea-47d7-8c73-c3134981d708 could not be found.
Oct 02 11:54:09 compute-0 nova_compute[192079]: 2025-10-02 11:54:09.034 2 INFO nova.compute.resource_tracker [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Compute node record created for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com with uuid: 55f2ae21-42ea-47d7-8c73-c3134981d708
Oct 02 11:54:09 compute-0 nova_compute[192079]: 2025-10-02 11:54:09.226 2 DEBUG nova.compute.resource_tracker [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 11:54:09 compute-0 nova_compute[192079]: 2025-10-02 11:54:09.227 2 DEBUG nova.compute.resource_tracker [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 11:54:09 compute-0 nova_compute[192079]: 2025-10-02 11:54:09.823 2 INFO nova.scheduler.client.report [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [req-740bf98b-9716-459b-b0f2-c5742d5a57f2] Created resource provider record via placement API for resource provider with UUID 55f2ae21-42ea-47d7-8c73-c3134981d708 and name compute-0.ctlplane.example.com.
Oct 02 11:54:09 compute-0 nova_compute[192079]: 2025-10-02 11:54:09.855 2 DEBUG nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] /sys/module/kvm_amd/parameters/sev contains [N
Oct 02 11:54:09 compute-0 nova_compute[192079]: ] _kernel_supports_amd_sev /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1803
Oct 02 11:54:09 compute-0 nova_compute[192079]: 2025-10-02 11:54:09.856 2 INFO nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] kernel doesn't support AMD SEV
Oct 02 11:54:09 compute-0 nova_compute[192079]: 2025-10-02 11:54:09.857 2 DEBUG nova.compute.provider_tree [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Updating inventory in ProviderTree for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 with inventory: {'MEMORY_MB': {'total': 7679, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0, 'reserved': 512}, 'VCPU': {'total': 8, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0, 'reserved': 0}, 'DISK_GB': {'total': 79, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9, 'reserved': 0}} update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:176
Oct 02 11:54:09 compute-0 nova_compute[192079]: 2025-10-02 11:54:09.857 2 DEBUG nova.virt.libvirt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 11:54:09 compute-0 nova_compute[192079]: 2025-10-02 11:54:09.859 2 DEBUG nova.virt.libvirt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Libvirt baseline CPU <cpu>
Oct 02 11:54:09 compute-0 nova_compute[192079]:   <arch>x86_64</arch>
Oct 02 11:54:09 compute-0 nova_compute[192079]:   <model>Nehalem</model>
Oct 02 11:54:09 compute-0 nova_compute[192079]:   <vendor>AMD</vendor>
Oct 02 11:54:09 compute-0 nova_compute[192079]:   <topology sockets="8" cores="1" threads="1"/>
Oct 02 11:54:09 compute-0 nova_compute[192079]: </cpu>
Oct 02 11:54:09 compute-0 nova_compute[192079]:  _get_guest_baseline_cpu_features /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12537
Oct 02 11:54:09 compute-0 nova_compute[192079]: 2025-10-02 11:54:09.991 2 DEBUG nova.scheduler.client.report [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Updated inventory for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 with generation 0 in Placement from set_inventory_for_provider using data: {'MEMORY_MB': {'total': 7679, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0, 'reserved': 512}, 'VCPU': {'total': 8, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0, 'reserved': 0}, 'DISK_GB': {'total': 79, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9, 'reserved': 0}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:957
Oct 02 11:54:09 compute-0 nova_compute[192079]: 2025-10-02 11:54:09.992 2 DEBUG nova.compute.provider_tree [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Updating resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708 generation from 0 to 1 during operation: update_inventory _update_generation /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:164
Oct 02 11:54:09 compute-0 nova_compute[192079]: 2025-10-02 11:54:09.992 2 DEBUG nova.compute.provider_tree [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Updating inventory in ProviderTree for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 with inventory: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 0, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:176
Oct 02 11:54:10 compute-0 nova_compute[192079]: 2025-10-02 11:54:10.272 2 DEBUG nova.compute.provider_tree [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Updating resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708 generation from 1 to 2 during operation: update_traits _update_generation /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:164
Oct 02 11:54:10 compute-0 nova_compute[192079]: 2025-10-02 11:54:10.342 2 DEBUG nova.compute.resource_tracker [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 11:54:10 compute-0 nova_compute[192079]: 2025-10-02 11:54:10.343 2 DEBUG oslo_concurrency.lockutils [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.085s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 11:54:10 compute-0 nova_compute[192079]: 2025-10-02 11:54:10.343 2 DEBUG nova.service [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Creating RPC server for service compute start /usr/lib/python3.9/site-packages/nova/service.py:182
Oct 02 11:54:10 compute-0 sshd-session[192447]: Accepted publickey for zuul from 192.168.122.30 port 42376 ssh2: ECDSA SHA256:tAEsFSop2MjuMQQ/UqKU2uCkxqWXGfsM5crdhd6tlI4
Oct 02 11:54:10 compute-0 systemd-logind[827]: New session 27 of user zuul.
Oct 02 11:54:10 compute-0 systemd[1]: Started Session 27 of User zuul.
Oct 02 11:54:10 compute-0 sshd-session[192447]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Oct 02 11:54:10 compute-0 nova_compute[192079]: 2025-10-02 11:54:10.467 2 DEBUG nova.service [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Join ServiceGroup membership for this service compute start /usr/lib/python3.9/site-packages/nova/service.py:199
Oct 02 11:54:10 compute-0 nova_compute[192079]: 2025-10-02 11:54:10.468 2 DEBUG nova.servicegroup.drivers.db [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] DB_Driver: join new ServiceGroup member compute-0.ctlplane.example.com to the compute group, service = <Service: host=compute-0.ctlplane.example.com, binary=nova-compute, manager_class_name=nova.compute.manager.ComputeManager> join /usr/lib/python3.9/site-packages/nova/servicegroup/drivers/db.py:44
Oct 02 11:54:12 compute-0 python3.9[192600]: ansible-ansible.builtin.setup Invoked with gather_subset=['!all', '!min', 'local'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d
Oct 02 11:54:13 compute-0 sudo[192754]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vhbqvfzzlwcbzddoxidvzubvujsppyuy ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406053.0048268-72-248922271979111/AnsiballZ_systemd_service.py'
Oct 02 11:54:13 compute-0 sudo[192754]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:54:13 compute-0 python3.9[192756]: ansible-ansible.builtin.systemd_service Invoked with daemon_reload=True daemon_reexec=False scope=system no_block=False name=None state=None enabled=None force=None masked=None
Oct 02 11:54:14 compute-0 systemd[1]: Reloading.
Oct 02 11:54:14 compute-0 systemd-sysv-generator[192787]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:54:14 compute-0 systemd-rc-local-generator[192783]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:54:14 compute-0 sudo[192754]: pam_unix(sudo:session): session closed for user root
Oct 02 11:54:15 compute-0 python3.9[192940]: ansible-ansible.builtin.service_facts Invoked
Oct 02 11:54:15 compute-0 network[192957]: You are using 'network' service provided by 'network-scripts', which are now deprecated.
Oct 02 11:54:15 compute-0 network[192958]: 'network-scripts' will be removed from distribution in near future.
Oct 02 11:54:15 compute-0 network[192959]: It is advised to switch to 'NetworkManager' instead for network management.
Oct 02 11:54:17 compute-0 nova_compute[192079]: 2025-10-02 11:54:17.471 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_power_states run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:54:18 compute-0 sudo[193245]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wlyjjquviqvgcmlzstgnmnxskjpokvea ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406058.6667929-129-228081811815030/AnsiballZ_systemd_service.py'
Oct 02 11:54:18 compute-0 sudo[193245]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:54:18 compute-0 podman[193208]: 2025-10-02 11:54:18.986855258 +0000 UTC m=+0.055951668 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, config_id=multipathd, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, container_name=multipathd, org.label-schema.vendor=CentOS, tcib_managed=true)
Oct 02 11:54:19 compute-0 python3.9[193253]: ansible-ansible.builtin.systemd_service Invoked with enabled=False name=tripleo_ceilometer_agent_compute.service state=stopped daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:54:19 compute-0 sudo[193245]: pam_unix(sudo:session): session closed for user root
Oct 02 11:54:20 compute-0 sudo[193407]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fdwpcnvcawordjfaqtfbgyjitbujldnj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406059.7956975-159-267984099710201/AnsiballZ_file.py'
Oct 02 11:54:20 compute-0 sudo[193407]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:54:20 compute-0 python3.9[193409]: ansible-ansible.builtin.file Invoked with path=/usr/lib/systemd/system/tripleo_ceilometer_agent_compute.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:54:20 compute-0 sudo[193407]: pam_unix(sudo:session): session closed for user root
Oct 02 11:54:20 compute-0 rsyslogd[1013]: imjournal: journal files changed, reloading...  [v8.2506.0-2.el9 try https://www.rsyslog.com/e/0 ]
Oct 02 11:54:20 compute-0 rsyslogd[1013]: imjournal: journal files changed, reloading...  [v8.2506.0-2.el9 try https://www.rsyslog.com/e/0 ]
Oct 02 11:54:20 compute-0 nova_compute[192079]: 2025-10-02 11:54:20.888 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_running_deleted_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:54:20 compute-0 sudo[193560]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xfavjlacatuhfuikaruffebdwlzyudcb ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406060.6638393-183-189902182916289/AnsiballZ_file.py'
Oct 02 11:54:20 compute-0 sudo[193560]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:54:21 compute-0 python3.9[193562]: ansible-ansible.builtin.file Invoked with path=/etc/systemd/system/tripleo_ceilometer_agent_compute.service state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:54:21 compute-0 sudo[193560]: pam_unix(sudo:session): session closed for user root
Oct 02 11:54:22 compute-0 sudo[193712]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fhthuheqjussakyfuxniyacyxctjpfvv ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406061.567748-210-245624045264772/AnsiballZ_command.py'
Oct 02 11:54:22 compute-0 sudo[193712]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:54:22 compute-0 python3.9[193714]: ansible-ansible.legacy.command Invoked with _raw_params=if systemctl is-active certmonger.service; then
                                               systemctl disable --now certmonger.service
                                               test -f /etc/systemd/system/certmonger.service || systemctl mask certmonger.service
                                             fi
                                              _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:54:22 compute-0 sudo[193712]: pam_unix(sudo:session): session closed for user root
Oct 02 11:54:23 compute-0 python3.9[193866]: ansible-ansible.builtin.find Invoked with file_type=any hidden=True paths=['/var/lib/certmonger/requests'] patterns=[] read_whole_file=False age_stamp=mtime recurse=False follow=False get_checksum=False checksum_algorithm=sha1 use_regex=False exact_mode=True excludes=None contains=None age=None size=None depth=None mode=None encoding=None limit=None
Oct 02 11:54:23 compute-0 sudo[194016]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fyfroibkymvqzfptwlxmazzkbhispksw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406063.4769669-264-278942260003213/AnsiballZ_systemd_service.py'
Oct 02 11:54:23 compute-0 sudo[194016]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:54:24 compute-0 python3.9[194018]: ansible-ansible.builtin.systemd_service Invoked with daemon_reload=True daemon_reexec=False scope=system no_block=False name=None state=None enabled=None force=None masked=None
Oct 02 11:54:24 compute-0 systemd[1]: Reloading.
Oct 02 11:54:24 compute-0 systemd-rc-local-generator[194043]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:54:24 compute-0 systemd-sysv-generator[194048]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:54:24 compute-0 sudo[194016]: pam_unix(sudo:session): session closed for user root
Oct 02 11:54:25 compute-0 sudo[194203]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ihwsyrmzgmmugzcoqqrztvryzvewgmhh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406064.6787925-288-156302785809230/AnsiballZ_command.py'
Oct 02 11:54:25 compute-0 sudo[194203]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:54:25 compute-0 python3.9[194205]: ansible-ansible.legacy.command Invoked with cmd=/usr/bin/systemctl reset-failed tripleo_ceilometer_agent_compute.service _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True _raw_params=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:54:25 compute-0 sudo[194203]: pam_unix(sudo:session): session closed for user root
Oct 02 11:54:25 compute-0 sudo[194356]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qmapezctkmqrsxvxsexbrpbpbsmlidfz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406065.5782013-315-126455280067131/AnsiballZ_file.py'
Oct 02 11:54:25 compute-0 sudo[194356]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:54:26 compute-0 python3.9[194358]: ansible-ansible.builtin.file Invoked with group=zuul mode=0750 owner=zuul path=/var/lib/openstack/config/telemetry recurse=True setype=container_file_t state=directory force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:54:26 compute-0 sudo[194356]: pam_unix(sudo:session): session closed for user root
Oct 02 11:54:26 compute-0 podman[194359]: 2025-10-02 11:54:26.134667884 +0000 UTC m=+0.058546189 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_id=iscsid, managed_by=edpm_ansible, org.label-schema.license=GPLv2, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=iscsid, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 11:54:26 compute-0 python3.9[194528]: ansible-ansible.builtin.stat Invoked with path=/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:54:27 compute-0 python3.9[194680]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/config/telemetry/ceilometer-host-specific.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:54:28 compute-0 python3.9[194801]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/config/telemetry/ceilometer-host-specific.conf mode=0644 setype=container_file_t src=/home/zuul/.ansible/tmp/ansible-tmp-1759406067.1489446-363-47155425144399/.source.conf follow=False _original_basename=ceilometer-host-specific.conf.j2 checksum=e86e0e43000ce9ccfe5aefbf8e8f2e3d15d05584 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:54:28 compute-0 sudo[194951]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bombipaxucymaionbxlqrmtxkuussmjx ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406068.5486243-408-12819827763017/AnsiballZ_group.py'
Oct 02 11:54:28 compute-0 sudo[194951]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:54:29 compute-0 python3.9[194953]: ansible-ansible.builtin.group Invoked with name=libvirt state=present force=False system=False local=False non_unique=False gid=None gid_min=None gid_max=None
Oct 02 11:54:29 compute-0 sudo[194951]: pam_unix(sudo:session): session closed for user root
Oct 02 11:54:29 compute-0 sudo[195103]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kwrjpasxsivscpljveollxmwqqafqhhy ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406069.5491867-441-266007033782822/AnsiballZ_getent.py'
Oct 02 11:54:29 compute-0 sudo[195103]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:54:30 compute-0 python3.9[195105]: ansible-ansible.builtin.getent Invoked with database=passwd key=ceilometer fail_key=True service=None split=None
Oct 02 11:54:30 compute-0 sudo[195103]: pam_unix(sudo:session): session closed for user root
Oct 02 11:54:30 compute-0 sudo[195256]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ezadongjvffqaoqyosfzfbvobvwqkale ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406070.4092164-465-173692114608151/AnsiballZ_group.py'
Oct 02 11:54:30 compute-0 sudo[195256]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:54:31 compute-0 python3.9[195258]: ansible-ansible.builtin.group Invoked with gid=42405 name=ceilometer state=present force=False system=False local=False non_unique=False gid_min=None gid_max=None
Oct 02 11:54:31 compute-0 groupadd[195259]: group added to /etc/group: name=ceilometer, GID=42405
Oct 02 11:54:31 compute-0 groupadd[195259]: group added to /etc/gshadow: name=ceilometer
Oct 02 11:54:31 compute-0 groupadd[195259]: new group: name=ceilometer, GID=42405
Oct 02 11:54:31 compute-0 sudo[195256]: pam_unix(sudo:session): session closed for user root
Oct 02 11:54:32 compute-0 sudo[195414]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-utsgjhoimbmbhrerarmkkbmncuefozmh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406071.5605514-489-222500546722787/AnsiballZ_user.py'
Oct 02 11:54:32 compute-0 sudo[195414]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:54:32 compute-0 python3.9[195416]: ansible-ansible.builtin.user Invoked with comment=ceilometer user group=ceilometer groups=['libvirt'] name=ceilometer shell=/sbin/nologin state=present uid=42405 non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on compute-0 update_password=always home=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None password_expire_account_disable=None uid_min=None uid_max=None
Oct 02 11:54:32 compute-0 useradd[195418]: new user: name=ceilometer, UID=42405, GID=42405, home=/home/ceilometer, shell=/sbin/nologin, from=/dev/pts/0
Oct 02 11:54:32 compute-0 useradd[195418]: add 'ceilometer' to group 'libvirt'
Oct 02 11:54:32 compute-0 useradd[195418]: add 'ceilometer' to shadow group 'libvirt'
Oct 02 11:54:32 compute-0 sudo[195414]: pam_unix(sudo:session): session closed for user root
Oct 02 11:54:35 compute-0 python3.9[195574]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/config/telemetry/ceilometer.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:54:35 compute-0 python3.9[195695]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/config/telemetry/ceilometer.conf mode=0640 remote_src=False src=/home/zuul/.ansible/tmp/ansible-tmp-1759406074.887483-567-248245620960888/.source.conf _original_basename=ceilometer.conf follow=False checksum=f74f01c63e6cdeca5458ef9aff2a1db5d6a4e4b9 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:54:36 compute-0 podman[195819]: 2025-10-02 11:54:36.484884483 +0000 UTC m=+0.078968683 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_managed=true, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, config_id=ovn_controller, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2)
Oct 02 11:54:36 compute-0 python3.9[195858]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/config/telemetry/polling.yaml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:54:37 compute-0 python3.9[195992]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/config/telemetry/polling.yaml mode=0640 remote_src=False src=/home/zuul/.ansible/tmp/ansible-tmp-1759406076.1098437-567-190698560597249/.source.yaml _original_basename=polling.yaml follow=False checksum=6c8680a286285f2e0ef9fa528ca754765e5ed0e5 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:54:37 compute-0 python3.9[196142]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/config/telemetry/custom.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:54:38 compute-0 python3.9[196263]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/config/telemetry/custom.conf mode=0640 remote_src=False src=/home/zuul/.ansible/tmp/ansible-tmp-1759406077.2901428-567-273003688612888/.source.conf _original_basename=custom.conf follow=False checksum=838b8b0a7d7f72e55ab67d39f32e3cb3eca2139b backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:54:38 compute-0 podman[196264]: 2025-10-02 11:54:38.326327958 +0000 UTC m=+0.053408276 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, org.label-schema.build-date=20251001, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent)
Oct 02 11:54:39 compute-0 python3.9[196432]: ansible-ansible.builtin.stat Invoked with path=/var/lib/openstack/certs/telemetry/default/tls.crt follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:54:39 compute-0 python3.9[196584]: ansible-ansible.builtin.stat Invoked with path=/var/lib/openstack/certs/telemetry/default/tls.key follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:54:40 compute-0 python3.9[196736]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:54:40 compute-0 auditd[709]: Audit daemon rotating log files
Oct 02 11:54:41 compute-0 python3.9[196857]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json mode=420 src=/home/zuul/.ansible/tmp/ansible-tmp-1759406079.9282296-744-276189333285581/.source.json follow=False _original_basename=ceilometer-agent-compute.json.j2 checksum=264d11e8d3809e7ef745878dce7edd46098e25b2 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:54:41 compute-0 python3.9[197007]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/config/telemetry/ceilometer-host-specific.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:54:42 compute-0 python3.9[197083]: ansible-ansible.legacy.file Invoked with mode=420 dest=/var/lib/openstack/config/telemetry/ceilometer-host-specific.conf _original_basename=ceilometer-host-specific.conf.j2 recurse=False state=file path=/var/lib/openstack/config/telemetry/ceilometer-host-specific.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:54:42 compute-0 python3.9[197233]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/config/telemetry/ceilometer_agent_compute.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:54:43 compute-0 python3.9[197354]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/config/telemetry/ceilometer_agent_compute.json mode=420 src=/home/zuul/.ansible/tmp/ansible-tmp-1759406082.2272425-744-152946292609081/.source.json follow=False _original_basename=ceilometer_agent_compute.json.j2 checksum=17453a32c9d181134878b3e453cb84c3cd9bd67d backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:54:43 compute-0 python3.9[197504]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:54:44 compute-0 python3.9[197625]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml mode=420 src=/home/zuul/.ansible/tmp/ansible-tmp-1759406083.4278262-744-1228870807562/.source.yaml follow=False _original_basename=ceilometer_prom_exporter.yaml.j2 checksum=10157c879411ee6023e506dc85a343cedc52700f backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:54:44 compute-0 python3.9[197775]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/config/telemetry/firewall.yaml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:54:45 compute-0 python3.9[197896]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/config/telemetry/firewall.yaml mode=420 src=/home/zuul/.ansible/tmp/ansible-tmp-1759406084.5413373-744-117028533204124/.source.yaml follow=False _original_basename=firewall.yaml.j2 checksum=d942d984493b214bda2913f753ff68cdcedff00e backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:54:46 compute-0 python3.9[198046]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/config/telemetry/node_exporter.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:54:46 compute-0 python3.9[198167]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/config/telemetry/node_exporter.json mode=420 src=/home/zuul/.ansible/tmp/ansible-tmp-1759406085.8136435-744-46963914789256/.source.json follow=False _original_basename=node_exporter.json.j2 checksum=6e4982940d2bfae88404914dfaf72552f6356d81 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:54:47 compute-0 python3.9[198317]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/config/telemetry/node_exporter.yaml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:54:48 compute-0 python3.9[198438]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/config/telemetry/node_exporter.yaml mode=420 src=/home/zuul/.ansible/tmp/ansible-tmp-1759406087.0697277-744-46837835678690/.source.yaml follow=False _original_basename=node_exporter.yaml.j2 checksum=81d906d3e1e8c4f8367276f5d3a67b80ca7e989e backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:54:49 compute-0 python3.9[198588]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/config/telemetry/openstack_network_exporter.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:54:49 compute-0 podman[198589]: 2025-10-02 11:54:49.414576447 +0000 UTC m=+0.315327502 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=multipathd, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.name=CentOS Stream 9 Base Image, config_id=multipathd, managed_by=edpm_ansible, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS)
Oct 02 11:54:49 compute-0 python3.9[198729]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/config/telemetry/openstack_network_exporter.json mode=420 src=/home/zuul/.ansible/tmp/ansible-tmp-1759406088.3705025-744-275843816958454/.source.json follow=False _original_basename=openstack_network_exporter.json.j2 checksum=d474f1e4c3dbd24762592c51cbe5311f0a037273 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:54:50 compute-0 python3.9[198879]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:54:51 compute-0 python3.9[199000]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml mode=420 src=/home/zuul/.ansible/tmp/ansible-tmp-1759406090.124865-744-169401933869696/.source.yaml follow=False _original_basename=openstack_network_exporter.yaml.j2 checksum=2b6bd0891e609bf38a73282f42888052b750bed6 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:54:51 compute-0 python3.9[199150]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/config/telemetry/podman_exporter.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:54:52 compute-0 python3.9[199271]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/config/telemetry/podman_exporter.json mode=420 src=/home/zuul/.ansible/tmp/ansible-tmp-1759406091.4607334-744-281062900366340/.source.json follow=False _original_basename=podman_exporter.json.j2 checksum=e342121a88f67e2bae7ebc05d1e6d350470198a5 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:54:53 compute-0 python3.9[199421]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/config/telemetry/podman_exporter.yaml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:54:53 compute-0 python3.9[199542]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/config/telemetry/podman_exporter.yaml mode=420 src=/home/zuul/.ansible/tmp/ansible-tmp-1759406092.7126365-744-192355594731026/.source.yaml follow=False _original_basename=podman_exporter.yaml.j2 checksum=7ccb5eca2ff1dc337c3f3ecbbff5245af7149c47 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:54:54 compute-0 python3.9[199692]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/config/telemetry/node_exporter.yaml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:54:55 compute-0 python3.9[199768]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/var/lib/openstack/config/telemetry/node_exporter.yaml _original_basename=node_exporter.yaml.j2 recurse=False state=file path=/var/lib/openstack/config/telemetry/node_exporter.yaml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:54:55 compute-0 python3.9[199918]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/config/telemetry/podman_exporter.yaml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:54:56 compute-0 podman[199968]: 2025-10-02 11:54:56.253643679 +0000 UTC m=+0.070682693 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, io.buildah.version=1.41.3, container_name=iscsid, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=iscsid)
Oct 02 11:54:56 compute-0 python3.9[200005]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/var/lib/openstack/config/telemetry/podman_exporter.yaml _original_basename=podman_exporter.yaml.j2 recurse=False state=file path=/var/lib/openstack/config/telemetry/podman_exporter.yaml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:54:57 compute-0 python3.9[200164]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:54:57 compute-0 python3.9[200240]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml _original_basename=ceilometer_prom_exporter.yaml.j2 recurse=False state=file path=/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:54:58 compute-0 sudo[200390]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dudvlelorqgubvzbelbpzismrdkhsqdg ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406097.8302655-1311-246413341089483/AnsiballZ_file.py'
Oct 02 11:54:58 compute-0 sudo[200390]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:54:58 compute-0 python3.9[200392]: ansible-ansible.builtin.file Invoked with group=ceilometer mode=0644 owner=ceilometer path=/var/lib/openstack/certs/telemetry/default/tls.crt recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False state=None _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:54:58 compute-0 sudo[200390]: pam_unix(sudo:session): session closed for user root
Oct 02 11:54:58 compute-0 sudo[200542]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wguxofaiqhympwctyfjwnrnmxligxahc ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406098.6858375-1335-18977162534037/AnsiballZ_file.py'
Oct 02 11:54:58 compute-0 sudo[200542]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:54:59 compute-0 python3.9[200544]: ansible-ansible.builtin.file Invoked with group=ceilometer mode=0644 owner=ceilometer path=/var/lib/openstack/certs/telemetry/default/tls.key recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False state=None _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:54:59 compute-0 sudo[200542]: pam_unix(sudo:session): session closed for user root
Oct 02 11:54:59 compute-0 sudo[200694]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dlyrtheedrhhxhtjlunhqbumosrqodwj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406099.4378111-1359-221833124052886/AnsiballZ_file.py'
Oct 02 11:54:59 compute-0 sudo[200694]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:54:59 compute-0 python3.9[200696]: ansible-ansible.builtin.file Invoked with group=zuul mode=0755 owner=zuul path=/var/lib/openstack/healthchecks setype=container_file_t state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:54:59 compute-0 sudo[200694]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:00 compute-0 sudo[200846]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wehbssryyybtcndpwzmldoiwmqdmvpdf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406100.169157-1383-106755198757177/AnsiballZ_systemd_service.py'
Oct 02 11:55:00 compute-0 sudo[200846]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:00 compute-0 python3.9[200848]: ansible-ansible.builtin.systemd_service Invoked with enabled=True name=podman.socket state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:55:00 compute-0 systemd[1]: Reloading.
Oct 02 11:55:00 compute-0 systemd-rc-local-generator[200876]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:55:00 compute-0 systemd-sysv-generator[200880]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:55:01 compute-0 systemd[1]: Listening on Podman API Socket.
Oct 02 11:55:01 compute-0 sudo[200846]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:01 compute-0 sudo[201036]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ljewtawzuidqcwpilekfbqwyecccpmtr ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406101.463169-1410-141676490533211/AnsiballZ_stat.py'
Oct 02 11:55:01 compute-0 sudo[201036]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:01 compute-0 python3.9[201038]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/healthchecks/ceilometer_agent_compute/healthcheck follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:55:01 compute-0 sudo[201036]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:55:02.195 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 11:55:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:55:02.197 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.002s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 11:55:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:55:02.197 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 11:55:02 compute-0 sudo[201159]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-enefbqxwlcndvqdtwhgusipkkwuqzjyi ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406101.463169-1410-141676490533211/AnsiballZ_copy.py'
Oct 02 11:55:02 compute-0 sudo[201159]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:02 compute-0 python3.9[201161]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/healthchecks/ceilometer_agent_compute/ group=zuul mode=0700 owner=zuul setype=container_file_t src=/home/zuul/.ansible/tmp/ansible-tmp-1759406101.463169-1410-141676490533211/.source _original_basename=healthcheck follow=False checksum=ebb343c21fce35a02591a9351660cb7035a47d42 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:55:02 compute-0 sudo[201159]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:02 compute-0 nova_compute[192079]: 2025-10-02 11:55:02.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:55:02 compute-0 nova_compute[192079]: 2025-10-02 11:55:02.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:55:02 compute-0 nova_compute[192079]: 2025-10-02 11:55:02.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 11:55:02 compute-0 nova_compute[192079]: 2025-10-02 11:55:02.667 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 11:55:02 compute-0 nova_compute[192079]: 2025-10-02 11:55:02.695 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 11:55:02 compute-0 nova_compute[192079]: 2025-10-02 11:55:02.695 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:55:02 compute-0 nova_compute[192079]: 2025-10-02 11:55:02.695 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:55:02 compute-0 nova_compute[192079]: 2025-10-02 11:55:02.695 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:55:02 compute-0 nova_compute[192079]: 2025-10-02 11:55:02.696 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:55:02 compute-0 nova_compute[192079]: 2025-10-02 11:55:02.696 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:55:02 compute-0 nova_compute[192079]: 2025-10-02 11:55:02.696 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:55:02 compute-0 nova_compute[192079]: 2025-10-02 11:55:02.697 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 11:55:02 compute-0 nova_compute[192079]: 2025-10-02 11:55:02.697 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:55:02 compute-0 nova_compute[192079]: 2025-10-02 11:55:02.719 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 11:55:02 compute-0 nova_compute[192079]: 2025-10-02 11:55:02.719 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 11:55:02 compute-0 nova_compute[192079]: 2025-10-02 11:55:02.719 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 11:55:02 compute-0 nova_compute[192079]: 2025-10-02 11:55:02.719 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 11:55:02 compute-0 sudo[201235]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lbohrdwvstfdedkioiubhiqtzdornyyl ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406101.463169-1410-141676490533211/AnsiballZ_stat.py'
Oct 02 11:55:02 compute-0 sudo[201235]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:02 compute-0 nova_compute[192079]: 2025-10-02 11:55:02.856 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 11:55:02 compute-0 nova_compute[192079]: 2025-10-02 11:55:02.857 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=6154MB free_disk=73.66875839233398GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 11:55:02 compute-0 nova_compute[192079]: 2025-10-02 11:55:02.857 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 11:55:02 compute-0 nova_compute[192079]: 2025-10-02 11:55:02.857 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 11:55:02 compute-0 nova_compute[192079]: 2025-10-02 11:55:02.914 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 11:55:02 compute-0 nova_compute[192079]: 2025-10-02 11:55:02.915 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 11:55:02 compute-0 nova_compute[192079]: 2025-10-02 11:55:02.944 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 11:55:02 compute-0 python3.9[201237]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/healthchecks/ceilometer_agent_compute/healthcheck.future follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:55:02 compute-0 sudo[201235]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:02 compute-0 nova_compute[192079]: 2025-10-02 11:55:02.990 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 0, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 11:55:02 compute-0 nova_compute[192079]: 2025-10-02 11:55:02.991 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 11:55:02 compute-0 nova_compute[192079]: 2025-10-02 11:55:02.992 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.135s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 11:55:03 compute-0 sudo[201358]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ogwmeneawclzpapugtplryjgsqvrhlrd ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406101.463169-1410-141676490533211/AnsiballZ_copy.py'
Oct 02 11:55:03 compute-0 sudo[201358]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:03 compute-0 python3.9[201360]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/healthchecks/ceilometer_agent_compute/ group=zuul mode=0700 owner=zuul setype=container_file_t src=/home/zuul/.ansible/tmp/ansible-tmp-1759406101.463169-1410-141676490533211/.source.future _original_basename=healthcheck.future follow=False checksum=d500a98192f4ddd70b4dfdc059e2d81aed36a294 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:55:03 compute-0 sudo[201358]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:04 compute-0 sudo[201510]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tpqfgdphunlcjumyklmovdsgzuqqnqau ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406103.8867385-1494-132611074548529/AnsiballZ_container_config_data.py'
Oct 02 11:55:04 compute-0 sudo[201510]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:04 compute-0 python3.9[201512]: ansible-container_config_data Invoked with config_overrides={} config_path=/var/lib/openstack/config/telemetry config_pattern=ceilometer_agent_compute.json debug=False
Oct 02 11:55:04 compute-0 sudo[201510]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:05 compute-0 sudo[201662]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qxudnaejhozqjbdbeotwihqlcwqkrgrw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406104.939913-1521-246032077525369/AnsiballZ_container_config_hash.py'
Oct 02 11:55:05 compute-0 sudo[201662]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:05 compute-0 python3.9[201664]: ansible-container_config_hash Invoked with check_mode=False config_vol_prefix=/var/lib/config-data
Oct 02 11:55:05 compute-0 sudo[201662]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:06 compute-0 sudo[201825]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fdzqpopdynpuvqdxmmbhxldesoqrqbqu ; /usr/bin/python3 /home/zuul/.ansible/tmp/ansible-tmp-1759406105.956363-1551-79706049251134/AnsiballZ_edpm_container_manage.py'
Oct 02 11:55:06 compute-0 sudo[201825]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:06 compute-0 podman[201788]: 2025-10-02 11:55:06.677849691 +0000 UTC m=+0.083256962 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, tcib_managed=true, config_id=ovn_controller, container_name=ovn_controller, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0)
Oct 02 11:55:06 compute-0 python3[201834]: ansible-edpm_container_manage Invoked with concurrency=1 config_dir=/var/lib/openstack/config/telemetry config_id=edpm config_overrides={} config_patterns=ceilometer_agent_compute.json log_base_path=/var/log/containers/stdouts debug=False
Oct 02 11:55:07 compute-0 podman[201876]: 2025-10-02 11:55:07.049700845 +0000 UTC m=+0.025898837 image pull 5f0622bc7c13827171d93b3baf72157e23d24d44579ad79fe3a89ad88180a4bb quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified
Oct 02 11:55:07 compute-0 podman[201876]: 2025-10-02 11:55:07.628279769 +0000 UTC m=+0.604477661 container create 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, org.label-schema.build-date=20251001, container_name=ceilometer_agent_compute, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_id=edpm, io.buildah.version=1.41.3)
Oct 02 11:55:07 compute-0 python3[201834]: ansible-edpm_container_manage PODMAN-CONTAINER-DEBUG: podman create --name ceilometer_agent_compute --conmon-pidfile /run/ceilometer_agent_compute.pid --env KOLLA_CONFIG_STRATEGY=COPY_ALWAYS --env OS_ENDPOINT_TYPE=internal --healthcheck-command /openstack/healthcheck compute --label config_id=edpm --label container_name=ceilometer_agent_compute --label managed_by=edpm_ansible --label config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']} --log-driver journald --log-level info --network host --security-opt label:type:ceilometer_polling_t --user ceilometer --volume /var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z --volume /var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z --volume /run/libvirt:/run/libvirt:shared,ro --volume /etc/hosts:/etc/hosts:ro --volume /etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro --volume /etc/localtime:/etc/localtime:ro --volume /etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro --volume /var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z --volume /var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z --volume /var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z --volume /dev/log:/dev/log --volume /var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified kolla_start
Oct 02 11:55:07 compute-0 sudo[201825]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:08 compute-0 sudo[202064]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vecqpyeneuldpkzgjnntieqrlmnoygzp ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406107.9703279-1575-85927836132962/AnsiballZ_stat.py'
Oct 02 11:55:08 compute-0 sudo[202064]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:08 compute-0 python3.9[202066]: ansible-ansible.builtin.stat Invoked with path=/etc/sysconfig/podman_drop_in follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:55:08 compute-0 sudo[202064]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:09 compute-0 sudo[202228]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ajbnpgwfzenprskylpdrrqirxwnjkget ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406108.778016-1602-202154986543436/AnsiballZ_file.py'
Oct 02 11:55:09 compute-0 sudo[202228]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:09 compute-0 podman[202192]: 2025-10-02 11:55:09.046619115 +0000 UTC m=+0.054000822 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=ovn_metadata_agent, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3)
Oct 02 11:55:09 compute-0 python3.9[202237]: ansible-file Invoked with path=/etc/systemd/system/edpm_ceilometer_agent_compute.requires state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:55:09 compute-0 sudo[202228]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:09 compute-0 sudo[202388]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ckpgmnovrazmkxdahtvvgacdjmlwwbrw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406109.3035371-1602-37714151355413/AnsiballZ_copy.py'
Oct 02 11:55:09 compute-0 sudo[202388]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:10 compute-0 python3.9[202390]: ansible-copy Invoked with src=/home/zuul/.ansible/tmp/ansible-tmp-1759406109.3035371-1602-37714151355413/source dest=/etc/systemd/system/edpm_ceilometer_agent_compute.service mode=0644 owner=root group=root backup=False force=True remote_src=False follow=False unsafe_writes=False _original_basename=None content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None checksum=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:55:10 compute-0 sudo[202388]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:10 compute-0 sudo[202464]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jerqaxppsqwlkbplzgotmztgbcnafket ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406109.3035371-1602-37714151355413/AnsiballZ_systemd.py'
Oct 02 11:55:10 compute-0 sudo[202464]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:10 compute-0 python3.9[202466]: ansible-systemd Invoked with daemon_reload=True daemon_reexec=False scope=system no_block=False name=None state=None enabled=None force=None masked=None
Oct 02 11:55:10 compute-0 systemd[1]: Reloading.
Oct 02 11:55:11 compute-0 systemd-sysv-generator[202497]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:55:11 compute-0 systemd-rc-local-generator[202492]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:55:11 compute-0 sudo[202464]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:11 compute-0 sudo[202575]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wfekiwbaamfeytychvzurjzdkwdlvxfp ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406109.3035371-1602-37714151355413/AnsiballZ_systemd.py'
Oct 02 11:55:11 compute-0 sudo[202575]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:11 compute-0 python3.9[202577]: ansible-systemd Invoked with state=restarted name=edpm_ceilometer_agent_compute.service enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:55:12 compute-0 systemd[1]: Reloading.
Oct 02 11:55:12 compute-0 systemd-rc-local-generator[202599]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:55:12 compute-0 systemd-sysv-generator[202607]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:55:12 compute-0 systemd[1]: Starting ceilometer_agent_compute container...
Oct 02 11:55:12 compute-0 systemd[1]: Started libcrun container.
Oct 02 11:55:12 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/db1a43e7f9bde62cb3b423268535817a444f1d68c2ac7ee32e080fad800a17bc/merged/etc/ceilometer/ceilometer_prom_exporter.yaml supports timestamps until 2038 (0x7fffffff)
Oct 02 11:55:12 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/db1a43e7f9bde62cb3b423268535817a444f1d68c2ac7ee32e080fad800a17bc/merged/etc/ceilometer/tls supports timestamps until 2038 (0x7fffffff)
Oct 02 11:55:12 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/db1a43e7f9bde62cb3b423268535817a444f1d68c2ac7ee32e080fad800a17bc/merged/var/lib/openstack/config supports timestamps until 2038 (0x7fffffff)
Oct 02 11:55:12 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/db1a43e7f9bde62cb3b423268535817a444f1d68c2ac7ee32e080fad800a17bc/merged/var/lib/kolla/config_files/config.json supports timestamps until 2038 (0x7fffffff)
Oct 02 11:55:12 compute-0 systemd[1]: Started /usr/bin/podman healthcheck run 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be.
Oct 02 11:55:13 compute-0 podman[202616]: 2025-10-02 11:55:13.182551334 +0000 UTC m=+0.675693839 container init 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, tcib_managed=true, container_name=ceilometer_agent_compute, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm)
Oct 02 11:55:13 compute-0 ceilometer_agent_compute[202632]: + sudo -E kolla_set_configs
Oct 02 11:55:13 compute-0 sudo[202638]: ceilometer : PWD=/ ; USER=root ; COMMAND=/usr/local/bin/kolla_set_configs
Oct 02 11:55:13 compute-0 ceilometer_agent_compute[202632]: sudo: unable to send audit message: Operation not permitted
Oct 02 11:55:13 compute-0 sudo[202638]: pam_systemd(sudo:session): Failed to connect to system bus: No such file or directory
Oct 02 11:55:13 compute-0 sudo[202638]: pam_unix(sudo:session): session opened for user root(uid=0) by (uid=42405)
Oct 02 11:55:13 compute-0 podman[202616]: 2025-10-02 11:55:13.213599132 +0000 UTC m=+0.706741587 container start 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3)
Oct 02 11:55:13 compute-0 ceilometer_agent_compute[202632]: INFO:__main__:Loading config file at /var/lib/kolla/config_files/config.json
Oct 02 11:55:13 compute-0 ceilometer_agent_compute[202632]: INFO:__main__:Validating config file
Oct 02 11:55:13 compute-0 ceilometer_agent_compute[202632]: INFO:__main__:Kolla config strategy set to: COPY_ALWAYS
Oct 02 11:55:13 compute-0 ceilometer_agent_compute[202632]: INFO:__main__:Copying service configuration files
Oct 02 11:55:13 compute-0 ceilometer_agent_compute[202632]: INFO:__main__:Deleting /etc/ceilometer/ceilometer.conf
Oct 02 11:55:13 compute-0 ceilometer_agent_compute[202632]: INFO:__main__:Copying /var/lib/openstack/config/ceilometer.conf to /etc/ceilometer/ceilometer.conf
Oct 02 11:55:13 compute-0 ceilometer_agent_compute[202632]: INFO:__main__:Setting permission for /etc/ceilometer/ceilometer.conf
Oct 02 11:55:13 compute-0 ceilometer_agent_compute[202632]: INFO:__main__:Deleting /etc/ceilometer/polling.yaml
Oct 02 11:55:13 compute-0 ceilometer_agent_compute[202632]: INFO:__main__:Copying /var/lib/openstack/config/polling.yaml to /etc/ceilometer/polling.yaml
Oct 02 11:55:13 compute-0 ceilometer_agent_compute[202632]: INFO:__main__:Setting permission for /etc/ceilometer/polling.yaml
Oct 02 11:55:13 compute-0 ceilometer_agent_compute[202632]: INFO:__main__:Copying /var/lib/openstack/config/custom.conf to /etc/ceilometer/ceilometer.conf.d/01-ceilometer-custom.conf
Oct 02 11:55:13 compute-0 ceilometer_agent_compute[202632]: INFO:__main__:Setting permission for /etc/ceilometer/ceilometer.conf.d/01-ceilometer-custom.conf
Oct 02 11:55:13 compute-0 ceilometer_agent_compute[202632]: INFO:__main__:Copying /var/lib/openstack/config/ceilometer-host-specific.conf to /etc/ceilometer/ceilometer.conf.d/02-ceilometer-host-specific.conf
Oct 02 11:55:13 compute-0 ceilometer_agent_compute[202632]: INFO:__main__:Setting permission for /etc/ceilometer/ceilometer.conf.d/02-ceilometer-host-specific.conf
Oct 02 11:55:13 compute-0 ceilometer_agent_compute[202632]: INFO:__main__:Writing out command to execute
Oct 02 11:55:13 compute-0 sudo[202638]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:13 compute-0 ceilometer_agent_compute[202632]: ++ cat /run_command
Oct 02 11:55:13 compute-0 ceilometer_agent_compute[202632]: + CMD='/usr/bin/ceilometer-polling --polling-namespaces compute --logfile /dev/stdout'
Oct 02 11:55:13 compute-0 ceilometer_agent_compute[202632]: + ARGS=
Oct 02 11:55:13 compute-0 ceilometer_agent_compute[202632]: + sudo kolla_copy_cacerts
Oct 02 11:55:13 compute-0 sudo[202653]: ceilometer : PWD=/ ; USER=root ; COMMAND=/usr/local/bin/kolla_copy_cacerts
Oct 02 11:55:13 compute-0 ceilometer_agent_compute[202632]: sudo: unable to send audit message: Operation not permitted
Oct 02 11:55:13 compute-0 sudo[202653]: pam_systemd(sudo:session): Failed to connect to system bus: No such file or directory
Oct 02 11:55:13 compute-0 sudo[202653]: pam_unix(sudo:session): session opened for user root(uid=0) by (uid=42405)
Oct 02 11:55:13 compute-0 sudo[202653]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:13 compute-0 ceilometer_agent_compute[202632]: + [[ ! -n '' ]]
Oct 02 11:55:13 compute-0 ceilometer_agent_compute[202632]: + . kolla_extend_start
Oct 02 11:55:13 compute-0 ceilometer_agent_compute[202632]: Running command: '/usr/bin/ceilometer-polling --polling-namespaces compute --logfile /dev/stdout'
Oct 02 11:55:13 compute-0 ceilometer_agent_compute[202632]: + echo 'Running command: '\''/usr/bin/ceilometer-polling --polling-namespaces compute --logfile /dev/stdout'\'''
Oct 02 11:55:13 compute-0 ceilometer_agent_compute[202632]: + umask 0022
Oct 02 11:55:13 compute-0 ceilometer_agent_compute[202632]: + exec /usr/bin/ceilometer-polling --polling-namespaces compute --logfile /dev/stdout
Oct 02 11:55:13 compute-0 podman[202616]: ceilometer_agent_compute
Oct 02 11:55:13 compute-0 systemd[1]: Started ceilometer_agent_compute container.
Oct 02 11:55:13 compute-0 sudo[202575]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:13 compute-0 podman[202639]: 2025-10-02 11:55:13.461746367 +0000 UTC m=+0.239380464 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=starting, health_failing_streak=1, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=ceilometer_agent_compute, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=edpm, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 11:55:13 compute-0 systemd[1]: 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be-2ce81449831fb250.service: Main process exited, code=exited, status=1/FAILURE
Oct 02 11:55:13 compute-0 systemd[1]: 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be-2ce81449831fb250.service: Failed with result 'exit-code'.
Oct 02 11:55:13 compute-0 sudo[202813]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kidxuarqvvehltjdwweclyspiukfryyn ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406113.5590527-1674-34489722898515/AnsiballZ_systemd.py'
Oct 02 11:55:13 compute-0 sudo[202813]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.124 2 DEBUG cotyledon.oslo_config_glue [-] Full set of CONF: _load_service_manager_options /usr/lib/python3.9/site-packages/cotyledon/oslo_config_glue.py:40
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.124 2 DEBUG cotyledon.oslo_config_glue [-] ******************************************************************************** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2589
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.124 2 DEBUG cotyledon.oslo_config_glue [-] Configuration options gathered from: log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2590
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.124 2 DEBUG cotyledon.oslo_config_glue [-] command line args: ['--polling-namespaces', 'compute', '--logfile', '/dev/stdout'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2591
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.124 2 DEBUG cotyledon.oslo_config_glue [-] config files: ['/etc/ceilometer/ceilometer.conf'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2592
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.124 2 DEBUG cotyledon.oslo_config_glue [-] ================================================================================ log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2594
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.124 2 DEBUG cotyledon.oslo_config_glue [-] batch_size                     = 50 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.125 2 DEBUG cotyledon.oslo_config_glue [-] cfg_file                       = polling.yaml log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.125 2 DEBUG cotyledon.oslo_config_glue [-] config_dir                     = ['/etc/ceilometer/ceilometer.conf.d'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.125 2 DEBUG cotyledon.oslo_config_glue [-] config_file                    = ['/etc/ceilometer/ceilometer.conf'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.125 2 DEBUG cotyledon.oslo_config_glue [-] config_source                  = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.125 2 DEBUG cotyledon.oslo_config_glue [-] debug                          = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.125 2 DEBUG cotyledon.oslo_config_glue [-] default_log_levels             = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'futurist=INFO', 'neutronclient=INFO', 'keystoneclient=INFO'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.125 2 DEBUG cotyledon.oslo_config_glue [-] event_pipeline_cfg_file        = event_pipeline.yaml log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.125 2 DEBUG cotyledon.oslo_config_glue [-] graceful_shutdown_timeout      = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.125 2 DEBUG cotyledon.oslo_config_glue [-] host                           = compute-0.ctlplane.example.com log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.125 2 DEBUG cotyledon.oslo_config_glue [-] http_timeout                   = 600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.126 2 DEBUG cotyledon.oslo_config_glue [-] hypervisor_inspector           = libvirt log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.126 2 DEBUG cotyledon.oslo_config_glue [-] instance_format                = [instance: %(uuid)s]  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.126 2 DEBUG cotyledon.oslo_config_glue [-] instance_uuid_format           = [instance: %(uuid)s]  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.126 2 DEBUG cotyledon.oslo_config_glue [-] libvirt_type                   = kvm log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.126 2 DEBUG cotyledon.oslo_config_glue [-] libvirt_uri                    =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.126 2 DEBUG cotyledon.oslo_config_glue [-] log_config_append              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.126 2 DEBUG cotyledon.oslo_config_glue [-] log_date_format                = %Y-%m-%d %H:%M:%S log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.126 2 DEBUG cotyledon.oslo_config_glue [-] log_dir                        = /var/log/ceilometer log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.126 2 DEBUG cotyledon.oslo_config_glue [-] log_file                       = /dev/stdout log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.126 2 DEBUG cotyledon.oslo_config_glue [-] log_options                    = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.126 2 DEBUG cotyledon.oslo_config_glue [-] log_rotate_interval            = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.126 2 DEBUG cotyledon.oslo_config_glue [-] log_rotate_interval_type       = days log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.127 2 DEBUG cotyledon.oslo_config_glue [-] log_rotation_type              = none log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.127 2 DEBUG cotyledon.oslo_config_glue [-] logging_context_format_string  = %(asctime)s.%(msecs)03d %(process)d %(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(user_identity)s] %(instance)s%(message)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.127 2 DEBUG cotyledon.oslo_config_glue [-] logging_debug_format_suffix    = %(funcName)s %(pathname)s:%(lineno)d log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.127 2 DEBUG cotyledon.oslo_config_glue [-] logging_default_format_string  = %(asctime)s.%(msecs)03d %(process)d %(levelname)s %(name)s [-] %(instance)s%(message)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.127 2 DEBUG cotyledon.oslo_config_glue [-] logging_exception_prefix       = %(asctime)s.%(msecs)03d %(process)d ERROR %(name)s %(instance)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.127 2 DEBUG cotyledon.oslo_config_glue [-] logging_user_identity_format   = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.127 2 DEBUG cotyledon.oslo_config_glue [-] max_logfile_count              = 30 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.127 2 DEBUG cotyledon.oslo_config_glue [-] max_logfile_size_mb            = 200 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.127 2 DEBUG cotyledon.oslo_config_glue [-] max_parallel_requests          = 64 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.127 2 DEBUG cotyledon.oslo_config_glue [-] partitioning_group_prefix      = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.127 2 DEBUG cotyledon.oslo_config_glue [-] pipeline_cfg_file              = pipeline.yaml log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.127 2 DEBUG cotyledon.oslo_config_glue [-] polling_namespaces             = ['compute'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.128 2 DEBUG cotyledon.oslo_config_glue [-] pollsters_definitions_dirs     = ['/etc/ceilometer/pollsters.d'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.128 2 DEBUG cotyledon.oslo_config_glue [-] publish_errors                 = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.128 2 DEBUG cotyledon.oslo_config_glue [-] rate_limit_burst               = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.128 2 DEBUG cotyledon.oslo_config_glue [-] rate_limit_except_level        = CRITICAL log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.128 2 DEBUG cotyledon.oslo_config_glue [-] rate_limit_interval            = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.128 2 DEBUG cotyledon.oslo_config_glue [-] reseller_prefix                = AUTH_ log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.128 2 DEBUG cotyledon.oslo_config_glue [-] reserved_metadata_keys         = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.128 2 DEBUG cotyledon.oslo_config_glue [-] reserved_metadata_length       = 256 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.128 2 DEBUG cotyledon.oslo_config_glue [-] reserved_metadata_namespace    = ['metering.'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.128 2 DEBUG cotyledon.oslo_config_glue [-] rootwrap_config                = /etc/ceilometer/rootwrap.conf log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.128 2 DEBUG cotyledon.oslo_config_glue [-] sample_source                  = openstack log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.128 2 DEBUG cotyledon.oslo_config_glue [-] syslog_log_facility            = LOG_USER log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.129 2 DEBUG cotyledon.oslo_config_glue [-] tenant_name_discovery          = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.129 2 DEBUG cotyledon.oslo_config_glue [-] use_eventlog                   = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.129 2 DEBUG cotyledon.oslo_config_glue [-] use_journal                    = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.129 2 DEBUG cotyledon.oslo_config_glue [-] use_json                       = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.129 2 DEBUG cotyledon.oslo_config_glue [-] use_stderr                     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.129 2 DEBUG cotyledon.oslo_config_glue [-] use_syslog                     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.129 2 DEBUG cotyledon.oslo_config_glue [-] watch_log_file                 = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.129 2 DEBUG cotyledon.oslo_config_glue [-] compute.instance_discovery_method = libvirt_metadata log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.129 2 DEBUG cotyledon.oslo_config_glue [-] compute.resource_cache_expiry  = 3600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.129 2 DEBUG cotyledon.oslo_config_glue [-] compute.resource_update_interval = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.130 2 DEBUG cotyledon.oslo_config_glue [-] coordination.backend_url       = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.130 2 DEBUG cotyledon.oslo_config_glue [-] event.definitions_cfg_file     = event_definitions.yaml log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.130 2 DEBUG cotyledon.oslo_config_glue [-] event.drop_unmatched_notifications = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.130 2 DEBUG cotyledon.oslo_config_glue [-] event.store_raw                = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.130 2 DEBUG cotyledon.oslo_config_glue [-] ipmi.node_manager_init_retry   = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.130 2 DEBUG cotyledon.oslo_config_glue [-] ipmi.polling_retry             = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.130 2 DEBUG cotyledon.oslo_config_glue [-] meter.meter_definitions_dirs   = ['/etc/ceilometer/meters.d', '/usr/lib/python3.9/site-packages/ceilometer/data/meters.d'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.130 2 DEBUG cotyledon.oslo_config_glue [-] monasca.archive_on_failure     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.130 2 DEBUG cotyledon.oslo_config_glue [-] monasca.archive_path           = mon_pub_failures.txt log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.130 2 DEBUG cotyledon.oslo_config_glue [-] monasca.auth_section           = service_credentials log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.130 2 DEBUG cotyledon.oslo_config_glue [-] monasca.auth_type              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.131 2 DEBUG cotyledon.oslo_config_glue [-] monasca.batch_count            = 1000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.131 2 DEBUG cotyledon.oslo_config_glue [-] monasca.batch_max_retries      = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.131 2 DEBUG cotyledon.oslo_config_glue [-] monasca.batch_mode             = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.131 2 DEBUG cotyledon.oslo_config_glue [-] monasca.batch_polling_interval = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.131 2 DEBUG cotyledon.oslo_config_glue [-] monasca.batch_timeout          = 15 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.131 2 DEBUG cotyledon.oslo_config_glue [-] monasca.cafile                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.131 2 DEBUG cotyledon.oslo_config_glue [-] monasca.certfile               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.131 2 DEBUG cotyledon.oslo_config_glue [-] monasca.client_max_retries     = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.131 2 DEBUG cotyledon.oslo_config_glue [-] monasca.client_retry_interval  = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.131 2 DEBUG cotyledon.oslo_config_glue [-] monasca.clientapi_version      = 2_0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.131 2 DEBUG cotyledon.oslo_config_glue [-] monasca.cloud_name             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.132 2 DEBUG cotyledon.oslo_config_glue [-] monasca.cluster                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.132 2 DEBUG cotyledon.oslo_config_glue [-] monasca.collect_timing         = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.132 2 DEBUG cotyledon.oslo_config_glue [-] monasca.control_plane          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.132 2 DEBUG cotyledon.oslo_config_glue [-] monasca.enable_api_pagination  = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.132 2 DEBUG cotyledon.oslo_config_glue [-] monasca.insecure               = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.132 2 DEBUG cotyledon.oslo_config_glue [-] monasca.interface              = internal log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.132 2 DEBUG cotyledon.oslo_config_glue [-] monasca.keyfile                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.132 2 DEBUG cotyledon.oslo_config_glue [-] monasca.monasca_mappings       = /etc/ceilometer/monasca_field_definitions.yaml log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.132 2 DEBUG cotyledon.oslo_config_glue [-] monasca.region_name            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.132 2 DEBUG cotyledon.oslo_config_glue [-] monasca.retry_on_failure       = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.132 2 DEBUG cotyledon.oslo_config_glue [-] monasca.split_loggers          = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.133 2 DEBUG cotyledon.oslo_config_glue [-] monasca.timeout                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.133 2 DEBUG cotyledon.oslo_config_glue [-] notification.ack_on_event_error = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.133 2 DEBUG cotyledon.oslo_config_glue [-] notification.batch_size        = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.133 2 DEBUG cotyledon.oslo_config_glue [-] notification.batch_timeout     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.133 2 DEBUG cotyledon.oslo_config_glue [-] notification.messaging_urls    = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.133 2 DEBUG cotyledon.oslo_config_glue [-] notification.notification_control_exchanges = ['nova', 'glance', 'neutron', 'cinder', 'heat', 'keystone', 'sahara', 'trove', 'zaqar', 'swift', 'ceilometer', 'magnum', 'dns', 'ironic', 'aodh'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.133 2 DEBUG cotyledon.oslo_config_glue [-] notification.pipelines         = ['meter', 'event'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.133 2 DEBUG cotyledon.oslo_config_glue [-] notification.workers           = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.133 2 DEBUG cotyledon.oslo_config_glue [-] polling.batch_size             = 50 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.133 2 DEBUG cotyledon.oslo_config_glue [-] polling.cfg_file               = polling.yaml log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.133 2 DEBUG cotyledon.oslo_config_glue [-] polling.partitioning_group_prefix = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.134 2 DEBUG cotyledon.oslo_config_glue [-] polling.pollsters_definitions_dirs = ['/etc/ceilometer/pollsters.d'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.134 2 DEBUG cotyledon.oslo_config_glue [-] polling.tenant_name_discovery  = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.134 2 DEBUG cotyledon.oslo_config_glue [-] publisher.telemetry_secret     = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.134 2 DEBUG cotyledon.oslo_config_glue [-] publisher_notifier.event_topic = event log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.134 2 DEBUG cotyledon.oslo_config_glue [-] publisher_notifier.metering_topic = metering log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.134 2 DEBUG cotyledon.oslo_config_glue [-] publisher_notifier.telemetry_driver = messagingv2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.134 2 DEBUG cotyledon.oslo_config_glue [-] rgw_admin_credentials.access_key = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.134 2 DEBUG cotyledon.oslo_config_glue [-] rgw_admin_credentials.secret_key = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.134 2 DEBUG cotyledon.oslo_config_glue [-] rgw_client.implicit_tenants    = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.135 2 DEBUG cotyledon.oslo_config_glue [-] service_types.cinder           = volumev3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.135 2 DEBUG cotyledon.oslo_config_glue [-] service_types.glance           = image log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.135 2 DEBUG cotyledon.oslo_config_glue [-] service_types.neutron          = network log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.135 2 DEBUG cotyledon.oslo_config_glue [-] service_types.nova             = compute log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.135 2 DEBUG cotyledon.oslo_config_glue [-] service_types.radosgw          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.135 2 DEBUG cotyledon.oslo_config_glue [-] service_types.swift            = object-store log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.135 2 DEBUG cotyledon.oslo_config_glue [-] vmware.api_retry_count         = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.135 2 DEBUG cotyledon.oslo_config_glue [-] vmware.ca_file                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.135 2 DEBUG cotyledon.oslo_config_glue [-] vmware.host_ip                 = 127.0.0.1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.135 2 DEBUG cotyledon.oslo_config_glue [-] vmware.host_password           = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.135 2 DEBUG cotyledon.oslo_config_glue [-] vmware.host_port               = 443 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.136 2 DEBUG cotyledon.oslo_config_glue [-] vmware.host_username           =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.136 2 DEBUG cotyledon.oslo_config_glue [-] vmware.insecure                = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.136 2 DEBUG cotyledon.oslo_config_glue [-] vmware.task_poll_interval      = 0.5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.136 2 DEBUG cotyledon.oslo_config_glue [-] vmware.wsdl_location           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.136 2 DEBUG cotyledon.oslo_config_glue [-] service_credentials.auth_section = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.136 2 DEBUG cotyledon.oslo_config_glue [-] service_credentials.auth_type  = password log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.136 2 DEBUG cotyledon.oslo_config_glue [-] service_credentials.cafile     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.136 2 DEBUG cotyledon.oslo_config_glue [-] service_credentials.certfile   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.136 2 DEBUG cotyledon.oslo_config_glue [-] service_credentials.collect_timing = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.136 2 DEBUG cotyledon.oslo_config_glue [-] service_credentials.insecure   = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.136 2 DEBUG cotyledon.oslo_config_glue [-] service_credentials.interface  = internalURL log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.137 2 DEBUG cotyledon.oslo_config_glue [-] service_credentials.keyfile    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.137 2 DEBUG cotyledon.oslo_config_glue [-] service_credentials.region_name = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.137 2 DEBUG cotyledon.oslo_config_glue [-] service_credentials.split_loggers = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.137 2 DEBUG cotyledon.oslo_config_glue [-] service_credentials.timeout    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.137 2 DEBUG cotyledon.oslo_config_glue [-] gnocchi.auth_section           = service_credentials log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.137 2 DEBUG cotyledon.oslo_config_glue [-] gnocchi.auth_type              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.137 2 DEBUG cotyledon.oslo_config_glue [-] gnocchi.cafile                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.137 2 DEBUG cotyledon.oslo_config_glue [-] gnocchi.certfile               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.137 2 DEBUG cotyledon.oslo_config_glue [-] gnocchi.collect_timing         = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.137 2 DEBUG cotyledon.oslo_config_glue [-] gnocchi.insecure               = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.137 2 DEBUG cotyledon.oslo_config_glue [-] gnocchi.interface              = internal log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.137 2 DEBUG cotyledon.oslo_config_glue [-] gnocchi.keyfile                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.138 2 DEBUG cotyledon.oslo_config_glue [-] gnocchi.region_name            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.138 2 DEBUG cotyledon.oslo_config_glue [-] gnocchi.split_loggers          = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.138 2 DEBUG cotyledon.oslo_config_glue [-] gnocchi.timeout                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.138 2 DEBUG cotyledon.oslo_config_glue [-] zaqar.auth_section             = service_credentials log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.138 2 DEBUG cotyledon.oslo_config_glue [-] zaqar.auth_type                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.138 2 DEBUG cotyledon.oslo_config_glue [-] zaqar.cafile                   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.138 2 DEBUG cotyledon.oslo_config_glue [-] zaqar.certfile                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.138 2 DEBUG cotyledon.oslo_config_glue [-] zaqar.collect_timing           = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.138 2 DEBUG cotyledon.oslo_config_glue [-] zaqar.insecure                 = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.138 2 DEBUG cotyledon.oslo_config_glue [-] zaqar.interface                = internal log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.138 2 DEBUG cotyledon.oslo_config_glue [-] zaqar.keyfile                  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.139 2 DEBUG cotyledon.oslo_config_glue [-] zaqar.region_name              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.139 2 DEBUG cotyledon.oslo_config_glue [-] zaqar.split_loggers            = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.139 2 DEBUG cotyledon.oslo_config_glue [-] zaqar.timeout                  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.139 2 DEBUG cotyledon.oslo_config_glue [-] ******************************************************************************** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2613
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.157 12 INFO ceilometer.polling.manager [-] Looking for dynamic pollsters configurations at [['/etc/ceilometer/pollsters.d']].
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.159 12 INFO ceilometer.polling.manager [-] No dynamic pollsters found in folder [/etc/ceilometer/pollsters.d].
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.160 12 INFO ceilometer.polling.manager [-] No dynamic pollsters file found in dirs [['/etc/ceilometer/pollsters.d']].
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.243 12 DEBUG ceilometer.compute.virt.libvirt.utils [-] Connecting to libvirt: qemu:///system new_libvirt_connection /usr/lib/python3.9/site-packages/ceilometer/compute/virt/libvirt/utils.py:93
Oct 02 11:55:14 compute-0 python3.9[202815]: ansible-ansible.builtin.systemd Invoked with name=edpm_ceilometer_agent_compute.service state=restarted daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None
Oct 02 11:55:14 compute-0 systemd[1]: Stopping ceilometer_agent_compute container...
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.316 12 DEBUG cotyledon.oslo_config_glue [-] Full set of CONF: _load_service_options /usr/lib/python3.9/site-packages/cotyledon/oslo_config_glue.py:48
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.316 12 DEBUG cotyledon.oslo_config_glue [-] ******************************************************************************** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2589
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.316 12 DEBUG cotyledon.oslo_config_glue [-] Configuration options gathered from: log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2590
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.316 12 DEBUG cotyledon.oslo_config_glue [-] command line args: ['--polling-namespaces', 'compute', '--logfile', '/dev/stdout'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2591
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.317 12 DEBUG cotyledon.oslo_config_glue [-] config files: ['/etc/ceilometer/ceilometer.conf'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2592
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.317 12 DEBUG cotyledon.oslo_config_glue [-] ================================================================================ log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2594
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.317 12 DEBUG cotyledon.oslo_config_glue [-] batch_size                     = 50 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.317 12 DEBUG cotyledon.oslo_config_glue [-] cfg_file                       = polling.yaml log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.317 12 DEBUG cotyledon.oslo_config_glue [-] config_dir                     = ['/etc/ceilometer/ceilometer.conf.d'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.317 12 DEBUG cotyledon.oslo_config_glue [-] config_file                    = ['/etc/ceilometer/ceilometer.conf'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.317 12 DEBUG cotyledon.oslo_config_glue [-] config_source                  = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.317 12 DEBUG cotyledon.oslo_config_glue [-] control_exchange               = ceilometer log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.317 12 DEBUG cotyledon.oslo_config_glue [-] debug                          = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.317 12 DEBUG cotyledon.oslo_config_glue [-] default_log_levels             = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'futurist=INFO', 'neutronclient=INFO', 'keystoneclient=INFO'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.318 12 DEBUG cotyledon.oslo_config_glue [-] event_pipeline_cfg_file        = event_pipeline.yaml log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.318 12 DEBUG cotyledon.oslo_config_glue [-] graceful_shutdown_timeout      = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.318 12 DEBUG cotyledon.oslo_config_glue [-] host                           = compute-0.ctlplane.example.com log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.318 12 DEBUG cotyledon.oslo_config_glue [-] http_timeout                   = 600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.318 12 DEBUG cotyledon.oslo_config_glue [-] hypervisor_inspector           = libvirt log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.318 12 DEBUG cotyledon.oslo_config_glue [-] instance_format                = [instance: %(uuid)s]  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.318 12 DEBUG cotyledon.oslo_config_glue [-] instance_uuid_format           = [instance: %(uuid)s]  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.318 12 DEBUG cotyledon.oslo_config_glue [-] libvirt_type                   = kvm log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.318 12 DEBUG cotyledon.oslo_config_glue [-] libvirt_uri                    =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.318 12 DEBUG cotyledon.oslo_config_glue [-] log_config_append              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.318 12 DEBUG cotyledon.oslo_config_glue [-] log_date_format                = %Y-%m-%d %H:%M:%S log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.319 12 DEBUG cotyledon.oslo_config_glue [-] log_dir                        = /var/log/ceilometer log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.319 12 DEBUG cotyledon.oslo_config_glue [-] log_file                       = /dev/stdout log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.319 12 DEBUG cotyledon.oslo_config_glue [-] log_options                    = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.319 12 DEBUG cotyledon.oslo_config_glue [-] log_rotate_interval            = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.319 12 DEBUG cotyledon.oslo_config_glue [-] log_rotate_interval_type       = days log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.319 12 DEBUG cotyledon.oslo_config_glue [-] log_rotation_type              = none log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.319 12 DEBUG cotyledon.oslo_config_glue [-] logging_context_format_string  = %(asctime)s.%(msecs)03d %(process)d %(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(user_identity)s] %(instance)s%(message)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.319 12 DEBUG cotyledon.oslo_config_glue [-] logging_debug_format_suffix    = %(funcName)s %(pathname)s:%(lineno)d log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.319 12 DEBUG cotyledon.oslo_config_glue [-] logging_default_format_string  = %(asctime)s.%(msecs)03d %(process)d %(levelname)s %(name)s [-] %(instance)s%(message)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.319 12 DEBUG cotyledon.oslo_config_glue [-] logging_exception_prefix       = %(asctime)s.%(msecs)03d %(process)d ERROR %(name)s %(instance)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.319 12 DEBUG cotyledon.oslo_config_glue [-] logging_user_identity_format   = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.319 12 DEBUG cotyledon.oslo_config_glue [-] max_logfile_count              = 30 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.320 12 DEBUG cotyledon.oslo_config_glue [-] max_logfile_size_mb            = 200 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.320 12 DEBUG cotyledon.oslo_config_glue [-] max_parallel_requests          = 64 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.320 12 DEBUG cotyledon.oslo_config_glue [-] partitioning_group_prefix      = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.320 12 DEBUG cotyledon.oslo_config_glue [-] pipeline_cfg_file              = pipeline.yaml log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.320 12 DEBUG cotyledon.oslo_config_glue [-] polling_namespaces             = ['compute'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.320 12 DEBUG cotyledon.oslo_config_glue [-] pollsters_definitions_dirs     = ['/etc/ceilometer/pollsters.d'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.320 12 DEBUG cotyledon.oslo_config_glue [-] publish_errors                 = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.320 12 DEBUG cotyledon.oslo_config_glue [-] rate_limit_burst               = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.320 12 DEBUG cotyledon.oslo_config_glue [-] rate_limit_except_level        = CRITICAL log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.320 12 DEBUG cotyledon.oslo_config_glue [-] rate_limit_interval            = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.320 12 DEBUG cotyledon.oslo_config_glue [-] reseller_prefix                = AUTH_ log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.321 12 DEBUG cotyledon.oslo_config_glue [-] reserved_metadata_keys         = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.321 12 DEBUG cotyledon.oslo_config_glue [-] reserved_metadata_length       = 256 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.321 12 DEBUG cotyledon.oslo_config_glue [-] reserved_metadata_namespace    = ['metering.'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.321 12 DEBUG cotyledon.oslo_config_glue [-] rootwrap_config                = /etc/ceilometer/rootwrap.conf log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.321 12 DEBUG cotyledon.oslo_config_glue [-] sample_source                  = openstack log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.321 12 DEBUG cotyledon.oslo_config_glue [-] syslog_log_facility            = LOG_USER log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.321 12 DEBUG cotyledon.oslo_config_glue [-] tenant_name_discovery          = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.321 12 DEBUG cotyledon.oslo_config_glue [-] transport_url                  = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.321 12 DEBUG cotyledon.oslo_config_glue [-] use_eventlog                   = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.321 12 DEBUG cotyledon.oslo_config_glue [-] use_journal                    = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.321 12 DEBUG cotyledon.oslo_config_glue [-] use_json                       = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.321 12 DEBUG cotyledon.oslo_config_glue [-] use_stderr                     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.322 12 DEBUG cotyledon.oslo_config_glue [-] use_syslog                     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.322 12 DEBUG cotyledon.oslo_config_glue [-] watch_log_file                 = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.322 12 DEBUG cotyledon.oslo_config_glue [-] compute.instance_discovery_method = libvirt_metadata log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.322 12 DEBUG cotyledon.oslo_config_glue [-] compute.resource_cache_expiry  = 3600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.322 12 DEBUG cotyledon.oslo_config_glue [-] compute.resource_update_interval = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.322 12 DEBUG cotyledon.oslo_config_glue [-] coordination.backend_url       = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.322 12 DEBUG cotyledon.oslo_config_glue [-] event.definitions_cfg_file     = event_definitions.yaml log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.322 12 DEBUG cotyledon.oslo_config_glue [-] event.drop_unmatched_notifications = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.322 12 DEBUG cotyledon.oslo_config_glue [-] event.store_raw                = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.322 12 DEBUG cotyledon.oslo_config_glue [-] ipmi.node_manager_init_retry   = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.322 12 DEBUG cotyledon.oslo_config_glue [-] ipmi.polling_retry             = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.323 12 DEBUG cotyledon.oslo_config_glue [-] meter.meter_definitions_dirs   = ['/etc/ceilometer/meters.d', '/usr/lib/python3.9/site-packages/ceilometer/data/meters.d'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.323 12 DEBUG cotyledon.oslo_config_glue [-] monasca.archive_on_failure     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.323 12 DEBUG cotyledon.oslo_config_glue [-] monasca.archive_path           = mon_pub_failures.txt log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.323 12 DEBUG cotyledon.oslo_config_glue [-] monasca.auth_section           = service_credentials log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.323 12 DEBUG cotyledon.oslo_config_glue [-] monasca.auth_type              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.323 12 DEBUG cotyledon.oslo_config_glue [-] monasca.batch_count            = 1000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.323 12 DEBUG cotyledon.oslo_config_glue [-] monasca.batch_max_retries      = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.323 12 DEBUG cotyledon.oslo_config_glue [-] monasca.batch_mode             = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.323 12 DEBUG cotyledon.oslo_config_glue [-] monasca.batch_polling_interval = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.323 12 DEBUG cotyledon.oslo_config_glue [-] monasca.batch_timeout          = 15 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.323 12 DEBUG cotyledon.oslo_config_glue [-] monasca.cafile                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.323 12 DEBUG cotyledon.oslo_config_glue [-] monasca.certfile               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.324 12 DEBUG cotyledon.oslo_config_glue [-] monasca.client_max_retries     = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.324 12 DEBUG cotyledon.oslo_config_glue [-] monasca.client_retry_interval  = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.324 12 DEBUG cotyledon.oslo_config_glue [-] monasca.clientapi_version      = 2_0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.324 12 DEBUG cotyledon.oslo_config_glue [-] monasca.cloud_name             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.324 12 DEBUG cotyledon.oslo_config_glue [-] monasca.cluster                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.324 12 DEBUG cotyledon.oslo_config_glue [-] monasca.collect_timing         = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.324 12 DEBUG cotyledon.oslo_config_glue [-] monasca.control_plane          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.324 12 DEBUG cotyledon.oslo_config_glue [-] monasca.enable_api_pagination  = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.324 12 DEBUG cotyledon.oslo_config_glue [-] monasca.insecure               = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.324 12 DEBUG cotyledon.oslo_config_glue [-] monasca.interface              = internal log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.325 12 DEBUG cotyledon.oslo_config_glue [-] monasca.keyfile                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.325 12 DEBUG cotyledon.oslo_config_glue [-] monasca.monasca_mappings       = /etc/ceilometer/monasca_field_definitions.yaml log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.325 12 DEBUG cotyledon.oslo_config_glue [-] monasca.region_name            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.325 12 DEBUG cotyledon.oslo_config_glue [-] monasca.retry_on_failure       = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.325 12 DEBUG cotyledon.oslo_config_glue [-] monasca.split_loggers          = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.325 12 DEBUG cotyledon.oslo_config_glue [-] monasca.timeout                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.325 12 DEBUG cotyledon.oslo_config_glue [-] notification.ack_on_event_error = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.325 12 DEBUG cotyledon.oslo_config_glue [-] notification.batch_size        = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.325 12 DEBUG cotyledon.oslo_config_glue [-] notification.batch_timeout     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.325 12 DEBUG cotyledon.oslo_config_glue [-] notification.messaging_urls    = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.325 12 DEBUG cotyledon.oslo_config_glue [-] notification.notification_control_exchanges = ['nova', 'glance', 'neutron', 'cinder', 'heat', 'keystone', 'sahara', 'trove', 'zaqar', 'swift', 'ceilometer', 'magnum', 'dns', 'ironic', 'aodh'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.326 12 DEBUG cotyledon.oslo_config_glue [-] notification.pipelines         = ['meter', 'event'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.326 12 DEBUG cotyledon.oslo_config_glue [-] notification.workers           = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.326 12 DEBUG cotyledon.oslo_config_glue [-] polling.batch_size             = 50 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.326 12 DEBUG cotyledon.oslo_config_glue [-] polling.cfg_file               = polling.yaml log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.326 12 DEBUG cotyledon.oslo_config_glue [-] polling.partitioning_group_prefix = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.326 12 DEBUG cotyledon.oslo_config_glue [-] polling.pollsters_definitions_dirs = ['/etc/ceilometer/pollsters.d'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.326 12 DEBUG cotyledon.oslo_config_glue [-] polling.tenant_name_discovery  = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.326 12 DEBUG cotyledon.oslo_config_glue [-] publisher.telemetry_secret     = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.326 12 DEBUG cotyledon.oslo_config_glue [-] publisher_notifier.event_topic = event log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.326 12 DEBUG cotyledon.oslo_config_glue [-] publisher_notifier.metering_topic = metering log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.326 12 DEBUG cotyledon.oslo_config_glue [-] publisher_notifier.telemetry_driver = messagingv2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.327 12 DEBUG cotyledon.oslo_config_glue [-] rgw_admin_credentials.access_key = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.327 12 DEBUG cotyledon.oslo_config_glue [-] rgw_admin_credentials.secret_key = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.327 12 DEBUG cotyledon.oslo_config_glue [-] rgw_client.implicit_tenants    = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.327 12 DEBUG cotyledon.oslo_config_glue [-] service_types.cinder           = volumev3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.327 12 DEBUG cotyledon.oslo_config_glue [-] service_types.glance           = image log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.327 12 DEBUG cotyledon.oslo_config_glue [-] service_types.neutron          = network log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.327 12 DEBUG cotyledon.oslo_config_glue [-] service_types.nova             = compute log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.327 12 DEBUG cotyledon.oslo_config_glue [-] service_types.radosgw          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.327 12 DEBUG cotyledon.oslo_config_glue [-] service_types.swift            = object-store log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.327 12 DEBUG cotyledon.oslo_config_glue [-] vmware.api_retry_count         = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.328 12 DEBUG cotyledon.oslo_config_glue [-] vmware.ca_file                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.328 12 DEBUG cotyledon.oslo_config_glue [-] vmware.host_ip                 = 127.0.0.1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.328 12 DEBUG cotyledon.oslo_config_glue [-] vmware.host_password           = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.328 12 DEBUG cotyledon.oslo_config_glue [-] vmware.host_port               = 443 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.328 12 DEBUG cotyledon.oslo_config_glue [-] vmware.host_username           =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.328 12 DEBUG cotyledon.oslo_config_glue [-] vmware.insecure                = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.328 12 DEBUG cotyledon.oslo_config_glue [-] vmware.task_poll_interval      = 0.5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.328 12 DEBUG cotyledon.oslo_config_glue [-] vmware.wsdl_location           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.328 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.auth_section = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.328 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.auth_type  = password log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.329 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.auth_url   = https://keystone-internal.openstack.svc:5000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.329 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.cafile     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.329 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.certfile   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.329 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.collect_timing = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.329 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.default_domain_id = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.329 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.default_domain_name = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.329 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.domain_id  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.329 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.domain_name = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.329 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.insecure   = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.329 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.interface  = internalURL log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.329 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.keyfile    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.330 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.password   = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.330 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.project_domain_id = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.330 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.project_domain_name = Default log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.330 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.project_id = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.330 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.project_name = service log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.330 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.region_name = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.330 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.split_loggers = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.330 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.system_scope = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.330 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.timeout    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.330 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.trust_id   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.331 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.user_domain_id = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.331 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.user_domain_name = Default log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.331 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.user_id    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.331 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.username   = ceilometer log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.331 12 DEBUG cotyledon.oslo_config_glue [-] gnocchi.auth_section           = service_credentials log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.331 12 DEBUG cotyledon.oslo_config_glue [-] gnocchi.auth_type              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.331 12 DEBUG cotyledon.oslo_config_glue [-] gnocchi.cafile                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.331 12 DEBUG cotyledon.oslo_config_glue [-] gnocchi.certfile               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.331 12 DEBUG cotyledon.oslo_config_glue [-] gnocchi.collect_timing         = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.331 12 DEBUG cotyledon.oslo_config_glue [-] gnocchi.insecure               = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.331 12 DEBUG cotyledon.oslo_config_glue [-] gnocchi.interface              = internal log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.332 12 DEBUG cotyledon.oslo_config_glue [-] gnocchi.keyfile                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.332 12 DEBUG cotyledon.oslo_config_glue [-] gnocchi.region_name            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.332 12 DEBUG cotyledon.oslo_config_glue [-] gnocchi.split_loggers          = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.332 12 DEBUG cotyledon.oslo_config_glue [-] gnocchi.timeout                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.332 12 DEBUG cotyledon.oslo_config_glue [-] zaqar.auth_section             = service_credentials log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.332 12 DEBUG cotyledon.oslo_config_glue [-] zaqar.auth_type                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.332 12 DEBUG cotyledon.oslo_config_glue [-] zaqar.cafile                   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.332 12 DEBUG cotyledon.oslo_config_glue [-] zaqar.certfile                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.332 12 DEBUG cotyledon.oslo_config_glue [-] zaqar.collect_timing           = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.332 12 DEBUG cotyledon.oslo_config_glue [-] zaqar.insecure                 = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.332 12 DEBUG cotyledon.oslo_config_glue [-] zaqar.interface                = internal log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.333 12 DEBUG cotyledon.oslo_config_glue [-] zaqar.keyfile                  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.333 12 DEBUG cotyledon.oslo_config_glue [-] zaqar.region_name              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.333 12 DEBUG cotyledon.oslo_config_glue [-] zaqar.split_loggers            = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.333 12 DEBUG cotyledon.oslo_config_glue [-] zaqar.timeout                  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.333 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_notifications.driver = ['noop'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.333 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_notifications.retry = -1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.333 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_notifications.topics = ['notifications'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.333 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_notifications.transport_url = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.333 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.amqp_auto_delete = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.333 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.amqp_durable_queues = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.333 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.conn_pool_min_size = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.333 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.conn_pool_ttl = 1200 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.334 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.direct_mandatory_flag = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.334 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.enable_cancel_on_failover = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.334 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.heartbeat_in_pthread = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.334 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.heartbeat_rate = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.334 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.334 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.kombu_compression = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.334 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.kombu_failover_strategy = round-robin log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.334 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.334 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.334 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.rabbit_ha_queues = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.334 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.rabbit_interval_max = 30 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.335 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.335 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 100 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.335 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.335 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.335 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.335 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.rabbit_quorum_queue = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.335 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.rabbit_retry_backoff = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.335 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.rabbit_retry_interval = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.335 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.335 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.rpc_conn_pool_size = 30 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.336 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.ssl      = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.336 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.ssl_ca_file =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.336 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.ssl_cert_file =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.336 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.ssl_enforce_fips_mode = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.336 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.ssl_key_file =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.336 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.ssl_version =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.336 12 DEBUG cotyledon.oslo_config_glue [-] ******************************************************************************** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2613
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.336 12 DEBUG cotyledon._service [-] Run service AgentManager(0) [12] wait_forever /usr/lib/python3.9/site-packages/cotyledon/_service.py:241
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.339 12 DEBUG ceilometer.agent [-] Config file: {'sources': [{'name': 'pollsters', 'interval': 120, 'meters': ['power.state', 'cpu', 'memory.usage', 'disk.*', 'network.*']}]} load_config /usr/lib/python3.9/site-packages/ceilometer/agent.py:64
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.346 12 DEBUG ceilometer.compute.virt.libvirt.utils [-] Connecting to libvirt: qemu:///system new_libvirt_connection /usr/lib/python3.9/site-packages/ceilometer/compute/virt/libvirt/utils.py:93
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.352 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.capacity, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.352 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.352 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.352 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.353 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.353 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.allocation, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.353 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.353 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.353 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.353 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.353 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.iops, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.353 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.353 12 DEBUG ceilometer.polling.manager [-] Skip pollster memory.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.353 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.353 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.353 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.353 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.353 12 DEBUG ceilometer.polling.manager [-] Skip pollster cpu, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.354 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.354 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.354 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.354 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.354 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.354 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.354 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.460 2 INFO cotyledon._service_manager [-] Caught SIGTERM signal, graceful exiting of master process
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.561 2 DEBUG cotyledon._service_manager [-] Killing services with signal SIGTERM _shutdown /usr/lib/python3.9/site-packages/cotyledon/_service_manager.py:304
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.561 2 DEBUG cotyledon._service_manager [-] Waiting services to terminate _shutdown /usr/lib/python3.9/site-packages/cotyledon/_service_manager.py:308
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.561 12 INFO cotyledon._service [-] Caught SIGTERM signal, graceful exiting of service AgentManager(0) [12]
Oct 02 11:55:14 compute-0 ceilometer_agent_compute[202632]: 2025-10-02 11:55:14.569 2 DEBUG cotyledon._service_manager [-] Shutdown finish _shutdown /usr/lib/python3.9/site-packages/cotyledon/_service_manager.py:320
Oct 02 11:55:14 compute-0 virtqemud[191807]: End of file while reading data: Input/output error
Oct 02 11:55:14 compute-0 virtqemud[191807]: End of file while reading data: Input/output error
Oct 02 11:55:14 compute-0 systemd[1]: libpod-48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be.scope: Deactivated successfully.
Oct 02 11:55:14 compute-0 systemd[1]: libpod-48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be.scope: Consumed 1.332s CPU time.
Oct 02 11:55:14 compute-0 podman[202822]: 2025-10-02 11:55:14.736555949 +0000 UTC m=+0.407919661 container died 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ceilometer_agent_compute, org.label-schema.build-date=20251001, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible)
Oct 02 11:55:14 compute-0 systemd[1]: 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be-2ce81449831fb250.timer: Deactivated successfully.
Oct 02 11:55:14 compute-0 systemd[1]: Stopped /usr/bin/podman healthcheck run 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be.
Oct 02 11:55:15 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be-userdata-shm.mount: Deactivated successfully.
Oct 02 11:55:15 compute-0 systemd[1]: var-lib-containers-storage-overlay-db1a43e7f9bde62cb3b423268535817a444f1d68c2ac7ee32e080fad800a17bc-merged.mount: Deactivated successfully.
Oct 02 11:55:15 compute-0 podman[202822]: 2025-10-02 11:55:15.315648447 +0000 UTC m=+0.987012129 container cleanup 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=edpm, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, tcib_managed=true, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2)
Oct 02 11:55:15 compute-0 podman[202822]: ceilometer_agent_compute
Oct 02 11:55:15 compute-0 podman[202854]: ceilometer_agent_compute
Oct 02 11:55:15 compute-0 systemd[1]: edpm_ceilometer_agent_compute.service: Deactivated successfully.
Oct 02 11:55:15 compute-0 systemd[1]: Stopped ceilometer_agent_compute container.
Oct 02 11:55:15 compute-0 systemd[1]: Starting ceilometer_agent_compute container...
Oct 02 11:55:15 compute-0 systemd[1]: Started libcrun container.
Oct 02 11:55:15 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/db1a43e7f9bde62cb3b423268535817a444f1d68c2ac7ee32e080fad800a17bc/merged/etc/ceilometer/ceilometer_prom_exporter.yaml supports timestamps until 2038 (0x7fffffff)
Oct 02 11:55:15 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/db1a43e7f9bde62cb3b423268535817a444f1d68c2ac7ee32e080fad800a17bc/merged/etc/ceilometer/tls supports timestamps until 2038 (0x7fffffff)
Oct 02 11:55:15 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/db1a43e7f9bde62cb3b423268535817a444f1d68c2ac7ee32e080fad800a17bc/merged/var/lib/openstack/config supports timestamps until 2038 (0x7fffffff)
Oct 02 11:55:15 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/db1a43e7f9bde62cb3b423268535817a444f1d68c2ac7ee32e080fad800a17bc/merged/var/lib/kolla/config_files/config.json supports timestamps until 2038 (0x7fffffff)
Oct 02 11:55:15 compute-0 systemd[1]: Started /usr/bin/podman healthcheck run 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be.
Oct 02 11:55:15 compute-0 podman[202865]: 2025-10-02 11:55:15.865741876 +0000 UTC m=+0.424592842 container init 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, org.label-schema.build-date=20251001, tcib_managed=true, config_id=edpm, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3)
Oct 02 11:55:15 compute-0 ceilometer_agent_compute[202881]: + sudo -E kolla_set_configs
Oct 02 11:55:15 compute-0 podman[202865]: 2025-10-02 11:55:15.896245568 +0000 UTC m=+0.455096574 container start 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_managed=true, config_id=edpm, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 11:55:15 compute-0 sudo[202887]: ceilometer : PWD=/ ; USER=root ; COMMAND=/usr/local/bin/kolla_set_configs
Oct 02 11:55:15 compute-0 sudo[202887]: pam_systemd(sudo:session): Failed to connect to system bus: No such file or directory
Oct 02 11:55:15 compute-0 sudo[202887]: pam_unix(sudo:session): session opened for user root(uid=0) by (uid=42405)
Oct 02 11:55:15 compute-0 ceilometer_agent_compute[202881]: sudo: unable to send audit message: Operation not permitted
Oct 02 11:55:15 compute-0 ceilometer_agent_compute[202881]: INFO:__main__:Loading config file at /var/lib/kolla/config_files/config.json
Oct 02 11:55:15 compute-0 ceilometer_agent_compute[202881]: INFO:__main__:Validating config file
Oct 02 11:55:15 compute-0 ceilometer_agent_compute[202881]: INFO:__main__:Kolla config strategy set to: COPY_ALWAYS
Oct 02 11:55:15 compute-0 ceilometer_agent_compute[202881]: INFO:__main__:Copying service configuration files
Oct 02 11:55:15 compute-0 ceilometer_agent_compute[202881]: INFO:__main__:Deleting /etc/ceilometer/ceilometer.conf
Oct 02 11:55:15 compute-0 ceilometer_agent_compute[202881]: INFO:__main__:Copying /var/lib/openstack/config/ceilometer.conf to /etc/ceilometer/ceilometer.conf
Oct 02 11:55:15 compute-0 ceilometer_agent_compute[202881]: INFO:__main__:Setting permission for /etc/ceilometer/ceilometer.conf
Oct 02 11:55:15 compute-0 ceilometer_agent_compute[202881]: INFO:__main__:Deleting /etc/ceilometer/polling.yaml
Oct 02 11:55:15 compute-0 ceilometer_agent_compute[202881]: INFO:__main__:Copying /var/lib/openstack/config/polling.yaml to /etc/ceilometer/polling.yaml
Oct 02 11:55:15 compute-0 ceilometer_agent_compute[202881]: INFO:__main__:Setting permission for /etc/ceilometer/polling.yaml
Oct 02 11:55:15 compute-0 ceilometer_agent_compute[202881]: INFO:__main__:Deleting /etc/ceilometer/ceilometer.conf.d/01-ceilometer-custom.conf
Oct 02 11:55:15 compute-0 ceilometer_agent_compute[202881]: INFO:__main__:Copying /var/lib/openstack/config/custom.conf to /etc/ceilometer/ceilometer.conf.d/01-ceilometer-custom.conf
Oct 02 11:55:15 compute-0 ceilometer_agent_compute[202881]: INFO:__main__:Setting permission for /etc/ceilometer/ceilometer.conf.d/01-ceilometer-custom.conf
Oct 02 11:55:15 compute-0 ceilometer_agent_compute[202881]: INFO:__main__:Deleting /etc/ceilometer/ceilometer.conf.d/02-ceilometer-host-specific.conf
Oct 02 11:55:15 compute-0 ceilometer_agent_compute[202881]: INFO:__main__:Copying /var/lib/openstack/config/ceilometer-host-specific.conf to /etc/ceilometer/ceilometer.conf.d/02-ceilometer-host-specific.conf
Oct 02 11:55:15 compute-0 ceilometer_agent_compute[202881]: INFO:__main__:Setting permission for /etc/ceilometer/ceilometer.conf.d/02-ceilometer-host-specific.conf
Oct 02 11:55:15 compute-0 ceilometer_agent_compute[202881]: INFO:__main__:Writing out command to execute
Oct 02 11:55:15 compute-0 sudo[202887]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:15 compute-0 ceilometer_agent_compute[202881]: ++ cat /run_command
Oct 02 11:55:15 compute-0 ceilometer_agent_compute[202881]: + CMD='/usr/bin/ceilometer-polling --polling-namespaces compute --logfile /dev/stdout'
Oct 02 11:55:15 compute-0 ceilometer_agent_compute[202881]: + ARGS=
Oct 02 11:55:15 compute-0 ceilometer_agent_compute[202881]: + sudo kolla_copy_cacerts
Oct 02 11:55:15 compute-0 podman[202865]: ceilometer_agent_compute
Oct 02 11:55:15 compute-0 systemd[1]: Started ceilometer_agent_compute container.
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: sudo: unable to send audit message: Operation not permitted
Oct 02 11:55:16 compute-0 sudo[202903]: ceilometer : PWD=/ ; USER=root ; COMMAND=/usr/local/bin/kolla_copy_cacerts
Oct 02 11:55:16 compute-0 sudo[202903]: pam_systemd(sudo:session): Failed to connect to system bus: No such file or directory
Oct 02 11:55:16 compute-0 sudo[202903]: pam_unix(sudo:session): session opened for user root(uid=0) by (uid=42405)
Oct 02 11:55:16 compute-0 sudo[202903]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: + [[ ! -n '' ]]
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: + . kolla_extend_start
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: Running command: '/usr/bin/ceilometer-polling --polling-namespaces compute --logfile /dev/stdout'
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: + echo 'Running command: '\''/usr/bin/ceilometer-polling --polling-namespaces compute --logfile /dev/stdout'\'''
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: + umask 0022
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: + exec /usr/bin/ceilometer-polling --polling-namespaces compute --logfile /dev/stdout
Oct 02 11:55:16 compute-0 sudo[202813]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:16 compute-0 podman[202888]: 2025-10-02 11:55:16.04469683 +0000 UTC m=+0.127273477 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=starting, health_failing_streak=1, health_log=, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, config_id=edpm, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 11:55:16 compute-0 systemd[1]: 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be-77f5e9cb6d8164f7.service: Main process exited, code=exited, status=1/FAILURE
Oct 02 11:55:16 compute-0 systemd[1]: 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be-77f5e9cb6d8164f7.service: Failed with result 'exit-code'.
Oct 02 11:55:16 compute-0 sudo[203061]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cxdrblnpgtwtnvtkutmtmptpvepuqohd ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406116.2004488-1698-166607483929504/AnsiballZ_stat.py'
Oct 02 11:55:16 compute-0 sudo[203061]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:16 compute-0 python3.9[203063]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/healthchecks/node_exporter/healthcheck follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:55:16 compute-0 sudo[203061]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.878 2 DEBUG cotyledon.oslo_config_glue [-] Full set of CONF: _load_service_manager_options /usr/lib/python3.9/site-packages/cotyledon/oslo_config_glue.py:40
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.880 2 DEBUG cotyledon.oslo_config_glue [-] ******************************************************************************** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2589
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.880 2 DEBUG cotyledon.oslo_config_glue [-] Configuration options gathered from: log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2590
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.880 2 DEBUG cotyledon.oslo_config_glue [-] command line args: ['--polling-namespaces', 'compute', '--logfile', '/dev/stdout'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2591
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.880 2 DEBUG cotyledon.oslo_config_glue [-] config files: ['/etc/ceilometer/ceilometer.conf'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2592
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.880 2 DEBUG cotyledon.oslo_config_glue [-] ================================================================================ log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2594
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.880 2 DEBUG cotyledon.oslo_config_glue [-] batch_size                     = 50 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.881 2 DEBUG cotyledon.oslo_config_glue [-] cfg_file                       = polling.yaml log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.881 2 DEBUG cotyledon.oslo_config_glue [-] config_dir                     = ['/etc/ceilometer/ceilometer.conf.d'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.881 2 DEBUG cotyledon.oslo_config_glue [-] config_file                    = ['/etc/ceilometer/ceilometer.conf'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.881 2 DEBUG cotyledon.oslo_config_glue [-] config_source                  = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.881 2 DEBUG cotyledon.oslo_config_glue [-] debug                          = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.882 2 DEBUG cotyledon.oslo_config_glue [-] default_log_levels             = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'futurist=INFO', 'neutronclient=INFO', 'keystoneclient=INFO'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.882 2 DEBUG cotyledon.oslo_config_glue [-] event_pipeline_cfg_file        = event_pipeline.yaml log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.882 2 DEBUG cotyledon.oslo_config_glue [-] graceful_shutdown_timeout      = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.882 2 DEBUG cotyledon.oslo_config_glue [-] host                           = compute-0.ctlplane.example.com log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.882 2 DEBUG cotyledon.oslo_config_glue [-] http_timeout                   = 600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.882 2 DEBUG cotyledon.oslo_config_glue [-] hypervisor_inspector           = libvirt log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.882 2 DEBUG cotyledon.oslo_config_glue [-] instance_format                = [instance: %(uuid)s]  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.883 2 DEBUG cotyledon.oslo_config_glue [-] instance_uuid_format           = [instance: %(uuid)s]  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.883 2 DEBUG cotyledon.oslo_config_glue [-] libvirt_type                   = kvm log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.883 2 DEBUG cotyledon.oslo_config_glue [-] libvirt_uri                    =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.883 2 DEBUG cotyledon.oslo_config_glue [-] log_config_append              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.883 2 DEBUG cotyledon.oslo_config_glue [-] log_date_format                = %Y-%m-%d %H:%M:%S log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.883 2 DEBUG cotyledon.oslo_config_glue [-] log_dir                        = /var/log/ceilometer log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.883 2 DEBUG cotyledon.oslo_config_glue [-] log_file                       = /dev/stdout log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.883 2 DEBUG cotyledon.oslo_config_glue [-] log_options                    = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.884 2 DEBUG cotyledon.oslo_config_glue [-] log_rotate_interval            = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.884 2 DEBUG cotyledon.oslo_config_glue [-] log_rotate_interval_type       = days log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.884 2 DEBUG cotyledon.oslo_config_glue [-] log_rotation_type              = none log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.884 2 DEBUG cotyledon.oslo_config_glue [-] logging_context_format_string  = %(asctime)s.%(msecs)03d %(process)d %(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(user_identity)s] %(instance)s%(message)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.884 2 DEBUG cotyledon.oslo_config_glue [-] logging_debug_format_suffix    = %(funcName)s %(pathname)s:%(lineno)d log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.884 2 DEBUG cotyledon.oslo_config_glue [-] logging_default_format_string  = %(asctime)s.%(msecs)03d %(process)d %(levelname)s %(name)s [-] %(instance)s%(message)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.884 2 DEBUG cotyledon.oslo_config_glue [-] logging_exception_prefix       = %(asctime)s.%(msecs)03d %(process)d ERROR %(name)s %(instance)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.884 2 DEBUG cotyledon.oslo_config_glue [-] logging_user_identity_format   = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.885 2 DEBUG cotyledon.oslo_config_glue [-] max_logfile_count              = 30 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.885 2 DEBUG cotyledon.oslo_config_glue [-] max_logfile_size_mb            = 200 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.885 2 DEBUG cotyledon.oslo_config_glue [-] max_parallel_requests          = 64 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.885 2 DEBUG cotyledon.oslo_config_glue [-] partitioning_group_prefix      = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.885 2 DEBUG cotyledon.oslo_config_glue [-] pipeline_cfg_file              = pipeline.yaml log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.885 2 DEBUG cotyledon.oslo_config_glue [-] polling_namespaces             = ['compute'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.885 2 DEBUG cotyledon.oslo_config_glue [-] pollsters_definitions_dirs     = ['/etc/ceilometer/pollsters.d'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.886 2 DEBUG cotyledon.oslo_config_glue [-] publish_errors                 = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.886 2 DEBUG cotyledon.oslo_config_glue [-] rate_limit_burst               = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.886 2 DEBUG cotyledon.oslo_config_glue [-] rate_limit_except_level        = CRITICAL log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.886 2 DEBUG cotyledon.oslo_config_glue [-] rate_limit_interval            = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.886 2 DEBUG cotyledon.oslo_config_glue [-] reseller_prefix                = AUTH_ log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.886 2 DEBUG cotyledon.oslo_config_glue [-] reserved_metadata_keys         = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.886 2 DEBUG cotyledon.oslo_config_glue [-] reserved_metadata_length       = 256 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.886 2 DEBUG cotyledon.oslo_config_glue [-] reserved_metadata_namespace    = ['metering.'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.887 2 DEBUG cotyledon.oslo_config_glue [-] rootwrap_config                = /etc/ceilometer/rootwrap.conf log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.887 2 DEBUG cotyledon.oslo_config_glue [-] sample_source                  = openstack log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.887 2 DEBUG cotyledon.oslo_config_glue [-] syslog_log_facility            = LOG_USER log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.887 2 DEBUG cotyledon.oslo_config_glue [-] tenant_name_discovery          = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.887 2 DEBUG cotyledon.oslo_config_glue [-] use_eventlog                   = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.887 2 DEBUG cotyledon.oslo_config_glue [-] use_journal                    = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.887 2 DEBUG cotyledon.oslo_config_glue [-] use_json                       = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.887 2 DEBUG cotyledon.oslo_config_glue [-] use_stderr                     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.888 2 DEBUG cotyledon.oslo_config_glue [-] use_syslog                     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.888 2 DEBUG cotyledon.oslo_config_glue [-] watch_log_file                 = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.888 2 DEBUG cotyledon.oslo_config_glue [-] compute.instance_discovery_method = libvirt_metadata log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.888 2 DEBUG cotyledon.oslo_config_glue [-] compute.resource_cache_expiry  = 3600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.888 2 DEBUG cotyledon.oslo_config_glue [-] compute.resource_update_interval = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.888 2 DEBUG cotyledon.oslo_config_glue [-] coordination.backend_url       = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.888 2 DEBUG cotyledon.oslo_config_glue [-] event.definitions_cfg_file     = event_definitions.yaml log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.889 2 DEBUG cotyledon.oslo_config_glue [-] event.drop_unmatched_notifications = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.889 2 DEBUG cotyledon.oslo_config_glue [-] event.store_raw                = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.889 2 DEBUG cotyledon.oslo_config_glue [-] ipmi.node_manager_init_retry   = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.889 2 DEBUG cotyledon.oslo_config_glue [-] ipmi.polling_retry             = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.889 2 DEBUG cotyledon.oslo_config_glue [-] meter.meter_definitions_dirs   = ['/etc/ceilometer/meters.d', '/usr/lib/python3.9/site-packages/ceilometer/data/meters.d'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.889 2 DEBUG cotyledon.oslo_config_glue [-] monasca.archive_on_failure     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.889 2 DEBUG cotyledon.oslo_config_glue [-] monasca.archive_path           = mon_pub_failures.txt log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.890 2 DEBUG cotyledon.oslo_config_glue [-] monasca.auth_section           = service_credentials log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.890 2 DEBUG cotyledon.oslo_config_glue [-] monasca.auth_type              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.890 2 DEBUG cotyledon.oslo_config_glue [-] monasca.batch_count            = 1000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.890 2 DEBUG cotyledon.oslo_config_glue [-] monasca.batch_max_retries      = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.890 2 DEBUG cotyledon.oslo_config_glue [-] monasca.batch_mode             = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.890 2 DEBUG cotyledon.oslo_config_glue [-] monasca.batch_polling_interval = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.890 2 DEBUG cotyledon.oslo_config_glue [-] monasca.batch_timeout          = 15 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.890 2 DEBUG cotyledon.oslo_config_glue [-] monasca.cafile                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.891 2 DEBUG cotyledon.oslo_config_glue [-] monasca.certfile               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.891 2 DEBUG cotyledon.oslo_config_glue [-] monasca.client_max_retries     = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.891 2 DEBUG cotyledon.oslo_config_glue [-] monasca.client_retry_interval  = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.891 2 DEBUG cotyledon.oslo_config_glue [-] monasca.clientapi_version      = 2_0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.891 2 DEBUG cotyledon.oslo_config_glue [-] monasca.cloud_name             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.891 2 DEBUG cotyledon.oslo_config_glue [-] monasca.cluster                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.892 2 DEBUG cotyledon.oslo_config_glue [-] monasca.collect_timing         = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.892 2 DEBUG cotyledon.oslo_config_glue [-] monasca.control_plane          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.892 2 DEBUG cotyledon.oslo_config_glue [-] monasca.enable_api_pagination  = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.892 2 DEBUG cotyledon.oslo_config_glue [-] monasca.insecure               = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.892 2 DEBUG cotyledon.oslo_config_glue [-] monasca.interface              = internal log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.892 2 DEBUG cotyledon.oslo_config_glue [-] monasca.keyfile                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.892 2 DEBUG cotyledon.oslo_config_glue [-] monasca.monasca_mappings       = /etc/ceilometer/monasca_field_definitions.yaml log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.893 2 DEBUG cotyledon.oslo_config_glue [-] monasca.region_name            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.893 2 DEBUG cotyledon.oslo_config_glue [-] monasca.retry_on_failure       = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.893 2 DEBUG cotyledon.oslo_config_glue [-] monasca.split_loggers          = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.893 2 DEBUG cotyledon.oslo_config_glue [-] monasca.timeout                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.893 2 DEBUG cotyledon.oslo_config_glue [-] notification.ack_on_event_error = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.893 2 DEBUG cotyledon.oslo_config_glue [-] notification.batch_size        = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.893 2 DEBUG cotyledon.oslo_config_glue [-] notification.batch_timeout     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.894 2 DEBUG cotyledon.oslo_config_glue [-] notification.messaging_urls    = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.894 2 DEBUG cotyledon.oslo_config_glue [-] notification.notification_control_exchanges = ['nova', 'glance', 'neutron', 'cinder', 'heat', 'keystone', 'sahara', 'trove', 'zaqar', 'swift', 'ceilometer', 'magnum', 'dns', 'ironic', 'aodh'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.894 2 DEBUG cotyledon.oslo_config_glue [-] notification.pipelines         = ['meter', 'event'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.894 2 DEBUG cotyledon.oslo_config_glue [-] notification.workers           = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.894 2 DEBUG cotyledon.oslo_config_glue [-] polling.batch_size             = 50 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.894 2 DEBUG cotyledon.oslo_config_glue [-] polling.cfg_file               = polling.yaml log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.894 2 DEBUG cotyledon.oslo_config_glue [-] polling.partitioning_group_prefix = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.895 2 DEBUG cotyledon.oslo_config_glue [-] polling.pollsters_definitions_dirs = ['/etc/ceilometer/pollsters.d'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.895 2 DEBUG cotyledon.oslo_config_glue [-] polling.tenant_name_discovery  = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.895 2 DEBUG cotyledon.oslo_config_glue [-] publisher.telemetry_secret     = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.895 2 DEBUG cotyledon.oslo_config_glue [-] publisher_notifier.event_topic = event log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.895 2 DEBUG cotyledon.oslo_config_glue [-] publisher_notifier.metering_topic = metering log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.895 2 DEBUG cotyledon.oslo_config_glue [-] publisher_notifier.telemetry_driver = messagingv2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.895 2 DEBUG cotyledon.oslo_config_glue [-] rgw_admin_credentials.access_key = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.895 2 DEBUG cotyledon.oslo_config_glue [-] rgw_admin_credentials.secret_key = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.896 2 DEBUG cotyledon.oslo_config_glue [-] rgw_client.implicit_tenants    = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.896 2 DEBUG cotyledon.oslo_config_glue [-] service_types.cinder           = volumev3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.896 2 DEBUG cotyledon.oslo_config_glue [-] service_types.glance           = image log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.896 2 DEBUG cotyledon.oslo_config_glue [-] service_types.neutron          = network log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.896 2 DEBUG cotyledon.oslo_config_glue [-] service_types.nova             = compute log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.896 2 DEBUG cotyledon.oslo_config_glue [-] service_types.radosgw          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.896 2 DEBUG cotyledon.oslo_config_glue [-] service_types.swift            = object-store log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.897 2 DEBUG cotyledon.oslo_config_glue [-] vmware.api_retry_count         = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.897 2 DEBUG cotyledon.oslo_config_glue [-] vmware.ca_file                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.897 2 DEBUG cotyledon.oslo_config_glue [-] vmware.host_ip                 = 127.0.0.1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.897 2 DEBUG cotyledon.oslo_config_glue [-] vmware.host_password           = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.897 2 DEBUG cotyledon.oslo_config_glue [-] vmware.host_port               = 443 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.897 2 DEBUG cotyledon.oslo_config_glue [-] vmware.host_username           =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.897 2 DEBUG cotyledon.oslo_config_glue [-] vmware.insecure                = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.897 2 DEBUG cotyledon.oslo_config_glue [-] vmware.task_poll_interval      = 0.5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.898 2 DEBUG cotyledon.oslo_config_glue [-] vmware.wsdl_location           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.898 2 DEBUG cotyledon.oslo_config_glue [-] service_credentials.auth_section = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.898 2 DEBUG cotyledon.oslo_config_glue [-] service_credentials.auth_type  = password log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.898 2 DEBUG cotyledon.oslo_config_glue [-] service_credentials.cafile     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.898 2 DEBUG cotyledon.oslo_config_glue [-] service_credentials.certfile   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.898 2 DEBUG cotyledon.oslo_config_glue [-] service_credentials.collect_timing = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.899 2 DEBUG cotyledon.oslo_config_glue [-] service_credentials.insecure   = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.899 2 DEBUG cotyledon.oslo_config_glue [-] service_credentials.interface  = internalURL log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.899 2 DEBUG cotyledon.oslo_config_glue [-] service_credentials.keyfile    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.899 2 DEBUG cotyledon.oslo_config_glue [-] service_credentials.region_name = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.899 2 DEBUG cotyledon.oslo_config_glue [-] service_credentials.split_loggers = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.899 2 DEBUG cotyledon.oslo_config_glue [-] service_credentials.timeout    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.899 2 DEBUG cotyledon.oslo_config_glue [-] gnocchi.auth_section           = service_credentials log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.900 2 DEBUG cotyledon.oslo_config_glue [-] gnocchi.auth_type              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.900 2 DEBUG cotyledon.oslo_config_glue [-] gnocchi.cafile                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.900 2 DEBUG cotyledon.oslo_config_glue [-] gnocchi.certfile               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.900 2 DEBUG cotyledon.oslo_config_glue [-] gnocchi.collect_timing         = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.900 2 DEBUG cotyledon.oslo_config_glue [-] gnocchi.insecure               = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.900 2 DEBUG cotyledon.oslo_config_glue [-] gnocchi.interface              = internal log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.900 2 DEBUG cotyledon.oslo_config_glue [-] gnocchi.keyfile                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.901 2 DEBUG cotyledon.oslo_config_glue [-] gnocchi.region_name            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.901 2 DEBUG cotyledon.oslo_config_glue [-] gnocchi.split_loggers          = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.901 2 DEBUG cotyledon.oslo_config_glue [-] gnocchi.timeout                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.901 2 DEBUG cotyledon.oslo_config_glue [-] zaqar.auth_section             = service_credentials log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.901 2 DEBUG cotyledon.oslo_config_glue [-] zaqar.auth_type                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.901 2 DEBUG cotyledon.oslo_config_glue [-] zaqar.cafile                   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.901 2 DEBUG cotyledon.oslo_config_glue [-] zaqar.certfile                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.902 2 DEBUG cotyledon.oslo_config_glue [-] zaqar.collect_timing           = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.902 2 DEBUG cotyledon.oslo_config_glue [-] zaqar.insecure                 = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.902 2 DEBUG cotyledon.oslo_config_glue [-] zaqar.interface                = internal log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.902 2 DEBUG cotyledon.oslo_config_glue [-] zaqar.keyfile                  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.902 2 DEBUG cotyledon.oslo_config_glue [-] zaqar.region_name              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.902 2 DEBUG cotyledon.oslo_config_glue [-] zaqar.split_loggers            = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.902 2 DEBUG cotyledon.oslo_config_glue [-] zaqar.timeout                  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.902 2 DEBUG cotyledon.oslo_config_glue [-] ******************************************************************************** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2613
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.923 12 INFO ceilometer.polling.manager [-] Looking for dynamic pollsters configurations at [['/etc/ceilometer/pollsters.d']].
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.924 12 INFO ceilometer.polling.manager [-] No dynamic pollsters found in folder [/etc/ceilometer/pollsters.d].
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.925 12 INFO ceilometer.polling.manager [-] No dynamic pollsters file found in dirs [['/etc/ceilometer/pollsters.d']].
Oct 02 11:55:16 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:16.936 12 DEBUG ceilometer.compute.virt.libvirt.utils [-] Connecting to libvirt: qemu:///system new_libvirt_connection /usr/lib/python3.9/site-packages/ceilometer/compute/virt/libvirt/utils.py:93
Oct 02 11:55:17 compute-0 sudo[203187]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tvypflfraqrdnvlapcusbwkmpamjoffy ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406116.2004488-1698-166607483929504/AnsiballZ_copy.py'
Oct 02 11:55:17 compute-0 sudo[203187]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.067 12 DEBUG cotyledon.oslo_config_glue [-] Full set of CONF: _load_service_options /usr/lib/python3.9/site-packages/cotyledon/oslo_config_glue.py:48
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.067 12 DEBUG cotyledon.oslo_config_glue [-] ******************************************************************************** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2589
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.067 12 DEBUG cotyledon.oslo_config_glue [-] Configuration options gathered from: log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2590
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.067 12 DEBUG cotyledon.oslo_config_glue [-] command line args: ['--polling-namespaces', 'compute', '--logfile', '/dev/stdout'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2591
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.067 12 DEBUG cotyledon.oslo_config_glue [-] config files: ['/etc/ceilometer/ceilometer.conf'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2592
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.067 12 DEBUG cotyledon.oslo_config_glue [-] ================================================================================ log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2594
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.068 12 DEBUG cotyledon.oslo_config_glue [-] batch_size                     = 50 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.068 12 DEBUG cotyledon.oslo_config_glue [-] cfg_file                       = polling.yaml log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.068 12 DEBUG cotyledon.oslo_config_glue [-] config_dir                     = ['/etc/ceilometer/ceilometer.conf.d'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.068 12 DEBUG cotyledon.oslo_config_glue [-] config_file                    = ['/etc/ceilometer/ceilometer.conf'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.068 12 DEBUG cotyledon.oslo_config_glue [-] config_source                  = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.068 12 DEBUG cotyledon.oslo_config_glue [-] control_exchange               = ceilometer log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.068 12 DEBUG cotyledon.oslo_config_glue [-] debug                          = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.069 12 DEBUG cotyledon.oslo_config_glue [-] default_log_levels             = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'futurist=INFO', 'neutronclient=INFO', 'keystoneclient=INFO'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.069 12 DEBUG cotyledon.oslo_config_glue [-] event_pipeline_cfg_file        = event_pipeline.yaml log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.069 12 DEBUG cotyledon.oslo_config_glue [-] graceful_shutdown_timeout      = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.069 12 DEBUG cotyledon.oslo_config_glue [-] host                           = compute-0.ctlplane.example.com log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.069 12 DEBUG cotyledon.oslo_config_glue [-] http_timeout                   = 600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.069 12 DEBUG cotyledon.oslo_config_glue [-] hypervisor_inspector           = libvirt log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.069 12 DEBUG cotyledon.oslo_config_glue [-] instance_format                = [instance: %(uuid)s]  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.069 12 DEBUG cotyledon.oslo_config_glue [-] instance_uuid_format           = [instance: %(uuid)s]  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.069 12 DEBUG cotyledon.oslo_config_glue [-] libvirt_type                   = kvm log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.070 12 DEBUG cotyledon.oslo_config_glue [-] libvirt_uri                    =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.070 12 DEBUG cotyledon.oslo_config_glue [-] log_config_append              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.070 12 DEBUG cotyledon.oslo_config_glue [-] log_date_format                = %Y-%m-%d %H:%M:%S log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.070 12 DEBUG cotyledon.oslo_config_glue [-] log_dir                        = /var/log/ceilometer log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.070 12 DEBUG cotyledon.oslo_config_glue [-] log_file                       = /dev/stdout log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.070 12 DEBUG cotyledon.oslo_config_glue [-] log_options                    = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.070 12 DEBUG cotyledon.oslo_config_glue [-] log_rotate_interval            = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.070 12 DEBUG cotyledon.oslo_config_glue [-] log_rotate_interval_type       = days log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.070 12 DEBUG cotyledon.oslo_config_glue [-] log_rotation_type              = none log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.070 12 DEBUG cotyledon.oslo_config_glue [-] logging_context_format_string  = %(asctime)s.%(msecs)03d %(process)d %(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(user_identity)s] %(instance)s%(message)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.070 12 DEBUG cotyledon.oslo_config_glue [-] logging_debug_format_suffix    = %(funcName)s %(pathname)s:%(lineno)d log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.070 12 DEBUG cotyledon.oslo_config_glue [-] logging_default_format_string  = %(asctime)s.%(msecs)03d %(process)d %(levelname)s %(name)s [-] %(instance)s%(message)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.071 12 DEBUG cotyledon.oslo_config_glue [-] logging_exception_prefix       = %(asctime)s.%(msecs)03d %(process)d ERROR %(name)s %(instance)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.071 12 DEBUG cotyledon.oslo_config_glue [-] logging_user_identity_format   = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.071 12 DEBUG cotyledon.oslo_config_glue [-] max_logfile_count              = 30 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.071 12 DEBUG cotyledon.oslo_config_glue [-] max_logfile_size_mb            = 200 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.071 12 DEBUG cotyledon.oslo_config_glue [-] max_parallel_requests          = 64 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.071 12 DEBUG cotyledon.oslo_config_glue [-] partitioning_group_prefix      = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.071 12 DEBUG cotyledon.oslo_config_glue [-] pipeline_cfg_file              = pipeline.yaml log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.071 12 DEBUG cotyledon.oslo_config_glue [-] polling_namespaces             = ['compute'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.071 12 DEBUG cotyledon.oslo_config_glue [-] pollsters_definitions_dirs     = ['/etc/ceilometer/pollsters.d'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.071 12 DEBUG cotyledon.oslo_config_glue [-] publish_errors                 = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.071 12 DEBUG cotyledon.oslo_config_glue [-] rate_limit_burst               = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.072 12 DEBUG cotyledon.oslo_config_glue [-] rate_limit_except_level        = CRITICAL log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.072 12 DEBUG cotyledon.oslo_config_glue [-] rate_limit_interval            = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.072 12 DEBUG cotyledon.oslo_config_glue [-] reseller_prefix                = AUTH_ log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.072 12 DEBUG cotyledon.oslo_config_glue [-] reserved_metadata_keys         = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.072 12 DEBUG cotyledon.oslo_config_glue [-] reserved_metadata_length       = 256 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.072 12 DEBUG cotyledon.oslo_config_glue [-] reserved_metadata_namespace    = ['metering.'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.072 12 DEBUG cotyledon.oslo_config_glue [-] rootwrap_config                = /etc/ceilometer/rootwrap.conf log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.072 12 DEBUG cotyledon.oslo_config_glue [-] sample_source                  = openstack log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.072 12 DEBUG cotyledon.oslo_config_glue [-] syslog_log_facility            = LOG_USER log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.072 12 DEBUG cotyledon.oslo_config_glue [-] tenant_name_discovery          = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.072 12 DEBUG cotyledon.oslo_config_glue [-] transport_url                  = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.073 12 DEBUG cotyledon.oslo_config_glue [-] use_eventlog                   = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.073 12 DEBUG cotyledon.oslo_config_glue [-] use_journal                    = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.073 12 DEBUG cotyledon.oslo_config_glue [-] use_json                       = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.073 12 DEBUG cotyledon.oslo_config_glue [-] use_stderr                     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.073 12 DEBUG cotyledon.oslo_config_glue [-] use_syslog                     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.073 12 DEBUG cotyledon.oslo_config_glue [-] watch_log_file                 = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2602
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.073 12 DEBUG cotyledon.oslo_config_glue [-] compute.instance_discovery_method = libvirt_metadata log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.073 12 DEBUG cotyledon.oslo_config_glue [-] compute.resource_cache_expiry  = 3600 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.073 12 DEBUG cotyledon.oslo_config_glue [-] compute.resource_update_interval = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.073 12 DEBUG cotyledon.oslo_config_glue [-] coordination.backend_url       = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.073 12 DEBUG cotyledon.oslo_config_glue [-] event.definitions_cfg_file     = event_definitions.yaml log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.074 12 DEBUG cotyledon.oslo_config_glue [-] event.drop_unmatched_notifications = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.074 12 DEBUG cotyledon.oslo_config_glue [-] event.store_raw                = [] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.074 12 DEBUG cotyledon.oslo_config_glue [-] ipmi.node_manager_init_retry   = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.074 12 DEBUG cotyledon.oslo_config_glue [-] ipmi.polling_retry             = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.074 12 DEBUG cotyledon.oslo_config_glue [-] meter.meter_definitions_dirs   = ['/etc/ceilometer/meters.d', '/usr/lib/python3.9/site-packages/ceilometer/data/meters.d'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.074 12 DEBUG cotyledon.oslo_config_glue [-] monasca.archive_on_failure     = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.074 12 DEBUG cotyledon.oslo_config_glue [-] monasca.archive_path           = mon_pub_failures.txt log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.074 12 DEBUG cotyledon.oslo_config_glue [-] monasca.auth_section           = service_credentials log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.074 12 DEBUG cotyledon.oslo_config_glue [-] monasca.auth_type              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.074 12 DEBUG cotyledon.oslo_config_glue [-] monasca.batch_count            = 1000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.074 12 DEBUG cotyledon.oslo_config_glue [-] monasca.batch_max_retries      = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.075 12 DEBUG cotyledon.oslo_config_glue [-] monasca.batch_mode             = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.075 12 DEBUG cotyledon.oslo_config_glue [-] monasca.batch_polling_interval = 5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.075 12 DEBUG cotyledon.oslo_config_glue [-] monasca.batch_timeout          = 15 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.075 12 DEBUG cotyledon.oslo_config_glue [-] monasca.cafile                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.075 12 DEBUG cotyledon.oslo_config_glue [-] monasca.certfile               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.075 12 DEBUG cotyledon.oslo_config_glue [-] monasca.client_max_retries     = 3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.075 12 DEBUG cotyledon.oslo_config_glue [-] monasca.client_retry_interval  = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.075 12 DEBUG cotyledon.oslo_config_glue [-] monasca.clientapi_version      = 2_0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.075 12 DEBUG cotyledon.oslo_config_glue [-] monasca.cloud_name             = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.075 12 DEBUG cotyledon.oslo_config_glue [-] monasca.cluster                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.075 12 DEBUG cotyledon.oslo_config_glue [-] monasca.collect_timing         = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.075 12 DEBUG cotyledon.oslo_config_glue [-] monasca.control_plane          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.076 12 DEBUG cotyledon.oslo_config_glue [-] monasca.enable_api_pagination  = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.076 12 DEBUG cotyledon.oslo_config_glue [-] monasca.insecure               = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.076 12 DEBUG cotyledon.oslo_config_glue [-] monasca.interface              = internal log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.076 12 DEBUG cotyledon.oslo_config_glue [-] monasca.keyfile                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.076 12 DEBUG cotyledon.oslo_config_glue [-] monasca.monasca_mappings       = /etc/ceilometer/monasca_field_definitions.yaml log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.076 12 DEBUG cotyledon.oslo_config_glue [-] monasca.region_name            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.076 12 DEBUG cotyledon.oslo_config_glue [-] monasca.retry_on_failure       = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.076 12 DEBUG cotyledon.oslo_config_glue [-] monasca.split_loggers          = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.076 12 DEBUG cotyledon.oslo_config_glue [-] monasca.timeout                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.076 12 DEBUG cotyledon.oslo_config_glue [-] notification.ack_on_event_error = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.076 12 DEBUG cotyledon.oslo_config_glue [-] notification.batch_size        = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.076 12 DEBUG cotyledon.oslo_config_glue [-] notification.batch_timeout     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.077 12 DEBUG cotyledon.oslo_config_glue [-] notification.messaging_urls    = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.077 12 DEBUG cotyledon.oslo_config_glue [-] notification.notification_control_exchanges = ['nova', 'glance', 'neutron', 'cinder', 'heat', 'keystone', 'sahara', 'trove', 'zaqar', 'swift', 'ceilometer', 'magnum', 'dns', 'ironic', 'aodh'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.077 12 DEBUG cotyledon.oslo_config_glue [-] notification.pipelines         = ['meter', 'event'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.077 12 DEBUG cotyledon.oslo_config_glue [-] notification.workers           = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.077 12 DEBUG cotyledon.oslo_config_glue [-] polling.batch_size             = 50 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.077 12 DEBUG cotyledon.oslo_config_glue [-] polling.cfg_file               = polling.yaml log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.077 12 DEBUG cotyledon.oslo_config_glue [-] polling.partitioning_group_prefix = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.077 12 DEBUG cotyledon.oslo_config_glue [-] polling.pollsters_definitions_dirs = ['/etc/ceilometer/pollsters.d'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.077 12 DEBUG cotyledon.oslo_config_glue [-] polling.tenant_name_discovery  = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.077 12 DEBUG cotyledon.oslo_config_glue [-] publisher.telemetry_secret     = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.078 12 DEBUG cotyledon.oslo_config_glue [-] publisher_notifier.event_topic = event log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.078 12 DEBUG cotyledon.oslo_config_glue [-] publisher_notifier.metering_topic = metering log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.078 12 DEBUG cotyledon.oslo_config_glue [-] publisher_notifier.telemetry_driver = messagingv2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.078 12 DEBUG cotyledon.oslo_config_glue [-] rgw_admin_credentials.access_key = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.078 12 DEBUG cotyledon.oslo_config_glue [-] rgw_admin_credentials.secret_key = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.078 12 DEBUG cotyledon.oslo_config_glue [-] rgw_client.implicit_tenants    = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.078 12 DEBUG cotyledon.oslo_config_glue [-] service_types.cinder           = volumev3 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.078 12 DEBUG cotyledon.oslo_config_glue [-] service_types.glance           = image log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.078 12 DEBUG cotyledon.oslo_config_glue [-] service_types.neutron          = network log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.078 12 DEBUG cotyledon.oslo_config_glue [-] service_types.nova             = compute log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.078 12 DEBUG cotyledon.oslo_config_glue [-] service_types.radosgw          = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.079 12 DEBUG cotyledon.oslo_config_glue [-] service_types.swift            = object-store log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.079 12 DEBUG cotyledon.oslo_config_glue [-] vmware.api_retry_count         = 10 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.079 12 DEBUG cotyledon.oslo_config_glue [-] vmware.ca_file                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.079 12 DEBUG cotyledon.oslo_config_glue [-] vmware.host_ip                 = 127.0.0.1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.079 12 DEBUG cotyledon.oslo_config_glue [-] vmware.host_password           = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.079 12 DEBUG cotyledon.oslo_config_glue [-] vmware.host_port               = 443 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.079 12 DEBUG cotyledon.oslo_config_glue [-] vmware.host_username           =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.079 12 DEBUG cotyledon.oslo_config_glue [-] vmware.insecure                = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.079 12 DEBUG cotyledon.oslo_config_glue [-] vmware.task_poll_interval      = 0.5 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.079 12 DEBUG cotyledon.oslo_config_glue [-] vmware.wsdl_location           = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.079 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.auth_section = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.080 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.auth_type  = password log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.080 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.auth_url   = https://keystone-internal.openstack.svc:5000 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.080 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.cafile     = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.080 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.certfile   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.080 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.collect_timing = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.080 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.default_domain_id = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.080 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.default_domain_name = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.080 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.domain_id  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.080 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.domain_name = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.080 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.insecure   = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.080 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.interface  = internalURL log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.080 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.keyfile    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.081 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.password   = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.081 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.project_domain_id = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.081 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.project_domain_name = Default log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.081 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.project_id = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.081 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.project_name = service log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.081 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.region_name = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.081 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.split_loggers = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.081 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.system_scope = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.081 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.timeout    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.081 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.trust_id   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.082 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.user_domain_id = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.082 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.user_domain_name = Default log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.082 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.user_id    = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.082 12 DEBUG cotyledon.oslo_config_glue [-] service_credentials.username   = ceilometer log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.082 12 DEBUG cotyledon.oslo_config_glue [-] gnocchi.auth_section           = service_credentials log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.082 12 DEBUG cotyledon.oslo_config_glue [-] gnocchi.auth_type              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.082 12 DEBUG cotyledon.oslo_config_glue [-] gnocchi.cafile                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.082 12 DEBUG cotyledon.oslo_config_glue [-] gnocchi.certfile               = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.082 12 DEBUG cotyledon.oslo_config_glue [-] gnocchi.collect_timing         = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.082 12 DEBUG cotyledon.oslo_config_glue [-] gnocchi.insecure               = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.083 12 DEBUG cotyledon.oslo_config_glue [-] gnocchi.interface              = internal log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.083 12 DEBUG cotyledon.oslo_config_glue [-] gnocchi.keyfile                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.083 12 DEBUG cotyledon.oslo_config_glue [-] gnocchi.region_name            = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.083 12 DEBUG cotyledon.oslo_config_glue [-] gnocchi.split_loggers          = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.083 12 DEBUG cotyledon.oslo_config_glue [-] gnocchi.timeout                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.083 12 DEBUG cotyledon.oslo_config_glue [-] zaqar.auth_section             = service_credentials log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.083 12 DEBUG cotyledon.oslo_config_glue [-] zaqar.auth_type                = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.083 12 DEBUG cotyledon.oslo_config_glue [-] zaqar.cafile                   = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.083 12 DEBUG cotyledon.oslo_config_glue [-] zaqar.certfile                 = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.083 12 DEBUG cotyledon.oslo_config_glue [-] zaqar.collect_timing           = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.084 12 DEBUG cotyledon.oslo_config_glue [-] zaqar.insecure                 = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.084 12 DEBUG cotyledon.oslo_config_glue [-] zaqar.interface                = internal log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.084 12 DEBUG cotyledon.oslo_config_glue [-] zaqar.keyfile                  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.084 12 DEBUG cotyledon.oslo_config_glue [-] zaqar.region_name              = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.084 12 DEBUG cotyledon.oslo_config_glue [-] zaqar.split_loggers            = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.084 12 DEBUG cotyledon.oslo_config_glue [-] zaqar.timeout                  = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.084 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_notifications.driver = ['noop'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.084 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_notifications.retry = -1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.084 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_notifications.topics = ['notifications'] log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.084 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_notifications.transport_url = **** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.084 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.amqp_auto_delete = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.085 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.amqp_durable_queues = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.085 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.conn_pool_min_size = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.085 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.conn_pool_ttl = 1200 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.085 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.direct_mandatory_flag = True log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.085 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.enable_cancel_on_failover = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.085 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.heartbeat_in_pthread = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.085 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.heartbeat_rate = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.085 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.085 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.kombu_compression = None log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.085 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.kombu_failover_strategy = round-robin log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.085 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.085 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.086 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.rabbit_ha_queues = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.086 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.rabbit_interval_max = 30 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.086 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.086 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 100 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.086 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.086 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.086 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.086 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.rabbit_quorum_queue = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.086 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.rabbit_retry_backoff = 2 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.086 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.rabbit_retry_interval = 1 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.086 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.086 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.rpc_conn_pool_size = 30 log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.087 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.ssl      = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.087 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.ssl_ca_file =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.087 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.ssl_cert_file =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.087 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.ssl_enforce_fips_mode = False log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.087 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.ssl_key_file =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.087 12 DEBUG cotyledon.oslo_config_glue [-] oslo_messaging_rabbit.ssl_version =  log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2609
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.087 12 DEBUG cotyledon.oslo_config_glue [-] ******************************************************************************** log_opt_values /usr/lib/python3.9/site-packages/oslo_config/cfg.py:2613
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.087 12 DEBUG cotyledon._service [-] Run service AgentManager(0) [12] wait_forever /usr/lib/python3.9/site-packages/cotyledon/_service.py:241
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.089 12 DEBUG ceilometer.agent [-] Config file: {'sources': [{'name': 'pollsters', 'interval': 120, 'meters': ['power.state', 'cpu', 'memory.usage', 'disk.*', 'network.*']}]} load_config /usr/lib/python3.9/site-packages/ceilometer/agent.py:64
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.095 12 DEBUG ceilometer.compute.virt.libvirt.utils [-] Connecting to libvirt: qemu:///system new_libvirt_connection /usr/lib/python3.9/site-packages/ceilometer/compute/virt/libvirt/utils.py:93
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.101 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.101 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.101 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.102 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.102 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.102 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.102 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.102 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.102 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.103 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.103 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.103 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.103 12 DEBUG ceilometer.polling.manager [-] Skip pollster memory.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.103 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.allocation, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.103 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.103 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.103 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.103 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.103 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.capacity, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster cpu, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.iops, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:55:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:55:17 compute-0 python3.9[203189]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/healthchecks/node_exporter/ group=zuul mode=0700 owner=zuul setype=container_file_t src=/home/zuul/.ansible/tmp/ansible-tmp-1759406116.2004488-1698-166607483929504/.source _original_basename=healthcheck follow=False checksum=e380c11c36804bfc65a818f2960cfa663daacfe5 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:55:17 compute-0 sudo[203187]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:18 compute-0 sudo[203342]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ukwueovrxyswzktbmjsvgpbhpxpbulgh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406117.7786045-1749-151741581551708/AnsiballZ_container_config_data.py'
Oct 02 11:55:18 compute-0 sudo[203342]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:18 compute-0 python3.9[203344]: ansible-container_config_data Invoked with config_overrides={} config_path=/var/lib/openstack/config/telemetry config_pattern=node_exporter.json debug=False
Oct 02 11:55:18 compute-0 sudo[203342]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:18 compute-0 sudo[203494]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wgnvxfzszelvhvhdzgvckygwsygkyfbm ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406118.6182349-1776-255617600964892/AnsiballZ_container_config_hash.py'
Oct 02 11:55:18 compute-0 sudo[203494]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:19 compute-0 python3.9[203496]: ansible-container_config_hash Invoked with check_mode=False config_vol_prefix=/var/lib/config-data
Oct 02 11:55:19 compute-0 sudo[203494]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:20 compute-0 podman[203596]: 2025-10-02 11:55:20.169888572 +0000 UTC m=+0.084734122 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, io.buildah.version=1.41.3, managed_by=edpm_ansible, container_name=multipathd, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 11:55:20 compute-0 sudo[203665]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vkyrkiplzpbggsgeeazuxuxvxuyncbld ; /usr/bin/python3 /home/zuul/.ansible/tmp/ansible-tmp-1759406119.9171546-1806-279101603153537/AnsiballZ_edpm_container_manage.py'
Oct 02 11:55:20 compute-0 sudo[203665]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:20 compute-0 python3[203667]: ansible-edpm_container_manage Invoked with concurrency=1 config_dir=/var/lib/openstack/config/telemetry config_id=edpm config_overrides={} config_patterns=node_exporter.json log_base_path=/var/log/containers/stdouts debug=False
Oct 02 11:55:20 compute-0 podman[203703]: 2025-10-02 11:55:20.625932812 +0000 UTC m=+0.021140876 image pull 0da6a335fe1356545476b749c68f022c897de3a2139e8f0054f6937349ee2b83 quay.io/prometheus/node-exporter:v1.5.0
Oct 02 11:55:21 compute-0 podman[203703]: 2025-10-02 11:55:21.406383054 +0000 UTC m=+0.801591058 container create cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, config_id=edpm, container_name=node_exporter, managed_by=edpm_ansible)
Oct 02 11:55:21 compute-0 python3[203667]: ansible-edpm_container_manage PODMAN-CONTAINER-DEBUG: podman create --name node_exporter --conmon-pidfile /run/node_exporter.pid --env OS_ENDPOINT_TYPE=internal --healthcheck-command /openstack/healthcheck node_exporter --label config_id=edpm --label container_name=node_exporter --label managed_by=edpm_ansible --label config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']} --log-driver journald --log-level info --network host --privileged=True --publish 9100:9100 --user root --volume /var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z --volume /var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z --volume /var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw --volume /var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z quay.io/prometheus/node-exporter:v1.5.0 --web.config.file=/etc/node_exporter/node_exporter.yaml --web.disable-exporter-metrics --collector.systemd --collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\.service --no-collector.dmi --no-collector.entropy --no-collector.thermal_zone --no-collector.time --no-collector.timex --no-collector.uname --no-collector.stat --no-collector.hwmon --no-collector.os --no-collector.selinux --no-collector.textfile --no-collector.powersupplyclass --no-collector.pressure --no-collector.rapl
Oct 02 11:55:21 compute-0 sudo[203665]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:21 compute-0 sudo[203891]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ygzqzwzspcxbxtfceasqeybfaafkjoxa ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406121.7420626-1830-38011084537127/AnsiballZ_stat.py'
Oct 02 11:55:21 compute-0 sudo[203891]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:22 compute-0 python3.9[203893]: ansible-ansible.builtin.stat Invoked with path=/etc/sysconfig/podman_drop_in follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:55:22 compute-0 sudo[203891]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:22 compute-0 sudo[204045]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-onykctdxhvjrmiursrjmiwhkaozzqjgh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406122.5147853-1857-56073206985267/AnsiballZ_file.py'
Oct 02 11:55:22 compute-0 sudo[204045]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:23 compute-0 python3.9[204047]: ansible-file Invoked with path=/etc/systemd/system/edpm_node_exporter.requires state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:55:23 compute-0 sudo[204045]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:23 compute-0 sudo[204196]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wnqnklwelijjohtsqvqujbkhiytqnqng ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406123.1151774-1857-58089320594439/AnsiballZ_copy.py'
Oct 02 11:55:23 compute-0 sudo[204196]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:23 compute-0 python3.9[204198]: ansible-copy Invoked with src=/home/zuul/.ansible/tmp/ansible-tmp-1759406123.1151774-1857-58089320594439/source dest=/etc/systemd/system/edpm_node_exporter.service mode=0644 owner=root group=root backup=False force=True remote_src=False follow=False unsafe_writes=False _original_basename=None content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None checksum=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:55:23 compute-0 sudo[204196]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:24 compute-0 sudo[204272]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ebbngmzbgugfyudbbwhkvbokfluoicrt ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406123.1151774-1857-58089320594439/AnsiballZ_systemd.py'
Oct 02 11:55:24 compute-0 sudo[204272]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:24 compute-0 python3.9[204274]: ansible-systemd Invoked with daemon_reload=True daemon_reexec=False scope=system no_block=False name=None state=None enabled=None force=None masked=None
Oct 02 11:55:24 compute-0 systemd[1]: Reloading.
Oct 02 11:55:24 compute-0 systemd-rc-local-generator[204302]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:55:24 compute-0 systemd-sysv-generator[204305]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:55:24 compute-0 sudo[204272]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:24 compute-0 sudo[204383]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rproquiwnnowxsiputdnxytqxoptotkm ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406123.1151774-1857-58089320594439/AnsiballZ_systemd.py'
Oct 02 11:55:24 compute-0 sudo[204383]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:25 compute-0 python3.9[204385]: ansible-systemd Invoked with state=restarted name=edpm_node_exporter.service enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:55:25 compute-0 systemd[1]: Reloading.
Oct 02 11:55:25 compute-0 systemd-rc-local-generator[204416]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:55:25 compute-0 systemd-sysv-generator[204420]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:55:25 compute-0 systemd[1]: Starting node_exporter container...
Oct 02 11:55:25 compute-0 systemd[1]: Started libcrun container.
Oct 02 11:55:25 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/dee776ad332ccb7657bbe12da8fded1f6b77b358681cbefe6b82f309393481c3/merged/etc/node_exporter/tls supports timestamps until 2038 (0x7fffffff)
Oct 02 11:55:25 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/dee776ad332ccb7657bbe12da8fded1f6b77b358681cbefe6b82f309393481c3/merged/etc/node_exporter/node_exporter.yaml supports timestamps until 2038 (0x7fffffff)
Oct 02 11:55:26 compute-0 systemd[1]: Started /usr/bin/podman healthcheck run cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2.
Oct 02 11:55:26 compute-0 podman[204425]: 2025-10-02 11:55:26.114383228 +0000 UTC m=+0.358974489 container init cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter)
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.127Z caller=node_exporter.go:180 level=info msg="Starting node_exporter" version="(version=1.5.0, branch=HEAD, revision=1b48970ffcf5630534fb00bb0687d73c66d1c959)"
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.127Z caller=node_exporter.go:181 level=info msg="Build context" build_context="(go=go1.19.3, user=root@6e7732a7b81b, date=20221129-18:59:09)"
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.127Z caller=node_exporter.go:183 level=warn msg="Node Exporter is running as root user. This exporter is designed to run as unprivileged user, root is not required."
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.127Z caller=filesystem_common.go:111 level=info collector=filesystem msg="Parsed flag --collector.filesystem.mount-points-exclude" flag=^/(dev|proc|run/credentials/.+|sys|var/lib/docker/.+|var/lib/containers/storage/.+)($|/)
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.128Z caller=filesystem_common.go:113 level=info collector=filesystem msg="Parsed flag --collector.filesystem.fs-types-exclude" flag=^(autofs|binfmt_misc|bpf|cgroup2?|configfs|debugfs|devpts|devtmpfs|fusectl|hugetlbfs|iso9660|mqueue|nsfs|overlay|proc|procfs|pstore|rpc_pipefs|securityfs|selinuxfs|squashfs|sysfs|tracefs)$
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.128Z caller=diskstats_common.go:111 level=info collector=diskstats msg="Parsed flag --collector.diskstats.device-exclude" flag=^(ram|loop|fd|(h|s|v|xv)d[a-z]|nvme\d+n\d+p)\d+$
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.128Z caller=diskstats_linux.go:264 level=error collector=diskstats msg="Failed to open directory, disabling udev device properties" path=/run/udev/data
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.128Z caller=systemd_linux.go:152 level=info collector=systemd msg="Parsed flag --collector.systemd.unit-include" flag=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\.service
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.128Z caller=systemd_linux.go:154 level=info collector=systemd msg="Parsed flag --collector.systemd.unit-exclude" flag=.+\.(automount|device|mount|scope|slice)
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.128Z caller=node_exporter.go:110 level=info msg="Enabled collectors"
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=arp
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=bcache
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=bonding
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=btrfs
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=conntrack
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=cpu
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=cpufreq
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=diskstats
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=edac
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=fibrechannel
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=filefd
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=filesystem
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=infiniband
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=ipvs
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=loadavg
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=mdadm
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=meminfo
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=netclass
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=netdev
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=netstat
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=nfs
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=nfsd
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=nvme
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=schedstat
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=sockstat
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=softnet
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=systemd
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=tapestats
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=udp_queues
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=vmstat
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=xfs
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=node_exporter.go:117 level=info collector=zfs
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.129Z caller=tls_config.go:232 level=info msg="Listening on" address=[::]:9100
Oct 02 11:55:26 compute-0 node_exporter[204441]: ts=2025-10-02T11:55:26.130Z caller=tls_config.go:268 level=info msg="TLS is enabled." http2=true address=[::]:9100
Oct 02 11:55:26 compute-0 podman[204425]: 2025-10-02 11:55:26.143523823 +0000 UTC m=+0.388115054 container start cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']})
Oct 02 11:55:26 compute-0 podman[204425]: node_exporter
Oct 02 11:55:26 compute-0 systemd[1]: Started node_exporter container.
Oct 02 11:55:26 compute-0 sudo[204383]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:26 compute-0 podman[204450]: 2025-10-02 11:55:26.232607884 +0000 UTC m=+0.080363191 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 11:55:26 compute-0 sudo[204637]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ycjnnmdxdvzztyndxkmafltaxxgdlggl ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406126.634467-1929-27051197410608/AnsiballZ_systemd.py'
Oct 02 11:55:26 compute-0 sudo[204637]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:26 compute-0 podman[204598]: 2025-10-02 11:55:26.929536289 +0000 UTC m=+0.062866747 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, container_name=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, org.label-schema.license=GPLv2, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid, io.buildah.version=1.41.3, org.label-schema.build-date=20251001)
Oct 02 11:55:27 compute-0 python3.9[204644]: ansible-ansible.builtin.systemd Invoked with name=edpm_node_exporter.service state=restarted daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None
Oct 02 11:55:27 compute-0 systemd[1]: Stopping node_exporter container...
Oct 02 11:55:27 compute-0 systemd[1]: libpod-cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2.scope: Deactivated successfully.
Oct 02 11:55:27 compute-0 podman[204648]: 2025-10-02 11:55:27.333900001 +0000 UTC m=+0.041585651 container died cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>)
Oct 02 11:55:27 compute-0 systemd[1]: cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2-2aacf1a57ec1cb44.timer: Deactivated successfully.
Oct 02 11:55:27 compute-0 systemd[1]: Stopped /usr/bin/podman healthcheck run cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2.
Oct 02 11:55:27 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2-userdata-shm.mount: Deactivated successfully.
Oct 02 11:55:27 compute-0 systemd[1]: var-lib-containers-storage-overlay-dee776ad332ccb7657bbe12da8fded1f6b77b358681cbefe6b82f309393481c3-merged.mount: Deactivated successfully.
Oct 02 11:55:27 compute-0 podman[204648]: 2025-10-02 11:55:27.377496265 +0000 UTC m=+0.085181915 container cleanup cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter)
Oct 02 11:55:27 compute-0 podman[204648]: node_exporter
Oct 02 11:55:27 compute-0 systemd[1]: edpm_node_exporter.service: Main process exited, code=exited, status=2/INVALIDARGUMENT
Oct 02 11:55:27 compute-0 podman[204677]: node_exporter
Oct 02 11:55:27 compute-0 systemd[1]: edpm_node_exporter.service: Failed with result 'exit-code'.
Oct 02 11:55:27 compute-0 systemd[1]: Stopped node_exporter container.
Oct 02 11:55:27 compute-0 systemd[1]: Starting node_exporter container...
Oct 02 11:55:27 compute-0 systemd[1]: Started libcrun container.
Oct 02 11:55:27 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/dee776ad332ccb7657bbe12da8fded1f6b77b358681cbefe6b82f309393481c3/merged/etc/node_exporter/tls supports timestamps until 2038 (0x7fffffff)
Oct 02 11:55:27 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/dee776ad332ccb7657bbe12da8fded1f6b77b358681cbefe6b82f309393481c3/merged/etc/node_exporter/node_exporter.yaml supports timestamps until 2038 (0x7fffffff)
Oct 02 11:55:27 compute-0 systemd[1]: Started /usr/bin/podman healthcheck run cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2.
Oct 02 11:55:27 compute-0 podman[204690]: 2025-10-02 11:55:27.577398518 +0000 UTC m=+0.104282192 container init cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter)
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.587Z caller=node_exporter.go:180 level=info msg="Starting node_exporter" version="(version=1.5.0, branch=HEAD, revision=1b48970ffcf5630534fb00bb0687d73c66d1c959)"
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.587Z caller=node_exporter.go:181 level=info msg="Build context" build_context="(go=go1.19.3, user=root@6e7732a7b81b, date=20221129-18:59:09)"
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.587Z caller=node_exporter.go:183 level=warn msg="Node Exporter is running as root user. This exporter is designed to run as unprivileged user, root is not required."
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.587Z caller=systemd_linux.go:152 level=info collector=systemd msg="Parsed flag --collector.systemd.unit-include" flag=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\.service
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.587Z caller=systemd_linux.go:154 level=info collector=systemd msg="Parsed flag --collector.systemd.unit-exclude" flag=.+\.(automount|device|mount|scope|slice)
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.587Z caller=filesystem_common.go:111 level=info collector=filesystem msg="Parsed flag --collector.filesystem.mount-points-exclude" flag=^/(dev|proc|run/credentials/.+|sys|var/lib/docker/.+|var/lib/containers/storage/.+)($|/)
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.587Z caller=filesystem_common.go:113 level=info collector=filesystem msg="Parsed flag --collector.filesystem.fs-types-exclude" flag=^(autofs|binfmt_misc|bpf|cgroup2?|configfs|debugfs|devpts|devtmpfs|fusectl|hugetlbfs|iso9660|mqueue|nsfs|overlay|proc|procfs|pstore|rpc_pipefs|securityfs|selinuxfs|squashfs|sysfs|tracefs)$
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=diskstats_common.go:111 level=info collector=diskstats msg="Parsed flag --collector.diskstats.device-exclude" flag=^(ram|loop|fd|(h|s|v|xv)d[a-z]|nvme\d+n\d+p)\d+$
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=diskstats_linux.go:264 level=error collector=diskstats msg="Failed to open directory, disabling udev device properties" path=/run/udev/data
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:110 level=info msg="Enabled collectors"
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=arp
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=bcache
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=bonding
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=btrfs
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=conntrack
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=cpu
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=cpufreq
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=diskstats
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=edac
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=fibrechannel
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=filefd
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=filesystem
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=infiniband
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=ipvs
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=loadavg
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=mdadm
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=meminfo
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=netclass
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=netdev
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=netstat
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=nfs
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=nfsd
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=nvme
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=schedstat
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=sockstat
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=softnet
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=systemd
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=tapestats
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=udp_queues
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=vmstat
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=xfs
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=node_exporter.go:117 level=info collector=zfs
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.588Z caller=tls_config.go:232 level=info msg="Listening on" address=[::]:9100
Oct 02 11:55:27 compute-0 node_exporter[204706]: ts=2025-10-02T11:55:27.590Z caller=tls_config.go:268 level=info msg="TLS is enabled." http2=true address=[::]:9100
Oct 02 11:55:27 compute-0 podman[204690]: 2025-10-02 11:55:27.601591217 +0000 UTC m=+0.128474871 container start cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 11:55:27 compute-0 podman[204690]: node_exporter
Oct 02 11:55:27 compute-0 systemd[1]: Started node_exporter container.
Oct 02 11:55:27 compute-0 sudo[204637]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:27 compute-0 podman[204715]: 2025-10-02 11:55:27.658279353 +0000 UTC m=+0.047857613 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']})
Oct 02 11:55:28 compute-0 sudo[204889]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-egfxzlgoqjqpqzpgxghuwysooizplgfc ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406127.7845821-1953-47525078916827/AnsiballZ_stat.py'
Oct 02 11:55:28 compute-0 sudo[204889]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:28 compute-0 python3.9[204891]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/healthchecks/podman_exporter/healthcheck follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:55:28 compute-0 sudo[204889]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:28 compute-0 sudo[205012]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-aswkvxvbwatjeqlkmwlsrawlsdcxcxib ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406127.7845821-1953-47525078916827/AnsiballZ_copy.py'
Oct 02 11:55:28 compute-0 sudo[205012]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:28 compute-0 python3.9[205014]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/healthchecks/podman_exporter/ group=zuul mode=0700 owner=zuul setype=container_file_t src=/home/zuul/.ansible/tmp/ansible-tmp-1759406127.7845821-1953-47525078916827/.source _original_basename=healthcheck follow=False checksum=e380c11c36804bfc65a818f2960cfa663daacfe5 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:55:28 compute-0 sudo[205012]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:29 compute-0 sudo[205164]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-splfcsglmatcliqbsculivxyvekqodiz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406129.3556619-2004-174004681713589/AnsiballZ_container_config_data.py'
Oct 02 11:55:29 compute-0 sudo[205164]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:29 compute-0 python3.9[205166]: ansible-container_config_data Invoked with config_overrides={} config_path=/var/lib/openstack/config/telemetry config_pattern=podman_exporter.json debug=False
Oct 02 11:55:29 compute-0 sudo[205164]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:30 compute-0 sudo[205316]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ogkneqxlzzkxvycispuyynqyasnilozv ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406130.2070508-2031-249989444826045/AnsiballZ_container_config_hash.py'
Oct 02 11:55:30 compute-0 sudo[205316]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:30 compute-0 python3.9[205318]: ansible-container_config_hash Invoked with check_mode=False config_vol_prefix=/var/lib/config-data
Oct 02 11:55:30 compute-0 sudo[205316]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:31 compute-0 sudo[205468]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pzvwicmrgweylgdfxskzulfluovaqobj ; /usr/bin/python3 /home/zuul/.ansible/tmp/ansible-tmp-1759406131.065643-2061-148464365460759/AnsiballZ_edpm_container_manage.py'
Oct 02 11:55:31 compute-0 sudo[205468]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:31 compute-0 python3[205470]: ansible-edpm_container_manage Invoked with concurrency=1 config_dir=/var/lib/openstack/config/telemetry config_id=edpm config_overrides={} config_patterns=podman_exporter.json log_base_path=/var/log/containers/stdouts debug=False
Oct 02 11:55:32 compute-0 podman[205483]: 2025-10-02 11:55:32.76611151 +0000 UTC m=+1.110863837 image pull e56d40e393eb5ea8704d9af8cf0d74665df83747106713fda91530f201837815 quay.io/navidys/prometheus-podman-exporter:v1.10.1
Oct 02 11:55:32 compute-0 podman[205578]: 2025-10-02 11:55:32.900977113 +0000 UTC m=+0.049933234 container create c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, config_id=edpm, container_name=podman_exporter)
Oct 02 11:55:32 compute-0 podman[205578]: 2025-10-02 11:55:32.870554882 +0000 UTC m=+0.019510983 image pull e56d40e393eb5ea8704d9af8cf0d74665df83747106713fda91530f201837815 quay.io/navidys/prometheus-podman-exporter:v1.10.1
Oct 02 11:55:32 compute-0 python3[205470]: ansible-edpm_container_manage PODMAN-CONTAINER-DEBUG: podman create --name podman_exporter --conmon-pidfile /run/podman_exporter.pid --env OS_ENDPOINT_TYPE=internal --env CONTAINER_HOST=unix:///run/podman/podman.sock --healthcheck-command /openstack/healthcheck podman_exporter --label config_id=edpm --label container_name=podman_exporter --label managed_by=edpm_ansible --label config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']} --log-driver journald --log-level info --network host --privileged=True --publish 9882:9882 --user root --volume /var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z --volume /var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z --volume /run/podman/podman.sock:/run/podman/podman.sock:rw,z --volume /var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z quay.io/navidys/prometheus-podman-exporter:v1.10.1 --web.config.file=/etc/podman_exporter/podman_exporter.yaml
Oct 02 11:55:33 compute-0 sudo[205468]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:33 compute-0 sudo[205766]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mmbqjvgjfiassjohmqvhqqlcrszofnan ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406133.5150878-2085-106468787473842/AnsiballZ_stat.py'
Oct 02 11:55:33 compute-0 sudo[205766]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:33 compute-0 python3.9[205768]: ansible-ansible.builtin.stat Invoked with path=/etc/sysconfig/podman_drop_in follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:55:33 compute-0 sudo[205766]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:34 compute-0 sudo[205920]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wunnkscptzugziyuqklwunnxprffzzxk ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406134.303166-2112-95700315807695/AnsiballZ_file.py'
Oct 02 11:55:34 compute-0 sudo[205920]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:34 compute-0 python3.9[205922]: ansible-file Invoked with path=/etc/systemd/system/edpm_podman_exporter.requires state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:55:34 compute-0 sudo[205920]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:35 compute-0 sudo[206071]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-htocylojbrnmicgkbavbngchldcolppf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406134.8013902-2112-101245769631346/AnsiballZ_copy.py'
Oct 02 11:55:35 compute-0 sudo[206071]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:35 compute-0 python3.9[206073]: ansible-copy Invoked with src=/home/zuul/.ansible/tmp/ansible-tmp-1759406134.8013902-2112-101245769631346/source dest=/etc/systemd/system/edpm_podman_exporter.service mode=0644 owner=root group=root backup=False force=True remote_src=False follow=False unsafe_writes=False _original_basename=None content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None checksum=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:55:35 compute-0 sudo[206071]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:35 compute-0 sudo[206147]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ruqhqoeedzmfrbjkeqisktrskcxbgvkf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406134.8013902-2112-101245769631346/AnsiballZ_systemd.py'
Oct 02 11:55:35 compute-0 sudo[206147]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:36 compute-0 python3.9[206149]: ansible-systemd Invoked with daemon_reload=True daemon_reexec=False scope=system no_block=False name=None state=None enabled=None force=None masked=None
Oct 02 11:55:36 compute-0 systemd[1]: Reloading.
Oct 02 11:55:36 compute-0 systemd-rc-local-generator[206174]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:55:36 compute-0 systemd-sysv-generator[206178]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:55:36 compute-0 sudo[206147]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:36 compute-0 sudo[206258]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-gtdadlnpyghyauhteizcqqtoxwsbmvex ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406134.8013902-2112-101245769631346/AnsiballZ_systemd.py'
Oct 02 11:55:36 compute-0 sudo[206258]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:36 compute-0 podman[206260]: 2025-10-02 11:55:36.812046216 +0000 UTC m=+0.080848248 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, config_id=ovn_controller, managed_by=edpm_ansible, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, container_name=ovn_controller, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_managed=true)
Oct 02 11:55:37 compute-0 python3.9[206261]: ansible-systemd Invoked with state=restarted name=edpm_podman_exporter.service enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:55:37 compute-0 systemd[1]: Reloading.
Oct 02 11:55:37 compute-0 systemd-sysv-generator[206319]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:55:37 compute-0 systemd-rc-local-generator[206316]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:55:37 compute-0 systemd[1]: Starting podman_exporter container...
Oct 02 11:55:37 compute-0 systemd[1]: Started libcrun container.
Oct 02 11:55:37 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/353969c66448016345dbb1883590bc3cb9b7aaf5bdda37626acc0512fe37c3c7/merged/etc/podman_exporter/tls supports timestamps until 2038 (0x7fffffff)
Oct 02 11:55:37 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/353969c66448016345dbb1883590bc3cb9b7aaf5bdda37626acc0512fe37c3c7/merged/etc/podman_exporter/podman_exporter.yaml supports timestamps until 2038 (0x7fffffff)
Oct 02 11:55:37 compute-0 systemd[1]: Started /usr/bin/podman healthcheck run c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e.
Oct 02 11:55:37 compute-0 podman[206326]: 2025-10-02 11:55:37.519608562 +0000 UTC m=+0.126677199 container init c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 11:55:37 compute-0 podman_exporter[206341]: ts=2025-10-02T11:55:37.534Z caller=exporter.go:68 level=info msg="Starting podman-prometheus-exporter" version="(version=1.10.1, branch=HEAD, revision=1)"
Oct 02 11:55:37 compute-0 podman_exporter[206341]: ts=2025-10-02T11:55:37.534Z caller=exporter.go:69 level=info msg=metrics enhanced=false
Oct 02 11:55:37 compute-0 podman_exporter[206341]: ts=2025-10-02T11:55:37.534Z caller=handler.go:94 level=info msg="enabled collectors"
Oct 02 11:55:37 compute-0 podman_exporter[206341]: ts=2025-10-02T11:55:37.534Z caller=handler.go:105 level=info collector=container
Oct 02 11:55:37 compute-0 podman[206326]: 2025-10-02 11:55:37.543917936 +0000 UTC m=+0.150986573 container start c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 11:55:37 compute-0 podman[206326]: podman_exporter
Oct 02 11:55:37 compute-0 systemd[1]: Starting Podman API Service...
Oct 02 11:55:37 compute-0 systemd[1]: Started Podman API Service.
Oct 02 11:55:37 compute-0 systemd[1]: Started podman_exporter container.
Oct 02 11:55:37 compute-0 podman[206352]: time="2025-10-02T11:55:37Z" level=info msg="/usr/bin/podman filtering at log level info"
Oct 02 11:55:37 compute-0 podman[206352]: time="2025-10-02T11:55:37Z" level=info msg="Setting parallel job count to 25"
Oct 02 11:55:37 compute-0 podman[206352]: time="2025-10-02T11:55:37Z" level=info msg="Using sqlite as database backend"
Oct 02 11:55:37 compute-0 podman[206352]: time="2025-10-02T11:55:37Z" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled"
Oct 02 11:55:37 compute-0 podman[206352]: time="2025-10-02T11:55:37Z" level=info msg="Using systemd socket activation to determine API endpoint"
Oct 02 11:55:37 compute-0 podman[206352]: time="2025-10-02T11:55:37Z" level=info msg="API service listening on \"/run/podman/podman.sock\". URI: \"unix:///run/podman/podman.sock\""
Oct 02 11:55:37 compute-0 sudo[206258]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:37 compute-0 podman[206352]: @ - - [02/Oct/2025:11:55:37 +0000] "GET /v4.9.3/libpod/_ping HTTP/1.1" 200 2 "" "Go-http-client/1.1"
Oct 02 11:55:37 compute-0 podman[206352]: time="2025-10-02T11:55:37Z" level=info msg="List containers: received `last` parameter - overwriting `limit`"
Oct 02 11:55:37 compute-0 podman[206351]: 2025-10-02 11:55:37.601859868 +0000 UTC m=+0.048283500 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=starting, health_failing_streak=1, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>)
Oct 02 11:55:37 compute-0 systemd[1]: c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e-39fcba6db8057acf.service: Main process exited, code=exited, status=1/FAILURE
Oct 02 11:55:37 compute-0 systemd[1]: c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e-39fcba6db8057acf.service: Failed with result 'exit-code'.
Oct 02 11:55:37 compute-0 podman[206352]: @ - - [02/Oct/2025:11:55:37 +0000] "GET /v4.9.3/libpod/containers/json?all=true&external=false&last=0&namespace=false&size=true&sync=false HTTP/1.1" 200 22059 "" "Go-http-client/1.1"
Oct 02 11:55:37 compute-0 podman_exporter[206341]: ts=2025-10-02T11:55:37.611Z caller=exporter.go:96 level=info msg="Listening on" address=:9882
Oct 02 11:55:37 compute-0 podman_exporter[206341]: ts=2025-10-02T11:55:37.612Z caller=tls_config.go:313 level=info msg="Listening on" address=[::]:9882
Oct 02 11:55:37 compute-0 podman_exporter[206341]: ts=2025-10-02T11:55:37.612Z caller=tls_config.go:349 level=info msg="TLS is enabled." http2=true address=[::]:9882
Oct 02 11:55:38 compute-0 sudo[206533]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-arlzmspkyxudhqyubrlosbqqyapqicay ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406137.7437148-2184-69809958756539/AnsiballZ_systemd.py'
Oct 02 11:55:38 compute-0 sudo[206533]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:38 compute-0 python3.9[206535]: ansible-ansible.builtin.systemd Invoked with name=edpm_podman_exporter.service state=restarted daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None
Oct 02 11:55:38 compute-0 systemd[1]: Stopping podman_exporter container...
Oct 02 11:55:38 compute-0 podman[206352]: @ - - [02/Oct/2025:11:55:37 +0000] "GET /v4.9.3/libpod/events?filters=%7B%7D&since=&stream=true&until= HTTP/1.1" 200 1449 "" "Go-http-client/1.1"
Oct 02 11:55:38 compute-0 systemd[1]: libpod-c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e.scope: Deactivated successfully.
Oct 02 11:55:38 compute-0 conmon[206341]: conmon c2c435107ee78ae9d455 <nwarn>: Failed to open cgroups file: /sys/fs/cgroup/machine.slice/libpod-c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e.scope/container/memory.events
Oct 02 11:55:38 compute-0 podman[206539]: 2025-10-02 11:55:38.450782334 +0000 UTC m=+0.059627019 container died c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 11:55:38 compute-0 systemd[1]: c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e-39fcba6db8057acf.timer: Deactivated successfully.
Oct 02 11:55:38 compute-0 systemd[1]: Stopped /usr/bin/podman healthcheck run c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e.
Oct 02 11:55:38 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e-userdata-shm.mount: Deactivated successfully.
Oct 02 11:55:38 compute-0 systemd[1]: var-lib-containers-storage-overlay-353969c66448016345dbb1883590bc3cb9b7aaf5bdda37626acc0512fe37c3c7-merged.mount: Deactivated successfully.
Oct 02 11:55:38 compute-0 podman[206539]: 2025-10-02 11:55:38.744110221 +0000 UTC m=+0.352954956 container cleanup c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 11:55:38 compute-0 podman[206539]: podman_exporter
Oct 02 11:55:38 compute-0 systemd[1]: edpm_podman_exporter.service: Main process exited, code=exited, status=2/INVALIDARGUMENT
Oct 02 11:55:38 compute-0 podman[206568]: podman_exporter
Oct 02 11:55:38 compute-0 systemd[1]: edpm_podman_exporter.service: Failed with result 'exit-code'.
Oct 02 11:55:38 compute-0 systemd[1]: Stopped podman_exporter container.
Oct 02 11:55:38 compute-0 systemd[1]: Starting podman_exporter container...
Oct 02 11:55:39 compute-0 systemd[1]: Started libcrun container.
Oct 02 11:55:39 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/353969c66448016345dbb1883590bc3cb9b7aaf5bdda37626acc0512fe37c3c7/merged/etc/podman_exporter/tls supports timestamps until 2038 (0x7fffffff)
Oct 02 11:55:39 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/353969c66448016345dbb1883590bc3cb9b7aaf5bdda37626acc0512fe37c3c7/merged/etc/podman_exporter/podman_exporter.yaml supports timestamps until 2038 (0x7fffffff)
Oct 02 11:55:39 compute-0 systemd[1]: Started /usr/bin/podman healthcheck run c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e.
Oct 02 11:55:39 compute-0 podman[206581]: 2025-10-02 11:55:39.092152004 +0000 UTC m=+0.239444119 container init c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 11:55:39 compute-0 podman_exporter[206596]: ts=2025-10-02T11:55:39.106Z caller=exporter.go:68 level=info msg="Starting podman-prometheus-exporter" version="(version=1.10.1, branch=HEAD, revision=1)"
Oct 02 11:55:39 compute-0 podman_exporter[206596]: ts=2025-10-02T11:55:39.106Z caller=exporter.go:69 level=info msg=metrics enhanced=false
Oct 02 11:55:39 compute-0 podman_exporter[206596]: ts=2025-10-02T11:55:39.107Z caller=handler.go:94 level=info msg="enabled collectors"
Oct 02 11:55:39 compute-0 podman_exporter[206596]: ts=2025-10-02T11:55:39.107Z caller=handler.go:105 level=info collector=container
Oct 02 11:55:39 compute-0 podman[206352]: @ - - [02/Oct/2025:11:55:39 +0000] "GET /v4.9.3/libpod/_ping HTTP/1.1" 200 2 "" "Go-http-client/1.1"
Oct 02 11:55:39 compute-0 podman[206352]: time="2025-10-02T11:55:39Z" level=info msg="List containers: received `last` parameter - overwriting `limit`"
Oct 02 11:55:39 compute-0 podman[206581]: 2025-10-02 11:55:39.134591782 +0000 UTC m=+0.281883877 container start c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']})
Oct 02 11:55:39 compute-0 podman[206581]: podman_exporter
Oct 02 11:55:39 compute-0 systemd[1]: Started podman_exporter container.
Oct 02 11:55:39 compute-0 podman[206599]: 2025-10-02 11:55:39.157429626 +0000 UTC m=+0.082273167 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, config_id=ovn_metadata_agent, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.name=CentOS Stream 9 Base Image, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team)
Oct 02 11:55:39 compute-0 podman[206352]: @ - - [02/Oct/2025:11:55:39 +0000] "GET /v4.9.3/libpod/containers/json?all=true&external=false&last=0&namespace=false&size=true&sync=false HTTP/1.1" 200 22061 "" "Go-http-client/1.1"
Oct 02 11:55:39 compute-0 podman_exporter[206596]: ts=2025-10-02T11:55:39.168Z caller=exporter.go:96 level=info msg="Listening on" address=:9882
Oct 02 11:55:39 compute-0 podman_exporter[206596]: ts=2025-10-02T11:55:39.169Z caller=tls_config.go:313 level=info msg="Listening on" address=[::]:9882
Oct 02 11:55:39 compute-0 podman_exporter[206596]: ts=2025-10-02T11:55:39.169Z caller=tls_config.go:349 level=info msg="TLS is enabled." http2=true address=[::]:9882
Oct 02 11:55:39 compute-0 sudo[206533]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:39 compute-0 podman[206622]: 2025-10-02 11:55:39.201041317 +0000 UTC m=+0.057272406 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']})
Oct 02 11:55:39 compute-0 sudo[206797]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ttisqhnmrehhztxmajmiylfwnykjuzsj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406139.5120528-2208-227969257630203/AnsiballZ_stat.py'
Oct 02 11:55:39 compute-0 sudo[206797]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:40 compute-0 python3.9[206799]: ansible-ansible.legacy.stat Invoked with path=/var/lib/openstack/healthchecks/openstack_network_exporter/healthcheck follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:55:40 compute-0 sudo[206797]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:40 compute-0 sudo[206920]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mczmyrauafqfnplcdsydrpulwujmdstk ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406139.5120528-2208-227969257630203/AnsiballZ_copy.py'
Oct 02 11:55:40 compute-0 sudo[206920]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:40 compute-0 python3.9[206922]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/openstack/healthchecks/openstack_network_exporter/ group=zuul mode=0700 owner=zuul setype=container_file_t src=/home/zuul/.ansible/tmp/ansible-tmp-1759406139.5120528-2208-227969257630203/.source _original_basename=healthcheck follow=False checksum=e380c11c36804bfc65a818f2960cfa663daacfe5 backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None attributes=None
Oct 02 11:55:40 compute-0 sudo[206920]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:41 compute-0 sudo[207072]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ffzaggbnntcmbzcoeywzxgmngmjonfyh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406140.9235892-2259-244310895172140/AnsiballZ_container_config_data.py'
Oct 02 11:55:41 compute-0 sudo[207072]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:41 compute-0 python3.9[207074]: ansible-container_config_data Invoked with config_overrides={} config_path=/var/lib/openstack/config/telemetry config_pattern=openstack_network_exporter.json debug=False
Oct 02 11:55:41 compute-0 sudo[207072]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:41 compute-0 sudo[207224]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-phrrmoezsvrsbljedjzjunalamegbtea ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406141.7333615-2286-196749532186160/AnsiballZ_container_config_hash.py'
Oct 02 11:55:41 compute-0 sudo[207224]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:42 compute-0 python3.9[207226]: ansible-container_config_hash Invoked with check_mode=False config_vol_prefix=/var/lib/config-data
Oct 02 11:55:42 compute-0 sudo[207224]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:42 compute-0 sudo[207376]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cgftxgrcwafpyrmkpwbfibummfltghyq ; /usr/bin/python3 /home/zuul/.ansible/tmp/ansible-tmp-1759406142.5719948-2316-260378407510602/AnsiballZ_edpm_container_manage.py'
Oct 02 11:55:42 compute-0 sudo[207376]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:43 compute-0 python3[207378]: ansible-edpm_container_manage Invoked with concurrency=1 config_dir=/var/lib/openstack/config/telemetry config_id=edpm config_overrides={} config_patterns=openstack_network_exporter.json log_base_path=/var/log/containers/stdouts debug=False
Oct 02 11:55:45 compute-0 podman[207393]: 2025-10-02 11:55:45.405405737 +0000 UTC m=+2.239120799 image pull 186c5e97c6f6912533851a0044ea6da23938910e7bddfb4a6c0be9b48ab2a1d1 quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified
Oct 02 11:55:45 compute-0 podman[207487]: 2025-10-02 11:55:45.542072248 +0000 UTC m=+0.049773900 container create 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, container_name=openstack_network_exporter, io.openshift.expose-services=, release=1755695350, vcs-type=git, url=https://catalog.redhat.com/en/search?searchType=containers, io.openshift.tags=minimal rhel9, architecture=x86_64, vendor=Red Hat, Inc., com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.buildah.version=1.33.7, managed_by=edpm_ansible, com.redhat.component=ubi9-minimal-container, version=9.6, distribution-scope=public, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, config_id=edpm, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., name=ubi9-minimal, build-date=2025-08-20T13:12:41, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, maintainer=Red Hat, Inc.)
Oct 02 11:55:45 compute-0 podman[207487]: 2025-10-02 11:55:45.516453159 +0000 UTC m=+0.024154841 image pull 186c5e97c6f6912533851a0044ea6da23938910e7bddfb4a6c0be9b48ab2a1d1 quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified
Oct 02 11:55:45 compute-0 python3[207378]: ansible-edpm_container_manage PODMAN-CONTAINER-DEBUG: podman create --name openstack_network_exporter --conmon-pidfile /run/openstack_network_exporter.pid --env OS_ENDPOINT_TYPE=internal --env OPENSTACK_NETWORK_EXPORTER_YAML=/etc/openstack_network_exporter/openstack_network_exporter.yaml --healthcheck-command /openstack/healthcheck openstack-netwo --label config_id=edpm --label container_name=openstack_network_exporter --label managed_by=edpm_ansible --label config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']} --log-driver journald --log-level info --network host --privileged=True --publish 9105:9105 --volume /var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z --volume /var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z --volume /var/run/openvswitch:/run/openvswitch:rw,z --volume /var/lib/openvswitch/ovn:/run/ovn:rw,z --volume /proc:/host/proc:ro --volume /var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified
Oct 02 11:55:45 compute-0 sudo[207376]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:46 compute-0 podman[207549]: 2025-10-02 11:55:46.14176958 +0000 UTC m=+0.061406468 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=starting, health_failing_streak=2, health_log=, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=edpm, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2)
Oct 02 11:55:46 compute-0 systemd[1]: 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be-77f5e9cb6d8164f7.service: Main process exited, code=exited, status=1/FAILURE
Oct 02 11:55:46 compute-0 systemd[1]: 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be-77f5e9cb6d8164f7.service: Failed with result 'exit-code'.
Oct 02 11:55:46 compute-0 sudo[207694]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-abghocqvgzvzhpelrgbpenoafdxjziqu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406146.156435-2340-250978866499946/AnsiballZ_stat.py'
Oct 02 11:55:46 compute-0 sudo[207694]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:46 compute-0 python3.9[207696]: ansible-ansible.builtin.stat Invoked with path=/etc/sysconfig/podman_drop_in follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:55:46 compute-0 sudo[207694]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:47 compute-0 sudo[207848]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ymwkiwywextrzzccbpwflgjroikrckeq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406146.910432-2367-51695547824770/AnsiballZ_file.py'
Oct 02 11:55:47 compute-0 sudo[207848]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:47 compute-0 python3.9[207850]: ansible-file Invoked with path=/etc/systemd/system/edpm_openstack_network_exporter.requires state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:55:47 compute-0 sudo[207848]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:47 compute-0 sudo[207999]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zjmktagabbanhjguhgyploslfgkhdqwq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406147.4295146-2367-272757153437217/AnsiballZ_copy.py'
Oct 02 11:55:47 compute-0 sudo[207999]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:48 compute-0 python3.9[208001]: ansible-copy Invoked with src=/home/zuul/.ansible/tmp/ansible-tmp-1759406147.4295146-2367-272757153437217/source dest=/etc/systemd/system/edpm_openstack_network_exporter.service mode=0644 owner=root group=root backup=False force=True remote_src=False follow=False unsafe_writes=False _original_basename=None content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None checksum=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:55:48 compute-0 sudo[207999]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:48 compute-0 sudo[208075]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zriffbpbwvztfwxhakshfascfzwduowb ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406147.4295146-2367-272757153437217/AnsiballZ_systemd.py'
Oct 02 11:55:48 compute-0 sudo[208075]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:48 compute-0 python3.9[208077]: ansible-systemd Invoked with daemon_reload=True daemon_reexec=False scope=system no_block=False name=None state=None enabled=None force=None masked=None
Oct 02 11:55:48 compute-0 systemd[1]: Reloading.
Oct 02 11:55:48 compute-0 systemd-sysv-generator[208108]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:55:48 compute-0 systemd-rc-local-generator[208102]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:55:48 compute-0 sudo[208075]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:49 compute-0 sudo[208186]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kaolncfyrqreamoqnjlcuftmwhszcmuk ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406147.4295146-2367-272757153437217/AnsiballZ_systemd.py'
Oct 02 11:55:49 compute-0 sudo[208186]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:49 compute-0 python3.9[208188]: ansible-systemd Invoked with state=restarted name=edpm_openstack_network_exporter.service enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None
Oct 02 11:55:49 compute-0 systemd[1]: Reloading.
Oct 02 11:55:49 compute-0 systemd-rc-local-generator[208215]: /etc/rc.d/rc.local is not marked executable, skipping.
Oct 02 11:55:49 compute-0 systemd-sysv-generator[208218]: SysV service '/etc/rc.d/init.d/network' lacks a native systemd unit file. Automatically generating a unit file for compatibility. Please update package to include a native systemd unit file, in order to make it more safe and robust.
Oct 02 11:55:49 compute-0 systemd[1]: Starting openstack_network_exporter container...
Oct 02 11:55:49 compute-0 systemd[1]: Started libcrun container.
Oct 02 11:55:49 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/5f1fa8879233a8c2a98c3d98834ec65993896761ec99af651c253eac2446d732/merged/run/ovn supports timestamps until 2038 (0x7fffffff)
Oct 02 11:55:49 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/5f1fa8879233a8c2a98c3d98834ec65993896761ec99af651c253eac2446d732/merged/etc/openstack_network_exporter/openstack_network_exporter.yaml supports timestamps until 2038 (0x7fffffff)
Oct 02 11:55:49 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/5f1fa8879233a8c2a98c3d98834ec65993896761ec99af651c253eac2446d732/merged/etc/openstack_network_exporter/tls supports timestamps until 2038 (0x7fffffff)
Oct 02 11:55:49 compute-0 systemd[1]: Started /usr/bin/podman healthcheck run 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00.
Oct 02 11:55:49 compute-0 podman[208228]: 2025-10-02 11:55:49.948643489 +0000 UTC m=+0.138849002 container init 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, vcs-type=git, version=9.6, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., url=https://catalog.redhat.com/en/search?searchType=containers, config_id=edpm, release=1755695350, container_name=openstack_network_exporter, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.openshift.tags=minimal rhel9, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vendor=Red Hat, Inc., io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, io.openshift.expose-services=, build-date=2025-08-20T13:12:41, architecture=x86_64, managed_by=edpm_ansible, com.redhat.component=ubi9-minimal-container, distribution-scope=public, io.buildah.version=1.33.7, name=ubi9-minimal, maintainer=Red Hat, Inc., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']})
Oct 02 11:55:49 compute-0 openstack_network_exporter[208243]: INFO    11:55:49 main.go:48: registering *bridge.Collector
Oct 02 11:55:49 compute-0 openstack_network_exporter[208243]: INFO    11:55:49 main.go:48: registering *coverage.Collector
Oct 02 11:55:49 compute-0 openstack_network_exporter[208243]: INFO    11:55:49 main.go:48: registering *datapath.Collector
Oct 02 11:55:49 compute-0 openstack_network_exporter[208243]: INFO    11:55:49 main.go:48: registering *iface.Collector
Oct 02 11:55:49 compute-0 openstack_network_exporter[208243]: INFO    11:55:49 main.go:48: registering *memory.Collector
Oct 02 11:55:49 compute-0 openstack_network_exporter[208243]: INFO    11:55:49 main.go:48: registering *ovnnorthd.Collector
Oct 02 11:55:49 compute-0 openstack_network_exporter[208243]: INFO    11:55:49 main.go:48: registering *ovn.Collector
Oct 02 11:55:49 compute-0 openstack_network_exporter[208243]: INFO    11:55:49 main.go:48: registering *ovsdbserver.Collector
Oct 02 11:55:49 compute-0 openstack_network_exporter[208243]: INFO    11:55:49 main.go:48: registering *pmd_perf.Collector
Oct 02 11:55:49 compute-0 openstack_network_exporter[208243]: INFO    11:55:49 main.go:48: registering *pmd_rxq.Collector
Oct 02 11:55:49 compute-0 openstack_network_exporter[208243]: INFO    11:55:49 main.go:48: registering *vswitch.Collector
Oct 02 11:55:49 compute-0 openstack_network_exporter[208243]: NOTICE  11:55:49 main.go:76: listening on https://:9105/metrics
Oct 02 11:55:49 compute-0 podman[208228]: 2025-10-02 11:55:49.979206343 +0000 UTC m=+0.169411806 container start 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, vcs-type=git, version=9.6, distribution-scope=public, io.openshift.expose-services=, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., managed_by=edpm_ansible, container_name=openstack_network_exporter, io.buildah.version=1.33.7, maintainer=Red Hat, Inc., url=https://catalog.redhat.com/en/search?searchType=containers, build-date=2025-08-20T13:12:41, config_id=edpm, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., architecture=x86_64, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, name=ubi9-minimal, io.openshift.tags=minimal rhel9, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, vendor=Red Hat, Inc., release=1755695350, com.redhat.component=ubi9-minimal-container, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly.)
Oct 02 11:55:49 compute-0 podman[208228]: openstack_network_exporter
Oct 02 11:55:49 compute-0 systemd[1]: Started openstack_network_exporter container.
Oct 02 11:55:50 compute-0 sudo[208186]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:50 compute-0 podman[208254]: 2025-10-02 11:55:50.054745255 +0000 UTC m=+0.065393065 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, build-date=2025-08-20T13:12:41, maintainer=Red Hat, Inc., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, vcs-type=git, name=ubi9-minimal, version=9.6, architecture=x86_64, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.openshift.tags=minimal rhel9, managed_by=edpm_ansible, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vendor=Red Hat, Inc., com.redhat.component=ubi9-minimal-container, container_name=openstack_network_exporter, release=1755695350, url=https://catalog.redhat.com/en/search?searchType=containers, config_id=edpm, io.buildah.version=1.33.7, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, io.openshift.expose-services=, distribution-scope=public, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly.)
Oct 02 11:55:50 compute-0 sudo[208440]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ibtcqqcfljzxtiwtpmezmzydcfffixen ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406150.225628-2439-235283985227113/AnsiballZ_systemd.py'
Oct 02 11:55:50 compute-0 sudo[208440]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:50 compute-0 podman[208401]: 2025-10-02 11:55:50.561215313 +0000 UTC m=+0.066871607 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, tcib_managed=true, container_name=multipathd, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, config_id=multipathd, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible)
Oct 02 11:55:50 compute-0 python3.9[208448]: ansible-ansible.builtin.systemd Invoked with name=edpm_openstack_network_exporter.service state=restarted daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None
Oct 02 11:55:50 compute-0 systemd[1]: Stopping openstack_network_exporter container...
Oct 02 11:55:50 compute-0 systemd[1]: libpod-468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00.scope: Deactivated successfully.
Oct 02 11:55:50 compute-0 podman[208453]: 2025-10-02 11:55:50.933898587 +0000 UTC m=+0.039925162 container died 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, container_name=openstack_network_exporter, io.buildah.version=1.33.7, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vcs-type=git, version=9.6, maintainer=Red Hat, Inc., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, vendor=Red Hat, Inc., config_id=edpm, io.openshift.expose-services=, release=1755695350, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.openshift.tags=minimal rhel9, url=https://catalog.redhat.com/en/search?searchType=containers, name=ubi9-minimal, architecture=x86_64, build-date=2025-08-20T13:12:41, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., distribution-scope=public, managed_by=edpm_ansible, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, com.redhat.component=ubi9-minimal-container)
Oct 02 11:55:50 compute-0 systemd[1]: 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00-32a6e0183faca26f.timer: Deactivated successfully.
Oct 02 11:55:50 compute-0 systemd[1]: Stopped /usr/bin/podman healthcheck run 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00.
Oct 02 11:55:50 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00-userdata-shm.mount: Deactivated successfully.
Oct 02 11:55:50 compute-0 systemd[1]: var-lib-containers-storage-overlay-5f1fa8879233a8c2a98c3d98834ec65993896761ec99af651c253eac2446d732-merged.mount: Deactivated successfully.
Oct 02 11:55:52 compute-0 podman[208453]: 2025-10-02 11:55:52.247154619 +0000 UTC m=+1.353181184 container cleanup 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, distribution-scope=public, managed_by=edpm_ansible, version=9.6, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, config_id=edpm, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vcs-type=git, vendor=Red Hat, Inc., url=https://catalog.redhat.com/en/search?searchType=containers, com.redhat.component=ubi9-minimal-container, container_name=openstack_network_exporter, io.buildah.version=1.33.7, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., architecture=x86_64, build-date=2025-08-20T13:12:41, io.openshift.tags=minimal rhel9, release=1755695350, maintainer=Red Hat, Inc., io.openshift.expose-services=, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, name=ubi9-minimal, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b)
Oct 02 11:55:52 compute-0 podman[208453]: openstack_network_exporter
Oct 02 11:55:52 compute-0 systemd[1]: edpm_openstack_network_exporter.service: Main process exited, code=exited, status=2/INVALIDARGUMENT
Oct 02 11:55:52 compute-0 podman[208479]: openstack_network_exporter
Oct 02 11:55:52 compute-0 systemd[1]: edpm_openstack_network_exporter.service: Failed with result 'exit-code'.
Oct 02 11:55:52 compute-0 systemd[1]: Stopped openstack_network_exporter container.
Oct 02 11:55:52 compute-0 systemd[1]: Starting openstack_network_exporter container...
Oct 02 11:55:52 compute-0 systemd[1]: Started libcrun container.
Oct 02 11:55:52 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/5f1fa8879233a8c2a98c3d98834ec65993896761ec99af651c253eac2446d732/merged/run/ovn supports timestamps until 2038 (0x7fffffff)
Oct 02 11:55:52 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/5f1fa8879233a8c2a98c3d98834ec65993896761ec99af651c253eac2446d732/merged/etc/openstack_network_exporter/openstack_network_exporter.yaml supports timestamps until 2038 (0x7fffffff)
Oct 02 11:55:52 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/5f1fa8879233a8c2a98c3d98834ec65993896761ec99af651c253eac2446d732/merged/etc/openstack_network_exporter/tls supports timestamps until 2038 (0x7fffffff)
Oct 02 11:55:52 compute-0 systemd[1]: Started /usr/bin/podman healthcheck run 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00.
Oct 02 11:55:52 compute-0 podman[208492]: 2025-10-02 11:55:52.465388126 +0000 UTC m=+0.118442424 container init 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, release=1755695350, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.buildah.version=1.33.7, version=9.6, build-date=2025-08-20T13:12:41, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., managed_by=edpm_ansible, url=https://catalog.redhat.com/en/search?searchType=containers, architecture=x86_64, io.openshift.tags=minimal rhel9, io.openshift.expose-services=, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vcs-type=git, distribution-scope=public, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, com.redhat.component=ubi9-minimal-container, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, config_id=edpm, maintainer=Red Hat, Inc., vendor=Red Hat, Inc., name=ubi9-minimal, container_name=openstack_network_exporter, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b)
Oct 02 11:55:52 compute-0 openstack_network_exporter[208509]: INFO    11:55:52 main.go:48: registering *bridge.Collector
Oct 02 11:55:52 compute-0 openstack_network_exporter[208509]: INFO    11:55:52 main.go:48: registering *coverage.Collector
Oct 02 11:55:52 compute-0 openstack_network_exporter[208509]: INFO    11:55:52 main.go:48: registering *datapath.Collector
Oct 02 11:55:52 compute-0 openstack_network_exporter[208509]: INFO    11:55:52 main.go:48: registering *iface.Collector
Oct 02 11:55:52 compute-0 openstack_network_exporter[208509]: INFO    11:55:52 main.go:48: registering *memory.Collector
Oct 02 11:55:52 compute-0 openstack_network_exporter[208509]: INFO    11:55:52 main.go:48: registering *ovnnorthd.Collector
Oct 02 11:55:52 compute-0 openstack_network_exporter[208509]: INFO    11:55:52 main.go:48: registering *ovn.Collector
Oct 02 11:55:52 compute-0 openstack_network_exporter[208509]: INFO    11:55:52 main.go:48: registering *ovsdbserver.Collector
Oct 02 11:55:52 compute-0 openstack_network_exporter[208509]: INFO    11:55:52 main.go:48: registering *pmd_perf.Collector
Oct 02 11:55:52 compute-0 openstack_network_exporter[208509]: INFO    11:55:52 main.go:48: registering *pmd_rxq.Collector
Oct 02 11:55:52 compute-0 openstack_network_exporter[208509]: INFO    11:55:52 main.go:48: registering *vswitch.Collector
Oct 02 11:55:52 compute-0 openstack_network_exporter[208509]: NOTICE  11:55:52 main.go:76: listening on https://:9105/metrics
Oct 02 11:55:52 compute-0 podman[208492]: 2025-10-02 11:55:52.498101899 +0000 UTC m=+0.151156177 container start 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, distribution-scope=public, io.buildah.version=1.33.7, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., maintainer=Red Hat, Inc., url=https://catalog.redhat.com/en/search?searchType=containers, version=9.6, config_id=edpm, vcs-type=git, architecture=x86_64, name=ubi9-minimal, io.openshift.tags=minimal rhel9, build-date=2025-08-20T13:12:41, container_name=openstack_network_exporter, io.openshift.expose-services=, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., com.redhat.component=ubi9-minimal-container, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, release=1755695350, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., managed_by=edpm_ansible, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, vendor=Red Hat, Inc.)
Oct 02 11:55:52 compute-0 podman[208492]: openstack_network_exporter
Oct 02 11:55:52 compute-0 systemd[1]: Started openstack_network_exporter container.
Oct 02 11:55:52 compute-0 sudo[208440]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:52 compute-0 podman[208519]: 2025-10-02 11:55:52.570736512 +0000 UTC m=+0.063084292 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, vendor=Red Hat, Inc., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.tags=minimal rhel9, com.redhat.component=ubi9-minimal-container, io.buildah.version=1.33.7, vcs-type=git, name=ubi9-minimal, container_name=openstack_network_exporter, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., url=https://catalog.redhat.com/en/search?searchType=containers, release=1755695350, build-date=2025-08-20T13:12:41, distribution-scope=public, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, maintainer=Red Hat, Inc., architecture=x86_64, config_id=edpm, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, io.openshift.expose-services=, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, version=9.6)
Oct 02 11:55:52 compute-0 sudo[208688]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nobpdbaisxbuvtrdrpqfgwejkskcgrzh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406152.71539-2463-127475397154023/AnsiballZ_find.py'
Oct 02 11:55:52 compute-0 sudo[208688]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:53 compute-0 python3.9[208690]: ansible-ansible.builtin.find Invoked with file_type=directory paths=['/var/lib/openstack/healthchecks/'] patterns=[] read_whole_file=False age_stamp=mtime recurse=False hidden=False follow=False get_checksum=False checksum_algorithm=sha1 use_regex=False exact_mode=True excludes=None contains=None age=None size=None depth=None mode=None encoding=None limit=None
Oct 02 11:55:53 compute-0 sudo[208688]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:54 compute-0 sudo[208840]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hucdizjukbmmautuioivtyzwfqudjapd ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406153.6812785-2491-216249495025455/AnsiballZ_podman_container_info.py'
Oct 02 11:55:54 compute-0 sudo[208840]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:54 compute-0 python3.9[208842]: ansible-containers.podman.podman_container_info Invoked with name=['ovn_controller'] executable=podman
Oct 02 11:55:54 compute-0 sudo[208840]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:54 compute-0 sudo[209006]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ktkfdjdxyrglxbtzxtshpvimmbipesat ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406154.5197656-2499-52165680320017/AnsiballZ_podman_container_exec.py'
Oct 02 11:55:54 compute-0 sudo[209006]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:55 compute-0 python3.9[209008]: ansible-containers.podman.podman_container_exec Invoked with command=id -u name=ovn_controller detach=False executable=podman privileged=False tty=False argv=None env=None user=None workdir=None
Oct 02 11:55:55 compute-0 systemd[1]: Started libpod-conmon-9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d.scope.
Oct 02 11:55:55 compute-0 podman[209009]: 2025-10-02 11:55:55.225683674 +0000 UTC m=+0.073985562 container exec 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, container_name=ovn_controller, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, config_id=ovn_controller)
Oct 02 11:55:55 compute-0 podman[209009]: 2025-10-02 11:55:55.255536758 +0000 UTC m=+0.103838716 container exec_died 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, config_id=ovn_controller, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS)
Oct 02 11:55:55 compute-0 systemd[1]: libpod-conmon-9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d.scope: Deactivated successfully.
Oct 02 11:55:55 compute-0 sudo[209006]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:55 compute-0 sudo[209191]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-almanupqvvzzejkdulewxwbxwznbjtjz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406155.4324315-2507-224145961429384/AnsiballZ_podman_container_exec.py'
Oct 02 11:55:55 compute-0 sudo[209191]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:55 compute-0 python3.9[209193]: ansible-containers.podman.podman_container_exec Invoked with command=id -g name=ovn_controller detach=False executable=podman privileged=False tty=False argv=None env=None user=None workdir=None
Oct 02 11:55:55 compute-0 systemd[1]: Started libpod-conmon-9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d.scope.
Oct 02 11:55:55 compute-0 podman[209194]: 2025-10-02 11:55:55.967060583 +0000 UTC m=+0.068508671 container exec 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_managed=true, container_name=ovn_controller, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, maintainer=OpenStack Kubernetes Operator team)
Oct 02 11:55:56 compute-0 podman[209194]: 2025-10-02 11:55:56.002381807 +0000 UTC m=+0.103829885 container exec_died 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, config_id=ovn_controller, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_managed=true)
Oct 02 11:55:56 compute-0 systemd[1]: libpod-conmon-9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d.scope: Deactivated successfully.
Oct 02 11:55:56 compute-0 sudo[209191]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:56 compute-0 sudo[209376]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ahnlpytgckmasrxarkpqeepyvwogcvef ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406156.2051513-2515-86537873144984/AnsiballZ_file.py'
Oct 02 11:55:56 compute-0 sudo[209376]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:56 compute-0 python3.9[209378]: ansible-ansible.builtin.file Invoked with group=0 mode=0700 owner=0 path=/var/lib/openstack/healthchecks/ovn_controller recurse=True state=directory force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:55:56 compute-0 sudo[209376]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:57 compute-0 podman[209482]: 2025-10-02 11:55:57.142857093 +0000 UTC m=+0.059768563 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, config_id=iscsid, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, managed_by=edpm_ansible, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, tcib_managed=true)
Oct 02 11:55:57 compute-0 sudo[209548]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-djjbooqzkbappryevdszyodcncfihkpm ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406156.888537-2524-34546778454763/AnsiballZ_podman_container_info.py'
Oct 02 11:55:57 compute-0 sudo[209548]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:57 compute-0 python3.9[209550]: ansible-containers.podman.podman_container_info Invoked with name=['ovn_metadata_agent'] executable=podman
Oct 02 11:55:57 compute-0 sudo[209548]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:57 compute-0 podman[209687]: 2025-10-02 11:55:57.951948681 +0000 UTC m=+0.057571853 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']})
Oct 02 11:55:57 compute-0 sudo[209727]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-crfbeoejpekwytpzivjltlrsmjxvseff ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406157.6578398-2532-267860208549544/AnsiballZ_podman_container_exec.py'
Oct 02 11:55:57 compute-0 sudo[209727]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:58 compute-0 python3.9[209739]: ansible-containers.podman.podman_container_exec Invoked with command=id -u name=ovn_metadata_agent detach=False executable=podman privileged=False tty=False argv=None env=None user=None workdir=None
Oct 02 11:55:58 compute-0 systemd[1]: Started libpod-conmon-02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3.scope.
Oct 02 11:55:58 compute-0 podman[209740]: 2025-10-02 11:55:58.248296792 +0000 UTC m=+0.084988622 container exec 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, container_name=ovn_metadata_agent, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 11:55:58 compute-0 podman[209740]: 2025-10-02 11:55:58.281424896 +0000 UTC m=+0.118116736 container exec_died 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_metadata_agent, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, managed_by=edpm_ansible, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS)
Oct 02 11:55:58 compute-0 systemd[1]: libpod-conmon-02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3.scope: Deactivated successfully.
Oct 02 11:55:58 compute-0 sudo[209727]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:58 compute-0 sudo[209919]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nxphhwhdpkbknhgwawysmedlczgkeotn ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406158.4949512-2540-277710699308475/AnsiballZ_podman_container_exec.py'
Oct 02 11:55:58 compute-0 sudo[209919]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:58 compute-0 python3.9[209921]: ansible-containers.podman.podman_container_exec Invoked with command=id -g name=ovn_metadata_agent detach=False executable=podman privileged=False tty=False argv=None env=None user=None workdir=None
Oct 02 11:55:59 compute-0 systemd[1]: Started libpod-conmon-02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3.scope.
Oct 02 11:55:59 compute-0 podman[209922]: 2025-10-02 11:55:59.035055691 +0000 UTC m=+0.069225322 container exec 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, container_name=ovn_metadata_agent, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 11:55:59 compute-0 podman[209922]: 2025-10-02 11:55:59.063830026 +0000 UTC m=+0.097999617 container exec_died 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=ovn_metadata_agent, org.label-schema.license=GPLv2, container_name=ovn_metadata_agent, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, managed_by=edpm_ansible)
Oct 02 11:55:59 compute-0 systemd[1]: libpod-conmon-02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3.scope: Deactivated successfully.
Oct 02 11:55:59 compute-0 sudo[209919]: pam_unix(sudo:session): session closed for user root
Oct 02 11:55:59 compute-0 sudo[210105]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xaftncmgpzsdolqxggynpapdlpsdweut ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406159.2383454-2548-42819673589830/AnsiballZ_file.py'
Oct 02 11:55:59 compute-0 sudo[210105]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:55:59 compute-0 python3.9[210107]: ansible-ansible.builtin.file Invoked with group=0 mode=0700 owner=0 path=/var/lib/openstack/healthchecks/ovn_metadata_agent recurse=True state=directory force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:55:59 compute-0 sudo[210105]: pam_unix(sudo:session): session closed for user root
Oct 02 11:56:00 compute-0 sudo[210257]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hoceyzifjczqgglbhrijljxkqexcoerk ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406159.9129298-2557-243160864387917/AnsiballZ_podman_container_info.py'
Oct 02 11:56:00 compute-0 sudo[210257]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:56:00 compute-0 python3.9[210259]: ansible-containers.podman.podman_container_info Invoked with name=['iscsid'] executable=podman
Oct 02 11:56:00 compute-0 sudo[210257]: pam_unix(sudo:session): session closed for user root
Oct 02 11:56:00 compute-0 sudo[210422]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-soaqarpqfqycmbozpjktlbsazyztuzyv ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406160.6153357-2565-138759327766844/AnsiballZ_podman_container_exec.py'
Oct 02 11:56:00 compute-0 sudo[210422]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:56:01 compute-0 python3.9[210424]: ansible-containers.podman.podman_container_exec Invoked with command=id -u name=iscsid detach=False executable=podman privileged=False tty=False argv=None env=None user=None workdir=None
Oct 02 11:56:01 compute-0 systemd[1]: Started libpod-conmon-d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818.scope.
Oct 02 11:56:01 compute-0 podman[210425]: 2025-10-02 11:56:01.147808799 +0000 UTC m=+0.082264907 container exec d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, container_name=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, config_id=iscsid, io.buildah.version=1.41.3, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 11:56:01 compute-0 podman[210445]: 2025-10-02 11:56:01.225196471 +0000 UTC m=+0.059432903 container exec_died d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, tcib_managed=true, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, container_name=iscsid)
Oct 02 11:56:01 compute-0 podman[210425]: 2025-10-02 11:56:01.25628089 +0000 UTC m=+0.190736988 container exec_died d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, config_id=iscsid, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, io.buildah.version=1.41.3)
Oct 02 11:56:01 compute-0 systemd[1]: libpod-conmon-d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818.scope: Deactivated successfully.
Oct 02 11:56:01 compute-0 sudo[210422]: pam_unix(sudo:session): session closed for user root
Oct 02 11:56:01 compute-0 sudo[210607]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-uzihirvfmgqstlxpsyqhbkxppdgbultm ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406161.5172892-2573-140286141166823/AnsiballZ_podman_container_exec.py'
Oct 02 11:56:01 compute-0 sudo[210607]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:56:02 compute-0 python3.9[210609]: ansible-containers.podman.podman_container_exec Invoked with command=id -g name=iscsid detach=False executable=podman privileged=False tty=False argv=None env=None user=None workdir=None
Oct 02 11:56:02 compute-0 systemd[1]: Started libpod-conmon-d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818.scope.
Oct 02 11:56:02 compute-0 podman[210610]: 2025-10-02 11:56:02.109284527 +0000 UTC m=+0.075960304 container exec d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, config_id=iscsid, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 11:56:02 compute-0 podman[210610]: 2025-10-02 11:56:02.144339944 +0000 UTC m=+0.111015701 container exec_died d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, container_name=iscsid, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, config_id=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']})
Oct 02 11:56:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:56:02.196 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 11:56:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:56:02.199 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.002s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 11:56:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:56:02.199 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 11:56:02 compute-0 systemd[1]: libpod-conmon-d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818.scope: Deactivated successfully.
Oct 02 11:56:02 compute-0 sudo[210607]: pam_unix(sudo:session): session closed for user root
Oct 02 11:56:02 compute-0 sudo[210791]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-krkzuxdrhhosssnhqqtxoykpvetxxxrx ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406162.4152923-2581-170800192463732/AnsiballZ_file.py'
Oct 02 11:56:02 compute-0 sudo[210791]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:56:02 compute-0 python3.9[210793]: ansible-ansible.builtin.file Invoked with group=0 mode=0700 owner=0 path=/var/lib/openstack/healthchecks/iscsid recurse=True state=directory force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:56:02 compute-0 sudo[210791]: pam_unix(sudo:session): session closed for user root
Oct 02 11:56:02 compute-0 nova_compute[192079]: 2025-10-02 11:56:02.985 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:56:03 compute-0 nova_compute[192079]: 2025-10-02 11:56:03.003 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:56:03 compute-0 nova_compute[192079]: 2025-10-02 11:56:03.003 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 11:56:03 compute-0 nova_compute[192079]: 2025-10-02 11:56:03.003 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 11:56:03 compute-0 nova_compute[192079]: 2025-10-02 11:56:03.028 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 11:56:03 compute-0 nova_compute[192079]: 2025-10-02 11:56:03.029 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:56:03 compute-0 nova_compute[192079]: 2025-10-02 11:56:03.029 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:56:03 compute-0 nova_compute[192079]: 2025-10-02 11:56:03.029 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:56:03 compute-0 nova_compute[192079]: 2025-10-02 11:56:03.030 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:56:03 compute-0 nova_compute[192079]: 2025-10-02 11:56:03.030 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:56:03 compute-0 nova_compute[192079]: 2025-10-02 11:56:03.030 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 11:56:03 compute-0 nova_compute[192079]: 2025-10-02 11:56:03.030 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:56:03 compute-0 nova_compute[192079]: 2025-10-02 11:56:03.067 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 11:56:03 compute-0 nova_compute[192079]: 2025-10-02 11:56:03.067 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 11:56:03 compute-0 nova_compute[192079]: 2025-10-02 11:56:03.068 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 11:56:03 compute-0 nova_compute[192079]: 2025-10-02 11:56:03.068 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 11:56:03 compute-0 nova_compute[192079]: 2025-10-02 11:56:03.220 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 11:56:03 compute-0 nova_compute[192079]: 2025-10-02 11:56:03.221 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5961MB free_disk=73.49939346313477GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 11:56:03 compute-0 nova_compute[192079]: 2025-10-02 11:56:03.222 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 11:56:03 compute-0 nova_compute[192079]: 2025-10-02 11:56:03.222 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 11:56:03 compute-0 nova_compute[192079]: 2025-10-02 11:56:03.354 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 11:56:03 compute-0 nova_compute[192079]: 2025-10-02 11:56:03.354 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 11:56:03 compute-0 nova_compute[192079]: 2025-10-02 11:56:03.380 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 11:56:03 compute-0 sudo[210943]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ezbeeavrnhkwidhlppnwikxmmpvuwuux ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406163.0953305-2590-133298268264087/AnsiballZ_podman_container_info.py'
Oct 02 11:56:03 compute-0 sudo[210943]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:56:03 compute-0 nova_compute[192079]: 2025-10-02 11:56:03.397 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 0, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 11:56:03 compute-0 nova_compute[192079]: 2025-10-02 11:56:03.400 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 11:56:03 compute-0 nova_compute[192079]: 2025-10-02 11:56:03.400 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.178s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 11:56:03 compute-0 python3.9[210945]: ansible-containers.podman.podman_container_info Invoked with name=['multipathd'] executable=podman
Oct 02 11:56:03 compute-0 sudo[210943]: pam_unix(sudo:session): session closed for user root
Oct 02 11:56:04 compute-0 sudo[211107]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-oxeymlrhwszprabxvmzpqihmnhbbfzla ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406163.9284198-2598-174838116083332/AnsiballZ_podman_container_exec.py'
Oct 02 11:56:04 compute-0 sudo[211107]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:56:04 compute-0 python3.9[211109]: ansible-containers.podman.podman_container_exec Invoked with command=id -u name=multipathd detach=False executable=podman privileged=False tty=False argv=None env=None user=None workdir=None
Oct 02 11:56:04 compute-0 systemd[1]: Started libpod-conmon-8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26.scope.
Oct 02 11:56:04 compute-0 podman[211110]: 2025-10-02 11:56:04.87967865 +0000 UTC m=+0.252760361 container exec 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, container_name=multipathd, org.label-schema.name=CentOS Stream 9 Base Image, managed_by=edpm_ansible, config_id=multipathd, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 11:56:05 compute-0 podman[211129]: 2025-10-02 11:56:05.028290917 +0000 UTC m=+0.135364746 container exec_died 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_managed=true, config_id=multipathd, container_name=multipathd, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']})
Oct 02 11:56:05 compute-0 nova_compute[192079]: 2025-10-02 11:56:05.036 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:56:05 compute-0 nova_compute[192079]: 2025-10-02 11:56:05.037 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:56:05 compute-0 podman[211110]: 2025-10-02 11:56:05.052480768 +0000 UTC m=+0.425562489 container exec_died 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, container_name=multipathd, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_id=multipathd, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']})
Oct 02 11:56:05 compute-0 systemd[1]: libpod-conmon-8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26.scope: Deactivated successfully.
Oct 02 11:56:05 compute-0 sudo[211107]: pam_unix(sudo:session): session closed for user root
Oct 02 11:56:05 compute-0 sudo[211291]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-udqicunzigdcwjtlabpkzvlfqdihuaun ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406165.4727025-2606-110047322491511/AnsiballZ_podman_container_exec.py'
Oct 02 11:56:05 compute-0 sudo[211291]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:56:06 compute-0 python3.9[211293]: ansible-containers.podman.podman_container_exec Invoked with command=id -g name=multipathd detach=False executable=podman privileged=False tty=False argv=None env=None user=None workdir=None
Oct 02 11:56:06 compute-0 systemd[1]: Started libpod-conmon-8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26.scope.
Oct 02 11:56:06 compute-0 podman[211294]: 2025-10-02 11:56:06.438758642 +0000 UTC m=+0.309819188 container exec 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=multipathd, container_name=multipathd, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2)
Oct 02 11:56:06 compute-0 podman[211294]: 2025-10-02 11:56:06.475323572 +0000 UTC m=+0.346384118 container exec_died 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, config_id=multipathd, tcib_managed=true, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, container_name=multipathd, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']})
Oct 02 11:56:06 compute-0 sudo[211291]: pam_unix(sudo:session): session closed for user root
Oct 02 11:56:06 compute-0 systemd[1]: libpod-conmon-8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26.scope: Deactivated successfully.
Oct 02 11:56:06 compute-0 podman[211335]: 2025-10-02 11:56:06.937706945 +0000 UTC m=+0.082488724 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, container_name=ovn_controller, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller)
Oct 02 11:56:07 compute-0 sudo[211506]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jfxyybiukxmukajvcirembkyvykffeaj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406166.9809017-2614-18265349299509/AnsiballZ_file.py'
Oct 02 11:56:07 compute-0 sudo[211506]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:56:07 compute-0 python3.9[211508]: ansible-ansible.builtin.file Invoked with group=0 mode=0700 owner=0 path=/var/lib/openstack/healthchecks/multipathd recurse=True state=directory force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:56:07 compute-0 sudo[211506]: pam_unix(sudo:session): session closed for user root
Oct 02 11:56:08 compute-0 sudo[211658]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mmnaudwpejkoxputyipyqfevcbzkmlci ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406167.7230678-2623-1901633749720/AnsiballZ_podman_container_info.py'
Oct 02 11:56:08 compute-0 sudo[211658]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:56:08 compute-0 python3.9[211660]: ansible-containers.podman.podman_container_info Invoked with name=['ceilometer_agent_compute'] executable=podman
Oct 02 11:56:08 compute-0 sudo[211658]: pam_unix(sudo:session): session closed for user root
Oct 02 11:56:09 compute-0 sudo[211823]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vtfdoevgkuumdyrjqcmqtdxsouolsnbc ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406168.7716284-2631-53859025878726/AnsiballZ_podman_container_exec.py'
Oct 02 11:56:09 compute-0 sudo[211823]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:56:09 compute-0 python3.9[211825]: ansible-containers.podman.podman_container_exec Invoked with command=id -u name=ceilometer_agent_compute detach=False executable=podman privileged=False tty=False argv=None env=None user=None workdir=None
Oct 02 11:56:09 compute-0 systemd[1]: Started libpod-conmon-48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be.scope.
Oct 02 11:56:09 compute-0 podman[211826]: 2025-10-02 11:56:09.703386549 +0000 UTC m=+0.272609184 container exec 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, container_name=ceilometer_agent_compute, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team)
Oct 02 11:56:09 compute-0 podman[211848]: 2025-10-02 11:56:09.90452663 +0000 UTC m=+0.186989306 container exec_died 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, config_id=edpm, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, io.buildah.version=1.41.3)
Oct 02 11:56:09 compute-0 podman[211826]: 2025-10-02 11:56:09.989421177 +0000 UTC m=+0.558643892 container exec_died 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, tcib_managed=true)
Oct 02 11:56:09 compute-0 podman[211843]: 2025-10-02 11:56:09.992340067 +0000 UTC m=+0.281981580 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, config_id=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_managed=true, container_name=ovn_metadata_agent, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, io.buildah.version=1.41.3)
Oct 02 11:56:09 compute-0 systemd[1]: libpod-conmon-48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be.scope: Deactivated successfully.
Oct 02 11:56:10 compute-0 sudo[211823]: pam_unix(sudo:session): session closed for user root
Oct 02 11:56:10 compute-0 podman[211845]: 2025-10-02 11:56:10.313760692 +0000 UTC m=+0.597633557 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 11:56:10 compute-0 sudo[212052]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lntltmtwchyaedbumofedppmaqpjlbtw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406170.4145157-2639-111170091292594/AnsiballZ_podman_container_exec.py'
Oct 02 11:56:10 compute-0 sudo[212052]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:56:11 compute-0 python3.9[212054]: ansible-containers.podman.podman_container_exec Invoked with command=id -g name=ceilometer_agent_compute detach=False executable=podman privileged=False tty=False argv=None env=None user=None workdir=None
Oct 02 11:56:11 compute-0 systemd[1]: Started libpod-conmon-48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be.scope.
Oct 02 11:56:11 compute-0 podman[212055]: 2025-10-02 11:56:11.444687957 +0000 UTC m=+0.364872193 container exec 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, config_id=edpm, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, tcib_managed=true)
Oct 02 11:56:11 compute-0 podman[212073]: 2025-10-02 11:56:11.532225437 +0000 UTC m=+0.063974328 container exec_died 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, container_name=ceilometer_agent_compute, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_id=edpm, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team)
Oct 02 11:56:11 compute-0 podman[212055]: 2025-10-02 11:56:11.629617595 +0000 UTC m=+0.549801831 container exec_died 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, config_id=edpm, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ceilometer_agent_compute, managed_by=edpm_ansible, tcib_managed=true)
Oct 02 11:56:11 compute-0 systemd[1]: libpod-conmon-48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be.scope: Deactivated successfully.
Oct 02 11:56:11 compute-0 sudo[212052]: pam_unix(sudo:session): session closed for user root
Oct 02 11:56:12 compute-0 sudo[212235]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dhzhsknkrnkzyaubmsafsvbledtahwji ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406171.9280832-2647-151309134154779/AnsiballZ_file.py'
Oct 02 11:56:12 compute-0 sudo[212235]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:56:12 compute-0 python3.9[212237]: ansible-ansible.builtin.file Invoked with group=42405 mode=0700 owner=42405 path=/var/lib/openstack/healthchecks/ceilometer_agent_compute recurse=True state=directory force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:56:12 compute-0 sudo[212235]: pam_unix(sudo:session): session closed for user root
Oct 02 11:56:13 compute-0 sudo[212387]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pkvtrcnhlvwsfunzgwfqfujoornhaqvp ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406172.8285205-2656-170540091546429/AnsiballZ_podman_container_info.py'
Oct 02 11:56:13 compute-0 sudo[212387]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:56:13 compute-0 python3.9[212389]: ansible-containers.podman.podman_container_info Invoked with name=['node_exporter'] executable=podman
Oct 02 11:56:13 compute-0 sudo[212387]: pam_unix(sudo:session): session closed for user root
Oct 02 11:56:13 compute-0 sudo[212551]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xhhlzoejzmdajtzfeupucryblyifqcps ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406173.6520507-2664-92593674698638/AnsiballZ_podman_container_exec.py'
Oct 02 11:56:13 compute-0 sudo[212551]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:56:14 compute-0 python3.9[212553]: ansible-containers.podman.podman_container_exec Invoked with command=id -u name=node_exporter detach=False executable=podman privileged=False tty=False argv=None env=None user=None workdir=None
Oct 02 11:56:14 compute-0 systemd[1]: Started libpod-conmon-cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2.scope.
Oct 02 11:56:14 compute-0 podman[212554]: 2025-10-02 11:56:14.324536338 +0000 UTC m=+0.197932865 container exec cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 11:56:14 compute-0 podman[212574]: 2025-10-02 11:56:14.422192884 +0000 UTC m=+0.086493072 container exec_died cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>)
Oct 02 11:56:14 compute-0 podman[212554]: 2025-10-02 11:56:14.560349976 +0000 UTC m=+0.433746503 container exec_died cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']})
Oct 02 11:56:14 compute-0 systemd[1]: libpod-conmon-cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2.scope: Deactivated successfully.
Oct 02 11:56:14 compute-0 sudo[212551]: pam_unix(sudo:session): session closed for user root
Oct 02 11:56:15 compute-0 sudo[212736]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qghlfyavpswmwavfuflgsdtyudlwbqgo ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406174.8286176-2672-259663973193734/AnsiballZ_podman_container_exec.py'
Oct 02 11:56:15 compute-0 sudo[212736]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:56:15 compute-0 python3.9[212738]: ansible-containers.podman.podman_container_exec Invoked with command=id -g name=node_exporter detach=False executable=podman privileged=False tty=False argv=None env=None user=None workdir=None
Oct 02 11:56:15 compute-0 systemd[1]: Started libpod-conmon-cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2.scope.
Oct 02 11:56:15 compute-0 podman[212739]: 2025-10-02 11:56:15.616453767 +0000 UTC m=+0.130997458 container exec cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>)
Oct 02 11:56:15 compute-0 podman[212758]: 2025-10-02 11:56:15.687232189 +0000 UTC m=+0.059433483 container exec_died cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>)
Oct 02 11:56:15 compute-0 podman[212739]: 2025-10-02 11:56:15.717080354 +0000 UTC m=+0.231624025 container exec_died cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']})
Oct 02 11:56:15 compute-0 systemd[1]: libpod-conmon-cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2.scope: Deactivated successfully.
Oct 02 11:56:15 compute-0 sudo[212736]: pam_unix(sudo:session): session closed for user root
Oct 02 11:56:16 compute-0 sudo[212934]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-aexzgyjhsliydtqdgmgoeaokuvddtnoy ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406176.057695-2680-95782511671977/AnsiballZ_file.py'
Oct 02 11:56:16 compute-0 sudo[212934]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:56:16 compute-0 podman[212895]: 2025-10-02 11:56:16.332812813 +0000 UTC m=+0.080022445 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, container_name=ceilometer_agent_compute, org.label-schema.vendor=CentOS, tcib_managed=true, io.buildah.version=1.41.3)
Oct 02 11:56:16 compute-0 python3.9[212942]: ansible-ansible.builtin.file Invoked with group=0 mode=0700 owner=0 path=/var/lib/openstack/healthchecks/node_exporter recurse=True state=directory force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:56:16 compute-0 sudo[212934]: pam_unix(sudo:session): session closed for user root
Oct 02 11:56:17 compute-0 sudo[213093]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zzdwmctsnkdpuprmxbtcovfgbtsdwlgu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406176.7887921-2689-139491639174709/AnsiballZ_podman_container_info.py'
Oct 02 11:56:17 compute-0 sudo[213093]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:56:17 compute-0 python3.9[213095]: ansible-containers.podman.podman_container_info Invoked with name=['podman_exporter'] executable=podman
Oct 02 11:56:17 compute-0 sudo[213093]: pam_unix(sudo:session): session closed for user root
Oct 02 11:56:18 compute-0 sudo[213258]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vhpcuhlffckxgqjzcgzgeemoihtaqnsz ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406177.9566498-2697-272694207526213/AnsiballZ_podman_container_exec.py'
Oct 02 11:56:18 compute-0 sudo[213258]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:56:18 compute-0 python3.9[213260]: ansible-containers.podman.podman_container_exec Invoked with command=id -u name=podman_exporter detach=False executable=podman privileged=False tty=False argv=None env=None user=None workdir=None
Oct 02 11:56:18 compute-0 systemd[1]: Started libpod-conmon-c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e.scope.
Oct 02 11:56:18 compute-0 podman[213261]: 2025-10-02 11:56:18.899741822 +0000 UTC m=+0.397686459 container exec c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 11:56:19 compute-0 podman[213281]: 2025-10-02 11:56:19.029102983 +0000 UTC m=+0.114694182 container exec_died c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>)
Oct 02 11:56:19 compute-0 podman[213261]: 2025-10-02 11:56:19.129434192 +0000 UTC m=+0.627378729 container exec_died c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']})
Oct 02 11:56:19 compute-0 systemd[1]: libpod-conmon-c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e.scope: Deactivated successfully.
Oct 02 11:56:19 compute-0 sudo[213258]: pam_unix(sudo:session): session closed for user root
Oct 02 11:56:20 compute-0 sudo[213443]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kufhapfussuxhdnayytdulqtrvkoxvbf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406179.811005-2705-43305451748151/AnsiballZ_podman_container_exec.py'
Oct 02 11:56:20 compute-0 sudo[213443]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:56:20 compute-0 python3.9[213445]: ansible-containers.podman.podman_container_exec Invoked with command=id -g name=podman_exporter detach=False executable=podman privileged=False tty=False argv=None env=None user=None workdir=None
Oct 02 11:56:21 compute-0 systemd[1]: Started libpod-conmon-c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e.scope.
Oct 02 11:56:21 compute-0 podman[213446]: 2025-10-02 11:56:21.060708547 +0000 UTC m=+0.689056613 container exec c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>)
Oct 02 11:56:21 compute-0 podman[213466]: 2025-10-02 11:56:21.185187934 +0000 UTC m=+0.110957279 container exec_died c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 11:56:21 compute-0 podman[213446]: 2025-10-02 11:56:21.272313004 +0000 UTC m=+0.900661010 container exec_died c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 11:56:21 compute-0 systemd[1]: libpod-conmon-c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e.scope: Deactivated successfully.
Oct 02 11:56:21 compute-0 podman[213463]: 2025-10-02 11:56:21.646645163 +0000 UTC m=+0.579697407 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.schema-version=1.0, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, org.label-schema.build-date=20251001, config_id=multipathd, container_name=multipathd, io.buildah.version=1.41.3)
Oct 02 11:56:21 compute-0 sudo[213443]: pam_unix(sudo:session): session closed for user root
Oct 02 11:56:22 compute-0 sudo[213645]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fetkvgbdrbmreuwkzqghxpapoledugzt ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406181.8624914-2713-101198405856371/AnsiballZ_file.py'
Oct 02 11:56:22 compute-0 sudo[213645]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:56:22 compute-0 python3.9[213647]: ansible-ansible.builtin.file Invoked with group=0 mode=0700 owner=0 path=/var/lib/openstack/healthchecks/podman_exporter recurse=True state=directory force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:56:22 compute-0 sudo[213645]: pam_unix(sudo:session): session closed for user root
Oct 02 11:56:22 compute-0 sudo[213809]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-uvetqihtwcskxawymxqxrzollhnhqzcf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406182.5876162-2722-241308422509727/AnsiballZ_podman_container_info.py'
Oct 02 11:56:22 compute-0 sudo[213809]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:56:22 compute-0 podman[213771]: 2025-10-02 11:56:22.880648392 +0000 UTC m=+0.069857068 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., build-date=2025-08-20T13:12:41, io.openshift.tags=minimal rhel9, managed_by=edpm_ansible, distribution-scope=public, vendor=Red Hat, Inc., version=9.6, vcs-type=git, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, io.openshift.expose-services=, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.buildah.version=1.33.7, maintainer=Red Hat, Inc., architecture=x86_64, release=1755695350, url=https://catalog.redhat.com/en/search?searchType=containers, config_id=edpm, container_name=openstack_network_exporter, com.redhat.component=ubi9-minimal-container, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., name=ubi9-minimal, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b)
Oct 02 11:56:23 compute-0 python3.9[213818]: ansible-containers.podman.podman_container_info Invoked with name=['openstack_network_exporter'] executable=podman
Oct 02 11:56:23 compute-0 sudo[213809]: pam_unix(sudo:session): session closed for user root
Oct 02 11:56:23 compute-0 sudo[213981]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hsbhxfjemlccldlmltflequjfmlbvung ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406183.3254158-2730-30387461659319/AnsiballZ_podman_container_exec.py'
Oct 02 11:56:23 compute-0 sudo[213981]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:56:23 compute-0 python3.9[213983]: ansible-containers.podman.podman_container_exec Invoked with command=id -u name=openstack_network_exporter detach=False executable=podman privileged=False tty=False argv=None env=None user=None workdir=None
Oct 02 11:56:23 compute-0 systemd[1]: Started libpod-conmon-468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00.scope.
Oct 02 11:56:23 compute-0 podman[213984]: 2025-10-02 11:56:23.953677416 +0000 UTC m=+0.178152865 container exec 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, distribution-scope=public, release=1755695350, com.redhat.component=ubi9-minimal-container, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.expose-services=, maintainer=Red Hat, Inc., managed_by=edpm_ansible, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., version=9.6, vcs-type=git, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, architecture=x86_64, build-date=2025-08-20T13:12:41, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, config_id=edpm, io.openshift.tags=minimal rhel9, name=ubi9-minimal, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, url=https://catalog.redhat.com/en/search?searchType=containers, container_name=openstack_network_exporter, io.buildah.version=1.33.7, vendor=Red Hat, Inc.)
Oct 02 11:56:23 compute-0 podman[213984]: 2025-10-02 11:56:23.98421552 +0000 UTC m=+0.208690949 container exec_died 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, distribution-scope=public, release=1755695350, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., architecture=x86_64, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, vcs-type=git, name=ubi9-minimal, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., container_name=openstack_network_exporter, io.openshift.expose-services=, version=9.6, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., build-date=2025-08-20T13:12:41, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, config_id=edpm, vendor=Red Hat, Inc., maintainer=Red Hat, Inc., managed_by=edpm_ansible, com.redhat.component=ubi9-minimal-container, io.openshift.tags=minimal rhel9, url=https://catalog.redhat.com/en/search?searchType=containers, io.buildah.version=1.33.7)
Oct 02 11:56:24 compute-0 sudo[213981]: pam_unix(sudo:session): session closed for user root
Oct 02 11:56:24 compute-0 systemd[1]: libpod-conmon-468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00.scope: Deactivated successfully.
Oct 02 11:56:24 compute-0 sudo[214167]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-biynjnafpnlrhpuetakkcaonlndeossh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406184.2748706-2738-69726058226128/AnsiballZ_podman_container_exec.py'
Oct 02 11:56:24 compute-0 sudo[214167]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:56:24 compute-0 python3.9[214169]: ansible-containers.podman.podman_container_exec Invoked with command=id -g name=openstack_network_exporter detach=False executable=podman privileged=False tty=False argv=None env=None user=None workdir=None
Oct 02 11:56:24 compute-0 systemd[1]: Started libpod-conmon-468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00.scope.
Oct 02 11:56:24 compute-0 podman[214170]: 2025-10-02 11:56:24.877820305 +0000 UTC m=+0.132671003 container exec 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, config_id=edpm, release=1755695350, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vendor=Red Hat, Inc., managed_by=edpm_ansible, version=9.6, vcs-type=git, architecture=x86_64, container_name=openstack_network_exporter, io.openshift.tags=minimal rhel9, name=ubi9-minimal, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, maintainer=Red Hat, Inc., distribution-scope=public, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.expose-services=, url=https://catalog.redhat.com/en/search?searchType=containers, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.buildah.version=1.33.7, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., build-date=2025-08-20T13:12:41, com.redhat.component=ubi9-minimal-container)
Oct 02 11:56:24 compute-0 podman[214190]: 2025-10-02 11:56:24.944176946 +0000 UTC m=+0.053768729 container exec_died 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, maintainer=Red Hat, Inc., vcs-type=git, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, distribution-scope=public, config_id=edpm, url=https://catalog.redhat.com/en/search?searchType=containers, build-date=2025-08-20T13:12:41, io.openshift.tags=minimal rhel9, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, managed_by=edpm_ansible, com.redhat.component=ubi9-minimal-container, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, release=1755695350, io.buildah.version=1.33.7, name=ubi9-minimal, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., io.openshift.expose-services=, version=9.6, container_name=openstack_network_exporter, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, vendor=Red Hat, Inc., architecture=x86_64, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly.)
Oct 02 11:56:24 compute-0 podman[214170]: 2025-10-02 11:56:24.999203829 +0000 UTC m=+0.254054507 container exec_died 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, distribution-scope=public, url=https://catalog.redhat.com/en/search?searchType=containers, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.openshift.tags=minimal rhel9, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., com.redhat.component=ubi9-minimal-container, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, name=ubi9-minimal, io.buildah.version=1.33.7, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, maintainer=Red Hat, Inc., vcs-type=git, architecture=x86_64, build-date=2025-08-20T13:12:41, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, container_name=openstack_network_exporter, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.expose-services=, managed_by=edpm_ansible, config_id=edpm, release=1755695350, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vendor=Red Hat, Inc., version=9.6)
Oct 02 11:56:25 compute-0 systemd[1]: libpod-conmon-468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00.scope: Deactivated successfully.
Oct 02 11:56:25 compute-0 sudo[214167]: pam_unix(sudo:session): session closed for user root
Oct 02 11:56:25 compute-0 sudo[214350]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cysnefglhuhxmvyfnpuikbzzsbhemclx ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406185.2427843-2746-168643846946200/AnsiballZ_file.py'
Oct 02 11:56:25 compute-0 sudo[214350]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:56:25 compute-0 python3.9[214352]: ansible-ansible.builtin.file Invoked with group=0 mode=0700 owner=0 path=/var/lib/openstack/healthchecks/openstack_network_exporter recurse=True state=directory force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:56:25 compute-0 sudo[214350]: pam_unix(sudo:session): session closed for user root
Oct 02 11:56:28 compute-0 podman[214378]: 2025-10-02 11:56:28.160814992 +0000 UTC m=+0.074509645 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, container_name=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, tcib_managed=true)
Oct 02 11:56:28 compute-0 podman[214377]: 2025-10-02 11:56:28.184146059 +0000 UTC m=+0.098017646 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']})
Oct 02 11:56:37 compute-0 podman[214422]: 2025-10-02 11:56:37.184932841 +0000 UTC m=+0.100592278 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller, container_name=ovn_controller, io.buildah.version=1.41.3, managed_by=edpm_ansible, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0)
Oct 02 11:56:40 compute-0 podman[214451]: 2025-10-02 11:56:40.134824005 +0000 UTC m=+0.054993924 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, config_id=ovn_metadata_agent, container_name=ovn_metadata_agent)
Oct 02 11:56:41 compute-0 podman[214470]: 2025-10-02 11:56:41.157834054 +0000 UTC m=+0.077057087 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 11:56:47 compute-0 podman[214494]: 2025-10-02 11:56:47.15128696 +0000 UTC m=+0.059256260 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, org.label-schema.vendor=CentOS, container_name=ceilometer_agent_compute, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=edpm, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_managed=true, io.buildah.version=1.41.3)
Oct 02 11:56:52 compute-0 podman[214514]: 2025-10-02 11:56:52.152582381 +0000 UTC m=+0.066603461 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=multipathd, tcib_managed=true, container_name=multipathd)
Oct 02 11:56:53 compute-0 podman[214534]: 2025-10-02 11:56:53.136952214 +0000 UTC m=+0.054502360 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.expose-services=, name=ubi9-minimal, vendor=Red Hat, Inc., config_id=edpm, version=9.6, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, container_name=openstack_network_exporter, vcs-type=git, managed_by=edpm_ansible, url=https://catalog.redhat.com/en/search?searchType=containers, maintainer=Red Hat, Inc., build-date=2025-08-20T13:12:41, com.redhat.component=ubi9-minimal-container, io.buildah.version=1.33.7, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.tags=minimal rhel9, release=1755695350, architecture=x86_64, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, distribution-scope=public, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9.)
Oct 02 11:56:59 compute-0 podman[214558]: 2025-10-02 11:56:59.140846506 +0000 UTC m=+0.055386413 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, config_id=iscsid, container_name=iscsid, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 11:56:59 compute-0 podman[214557]: 2025-10-02 11:56:59.15925112 +0000 UTC m=+0.068318027 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter)
Oct 02 11:57:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:57:02.197 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 11:57:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:57:02.198 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 11:57:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:57:02.198 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 11:57:02 compute-0 nova_compute[192079]: 2025-10-02 11:57:02.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:57:02 compute-0 nova_compute[192079]: 2025-10-02 11:57:02.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 11:57:02 compute-0 nova_compute[192079]: 2025-10-02 11:57:02.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 11:57:02 compute-0 nova_compute[192079]: 2025-10-02 11:57:02.685 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 11:57:02 compute-0 nova_compute[192079]: 2025-10-02 11:57:02.685 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:57:03 compute-0 nova_compute[192079]: 2025-10-02 11:57:03.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:57:03 compute-0 nova_compute[192079]: 2025-10-02 11:57:03.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:57:03 compute-0 nova_compute[192079]: 2025-10-02 11:57:03.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 11:57:04 compute-0 nova_compute[192079]: 2025-10-02 11:57:04.662 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:57:04 compute-0 nova_compute[192079]: 2025-10-02 11:57:04.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:57:04 compute-0 nova_compute[192079]: 2025-10-02 11:57:04.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:57:04 compute-0 nova_compute[192079]: 2025-10-02 11:57:04.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:57:04 compute-0 nova_compute[192079]: 2025-10-02 11:57:04.710 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 11:57:04 compute-0 nova_compute[192079]: 2025-10-02 11:57:04.711 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 11:57:04 compute-0 nova_compute[192079]: 2025-10-02 11:57:04.711 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 11:57:04 compute-0 nova_compute[192079]: 2025-10-02 11:57:04.712 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 11:57:04 compute-0 nova_compute[192079]: 2025-10-02 11:57:04.917 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 11:57:04 compute-0 nova_compute[192079]: 2025-10-02 11:57:04.918 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=6033MB free_disk=73.49939346313477GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 11:57:04 compute-0 nova_compute[192079]: 2025-10-02 11:57:04.918 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 11:57:04 compute-0 nova_compute[192079]: 2025-10-02 11:57:04.918 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 11:57:04 compute-0 nova_compute[192079]: 2025-10-02 11:57:04.969 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 11:57:04 compute-0 nova_compute[192079]: 2025-10-02 11:57:04.969 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 11:57:04 compute-0 nova_compute[192079]: 2025-10-02 11:57:04.989 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 11:57:05 compute-0 nova_compute[192079]: 2025-10-02 11:57:05.002 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 0, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 11:57:05 compute-0 nova_compute[192079]: 2025-10-02 11:57:05.004 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 11:57:05 compute-0 nova_compute[192079]: 2025-10-02 11:57:05.004 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.086s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 11:57:06 compute-0 nova_compute[192079]: 2025-10-02 11:57:06.004 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:57:08 compute-0 podman[214600]: 2025-10-02 11:57:08.173062245 +0000 UTC m=+0.088418466 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, config_id=ovn_controller, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3)
Oct 02 11:57:11 compute-0 podman[214628]: 2025-10-02 11:57:11.134956677 +0000 UTC m=+0.047393976 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, config_id=ovn_metadata_agent, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']})
Oct 02 11:57:12 compute-0 podman[214647]: 2025-10-02 11:57:12.158547142 +0000 UTC m=+0.061833460 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 11:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:57:17.096 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:57:17.096 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:57:17.096 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:57:17.096 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:57:17.096 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:57:17.096 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:57:17.096 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:57:17.097 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:57:17.097 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:57:17.097 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:57:17.097 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.iops, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:57:17.097 12 DEBUG ceilometer.polling.manager [-] Skip pollster cpu, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:57:17.097 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:57:17.097 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:57:17.097 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.allocation, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:57:17.097 12 DEBUG ceilometer.polling.manager [-] Skip pollster memory.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:57:17.098 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:57:17.098 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:57:17.098 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:57:17.098 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:57:17.098 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:57:17.098 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:57:17.098 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.capacity, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:57:17.098 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:57:17.098 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:57:17 compute-0 sudo[214811]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ttjadmzdbdxqorvjsjjbtlprbkvhgxxq ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406237.5260124-3285-94396910816846/AnsiballZ_file.py'
Oct 02 11:57:17 compute-0 sudo[214811]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:57:17 compute-0 podman[214770]: 2025-10-02 11:57:17.789869174 +0000 UTC m=+0.048786333 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=edpm, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, tcib_managed=true, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team)
Oct 02 11:57:18 compute-0 python3.9[214818]: ansible-ansible.builtin.file Invoked with group=root mode=0750 owner=root path=/var/lib/edpm-config/firewall/ state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:57:18 compute-0 sudo[214811]: pam_unix(sudo:session): session closed for user root
Oct 02 11:57:18 compute-0 sudo[214968]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-suonmqeudyenkulbcekzhosxkcveiqfw ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406238.2748652-3309-177010950839469/AnsiballZ_stat.py'
Oct 02 11:57:18 compute-0 sudo[214968]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:57:18 compute-0 python3.9[214970]: ansible-ansible.legacy.stat Invoked with path=/var/lib/edpm-config/firewall/telemetry.yaml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:57:18 compute-0 sudo[214968]: pam_unix(sudo:session): session closed for user root
Oct 02 11:57:19 compute-0 sudo[215091]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xmlxgqoyeqihjaxdlhytoakwgpmgnveu ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406238.2748652-3309-177010950839469/AnsiballZ_copy.py'
Oct 02 11:57:19 compute-0 sudo[215091]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:57:19 compute-0 python3.9[215093]: ansible-ansible.legacy.copy Invoked with dest=/var/lib/edpm-config/firewall/telemetry.yaml mode=0640 src=/home/zuul/.ansible/tmp/ansible-tmp-1759406238.2748652-3309-177010950839469/.source.yaml follow=False _original_basename=firewall.yaml.j2 checksum=d942d984493b214bda2913f753ff68cdcedff00e backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:57:19 compute-0 sudo[215091]: pam_unix(sudo:session): session closed for user root
Oct 02 11:57:20 compute-0 sudo[215243]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-uqqwwciyvpcmjzytxzgxwympyvhaoclt ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406239.879071-3357-247257292126512/AnsiballZ_file.py'
Oct 02 11:57:20 compute-0 sudo[215243]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:57:20 compute-0 python3.9[215245]: ansible-ansible.builtin.file Invoked with group=root mode=0750 owner=root path=/var/lib/edpm-config/firewall state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:57:20 compute-0 sudo[215243]: pam_unix(sudo:session): session closed for user root
Oct 02 11:57:21 compute-0 sudo[215395]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jotrvqtafidmdtwryfnuxkzohllpcnbs ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406240.8248491-3381-274331017378009/AnsiballZ_stat.py'
Oct 02 11:57:21 compute-0 sudo[215395]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:57:21 compute-0 python3.9[215397]: ansible-ansible.legacy.stat Invoked with path=/var/lib/edpm-config/firewall/edpm-nftables-base.yaml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:57:21 compute-0 sudo[215395]: pam_unix(sudo:session): session closed for user root
Oct 02 11:57:21 compute-0 sudo[215473]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-inudoxilnlgfgkdxelthegnsczotknap ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406240.8248491-3381-274331017378009/AnsiballZ_file.py'
Oct 02 11:57:21 compute-0 sudo[215473]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:57:22 compute-0 python3.9[215475]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/var/lib/edpm-config/firewall/edpm-nftables-base.yaml _original_basename=base-rules.yaml.j2 recurse=False state=file path=/var/lib/edpm-config/firewall/edpm-nftables-base.yaml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:57:22 compute-0 sudo[215473]: pam_unix(sudo:session): session closed for user root
Oct 02 11:57:22 compute-0 sudo[215642]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pghyyxqmsqtpqwmsgknakcmdtvcsnxei ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406242.4084797-3417-117042767691377/AnsiballZ_stat.py'
Oct 02 11:57:22 compute-0 sudo[215642]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:57:22 compute-0 podman[215599]: 2025-10-02 11:57:22.711648972 +0000 UTC m=+0.055862968 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, container_name=multipathd, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=multipathd, maintainer=OpenStack Kubernetes Operator team)
Oct 02 11:57:22 compute-0 python3.9[215647]: ansible-ansible.legacy.stat Invoked with path=/var/lib/edpm-config/firewall/edpm-nftables-user-rules.yaml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:57:22 compute-0 sudo[215642]: pam_unix(sudo:session): session closed for user root
Oct 02 11:57:23 compute-0 sudo[215740]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vcopsfqmqhgosiuptxcafirffzzeaemh ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406242.4084797-3417-117042767691377/AnsiballZ_file.py'
Oct 02 11:57:23 compute-0 sudo[215740]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:57:23 compute-0 podman[215697]: 2025-10-02 11:57:23.264376213 +0000 UTC m=+0.070626801 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, architecture=x86_64, com.redhat.component=ubi9-minimal-container, version=9.6, config_id=edpm, release=1755695350, vendor=Red Hat, Inc., description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.expose-services=, io.openshift.tags=minimal rhel9, managed_by=edpm_ansible, io.buildah.version=1.33.7, build-date=2025-08-20T13:12:41, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, container_name=openstack_network_exporter, name=ubi9-minimal, url=https://catalog.redhat.com/en/search?searchType=containers, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, vcs-type=git, distribution-scope=public, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, maintainer=Red Hat, Inc., summary=Provides the latest release of the minimal Red Hat Universal Base Image 9.)
Oct 02 11:57:23 compute-0 python3.9[215746]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/var/lib/edpm-config/firewall/edpm-nftables-user-rules.yaml _original_basename=.kduk9dt8 recurse=False state=file path=/var/lib/edpm-config/firewall/edpm-nftables-user-rules.yaml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:57:23 compute-0 sudo[215740]: pam_unix(sudo:session): session closed for user root
Oct 02 11:57:24 compute-0 sudo[215896]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xeinqiftylptbiupdzyhuiwyqveswkpe ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406243.764637-3453-224327988782568/AnsiballZ_stat.py'
Oct 02 11:57:24 compute-0 sudo[215896]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:57:24 compute-0 python3.9[215898]: ansible-ansible.legacy.stat Invoked with path=/etc/nftables/iptables.nft follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:57:24 compute-0 sudo[215896]: pam_unix(sudo:session): session closed for user root
Oct 02 11:57:24 compute-0 sudo[215974]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vdbncmbjsuvrkpwsviwgziisekkpuxkx ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406243.764637-3453-224327988782568/AnsiballZ_file.py'
Oct 02 11:57:24 compute-0 sudo[215974]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:57:24 compute-0 python3.9[215976]: ansible-ansible.legacy.file Invoked with group=root mode=0600 owner=root dest=/etc/nftables/iptables.nft _original_basename=iptables.nft recurse=False state=file path=/etc/nftables/iptables.nft force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:57:24 compute-0 sudo[215974]: pam_unix(sudo:session): session closed for user root
Oct 02 11:57:25 compute-0 sudo[216126]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jzstvjikqupuyuvuyfofcyqeeyugbqea ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406245.3090904-3492-132697440029004/AnsiballZ_command.py'
Oct 02 11:57:25 compute-0 sudo[216126]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:57:25 compute-0 python3.9[216128]: ansible-ansible.legacy.command Invoked with _raw_params=nft -j list ruleset _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:57:25 compute-0 sudo[216126]: pam_unix(sudo:session): session closed for user root
Oct 02 11:57:26 compute-0 sudo[216279]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ghzihknyzrbsuxrowezwldisdnhyjxyc ; /usr/bin/python3 /home/zuul/.ansible/tmp/ansible-tmp-1759406246.1916878-3516-45082675207271/AnsiballZ_edpm_nftables_from_files.py'
Oct 02 11:57:26 compute-0 sudo[216279]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:57:27 compute-0 python3[216281]: ansible-edpm_nftables_from_files Invoked with src=/var/lib/edpm-config/firewall
Oct 02 11:57:27 compute-0 sudo[216279]: pam_unix(sudo:session): session closed for user root
Oct 02 11:57:27 compute-0 sudo[216431]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kyulhamvpzgyuabxfvvervhzkcfkntbe ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406247.2585492-3540-63403226819263/AnsiballZ_stat.py'
Oct 02 11:57:27 compute-0 sudo[216431]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:57:27 compute-0 python3.9[216433]: ansible-ansible.legacy.stat Invoked with path=/etc/nftables/edpm-jumps.nft follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:57:27 compute-0 sudo[216431]: pam_unix(sudo:session): session closed for user root
Oct 02 11:57:28 compute-0 sudo[216509]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-uhvrgpxdzunyjgafwhgklcypqkfymtld ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406247.2585492-3540-63403226819263/AnsiballZ_file.py'
Oct 02 11:57:28 compute-0 sudo[216509]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:57:28 compute-0 python3.9[216511]: ansible-ansible.legacy.file Invoked with group=root mode=0600 owner=root dest=/etc/nftables/edpm-jumps.nft _original_basename=jump-chain.j2 recurse=False state=file path=/etc/nftables/edpm-jumps.nft force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:57:28 compute-0 sudo[216509]: pam_unix(sudo:session): session closed for user root
Oct 02 11:57:29 compute-0 sudo[216661]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sleiuwsuzhjredouevzittmpmpxkjtsm ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406248.7144756-3576-248109174884385/AnsiballZ_stat.py'
Oct 02 11:57:29 compute-0 sudo[216661]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:57:29 compute-0 python3.9[216663]: ansible-ansible.legacy.stat Invoked with path=/etc/nftables/edpm-update-jumps.nft follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:57:29 compute-0 sudo[216661]: pam_unix(sudo:session): session closed for user root
Oct 02 11:57:29 compute-0 sudo[216767]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-izchfthjvnyixutwjlhmdnulcsnnholf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406248.7144756-3576-248109174884385/AnsiballZ_file.py'
Oct 02 11:57:29 compute-0 sudo[216767]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:57:29 compute-0 podman[216714]: 2025-10-02 11:57:29.632379663 +0000 UTC m=+0.052222368 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, config_id=iscsid, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001)
Oct 02 11:57:29 compute-0 podman[216713]: 2025-10-02 11:57:29.638094559 +0000 UTC m=+0.057839431 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>)
Oct 02 11:57:29 compute-0 python3.9[216778]: ansible-ansible.legacy.file Invoked with group=root mode=0600 owner=root dest=/etc/nftables/edpm-update-jumps.nft _original_basename=jump-chain.j2 recurse=False state=file path=/etc/nftables/edpm-update-jumps.nft force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:57:29 compute-0 sudo[216767]: pam_unix(sudo:session): session closed for user root
Oct 02 11:57:30 compute-0 sudo[216935]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-uougyebrucqbsktvkjxwzckmbvftnpoi ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406250.083873-3612-207730635732501/AnsiballZ_stat.py'
Oct 02 11:57:30 compute-0 sudo[216935]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:57:30 compute-0 python3.9[216937]: ansible-ansible.legacy.stat Invoked with path=/etc/nftables/edpm-flushes.nft follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:57:30 compute-0 sudo[216935]: pam_unix(sudo:session): session closed for user root
Oct 02 11:57:31 compute-0 sudo[217013]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rteonuthvqfzkigyespsfpshllcsstbn ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406250.083873-3612-207730635732501/AnsiballZ_file.py'
Oct 02 11:57:31 compute-0 sudo[217013]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:57:31 compute-0 python3.9[217015]: ansible-ansible.legacy.file Invoked with group=root mode=0600 owner=root dest=/etc/nftables/edpm-flushes.nft _original_basename=flush-chain.j2 recurse=False state=file path=/etc/nftables/edpm-flushes.nft force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:57:31 compute-0 sudo[217013]: pam_unix(sudo:session): session closed for user root
Oct 02 11:57:31 compute-0 sudo[217165]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dytrahzgcrwpvhyhojcimxbuewpyyfrs ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406251.4669416-3648-67914371906621/AnsiballZ_stat.py'
Oct 02 11:57:31 compute-0 sudo[217165]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:57:32 compute-0 python3.9[217167]: ansible-ansible.legacy.stat Invoked with path=/etc/nftables/edpm-chains.nft follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:57:32 compute-0 sudo[217165]: pam_unix(sudo:session): session closed for user root
Oct 02 11:57:32 compute-0 sudo[217243]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-njqugbfjzgvmqxdamyeocpsgcqcorwdj ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406251.4669416-3648-67914371906621/AnsiballZ_file.py'
Oct 02 11:57:32 compute-0 sudo[217243]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:57:32 compute-0 python3.9[217245]: ansible-ansible.legacy.file Invoked with group=root mode=0600 owner=root dest=/etc/nftables/edpm-chains.nft _original_basename=chains.j2 recurse=False state=file path=/etc/nftables/edpm-chains.nft force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:57:32 compute-0 sudo[217243]: pam_unix(sudo:session): session closed for user root
Oct 02 11:57:33 compute-0 sudo[217395]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cuwmndiejfpfoimdrtgajcbqzsiziqzy ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406252.7954457-3684-237641567214756/AnsiballZ_stat.py'
Oct 02 11:57:33 compute-0 sudo[217395]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:57:33 compute-0 python3.9[217397]: ansible-ansible.legacy.stat Invoked with path=/etc/nftables/edpm-rules.nft follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True
Oct 02 11:57:33 compute-0 sudo[217395]: pam_unix(sudo:session): session closed for user root
Oct 02 11:57:33 compute-0 sudo[217520]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lquhkinyfvbfztauaiodohnxpaksvnsm ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406252.7954457-3684-237641567214756/AnsiballZ_copy.py'
Oct 02 11:57:33 compute-0 sudo[217520]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:57:33 compute-0 python3.9[217522]: ansible-ansible.legacy.copy Invoked with dest=/etc/nftables/edpm-rules.nft group=root mode=0600 owner=root src=/home/zuul/.ansible/tmp/ansible-tmp-1759406252.7954457-3684-237641567214756/.source.nft follow=False _original_basename=ruleset.j2 checksum=fb3275eced3a2e06312143189928124e1b2df34a backup=False force=True remote_src=False unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:57:33 compute-0 sudo[217520]: pam_unix(sudo:session): session closed for user root
Oct 02 11:57:34 compute-0 sudo[217672]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qmckanwoadvmwhlqhbgebdnfekihiwqc ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406254.3045325-3729-44259335341387/AnsiballZ_file.py'
Oct 02 11:57:34 compute-0 sudo[217672]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:57:34 compute-0 python3.9[217674]: ansible-ansible.builtin.file Invoked with group=root mode=0600 owner=root path=/etc/nftables/edpm-rules.nft.changed state=touch recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:57:34 compute-0 sudo[217672]: pam_unix(sudo:session): session closed for user root
Oct 02 11:57:35 compute-0 sudo[217824]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ysllrzgmievxtflusrlncxpkrkqumxot ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406254.9998493-3753-19937487245627/AnsiballZ_command.py'
Oct 02 11:57:35 compute-0 sudo[217824]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:57:35 compute-0 python3.9[217826]: ansible-ansible.legacy.command Invoked with _raw_params=set -o pipefail; cat /etc/nftables/edpm-chains.nft /etc/nftables/edpm-flushes.nft /etc/nftables/edpm-rules.nft /etc/nftables/edpm-update-jumps.nft /etc/nftables/edpm-jumps.nft | nft -c -f - _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:57:35 compute-0 sudo[217824]: pam_unix(sudo:session): session closed for user root
Oct 02 11:57:36 compute-0 sudo[217979]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ebrykgwodzkcqmrzxjyklhqekgzcknaf ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406255.8169067-3777-265080971560126/AnsiballZ_blockinfile.py'
Oct 02 11:57:36 compute-0 sudo[217979]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:57:36 compute-0 python3.9[217981]: ansible-ansible.builtin.blockinfile Invoked with backup=False block=include "/etc/nftables/iptables.nft"
                                             include "/etc/nftables/edpm-chains.nft"
                                             include "/etc/nftables/edpm-rules.nft"
                                             include "/etc/nftables/edpm-jumps.nft"
                                              path=/etc/sysconfig/nftables.conf validate=nft -c -f %s state=present marker=# {mark} ANSIBLE MANAGED BLOCK create=False marker_begin=BEGIN marker_end=END append_newline=False prepend_newline=False unsafe_writes=False insertafter=None insertbefore=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:57:36 compute-0 sudo[217979]: pam_unix(sudo:session): session closed for user root
Oct 02 11:57:37 compute-0 sudo[218131]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bdsuntjycjtyqaarfcabsbxtgsfoljqr ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406256.9264238-3804-67501571862393/AnsiballZ_command.py'
Oct 02 11:57:37 compute-0 sudo[218131]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:57:37 compute-0 python3.9[218133]: ansible-ansible.legacy.command Invoked with _raw_params=nft -f /etc/nftables/edpm-chains.nft _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:57:37 compute-0 sudo[218131]: pam_unix(sudo:session): session closed for user root
Oct 02 11:57:38 compute-0 sudo[218284]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xwsgyeufglabwwwipjgwouozienuhjas ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406257.728417-3828-2908342437152/AnsiballZ_stat.py'
Oct 02 11:57:38 compute-0 sudo[218284]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:57:38 compute-0 python3.9[218286]: ansible-ansible.builtin.stat Invoked with path=/etc/nftables/edpm-rules.nft.changed follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1
Oct 02 11:57:38 compute-0 sudo[218284]: pam_unix(sudo:session): session closed for user root
Oct 02 11:57:38 compute-0 PackageKit[132561]: daemon quit
Oct 02 11:57:38 compute-0 systemd[1]: packagekit.service: Deactivated successfully.
Oct 02 11:57:38 compute-0 podman[218313]: 2025-10-02 11:57:38.4408312 +0000 UTC m=+0.089775458 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2, container_name=ovn_controller, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=ovn_controller)
Oct 02 11:57:38 compute-0 sudo[218464]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ljympvjlvlojwyokfmtwatuofeicdszn ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406258.428161-3852-260417655277168/AnsiballZ_command.py'
Oct 02 11:57:38 compute-0 sudo[218464]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:57:38 compute-0 python3.9[218466]: ansible-ansible.legacy.command Invoked with _raw_params=set -o pipefail; cat /etc/nftables/edpm-flushes.nft /etc/nftables/edpm-rules.nft /etc/nftables/edpm-update-jumps.nft | nft -f - _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True cmd=None argv=None chdir=None executable=None creates=None removes=None stdin=None
Oct 02 11:57:38 compute-0 sudo[218464]: pam_unix(sudo:session): session closed for user root
Oct 02 11:57:39 compute-0 sudo[218619]:     zuul : TTY=pts/0 ; PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nxghtzdoeiqdkqpdcprhgztgembqyexb ; /usr/bin/python3.9 /home/zuul/.ansible/tmp/ansible-tmp-1759406259.381339-3876-23481161189930/AnsiballZ_file.py'
Oct 02 11:57:39 compute-0 sudo[218619]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 11:57:39 compute-0 python3.9[218622]: ansible-ansible.builtin.file Invoked with path=/etc/nftables/edpm-rules.nft.changed state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Oct 02 11:57:39 compute-0 sudo[218619]: pam_unix(sudo:session): session closed for user root
Oct 02 11:57:40 compute-0 sshd-session[192450]: Connection closed by 192.168.122.30 port 42376
Oct 02 11:57:40 compute-0 sshd-session[192447]: pam_unix(sshd:session): session closed for user zuul
Oct 02 11:57:40 compute-0 systemd[1]: session-27.scope: Deactivated successfully.
Oct 02 11:57:40 compute-0 systemd[1]: session-27.scope: Consumed 1min 39.943s CPU time.
Oct 02 11:57:40 compute-0 systemd-logind[827]: Session 27 logged out. Waiting for processes to exit.
Oct 02 11:57:40 compute-0 systemd-logind[827]: Removed session 27.
Oct 02 11:57:42 compute-0 podman[218647]: 2025-10-02 11:57:42.133199693 +0000 UTC m=+0.050267573 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, container_name=ovn_metadata_agent, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS)
Oct 02 11:57:43 compute-0 podman[218668]: 2025-10-02 11:57:43.161759732 +0000 UTC m=+0.073154352 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 11:57:48 compute-0 podman[218692]: 2025-10-02 11:57:48.204258761 +0000 UTC m=+0.116017399 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=edpm, container_name=ceilometer_agent_compute)
Oct 02 11:57:53 compute-0 podman[218714]: 2025-10-02 11:57:53.151764519 +0000 UTC m=+0.066853827 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_id=multipathd, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 11:57:54 compute-0 podman[218734]: 2025-10-02 11:57:54.132410252 +0000 UTC m=+0.051922977 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, url=https://catalog.redhat.com/en/search?searchType=containers, name=ubi9-minimal, io.openshift.expose-services=, com.redhat.component=ubi9-minimal-container, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.tags=minimal rhel9, maintainer=Red Hat, Inc., config_id=edpm, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, distribution-scope=public, managed_by=edpm_ansible, release=1755695350, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vcs-type=git, architecture=x86_64, io.buildah.version=1.33.7, vendor=Red Hat, Inc., container_name=openstack_network_exporter, version=9.6, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, build-date=2025-08-20T13:12:41)
Oct 02 11:58:00 compute-0 podman[218756]: 2025-10-02 11:58:00.137381074 +0000 UTC m=+0.054236011 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, container_name=iscsid, managed_by=edpm_ansible, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=iscsid)
Oct 02 11:58:00 compute-0 podman[218755]: 2025-10-02 11:58:00.174114754 +0000 UTC m=+0.089667186 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 11:58:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:58:02.199 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 11:58:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:58:02.199 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 11:58:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:58:02.199 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 11:58:02 compute-0 nova_compute[192079]: 2025-10-02 11:58:02.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:58:02 compute-0 nova_compute[192079]: 2025-10-02 11:58:02.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 11:58:02 compute-0 nova_compute[192079]: 2025-10-02 11:58:02.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 11:58:02 compute-0 nova_compute[192079]: 2025-10-02 11:58:02.689 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 11:58:03 compute-0 nova_compute[192079]: 2025-10-02 11:58:03.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:58:03 compute-0 nova_compute[192079]: 2025-10-02 11:58:03.680 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:58:04 compute-0 nova_compute[192079]: 2025-10-02 11:58:04.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:58:04 compute-0 nova_compute[192079]: 2025-10-02 11:58:04.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:58:04 compute-0 nova_compute[192079]: 2025-10-02 11:58:04.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:58:04 compute-0 nova_compute[192079]: 2025-10-02 11:58:04.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:58:04 compute-0 nova_compute[192079]: 2025-10-02 11:58:04.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 11:58:05 compute-0 nova_compute[192079]: 2025-10-02 11:58:05.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:58:06 compute-0 nova_compute[192079]: 2025-10-02 11:58:06.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:58:06 compute-0 nova_compute[192079]: 2025-10-02 11:58:06.692 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 11:58:06 compute-0 nova_compute[192079]: 2025-10-02 11:58:06.692 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 11:58:06 compute-0 nova_compute[192079]: 2025-10-02 11:58:06.692 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 11:58:06 compute-0 nova_compute[192079]: 2025-10-02 11:58:06.693 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 11:58:06 compute-0 nova_compute[192079]: 2025-10-02 11:58:06.932 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 11:58:06 compute-0 nova_compute[192079]: 2025-10-02 11:58:06.934 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=6059MB free_disk=73.50228500366211GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 11:58:06 compute-0 nova_compute[192079]: 2025-10-02 11:58:06.935 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 11:58:06 compute-0 nova_compute[192079]: 2025-10-02 11:58:06.935 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 11:58:06 compute-0 nova_compute[192079]: 2025-10-02 11:58:06.997 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 11:58:06 compute-0 nova_compute[192079]: 2025-10-02 11:58:06.998 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 11:58:07 compute-0 nova_compute[192079]: 2025-10-02 11:58:07.036 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 11:58:07 compute-0 nova_compute[192079]: 2025-10-02 11:58:07.058 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 0, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 11:58:07 compute-0 nova_compute[192079]: 2025-10-02 11:58:07.060 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 11:58:07 compute-0 nova_compute[192079]: 2025-10-02 11:58:07.060 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.125s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 11:58:08 compute-0 nova_compute[192079]: 2025-10-02 11:58:08.060 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:58:09 compute-0 podman[218800]: 2025-10-02 11:58:09.166217294 +0000 UTC m=+0.080356780 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, container_name=ovn_controller, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller, managed_by=edpm_ansible, org.label-schema.license=GPLv2)
Oct 02 11:58:13 compute-0 podman[218826]: 2025-10-02 11:58:13.172159624 +0000 UTC m=+0.075942588 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_managed=true, config_id=ovn_metadata_agent)
Oct 02 11:58:13 compute-0 podman[218846]: 2025-10-02 11:58:13.296853912 +0000 UTC m=+0.083040294 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 11:58:19 compute-0 podman[218870]: 2025-10-02 11:58:19.19037713 +0000 UTC m=+0.093043058 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, container_name=ceilometer_agent_compute, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, managed_by=edpm_ansible, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 11:58:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:58:19.896 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=2, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=1) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 11:58:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:58:19.897 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 0 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 11:58:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:58:19.898 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '2'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 11:58:24 compute-0 podman[218890]: 2025-10-02 11:58:24.130231589 +0000 UTC m=+0.049132942 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, config_id=multipathd, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, container_name=multipathd, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS)
Oct 02 11:58:25 compute-0 podman[218912]: 2025-10-02 11:58:25.152410193 +0000 UTC m=+0.069141562 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, vcs-type=git, io.openshift.expose-services=, architecture=x86_64, maintainer=Red Hat, Inc., build-date=2025-08-20T13:12:41, container_name=openstack_network_exporter, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, com.redhat.component=ubi9-minimal-container, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., version=9.6, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., managed_by=edpm_ansible, io.openshift.tags=minimal rhel9, io.buildah.version=1.33.7, name=ubi9-minimal, url=https://catalog.redhat.com/en/search?searchType=containers, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, config_id=edpm, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, release=1755695350, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, vendor=Red Hat, Inc., distribution-scope=public)
Oct 02 11:58:31 compute-0 podman[218933]: 2025-10-02 11:58:31.137394145 +0000 UTC m=+0.050360455 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 11:58:31 compute-0 podman[218934]: 2025-10-02 11:58:31.146230668 +0000 UTC m=+0.051124326 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=iscsid, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, container_name=iscsid, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 11:58:40 compute-0 podman[218977]: 2025-10-02 11:58:40.214889663 +0000 UTC m=+0.114549769 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 11:58:44 compute-0 podman[219005]: 2025-10-02 11:58:44.160214874 +0000 UTC m=+0.070677053 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.build-date=20251001, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, managed_by=edpm_ansible)
Oct 02 11:58:44 compute-0 podman[219006]: 2025-10-02 11:58:44.165456908 +0000 UTC m=+0.071271419 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 11:58:50 compute-0 podman[219047]: 2025-10-02 11:58:50.194516963 +0000 UTC m=+0.106121515 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true)
Oct 02 11:58:55 compute-0 podman[219068]: 2025-10-02 11:58:55.207451181 +0000 UTC m=+0.116383466 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, io.buildah.version=1.41.3, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_id=multipathd, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 11:58:55 compute-0 podman[219088]: 2025-10-02 11:58:55.290421879 +0000 UTC m=+0.071196856 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, release=1755695350, vendor=Red Hat, Inc., build-date=2025-08-20T13:12:41, com.redhat.component=ubi9-minimal-container, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, vcs-type=git, container_name=openstack_network_exporter, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, managed_by=edpm_ansible, architecture=x86_64, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.tags=minimal rhel9, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.buildah.version=1.33.7, name=ubi9-minimal, config_id=edpm, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., url=https://catalog.redhat.com/en/search?searchType=containers, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, maintainer=Red Hat, Inc., distribution-scope=public, io.openshift.expose-services=, version=9.6)
Oct 02 11:59:02 compute-0 podman[219109]: 2025-10-02 11:59:02.172592065 +0000 UTC m=+0.080129500 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter)
Oct 02 11:59:02 compute-0 podman[219110]: 2025-10-02 11:59:02.177714886 +0000 UTC m=+0.081813817 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, config_id=iscsid, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.license=GPLv2)
Oct 02 11:59:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:59:02.199 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 11:59:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:59:02.200 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 11:59:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:59:02.200 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 11:59:02 compute-0 nova_compute[192079]: 2025-10-02 11:59:02.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._run_pending_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:59:02 compute-0 nova_compute[192079]: 2025-10-02 11:59:02.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Cleaning up deleted instances _run_pending_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:11145
Oct 02 11:59:02 compute-0 nova_compute[192079]: 2025-10-02 11:59:02.887 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] There are 0 instances to clean _run_pending_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:11154
Oct 02 11:59:02 compute-0 nova_compute[192079]: 2025-10-02 11:59:02.888 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_incomplete_migrations run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:59:02 compute-0 nova_compute[192079]: 2025-10-02 11:59:02.888 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Cleaning up deleted instances with incomplete migration  _cleanup_incomplete_migrations /usr/lib/python3.9/site-packages/nova/compute/manager.py:11183
Oct 02 11:59:02 compute-0 nova_compute[192079]: 2025-10-02 11:59:02.916 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:59:03 compute-0 nova_compute[192079]: 2025-10-02 11:59:03.944 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:59:04 compute-0 nova_compute[192079]: 2025-10-02 11:59:04.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:59:04 compute-0 nova_compute[192079]: 2025-10-02 11:59:04.667 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 11:59:04 compute-0 nova_compute[192079]: 2025-10-02 11:59:04.667 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 11:59:04 compute-0 nova_compute[192079]: 2025-10-02 11:59:04.711 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 11:59:04 compute-0 nova_compute[192079]: 2025-10-02 11:59:04.712 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:59:05 compute-0 nova_compute[192079]: 2025-10-02 11:59:05.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:59:06 compute-0 nova_compute[192079]: 2025-10-02 11:59:06.660 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:59:06 compute-0 nova_compute[192079]: 2025-10-02 11:59:06.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:59:06 compute-0 nova_compute[192079]: 2025-10-02 11:59:06.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:59:06 compute-0 nova_compute[192079]: 2025-10-02 11:59:06.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 11:59:06 compute-0 nova_compute[192079]: 2025-10-02 11:59:06.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:59:06 compute-0 nova_compute[192079]: 2025-10-02 11:59:06.891 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 11:59:06 compute-0 nova_compute[192079]: 2025-10-02 11:59:06.891 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 11:59:06 compute-0 nova_compute[192079]: 2025-10-02 11:59:06.891 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 11:59:06 compute-0 nova_compute[192079]: 2025-10-02 11:59:06.891 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 11:59:07 compute-0 nova_compute[192079]: 2025-10-02 11:59:07.073 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 11:59:07 compute-0 nova_compute[192079]: 2025-10-02 11:59:07.074 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=6072MB free_disk=73.50237274169922GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 11:59:07 compute-0 nova_compute[192079]: 2025-10-02 11:59:07.074 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 11:59:07 compute-0 nova_compute[192079]: 2025-10-02 11:59:07.074 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 11:59:07 compute-0 nova_compute[192079]: 2025-10-02 11:59:07.189 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 11:59:07 compute-0 nova_compute[192079]: 2025-10-02 11:59:07.190 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 11:59:07 compute-0 nova_compute[192079]: 2025-10-02 11:59:07.220 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 11:59:07 compute-0 nova_compute[192079]: 2025-10-02 11:59:07.240 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 0, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 11:59:07 compute-0 nova_compute[192079]: 2025-10-02 11:59:07.243 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 11:59:07 compute-0 nova_compute[192079]: 2025-10-02 11:59:07.243 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.169s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 11:59:08 compute-0 nova_compute[192079]: 2025-10-02 11:59:08.245 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 11:59:11 compute-0 podman[219152]: 2025-10-02 11:59:11.181076342 +0000 UTC m=+0.091795521 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001)
Oct 02 11:59:15 compute-0 podman[219179]: 2025-10-02 11:59:15.171721104 +0000 UTC m=+0.074307011 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, managed_by=edpm_ansible, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, config_id=ovn_metadata_agent, container_name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2)
Oct 02 11:59:15 compute-0 podman[219180]: 2025-10-02 11:59:15.190750377 +0000 UTC m=+0.086405913 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 11:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:59:17.097 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:59:17.097 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:59:17.097 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:59:17.098 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:59:17.098 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:59:17.098 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:59:17.098 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:59:17.098 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:59:17.099 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.iops, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:59:17.099 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.capacity, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:59:17.099 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:59:17.099 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:59:17.099 12 DEBUG ceilometer.polling.manager [-] Skip pollster memory.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:59:17.099 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:59:17.100 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:59:17.100 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:59:17.100 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:59:17.100 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:59:17.100 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:59:17.101 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.allocation, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:59:17.101 12 DEBUG ceilometer.polling.manager [-] Skip pollster cpu, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:59:17.101 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:59:17.101 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:59:17.101 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 11:59:17.101 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 11:59:21 compute-0 podman[219220]: 2025-10-02 11:59:21.167410033 +0000 UTC m=+0.074496387 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=edpm, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, tcib_managed=true, container_name=ceilometer_agent_compute, managed_by=edpm_ansible)
Oct 02 11:59:26 compute-0 podman[219241]: 2025-10-02 11:59:26.175081408 +0000 UTC m=+0.090372753 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, config_id=multipathd, org.label-schema.schema-version=1.0)
Oct 02 11:59:26 compute-0 podman[219240]: 2025-10-02 11:59:26.186653536 +0000 UTC m=+0.095904355 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, vcs-type=git, release=1755695350, name=ubi9-minimal, maintainer=Red Hat, Inc., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, vendor=Red Hat, Inc., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, managed_by=edpm_ansible, build-date=2025-08-20T13:12:41, distribution-scope=public, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., config_id=edpm, version=9.6, io.buildah.version=1.33.7, io.openshift.expose-services=, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, container_name=openstack_network_exporter, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., com.redhat.component=ubi9-minimal-container, io.openshift.tags=minimal rhel9, url=https://catalog.redhat.com/en/search?searchType=containers, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., architecture=x86_64)
Oct 02 11:59:33 compute-0 podman[219281]: 2025-10-02 11:59:33.133371714 +0000 UTC m=+0.048504992 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 11:59:33 compute-0 podman[219282]: 2025-10-02 11:59:33.133371674 +0000 UTC m=+0.047200906 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, config_id=iscsid, container_name=iscsid, io.buildah.version=1.41.3, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible)
Oct 02 11:59:42 compute-0 podman[219322]: 2025-10-02 11:59:42.197920659 +0000 UTC m=+0.117385203 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=ovn_controller, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, container_name=ovn_controller)
Oct 02 11:59:46 compute-0 podman[219350]: 2025-10-02 11:59:46.177091545 +0000 UTC m=+0.077069566 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, container_name=ovn_metadata_agent, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 11:59:46 compute-0 podman[219351]: 2025-10-02 11:59:46.195371635 +0000 UTC m=+0.086871039 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 11:59:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:59:51.473 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=3, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=2) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 11:59:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:59:51.474 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 8 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 11:59:52 compute-0 podman[219393]: 2025-10-02 11:59:52.147582142 +0000 UTC m=+0.066713656 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, config_id=edpm, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']})
Oct 02 11:59:57 compute-0 podman[219414]: 2025-10-02 11:59:57.164908453 +0000 UTC m=+0.073929698 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vendor=Red Hat, Inc., managed_by=edpm_ansible, io.openshift.expose-services=, distribution-scope=public, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, vcs-type=git, build-date=2025-08-20T13:12:41, io.buildah.version=1.33.7, io.openshift.tags=minimal rhel9, maintainer=Red Hat, Inc., name=ubi9-minimal, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, config_id=edpm, container_name=openstack_network_exporter, version=9.6, url=https://catalog.redhat.com/en/search?searchType=containers, architecture=x86_64, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., release=1755695350, com.redhat.component=ubi9-minimal-container, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b)
Oct 02 11:59:57 compute-0 podman[219415]: 2025-10-02 11:59:57.196120215 +0000 UTC m=+0.093176665 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=multipathd, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, container_name=multipathd, io.buildah.version=1.41.3, org.label-schema.build-date=20251001)
Oct 02 11:59:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 11:59:59.476 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '3'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:00:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:02.200 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:00:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:02.200 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:00:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:02.201 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:00:04 compute-0 podman[219456]: 2025-10-02 12:00:04.134459389 +0000 UTC m=+0.052569340 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:00:04 compute-0 podman[219457]: 2025-10-02 12:00:04.149498749 +0000 UTC m=+0.064213206 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, config_id=iscsid, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, container_name=iscsid, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, org.label-schema.license=GPLv2, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0)
Oct 02 12:00:04 compute-0 nova_compute[192079]: 2025-10-02 12:00:04.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:00:05 compute-0 nova_compute[192079]: 2025-10-02 12:00:05.660 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:00:05 compute-0 nova_compute[192079]: 2025-10-02 12:00:05.718 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:00:05 compute-0 nova_compute[192079]: 2025-10-02 12:00:05.718 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:00:06 compute-0 nova_compute[192079]: 2025-10-02 12:00:06.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:00:06 compute-0 nova_compute[192079]: 2025-10-02 12:00:06.667 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:00:06 compute-0 nova_compute[192079]: 2025-10-02 12:00:06.668 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:00:06 compute-0 nova_compute[192079]: 2025-10-02 12:00:06.668 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:00:06 compute-0 nova_compute[192079]: 2025-10-02 12:00:06.689 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:00:06 compute-0 nova_compute[192079]: 2025-10-02 12:00:06.689 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:00:07 compute-0 nova_compute[192079]: 2025-10-02 12:00:07.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:00:07 compute-0 nova_compute[192079]: 2025-10-02 12:00:07.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:00:08 compute-0 nova_compute[192079]: 2025-10-02 12:00:08.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:00:08 compute-0 nova_compute[192079]: 2025-10-02 12:00:08.688 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:00:08 compute-0 nova_compute[192079]: 2025-10-02 12:00:08.688 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:00:08 compute-0 nova_compute[192079]: 2025-10-02 12:00:08.689 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:00:08 compute-0 nova_compute[192079]: 2025-10-02 12:00:08.689 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:00:08 compute-0 nova_compute[192079]: 2025-10-02 12:00:08.818 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:00:08 compute-0 nova_compute[192079]: 2025-10-02 12:00:08.818 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=6090MB free_disk=73.50342178344727GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:00:08 compute-0 nova_compute[192079]: 2025-10-02 12:00:08.819 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:00:08 compute-0 nova_compute[192079]: 2025-10-02 12:00:08.819 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:00:08 compute-0 nova_compute[192079]: 2025-10-02 12:00:08.888 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:00:08 compute-0 nova_compute[192079]: 2025-10-02 12:00:08.888 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:00:09 compute-0 nova_compute[192079]: 2025-10-02 12:00:09.103 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing inventories for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708 _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:804
Oct 02 12:00:09 compute-0 nova_compute[192079]: 2025-10-02 12:00:09.131 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Updating ProviderTree inventory for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 from _refresh_and_get_inventory using data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 0, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} _refresh_and_get_inventory /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:768
Oct 02 12:00:09 compute-0 nova_compute[192079]: 2025-10-02 12:00:09.131 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Updating inventory in ProviderTree for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 with inventory: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 0, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:176
Oct 02 12:00:09 compute-0 nova_compute[192079]: 2025-10-02 12:00:09.156 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing aggregate associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, aggregates: None _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:813
Oct 02 12:00:09 compute-0 nova_compute[192079]: 2025-10-02 12:00:09.184 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing trait associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, traits: COMPUTE_SECURITY_UEFI_SECURE_BOOT,COMPUTE_VIOMMU_MODEL_VIRTIO,COMPUTE_VIOMMU_MODEL_AUTO,COMPUTE_IMAGE_TYPE_AKI,COMPUTE_GRAPHICS_MODEL_VIRTIO,COMPUTE_NET_VIF_MODEL_PCNET,HW_CPU_X86_SSE42,COMPUTE_RESCUE_BFV,COMPUTE_VOLUME_EXTEND,COMPUTE_IMAGE_TYPE_QCOW2,COMPUTE_TRUSTED_CERTS,COMPUTE_SOCKET_PCI_NUMA_AFFINITY,COMPUTE_GRAPHICS_MODEL_CIRRUS,HW_CPU_X86_MMX,COMPUTE_STORAGE_BUS_VIRTIO,COMPUTE_NET_ATTACH_INTERFACE_WITH_TAG,COMPUTE_STORAGE_BUS_FDC,COMPUTE_STORAGE_BUS_USB,COMPUTE_NODE,HW_CPU_X86_SSSE3,HW_CPU_X86_SSE2,COMPUTE_GRAPHICS_MODEL_BOCHS,COMPUTE_NET_VIF_MODEL_E1000E,COMPUTE_IMAGE_TYPE_RAW,COMPUTE_NET_VIF_MODEL_NE2K_PCI,COMPUTE_IMAGE_TYPE_AMI,COMPUTE_VIOMMU_MODEL_INTEL,COMPUTE_SECURITY_TPM_2_0,COMPUTE_STORAGE_BUS_SCSI,COMPUTE_IMAGE_TYPE_ARI,COMPUTE_NET_VIF_MODEL_VMXNET3,COMPUTE_SECURITY_TPM_1_2,COMPUTE_NET_VIF_MODEL_E1000,HW_CPU_X86_SSE,COMPUTE_VOLUME_MULTI_ATTACH,COMPUTE_STORAGE_BUS_IDE,COMPUTE_GRAPHICS_MODEL_NONE,COMPUTE_VOLUME_ATTACH_WITH_TAG,COMPUTE_NET_VIF_MODEL_VIRTIO,HW_CPU_X86_SSE41,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_DEVICE_TAGGING,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_ACCELERATORS,COMPUTE_NET_VIF_MODEL_RTL8139,COMPUTE_GRAPHICS_MODEL_VGA,COMPUTE_STORAGE_BUS_SATA,COMPUTE_NET_VIF_MODEL_SPAPR_VLAN _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:825
Oct 02 12:00:09 compute-0 nova_compute[192079]: 2025-10-02 12:00:09.222 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:00:09 compute-0 nova_compute[192079]: 2025-10-02 12:00:09.240 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 0, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:00:09 compute-0 nova_compute[192079]: 2025-10-02 12:00:09.241 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:00:09 compute-0 nova_compute[192079]: 2025-10-02 12:00:09.241 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.423s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:00:10 compute-0 nova_compute[192079]: 2025-10-02 12:00:10.243 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:00:11 compute-0 nova_compute[192079]: 2025-10-02 12:00:11.207 2 DEBUG oslo_concurrency.lockutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Acquiring lock "54199f32-2d2a-4c54-a6bd-31d2d5675a46" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:00:11 compute-0 nova_compute[192079]: 2025-10-02 12:00:11.208 2 DEBUG oslo_concurrency.lockutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Lock "54199f32-2d2a-4c54-a6bd-31d2d5675a46" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:00:11 compute-0 nova_compute[192079]: 2025-10-02 12:00:11.233 2 DEBUG nova.compute.manager [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:00:11 compute-0 nova_compute[192079]: 2025-10-02 12:00:11.563 2 DEBUG oslo_concurrency.lockutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:00:11 compute-0 nova_compute[192079]: 2025-10-02 12:00:11.563 2 DEBUG oslo_concurrency.lockutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:00:11 compute-0 nova_compute[192079]: 2025-10-02 12:00:11.583 2 DEBUG nova.virt.hardware [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:00:11 compute-0 nova_compute[192079]: 2025-10-02 12:00:11.583 2 INFO nova.compute.claims [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:00:11 compute-0 nova_compute[192079]: 2025-10-02 12:00:11.920 2 DEBUG nova.compute.provider_tree [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:00:11 compute-0 nova_compute[192079]: 2025-10-02 12:00:11.935 2 DEBUG nova.scheduler.client.report [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 0, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:00:11 compute-0 nova_compute[192079]: 2025-10-02 12:00:11.967 2 DEBUG oslo_concurrency.lockutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.403s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:00:11 compute-0 nova_compute[192079]: 2025-10-02 12:00:11.968 2 DEBUG nova.compute.manager [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:00:12 compute-0 nova_compute[192079]: 2025-10-02 12:00:12.070 2 DEBUG nova.compute.manager [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:00:12 compute-0 nova_compute[192079]: 2025-10-02 12:00:12.070 2 DEBUG nova.network.neutron [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:00:12 compute-0 nova_compute[192079]: 2025-10-02 12:00:12.112 2 INFO nova.virt.libvirt.driver [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:00:12 compute-0 nova_compute[192079]: 2025-10-02 12:00:12.216 2 DEBUG nova.compute.manager [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:00:12 compute-0 nova_compute[192079]: 2025-10-02 12:00:12.415 2 DEBUG nova.compute.manager [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:00:12 compute-0 nova_compute[192079]: 2025-10-02 12:00:12.416 2 DEBUG nova.virt.libvirt.driver [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:00:12 compute-0 nova_compute[192079]: 2025-10-02 12:00:12.417 2 INFO nova.virt.libvirt.driver [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Creating image(s)
Oct 02 12:00:12 compute-0 nova_compute[192079]: 2025-10-02 12:00:12.418 2 DEBUG oslo_concurrency.lockutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Acquiring lock "/var/lib/nova/instances/54199f32-2d2a-4c54-a6bd-31d2d5675a46/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:00:12 compute-0 nova_compute[192079]: 2025-10-02 12:00:12.418 2 DEBUG oslo_concurrency.lockutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Lock "/var/lib/nova/instances/54199f32-2d2a-4c54-a6bd-31d2d5675a46/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:00:12 compute-0 nova_compute[192079]: 2025-10-02 12:00:12.418 2 DEBUG oslo_concurrency.lockutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Lock "/var/lib/nova/instances/54199f32-2d2a-4c54-a6bd-31d2d5675a46/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:00:12 compute-0 nova_compute[192079]: 2025-10-02 12:00:12.419 2 DEBUG oslo_concurrency.lockutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Image.cache.<locals>.fetch_func_sync" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:00:12 compute-0 nova_compute[192079]: 2025-10-02 12:00:12.419 2 DEBUG oslo_concurrency.lockutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Image.cache.<locals>.fetch_func_sync" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:00:13 compute-0 podman[219501]: 2025-10-02 12:00:13.157978475 +0000 UTC m=+0.078867766 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller, managed_by=edpm_ansible, org.label-schema.license=GPLv2, tcib_managed=true, container_name=ovn_controller, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:00:14 compute-0 nova_compute[192079]: 2025-10-02 12:00:14.284 2 DEBUG nova.network.neutron [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Automatically allocating a network for project 23de7e9a877e477cb52ac4d4c1410e0d. _auto_allocate_network /usr/lib/python3.9/site-packages/nova/network/neutron.py:2460
Oct 02 12:00:14 compute-0 nova_compute[192079]: 2025-10-02 12:00:14.966 2 DEBUG oslo_concurrency.processutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955.part --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:00:15 compute-0 nova_compute[192079]: 2025-10-02 12:00:15.025 2 DEBUG oslo_concurrency.processutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955.part --force-share --output=json" returned: 0 in 0.059s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:00:15 compute-0 nova_compute[192079]: 2025-10-02 12:00:15.027 2 DEBUG nova.virt.images [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] cf60d86d-f1d5-4be4-976e-7488dbdcf0b2 was qcow2, converting to raw fetch_to_raw /usr/lib/python3.9/site-packages/nova/virt/images.py:242
Oct 02 12:00:15 compute-0 nova_compute[192079]: 2025-10-02 12:00:15.028 2 DEBUG nova.privsep.utils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Path '/var/lib/nova/instances' supports direct I/O supports_direct_io /usr/lib/python3.9/site-packages/nova/privsep/utils.py:63
Oct 02 12:00:15 compute-0 nova_compute[192079]: 2025-10-02 12:00:15.029 2 DEBUG oslo_concurrency.processutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Running cmd (subprocess): qemu-img convert -t none -O raw -f qcow2 /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955.part /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955.converted execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:00:15 compute-0 nova_compute[192079]: 2025-10-02 12:00:15.254 2 DEBUG oslo_concurrency.processutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] CMD "qemu-img convert -t none -O raw -f qcow2 /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955.part /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955.converted" returned: 0 in 0.224s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:00:15 compute-0 nova_compute[192079]: 2025-10-02 12:00:15.258 2 DEBUG oslo_concurrency.processutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955.converted --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:00:15 compute-0 nova_compute[192079]: 2025-10-02 12:00:15.309 2 DEBUG oslo_concurrency.processutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955.converted --force-share --output=json" returned: 0 in 0.051s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:00:15 compute-0 nova_compute[192079]: 2025-10-02 12:00:15.311 2 DEBUG oslo_concurrency.lockutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Image.cache.<locals>.fetch_func_sync" :: held 2.891s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:00:15 compute-0 nova_compute[192079]: 2025-10-02 12:00:15.327 2 INFO oslo.privsep.daemon [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Running privsep helper: ['sudo', 'nova-rootwrap', '/etc/nova/rootwrap.conf', 'privsep-helper', '--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-compute.conf', '--config-dir', '/etc/nova/nova.conf.d', '--privsep_context', 'nova.privsep.sys_admin_pctxt', '--privsep_sock_path', '/tmp/tmpap4hsrhy/privsep.sock']
Oct 02 12:00:15 compute-0 nova_compute[192079]: 2025-10-02 12:00:15.954 2 INFO oslo.privsep.daemon [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Spawned new privsep daemon via rootwrap
Oct 02 12:00:15 compute-0 nova_compute[192079]: 2025-10-02 12:00:15.830 55 INFO oslo.privsep.daemon [-] privsep daemon starting
Oct 02 12:00:15 compute-0 nova_compute[192079]: 2025-10-02 12:00:15.835 55 INFO oslo.privsep.daemon [-] privsep process running with uid/gid: 0/0
Oct 02 12:00:15 compute-0 nova_compute[192079]: 2025-10-02 12:00:15.838 55 INFO oslo.privsep.daemon [-] privsep process running with capabilities (eff/prm/inh): CAP_CHOWN|CAP_DAC_OVERRIDE|CAP_DAC_READ_SEARCH|CAP_FOWNER|CAP_NET_ADMIN|CAP_SYS_ADMIN/CAP_CHOWN|CAP_DAC_OVERRIDE|CAP_DAC_READ_SEARCH|CAP_FOWNER|CAP_NET_ADMIN|CAP_SYS_ADMIN/none
Oct 02 12:00:15 compute-0 nova_compute[192079]: 2025-10-02 12:00:15.838 55 INFO oslo.privsep.daemon [-] privsep daemon running as pid 55
Oct 02 12:00:16 compute-0 nova_compute[192079]: 2025-10-02 12:00:16.046 2 DEBUG oslo_concurrency.processutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:00:16 compute-0 nova_compute[192079]: 2025-10-02 12:00:16.095 2 DEBUG oslo_concurrency.processutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.049s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:00:16 compute-0 nova_compute[192079]: 2025-10-02 12:00:16.096 2 DEBUG oslo_concurrency.lockutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:00:16 compute-0 nova_compute[192079]: 2025-10-02 12:00:16.096 2 DEBUG oslo_concurrency.lockutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:00:16 compute-0 nova_compute[192079]: 2025-10-02 12:00:16.106 2 DEBUG oslo_concurrency.processutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:00:16 compute-0 nova_compute[192079]: 2025-10-02 12:00:16.193 2 DEBUG oslo_concurrency.processutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.087s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:00:16 compute-0 nova_compute[192079]: 2025-10-02 12:00:16.195 2 DEBUG oslo_concurrency.processutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/54199f32-2d2a-4c54-a6bd-31d2d5675a46/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:00:16 compute-0 nova_compute[192079]: 2025-10-02 12:00:16.237 2 DEBUG oslo_concurrency.processutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/54199f32-2d2a-4c54-a6bd-31d2d5675a46/disk 1073741824" returned: 0 in 0.042s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:00:16 compute-0 nova_compute[192079]: 2025-10-02 12:00:16.239 2 DEBUG oslo_concurrency.lockutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.143s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:00:16 compute-0 nova_compute[192079]: 2025-10-02 12:00:16.240 2 DEBUG oslo_concurrency.processutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:00:16 compute-0 nova_compute[192079]: 2025-10-02 12:00:16.307 2 DEBUG oslo_concurrency.processutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.067s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:00:16 compute-0 nova_compute[192079]: 2025-10-02 12:00:16.308 2 DEBUG nova.virt.disk.api [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Checking if we can resize image /var/lib/nova/instances/54199f32-2d2a-4c54-a6bd-31d2d5675a46/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:00:16 compute-0 nova_compute[192079]: 2025-10-02 12:00:16.308 2 DEBUG oslo_concurrency.processutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/54199f32-2d2a-4c54-a6bd-31d2d5675a46/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:00:16 compute-0 nova_compute[192079]: 2025-10-02 12:00:16.357 2 DEBUG oslo_concurrency.processutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/54199f32-2d2a-4c54-a6bd-31d2d5675a46/disk --force-share --output=json" returned: 0 in 0.048s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:00:16 compute-0 nova_compute[192079]: 2025-10-02 12:00:16.358 2 DEBUG nova.virt.disk.api [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Cannot resize image /var/lib/nova/instances/54199f32-2d2a-4c54-a6bd-31d2d5675a46/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:00:16 compute-0 nova_compute[192079]: 2025-10-02 12:00:16.358 2 DEBUG nova.objects.instance [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Lazy-loading 'migration_context' on Instance uuid 54199f32-2d2a-4c54-a6bd-31d2d5675a46 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:00:16 compute-0 nova_compute[192079]: 2025-10-02 12:00:16.386 2 DEBUG nova.virt.libvirt.driver [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:00:16 compute-0 nova_compute[192079]: 2025-10-02 12:00:16.387 2 DEBUG nova.virt.libvirt.driver [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Ensure instance console log exists: /var/lib/nova/instances/54199f32-2d2a-4c54-a6bd-31d2d5675a46/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:00:16 compute-0 nova_compute[192079]: 2025-10-02 12:00:16.387 2 DEBUG oslo_concurrency.lockutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:00:16 compute-0 nova_compute[192079]: 2025-10-02 12:00:16.387 2 DEBUG oslo_concurrency.lockutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:00:16 compute-0 nova_compute[192079]: 2025-10-02 12:00:16.388 2 DEBUG oslo_concurrency.lockutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:00:17 compute-0 podman[219563]: 2025-10-02 12:00:17.162151583 +0000 UTC m=+0.071733696 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, io.buildah.version=1.41.3, managed_by=edpm_ansible, tcib_managed=true, container_name=ovn_metadata_agent)
Oct 02 12:00:17 compute-0 podman[219564]: 2025-10-02 12:00:17.172186344 +0000 UTC m=+0.076220862 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:00:23 compute-0 podman[219606]: 2025-10-02 12:00:23.16876201 +0000 UTC m=+0.075577854 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ceilometer_agent_compute, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_managed=true, config_id=edpm, io.buildah.version=1.41.3)
Oct 02 12:00:28 compute-0 podman[219627]: 2025-10-02 12:00:28.14384451 +0000 UTC m=+0.059406472 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, vendor=Red Hat, Inc., com.redhat.component=ubi9-minimal-container, url=https://catalog.redhat.com/en/search?searchType=containers, architecture=x86_64, vcs-type=git, version=9.6, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.buildah.version=1.33.7, io.openshift.tags=minimal rhel9, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, managed_by=edpm_ansible, config_id=edpm, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., name=ubi9-minimal, release=1755695350, maintainer=Red Hat, Inc., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, build-date=2025-08-20T13:12:41, io.openshift.expose-services=, distribution-scope=public, container_name=openstack_network_exporter, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9.)
Oct 02 12:00:28 compute-0 podman[219628]: 2025-10-02 12:00:28.15778484 +0000 UTC m=+0.067083437 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, managed_by=edpm_ansible, container_name=multipathd, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0)
Oct 02 12:00:32 compute-0 nova_compute[192079]: 2025-10-02 12:00:32.739 2 DEBUG oslo_concurrency.lockutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Acquiring lock "a3d563c1-37ae-41be-a49b-ee6efeccfc94" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:00:32 compute-0 nova_compute[192079]: 2025-10-02 12:00:32.739 2 DEBUG oslo_concurrency.lockutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "a3d563c1-37ae-41be-a49b-ee6efeccfc94" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:00:32 compute-0 nova_compute[192079]: 2025-10-02 12:00:32.774 2 DEBUG nova.compute.manager [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:00:32 compute-0 nova_compute[192079]: 2025-10-02 12:00:32.932 2 DEBUG oslo_concurrency.lockutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:00:32 compute-0 nova_compute[192079]: 2025-10-02 12:00:32.932 2 DEBUG oslo_concurrency.lockutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:00:32 compute-0 nova_compute[192079]: 2025-10-02 12:00:32.941 2 DEBUG nova.virt.hardware [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:00:32 compute-0 nova_compute[192079]: 2025-10-02 12:00:32.941 2 INFO nova.compute.claims [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:00:33 compute-0 nova_compute[192079]: 2025-10-02 12:00:33.203 2 DEBUG nova.compute.provider_tree [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Updating inventory in ProviderTree for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 with inventory: {'MEMORY_MB': {'total': 7679, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0, 'reserved': 512}, 'VCPU': {'total': 8, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0, 'reserved': 0}, 'DISK_GB': {'total': 79, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9, 'reserved': 1}} update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:176
Oct 02 12:00:33 compute-0 nova_compute[192079]: 2025-10-02 12:00:33.260 2 ERROR nova.scheduler.client.report [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [req-194c9970-ee4d-456d-872b-69ce42671ea4] Failed to update inventory to [{'MEMORY_MB': {'total': 7679, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0, 'reserved': 512}, 'VCPU': {'total': 8, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0, 'reserved': 0}, 'DISK_GB': {'total': 79, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9, 'reserved': 1}}] for resource provider with UUID 55f2ae21-42ea-47d7-8c73-c3134981d708.  Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict  ", "code": "placement.concurrent_update", "request_id": "req-194c9970-ee4d-456d-872b-69ce42671ea4"}]}
Oct 02 12:00:33 compute-0 nova_compute[192079]: 2025-10-02 12:00:33.277 2 DEBUG nova.scheduler.client.report [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Refreshing inventories for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708 _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:804
Oct 02 12:00:33 compute-0 nova_compute[192079]: 2025-10-02 12:00:33.300 2 DEBUG nova.scheduler.client.report [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Updating ProviderTree inventory for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 from _refresh_and_get_inventory using data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 0, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} _refresh_and_get_inventory /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:768
Oct 02 12:00:33 compute-0 nova_compute[192079]: 2025-10-02 12:00:33.300 2 DEBUG nova.compute.provider_tree [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Updating inventory in ProviderTree for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 with inventory: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 0, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:176
Oct 02 12:00:33 compute-0 nova_compute[192079]: 2025-10-02 12:00:33.316 2 DEBUG nova.scheduler.client.report [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Refreshing aggregate associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, aggregates: None _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:813
Oct 02 12:00:33 compute-0 nova_compute[192079]: 2025-10-02 12:00:33.337 2 DEBUG nova.network.neutron [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Automatically allocated network: {'id': '0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6', 'name': 'auto_allocated_network', 'tenant_id': '23de7e9a877e477cb52ac4d4c1410e0d', 'admin_state_up': True, 'mtu': 1442, 'status': 'ACTIVE', 'subnets': ['6a2058e4-dc89-48d3-88fc-bc95dba8da8b', 'd2e1858b-8344-4341-91f5-cb724ceffc0a'], 'shared': False, 'availability_zone_hints': [], 'availability_zones': [], 'ipv4_address_scope': None, 'ipv6_address_scope': None, 'router:external': False, 'description': '', 'qos_policy_id': None, 'port_security_enabled': True, 'dns_domain': '', 'l2_adjacency': True, 'tags': [], 'created_at': '2025-10-02T12:00:14Z', 'updated_at': '2025-10-02T12:00:26Z', 'revision_number': 4, 'project_id': '23de7e9a877e477cb52ac4d4c1410e0d'} _auto_allocate_network /usr/lib/python3.9/site-packages/nova/network/neutron.py:2478
Oct 02 12:00:33 compute-0 nova_compute[192079]: 2025-10-02 12:00:33.356 2 WARNING oslo_policy.policy [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] JSON formatted policy_file support is deprecated since Victoria release. You need to use YAML format which will be default in future. You can use ``oslopolicy-convert-json-to-yaml`` tool to convert existing JSON-formatted policy file to YAML-formatted in backward compatible way: https://docs.openstack.org/oslo.policy/latest/cli/oslopolicy-convert-json-to-yaml.html.
Oct 02 12:00:33 compute-0 nova_compute[192079]: 2025-10-02 12:00:33.357 2 WARNING oslo_policy.policy [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] JSON formatted policy_file support is deprecated since Victoria release. You need to use YAML format which will be default in future. You can use ``oslopolicy-convert-json-to-yaml`` tool to convert existing JSON-formatted policy file to YAML-formatted in backward compatible way: https://docs.openstack.org/oslo.policy/latest/cli/oslopolicy-convert-json-to-yaml.html.
Oct 02 12:00:33 compute-0 nova_compute[192079]: 2025-10-02 12:00:33.361 2 DEBUG nova.policy [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4e1cdf41d58b4774b94da988b9e8db73', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '23de7e9a877e477cb52ac4d4c1410e0d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:00:33 compute-0 nova_compute[192079]: 2025-10-02 12:00:33.366 2 DEBUG nova.scheduler.client.report [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Refreshing trait associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, traits: COMPUTE_SECURITY_UEFI_SECURE_BOOT,COMPUTE_VIOMMU_MODEL_VIRTIO,COMPUTE_VIOMMU_MODEL_AUTO,COMPUTE_IMAGE_TYPE_AKI,COMPUTE_GRAPHICS_MODEL_VIRTIO,COMPUTE_NET_VIF_MODEL_PCNET,HW_CPU_X86_SSE42,COMPUTE_RESCUE_BFV,COMPUTE_VOLUME_EXTEND,COMPUTE_IMAGE_TYPE_QCOW2,COMPUTE_TRUSTED_CERTS,COMPUTE_SOCKET_PCI_NUMA_AFFINITY,COMPUTE_GRAPHICS_MODEL_CIRRUS,HW_CPU_X86_MMX,COMPUTE_STORAGE_BUS_VIRTIO,COMPUTE_NET_ATTACH_INTERFACE_WITH_TAG,COMPUTE_STORAGE_BUS_FDC,COMPUTE_STORAGE_BUS_USB,COMPUTE_NODE,HW_CPU_X86_SSSE3,HW_CPU_X86_SSE2,COMPUTE_GRAPHICS_MODEL_BOCHS,COMPUTE_NET_VIF_MODEL_E1000E,COMPUTE_IMAGE_TYPE_RAW,COMPUTE_NET_VIF_MODEL_NE2K_PCI,COMPUTE_IMAGE_TYPE_AMI,COMPUTE_VIOMMU_MODEL_INTEL,COMPUTE_SECURITY_TPM_2_0,COMPUTE_STORAGE_BUS_SCSI,COMPUTE_IMAGE_TYPE_ARI,COMPUTE_NET_VIF_MODEL_VMXNET3,COMPUTE_SECURITY_TPM_1_2,COMPUTE_NET_VIF_MODEL_E1000,HW_CPU_X86_SSE,COMPUTE_VOLUME_MULTI_ATTACH,COMPUTE_STORAGE_BUS_IDE,COMPUTE_GRAPHICS_MODEL_NONE,COMPUTE_VOLUME_ATTACH_WITH_TAG,COMPUTE_NET_VIF_MODEL_VIRTIO,HW_CPU_X86_SSE41,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_DEVICE_TAGGING,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_ACCELERATORS,COMPUTE_NET_VIF_MODEL_RTL8139,COMPUTE_GRAPHICS_MODEL_VGA,COMPUTE_STORAGE_BUS_SATA,COMPUTE_NET_VIF_MODEL_SPAPR_VLAN _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:825
Oct 02 12:00:33 compute-0 nova_compute[192079]: 2025-10-02 12:00:33.457 2 DEBUG nova.compute.provider_tree [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Updating inventory in ProviderTree for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 with inventory: {'MEMORY_MB': {'total': 7679, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0, 'reserved': 512}, 'VCPU': {'total': 8, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0, 'reserved': 0}, 'DISK_GB': {'total': 79, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9, 'reserved': 1}} update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:176
Oct 02 12:00:33 compute-0 nova_compute[192079]: 2025-10-02 12:00:33.565 2 DEBUG nova.scheduler.client.report [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Updated inventory for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 with generation 8 in Placement from set_inventory_for_provider using data: {'MEMORY_MB': {'total': 7679, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0, 'reserved': 512}, 'VCPU': {'total': 8, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0, 'reserved': 0}, 'DISK_GB': {'total': 79, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9, 'reserved': 1}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:957
Oct 02 12:00:33 compute-0 nova_compute[192079]: 2025-10-02 12:00:33.566 2 DEBUG nova.compute.provider_tree [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Updating resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708 generation from 8 to 9 during operation: update_inventory _update_generation /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:164
Oct 02 12:00:33 compute-0 nova_compute[192079]: 2025-10-02 12:00:33.567 2 DEBUG nova.compute.provider_tree [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Updating inventory in ProviderTree for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 with inventory: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:176
Oct 02 12:00:33 compute-0 nova_compute[192079]: 2025-10-02 12:00:33.612 2 DEBUG oslo_concurrency.lockutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.680s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:00:33 compute-0 nova_compute[192079]: 2025-10-02 12:00:33.614 2 DEBUG nova.compute.manager [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:00:33 compute-0 nova_compute[192079]: 2025-10-02 12:00:33.729 2 DEBUG nova.compute.manager [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:00:33 compute-0 nova_compute[192079]: 2025-10-02 12:00:33.729 2 DEBUG nova.network.neutron [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:00:33 compute-0 nova_compute[192079]: 2025-10-02 12:00:33.756 2 INFO nova.virt.libvirt.driver [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:00:33 compute-0 nova_compute[192079]: 2025-10-02 12:00:33.789 2 DEBUG nova.compute.manager [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:00:33 compute-0 nova_compute[192079]: 2025-10-02 12:00:33.952 2 DEBUG nova.compute.manager [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:00:33 compute-0 nova_compute[192079]: 2025-10-02 12:00:33.953 2 DEBUG nova.virt.libvirt.driver [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:00:33 compute-0 nova_compute[192079]: 2025-10-02 12:00:33.953 2 INFO nova.virt.libvirt.driver [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Creating image(s)
Oct 02 12:00:33 compute-0 nova_compute[192079]: 2025-10-02 12:00:33.954 2 DEBUG oslo_concurrency.lockutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Acquiring lock "/var/lib/nova/instances/a3d563c1-37ae-41be-a49b-ee6efeccfc94/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:00:33 compute-0 nova_compute[192079]: 2025-10-02 12:00:33.954 2 DEBUG oslo_concurrency.lockutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "/var/lib/nova/instances/a3d563c1-37ae-41be-a49b-ee6efeccfc94/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:00:33 compute-0 nova_compute[192079]: 2025-10-02 12:00:33.954 2 DEBUG oslo_concurrency.lockutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "/var/lib/nova/instances/a3d563c1-37ae-41be-a49b-ee6efeccfc94/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:00:33 compute-0 nova_compute[192079]: 2025-10-02 12:00:33.967 2 DEBUG oslo_concurrency.processutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:00:34 compute-0 nova_compute[192079]: 2025-10-02 12:00:34.027 2 DEBUG oslo_concurrency.processutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.060s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:00:34 compute-0 nova_compute[192079]: 2025-10-02 12:00:34.028 2 DEBUG oslo_concurrency.lockutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:00:34 compute-0 nova_compute[192079]: 2025-10-02 12:00:34.028 2 DEBUG oslo_concurrency.lockutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:00:34 compute-0 nova_compute[192079]: 2025-10-02 12:00:34.038 2 DEBUG oslo_concurrency.processutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:00:34 compute-0 nova_compute[192079]: 2025-10-02 12:00:34.101 2 DEBUG oslo_concurrency.processutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.063s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:00:34 compute-0 nova_compute[192079]: 2025-10-02 12:00:34.102 2 DEBUG oslo_concurrency.processutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/a3d563c1-37ae-41be-a49b-ee6efeccfc94/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:00:34 compute-0 nova_compute[192079]: 2025-10-02 12:00:34.134 2 DEBUG oslo_concurrency.processutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/a3d563c1-37ae-41be-a49b-ee6efeccfc94/disk 1073741824" returned: 0 in 0.032s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:00:34 compute-0 nova_compute[192079]: 2025-10-02 12:00:34.135 2 DEBUG oslo_concurrency.lockutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.107s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:00:34 compute-0 nova_compute[192079]: 2025-10-02 12:00:34.136 2 DEBUG oslo_concurrency.processutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:00:34 compute-0 nova_compute[192079]: 2025-10-02 12:00:34.183 2 DEBUG nova.policy [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '67132a26bb4c454aa5ed0e4b8fee032c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '302a9c83c3eb43818ce6284e9ddb73be', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:00:34 compute-0 nova_compute[192079]: 2025-10-02 12:00:34.190 2 DEBUG oslo_concurrency.processutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:00:34 compute-0 nova_compute[192079]: 2025-10-02 12:00:34.191 2 DEBUG nova.virt.disk.api [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Checking if we can resize image /var/lib/nova/instances/a3d563c1-37ae-41be-a49b-ee6efeccfc94/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:00:34 compute-0 nova_compute[192079]: 2025-10-02 12:00:34.191 2 DEBUG oslo_concurrency.processutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a3d563c1-37ae-41be-a49b-ee6efeccfc94/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:00:34 compute-0 nova_compute[192079]: 2025-10-02 12:00:34.244 2 DEBUG oslo_concurrency.processutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a3d563c1-37ae-41be-a49b-ee6efeccfc94/disk --force-share --output=json" returned: 0 in 0.052s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:00:34 compute-0 nova_compute[192079]: 2025-10-02 12:00:34.245 2 DEBUG nova.virt.disk.api [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Cannot resize image /var/lib/nova/instances/a3d563c1-37ae-41be-a49b-ee6efeccfc94/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:00:34 compute-0 nova_compute[192079]: 2025-10-02 12:00:34.245 2 DEBUG nova.objects.instance [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lazy-loading 'migration_context' on Instance uuid a3d563c1-37ae-41be-a49b-ee6efeccfc94 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:00:34 compute-0 nova_compute[192079]: 2025-10-02 12:00:34.269 2 DEBUG nova.virt.libvirt.driver [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:00:34 compute-0 nova_compute[192079]: 2025-10-02 12:00:34.269 2 DEBUG nova.virt.libvirt.driver [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Ensure instance console log exists: /var/lib/nova/instances/a3d563c1-37ae-41be-a49b-ee6efeccfc94/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:00:34 compute-0 nova_compute[192079]: 2025-10-02 12:00:34.270 2 DEBUG oslo_concurrency.lockutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:00:34 compute-0 nova_compute[192079]: 2025-10-02 12:00:34.270 2 DEBUG oslo_concurrency.lockutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:00:34 compute-0 nova_compute[192079]: 2025-10-02 12:00:34.271 2 DEBUG oslo_concurrency.lockutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:00:35 compute-0 podman[219683]: 2025-10-02 12:00:35.166338716 +0000 UTC m=+0.072060126 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']})
Oct 02 12:00:35 compute-0 podman[219684]: 2025-10-02 12:00:35.181713636 +0000 UTC m=+0.083340092 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, config_id=iscsid, container_name=iscsid, tcib_managed=true, managed_by=edpm_ansible, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.build-date=20251001)
Oct 02 12:00:35 compute-0 nova_compute[192079]: 2025-10-02 12:00:35.336 2 DEBUG nova.network.neutron [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Successfully created port: 92d67693-7b14-496d-85fc-00362ed0e9f5 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:00:36 compute-0 nova_compute[192079]: 2025-10-02 12:00:36.927 2 DEBUG nova.network.neutron [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Successfully created port: d92bbd66-2dd9-44e3-a834-a92797ae8d1f _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:00:37 compute-0 nova_compute[192079]: 2025-10-02 12:00:37.107 2 DEBUG nova.network.neutron [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Successfully updated port: 92d67693-7b14-496d-85fc-00362ed0e9f5 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:00:37 compute-0 nova_compute[192079]: 2025-10-02 12:00:37.134 2 DEBUG oslo_concurrency.lockutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Acquiring lock "refresh_cache-54199f32-2d2a-4c54-a6bd-31d2d5675a46" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:00:37 compute-0 nova_compute[192079]: 2025-10-02 12:00:37.135 2 DEBUG oslo_concurrency.lockutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Acquired lock "refresh_cache-54199f32-2d2a-4c54-a6bd-31d2d5675a46" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:00:37 compute-0 nova_compute[192079]: 2025-10-02 12:00:37.135 2 DEBUG nova.network.neutron [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:00:37 compute-0 nova_compute[192079]: 2025-10-02 12:00:37.594 2 DEBUG nova.network.neutron [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:00:37 compute-0 nova_compute[192079]: 2025-10-02 12:00:37.932 2 DEBUG nova.compute.manager [req-0e60c3a7-4d7a-4453-b590-91ad6289f707 req-b16bc9ea-e9e7-4847-a99b-d20df11fb21a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Received event network-changed-92d67693-7b14-496d-85fc-00362ed0e9f5 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:00:37 compute-0 nova_compute[192079]: 2025-10-02 12:00:37.932 2 DEBUG nova.compute.manager [req-0e60c3a7-4d7a-4453-b590-91ad6289f707 req-b16bc9ea-e9e7-4847-a99b-d20df11fb21a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Refreshing instance network info cache due to event network-changed-92d67693-7b14-496d-85fc-00362ed0e9f5. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:00:37 compute-0 nova_compute[192079]: 2025-10-02 12:00:37.932 2 DEBUG oslo_concurrency.lockutils [req-0e60c3a7-4d7a-4453-b590-91ad6289f707 req-b16bc9ea-e9e7-4847-a99b-d20df11fb21a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-54199f32-2d2a-4c54-a6bd-31d2d5675a46" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.250 2 DEBUG nova.network.neutron [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Successfully updated port: d92bbd66-2dd9-44e3-a834-a92797ae8d1f _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.266 2 DEBUG oslo_concurrency.lockutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Acquiring lock "refresh_cache-a3d563c1-37ae-41be-a49b-ee6efeccfc94" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.267 2 DEBUG oslo_concurrency.lockutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Acquired lock "refresh_cache-a3d563c1-37ae-41be-a49b-ee6efeccfc94" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.267 2 DEBUG nova.network.neutron [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.751 2 DEBUG nova.network.neutron [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.815 2 DEBUG nova.compute.manager [req-402d8053-77f4-4e66-a568-e63455947141 req-4cb97c95-f4a7-4304-9f97-228d2c4743ec 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Received event network-changed-d92bbd66-2dd9-44e3-a834-a92797ae8d1f external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.816 2 DEBUG nova.compute.manager [req-402d8053-77f4-4e66-a568-e63455947141 req-4cb97c95-f4a7-4304-9f97-228d2c4743ec 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Refreshing instance network info cache due to event network-changed-d92bbd66-2dd9-44e3-a834-a92797ae8d1f. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.816 2 DEBUG oslo_concurrency.lockutils [req-402d8053-77f4-4e66-a568-e63455947141 req-4cb97c95-f4a7-4304-9f97-228d2c4743ec 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-a3d563c1-37ae-41be-a49b-ee6efeccfc94" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.880 2 DEBUG nova.network.neutron [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Updating instance_info_cache with network_info: [{"id": "92d67693-7b14-496d-85fc-00362ed0e9f5", "address": "fa:16:3e:5f:29:fc", "network": {"id": "0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6", "bridge": "br-int", "label": "auto_allocated_network", "subnets": [{"cidr": "10.1.0.0/26", "dns": [], "gateway": {"address": "10.1.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.1.0.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}, {"cidr": "fdfe:381f:8400::/64", "dns": [], "gateway": {"address": "fdfe:381f:8400::1", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "fdfe:381f:8400::287", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "23de7e9a877e477cb52ac4d4c1410e0d", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92d67693-7b", "ovs_interfaceid": "92d67693-7b14-496d-85fc-00362ed0e9f5", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.913 2 DEBUG oslo_concurrency.lockutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Releasing lock "refresh_cache-54199f32-2d2a-4c54-a6bd-31d2d5675a46" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.913 2 DEBUG nova.compute.manager [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Instance network_info: |[{"id": "92d67693-7b14-496d-85fc-00362ed0e9f5", "address": "fa:16:3e:5f:29:fc", "network": {"id": "0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6", "bridge": "br-int", "label": "auto_allocated_network", "subnets": [{"cidr": "10.1.0.0/26", "dns": [], "gateway": {"address": "10.1.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.1.0.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}, {"cidr": "fdfe:381f:8400::/64", "dns": [], "gateway": {"address": "fdfe:381f:8400::1", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "fdfe:381f:8400::287", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "23de7e9a877e477cb52ac4d4c1410e0d", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92d67693-7b", "ovs_interfaceid": "92d67693-7b14-496d-85fc-00362ed0e9f5", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.914 2 DEBUG oslo_concurrency.lockutils [req-0e60c3a7-4d7a-4453-b590-91ad6289f707 req-b16bc9ea-e9e7-4847-a99b-d20df11fb21a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-54199f32-2d2a-4c54-a6bd-31d2d5675a46" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.915 2 DEBUG nova.network.neutron [req-0e60c3a7-4d7a-4453-b590-91ad6289f707 req-b16bc9ea-e9e7-4847-a99b-d20df11fb21a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Refreshing network info cache for port 92d67693-7b14-496d-85fc-00362ed0e9f5 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.921 2 DEBUG nova.virt.libvirt.driver [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Start _get_guest_xml network_info=[{"id": "92d67693-7b14-496d-85fc-00362ed0e9f5", "address": "fa:16:3e:5f:29:fc", "network": {"id": "0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6", "bridge": "br-int", "label": "auto_allocated_network", "subnets": [{"cidr": "10.1.0.0/26", "dns": [], "gateway": {"address": "10.1.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.1.0.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}, {"cidr": "fdfe:381f:8400::/64", "dns": [], "gateway": {"address": "fdfe:381f:8400::1", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "fdfe:381f:8400::287", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "23de7e9a877e477cb52ac4d4c1410e0d", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92d67693-7b", "ovs_interfaceid": "92d67693-7b14-496d-85fc-00362ed0e9f5", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.926 2 WARNING nova.virt.libvirt.driver [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.934 2 DEBUG nova.virt.libvirt.host [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.935 2 DEBUG nova.virt.libvirt.host [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.942 2 DEBUG nova.virt.libvirt.host [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.943 2 DEBUG nova.virt.libvirt.host [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.944 2 DEBUG nova.virt.libvirt.driver [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.945 2 DEBUG nova.virt.hardware [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.945 2 DEBUG nova.virt.hardware [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.946 2 DEBUG nova.virt.hardware [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.946 2 DEBUG nova.virt.hardware [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.946 2 DEBUG nova.virt.hardware [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.947 2 DEBUG nova.virt.hardware [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.947 2 DEBUG nova.virt.hardware [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.948 2 DEBUG nova.virt.hardware [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.948 2 DEBUG nova.virt.hardware [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.948 2 DEBUG nova.virt.hardware [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.949 2 DEBUG nova.virt.hardware [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.953 2 DEBUG nova.privsep.utils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Path '/var/lib/nova/instances' supports direct I/O supports_direct_io /usr/lib/python3.9/site-packages/nova/privsep/utils.py:63
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.954 2 DEBUG nova.virt.libvirt.vif [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:00:09Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description=None,display_name='tempest-tempest.common.compute-instance-549213814-2',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-tempest-common-compute-instance-549213814-2',id=3,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=1,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='23de7e9a877e477cb52ac4d4c1410e0d',ramdisk_id='',reservation_id='r-2zs6ym1o',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-AutoAllocateNetworkTest-1436985778',owner_user_name='tempest-AutoAllocateNetworkTest-1436985778-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:00:12Z,user_data=None,user_id='4e1cdf41d58b4774b94da988b9e8db73',uuid=54199f32-2d2a-4c54-a6bd-31d2d5675a46,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "92d67693-7b14-496d-85fc-00362ed0e9f5", "address": "fa:16:3e:5f:29:fc", "network": {"id": "0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6", "bridge": "br-int", "label": "auto_allocated_network", "subnets": [{"cidr": "10.1.0.0/26", "dns": [], "gateway": {"address": "10.1.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.1.0.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}, {"cidr": "fdfe:381f:8400::/64", "dns": [], "gateway": {"address": "fdfe:381f:8400::1", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "fdfe:381f:8400::287", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "23de7e9a877e477cb52ac4d4c1410e0d", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92d67693-7b", "ovs_interfaceid": "92d67693-7b14-496d-85fc-00362ed0e9f5", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.954 2 DEBUG nova.network.os_vif_util [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Converting VIF {"id": "92d67693-7b14-496d-85fc-00362ed0e9f5", "address": "fa:16:3e:5f:29:fc", "network": {"id": "0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6", "bridge": "br-int", "label": "auto_allocated_network", "subnets": [{"cidr": "10.1.0.0/26", "dns": [], "gateway": {"address": "10.1.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.1.0.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}, {"cidr": "fdfe:381f:8400::/64", "dns": [], "gateway": {"address": "fdfe:381f:8400::1", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "fdfe:381f:8400::287", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "23de7e9a877e477cb52ac4d4c1410e0d", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92d67693-7b", "ovs_interfaceid": "92d67693-7b14-496d-85fc-00362ed0e9f5", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.956 2 DEBUG nova.network.os_vif_util [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:5f:29:fc,bridge_name='br-int',has_traffic_filtering=True,id=92d67693-7b14-496d-85fc-00362ed0e9f5,network=Network(0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap92d67693-7b') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.957 2 DEBUG nova.objects.instance [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Lazy-loading 'pci_devices' on Instance uuid 54199f32-2d2a-4c54-a6bd-31d2d5675a46 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.971 2 DEBUG nova.virt.libvirt.driver [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:00:39 compute-0 nova_compute[192079]:   <uuid>54199f32-2d2a-4c54-a6bd-31d2d5675a46</uuid>
Oct 02 12:00:39 compute-0 nova_compute[192079]:   <name>instance-00000003</name>
Oct 02 12:00:39 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:00:39 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:00:39 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:00:39 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:       <nova:name>tempest-tempest.common.compute-instance-549213814-2</nova:name>
Oct 02 12:00:39 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:00:39</nova:creationTime>
Oct 02 12:00:39 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:00:39 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:00:39 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:00:39 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:00:39 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:00:39 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:00:39 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:00:39 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:00:39 compute-0 nova_compute[192079]:         <nova:user uuid="4e1cdf41d58b4774b94da988b9e8db73">tempest-AutoAllocateNetworkTest-1436985778-project-member</nova:user>
Oct 02 12:00:39 compute-0 nova_compute[192079]:         <nova:project uuid="23de7e9a877e477cb52ac4d4c1410e0d">tempest-AutoAllocateNetworkTest-1436985778</nova:project>
Oct 02 12:00:39 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:00:39 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:00:39 compute-0 nova_compute[192079]:         <nova:port uuid="92d67693-7b14-496d-85fc-00362ed0e9f5">
Oct 02 12:00:39 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.1.0.48" ipVersion="4"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="fdfe:381f:8400::287" ipVersion="6"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:00:39 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:00:39 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:00:39 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <system>
Oct 02 12:00:39 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:00:39 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:00:39 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:00:39 compute-0 nova_compute[192079]:       <entry name="serial">54199f32-2d2a-4c54-a6bd-31d2d5675a46</entry>
Oct 02 12:00:39 compute-0 nova_compute[192079]:       <entry name="uuid">54199f32-2d2a-4c54-a6bd-31d2d5675a46</entry>
Oct 02 12:00:39 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     </system>
Oct 02 12:00:39 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:00:39 compute-0 nova_compute[192079]:   <os>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:   </os>
Oct 02 12:00:39 compute-0 nova_compute[192079]:   <features>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:   </features>
Oct 02 12:00:39 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:00:39 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:00:39 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:00:39 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/54199f32-2d2a-4c54-a6bd-31d2d5675a46/disk"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:00:39 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/54199f32-2d2a-4c54-a6bd-31d2d5675a46/disk.config"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:00:39 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:5f:29:fc"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:       <target dev="tap92d67693-7b"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:00:39 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/54199f32-2d2a-4c54-a6bd-31d2d5675a46/console.log" append="off"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <video>
Oct 02 12:00:39 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     </video>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:00:39 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:00:39 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:00:39 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:00:39 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:00:39 compute-0 nova_compute[192079]: </domain>
Oct 02 12:00:39 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.972 2 DEBUG nova.compute.manager [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Preparing to wait for external event network-vif-plugged-92d67693-7b14-496d-85fc-00362ed0e9f5 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.972 2 DEBUG oslo_concurrency.lockutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Acquiring lock "54199f32-2d2a-4c54-a6bd-31d2d5675a46-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.973 2 DEBUG oslo_concurrency.lockutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Lock "54199f32-2d2a-4c54-a6bd-31d2d5675a46-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.973 2 DEBUG oslo_concurrency.lockutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Lock "54199f32-2d2a-4c54-a6bd-31d2d5675a46-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.974 2 DEBUG nova.virt.libvirt.vif [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:00:09Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description=None,display_name='tempest-tempest.common.compute-instance-549213814-2',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-tempest-common-compute-instance-549213814-2',id=3,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=1,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='23de7e9a877e477cb52ac4d4c1410e0d',ramdisk_id='',reservation_id='r-2zs6ym1o',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-AutoAllocateNetworkTest-1436985778',owner_user_name='tempest-AutoAllocateNetworkTest-1436985778-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:00:12Z,user_data=None,user_id='4e1cdf41d58b4774b94da988b9e8db73',uuid=54199f32-2d2a-4c54-a6bd-31d2d5675a46,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "92d67693-7b14-496d-85fc-00362ed0e9f5", "address": "fa:16:3e:5f:29:fc", "network": {"id": "0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6", "bridge": "br-int", "label": "auto_allocated_network", "subnets": [{"cidr": "10.1.0.0/26", "dns": [], "gateway": {"address": "10.1.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.1.0.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}, {"cidr": "fdfe:381f:8400::/64", "dns": [], "gateway": {"address": "fdfe:381f:8400::1", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "fdfe:381f:8400::287", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "23de7e9a877e477cb52ac4d4c1410e0d", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92d67693-7b", "ovs_interfaceid": "92d67693-7b14-496d-85fc-00362ed0e9f5", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.974 2 DEBUG nova.network.os_vif_util [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Converting VIF {"id": "92d67693-7b14-496d-85fc-00362ed0e9f5", "address": "fa:16:3e:5f:29:fc", "network": {"id": "0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6", "bridge": "br-int", "label": "auto_allocated_network", "subnets": [{"cidr": "10.1.0.0/26", "dns": [], "gateway": {"address": "10.1.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.1.0.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}, {"cidr": "fdfe:381f:8400::/64", "dns": [], "gateway": {"address": "fdfe:381f:8400::1", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "fdfe:381f:8400::287", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "23de7e9a877e477cb52ac4d4c1410e0d", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92d67693-7b", "ovs_interfaceid": "92d67693-7b14-496d-85fc-00362ed0e9f5", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.974 2 DEBUG nova.network.os_vif_util [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:5f:29:fc,bridge_name='br-int',has_traffic_filtering=True,id=92d67693-7b14-496d-85fc-00362ed0e9f5,network=Network(0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap92d67693-7b') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:00:39 compute-0 nova_compute[192079]: 2025-10-02 12:00:39.975 2 DEBUG os_vif [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:5f:29:fc,bridge_name='br-int',has_traffic_filtering=True,id=92d67693-7b14-496d-85fc-00362ed0e9f5,network=Network(0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap92d67693-7b') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:00:40 compute-0 nova_compute[192079]: 2025-10-02 12:00:40.005 2 DEBUG ovsdbapp.backend.ovs_idl [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Created schema index Interface.name autocreate_indices /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/__init__.py:106
Oct 02 12:00:40 compute-0 nova_compute[192079]: 2025-10-02 12:00:40.006 2 DEBUG ovsdbapp.backend.ovs_idl [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Created schema index Port.name autocreate_indices /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/__init__.py:106
Oct 02 12:00:40 compute-0 nova_compute[192079]: 2025-10-02 12:00:40.006 2 DEBUG ovsdbapp.backend.ovs_idl [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Created schema index Bridge.name autocreate_indices /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/__init__.py:106
Oct 02 12:00:40 compute-0 nova_compute[192079]: 2025-10-02 12:00:40.006 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] tcp:127.0.0.1:6640: entering CONNECTING _transition /usr/lib64/python3.9/site-packages/ovs/reconnect.py:519
Oct 02 12:00:40 compute-0 nova_compute[192079]: 2025-10-02 12:00:40.009 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [POLLOUT] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:40 compute-0 nova_compute[192079]: 2025-10-02 12:00:40.010 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] tcp:127.0.0.1:6640: entering ACTIVE _transition /usr/lib64/python3.9/site-packages/ovs/reconnect.py:519
Oct 02 12:00:40 compute-0 nova_compute[192079]: 2025-10-02 12:00:40.010 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:40 compute-0 nova_compute[192079]: 2025-10-02 12:00:40.012 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:40 compute-0 nova_compute[192079]: 2025-10-02 12:00:40.015 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:40 compute-0 nova_compute[192079]: 2025-10-02 12:00:40.025 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:40 compute-0 nova_compute[192079]: 2025-10-02 12:00:40.025 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:00:40 compute-0 nova_compute[192079]: 2025-10-02 12:00:40.025 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:00:40 compute-0 nova_compute[192079]: 2025-10-02 12:00:40.026 2 INFO oslo.privsep.daemon [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Running privsep helper: ['sudo', 'nova-rootwrap', '/etc/nova/rootwrap.conf', 'privsep-helper', '--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-compute.conf', '--config-dir', '/etc/nova/nova.conf.d', '--privsep_context', 'vif_plug_ovs.privsep.vif_plug', '--privsep_sock_path', '/tmp/tmp8k5kljk7/privsep.sock']
Oct 02 12:00:40 compute-0 nova_compute[192079]: 2025-10-02 12:00:40.705 2 INFO oslo.privsep.daemon [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Spawned new privsep daemon via rootwrap
Oct 02 12:00:40 compute-0 nova_compute[192079]: 2025-10-02 12:00:40.580 91 INFO oslo.privsep.daemon [-] privsep daemon starting
Oct 02 12:00:40 compute-0 nova_compute[192079]: 2025-10-02 12:00:40.587 91 INFO oslo.privsep.daemon [-] privsep process running with uid/gid: 0/0
Oct 02 12:00:40 compute-0 nova_compute[192079]: 2025-10-02 12:00:40.590 91 INFO oslo.privsep.daemon [-] privsep process running with capabilities (eff/prm/inh): CAP_DAC_OVERRIDE|CAP_NET_ADMIN/CAP_DAC_OVERRIDE|CAP_NET_ADMIN/none
Oct 02 12:00:40 compute-0 nova_compute[192079]: 2025-10-02 12:00:40.591 91 INFO oslo.privsep.daemon [-] privsep daemon running as pid 91
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.026 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.026 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap92d67693-7b, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.027 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap92d67693-7b, col_values=(('external_ids', {'iface-id': '92d67693-7b14-496d-85fc-00362ed0e9f5', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:5f:29:fc', 'vm-uuid': '54199f32-2d2a-4c54-a6bd-31d2d5675a46'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.076 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:41 compute-0 NetworkManager[51160]: <info>  [1759406441.0773] manager: (tap92d67693-7b): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/23)
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.080 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.083 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.085 2 INFO os_vif [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:5f:29:fc,bridge_name='br-int',has_traffic_filtering=True,id=92d67693-7b14-496d-85fc-00362ed0e9f5,network=Network(0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap92d67693-7b')
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.139 2 DEBUG nova.virt.libvirt.driver [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.140 2 DEBUG nova.virt.libvirt.driver [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.140 2 DEBUG nova.virt.libvirt.driver [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] No VIF found with MAC fa:16:3e:5f:29:fc, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.141 2 INFO nova.virt.libvirt.driver [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Using config drive
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.443 2 DEBUG nova.network.neutron [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Updating instance_info_cache with network_info: [{"id": "d92bbd66-2dd9-44e3-a834-a92797ae8d1f", "address": "fa:16:3e:8c:7f:ac", "network": {"id": "0432e6a2-e111-484d-b6cf-d32d9fc846c9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1078640656-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "302a9c83c3eb43818ce6284e9ddb73be", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd92bbd66-2d", "ovs_interfaceid": "d92bbd66-2dd9-44e3-a834-a92797ae8d1f", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.481 2 DEBUG oslo_concurrency.lockutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Releasing lock "refresh_cache-a3d563c1-37ae-41be-a49b-ee6efeccfc94" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.482 2 DEBUG nova.compute.manager [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Instance network_info: |[{"id": "d92bbd66-2dd9-44e3-a834-a92797ae8d1f", "address": "fa:16:3e:8c:7f:ac", "network": {"id": "0432e6a2-e111-484d-b6cf-d32d9fc846c9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1078640656-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "302a9c83c3eb43818ce6284e9ddb73be", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd92bbd66-2d", "ovs_interfaceid": "d92bbd66-2dd9-44e3-a834-a92797ae8d1f", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.482 2 DEBUG oslo_concurrency.lockutils [req-402d8053-77f4-4e66-a568-e63455947141 req-4cb97c95-f4a7-4304-9f97-228d2c4743ec 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-a3d563c1-37ae-41be-a49b-ee6efeccfc94" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.482 2 DEBUG nova.network.neutron [req-402d8053-77f4-4e66-a568-e63455947141 req-4cb97c95-f4a7-4304-9f97-228d2c4743ec 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Refreshing network info cache for port d92bbd66-2dd9-44e3-a834-a92797ae8d1f _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.488 2 DEBUG nova.virt.libvirt.driver [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Start _get_guest_xml network_info=[{"id": "d92bbd66-2dd9-44e3-a834-a92797ae8d1f", "address": "fa:16:3e:8c:7f:ac", "network": {"id": "0432e6a2-e111-484d-b6cf-d32d9fc846c9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1078640656-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "302a9c83c3eb43818ce6284e9ddb73be", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd92bbd66-2d", "ovs_interfaceid": "d92bbd66-2dd9-44e3-a834-a92797ae8d1f", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.493 2 WARNING nova.virt.libvirt.driver [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.500 2 DEBUG nova.virt.libvirt.host [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.501 2 DEBUG nova.virt.libvirt.host [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.511 2 DEBUG nova.virt.libvirt.host [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.512 2 DEBUG nova.virt.libvirt.host [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.513 2 DEBUG nova.virt.libvirt.driver [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.514 2 DEBUG nova.virt.hardware [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T12:00:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1285819904',id=8,is_public=True,memory_mb=128,name='tempest-flavor_with_ephemeral_0-768942469',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.514 2 DEBUG nova.virt.hardware [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.515 2 DEBUG nova.virt.hardware [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.515 2 DEBUG nova.virt.hardware [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.515 2 DEBUG nova.virt.hardware [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.516 2 DEBUG nova.virt.hardware [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.516 2 DEBUG nova.virt.hardware [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.516 2 DEBUG nova.virt.hardware [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.517 2 DEBUG nova.virt.hardware [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.517 2 DEBUG nova.virt.hardware [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.518 2 DEBUG nova.virt.hardware [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.523 2 DEBUG nova.virt.libvirt.vif [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:00:31Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ServersWithSpecificFlavorTestJSON-server-305259138',display_name='tempest-ServersWithSpecificFlavorTestJSON-server-305259138',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(8),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverswithspecificflavortestjson-server-305259138',id=5,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=8,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBLQAR6rn15gxsCt5BVT9ZeXnbqUta2pJ91YMBkT9rHUc9ZBtTK728XqHiZfyDrBlMAbpgHvu/gvYEjRf3OvnLlEsO2AY8MfRajDqsbCXPjRzSoO5eacsxtVMw0D5LoybNA==',key_name='tempest-keypair-283845564',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='302a9c83c3eb43818ce6284e9ddb73be',ramdisk_id='',reservation_id='r-afr0o08k',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServersWithSpecificFlavorTestJSON-1100192498',owner_user_name='tempest-ServersWithSpecificFlavorTestJSON-1100192498-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:00:33Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='67132a26bb4c454aa5ed0e4b8fee032c',uuid=a3d563c1-37ae-41be-a49b-ee6efeccfc94,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "d92bbd66-2dd9-44e3-a834-a92797ae8d1f", "address": "fa:16:3e:8c:7f:ac", "network": {"id": "0432e6a2-e111-484d-b6cf-d32d9fc846c9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1078640656-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "302a9c83c3eb43818ce6284e9ddb73be", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd92bbd66-2d", "ovs_interfaceid": "d92bbd66-2dd9-44e3-a834-a92797ae8d1f", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.523 2 DEBUG nova.network.os_vif_util [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Converting VIF {"id": "d92bbd66-2dd9-44e3-a834-a92797ae8d1f", "address": "fa:16:3e:8c:7f:ac", "network": {"id": "0432e6a2-e111-484d-b6cf-d32d9fc846c9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1078640656-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "302a9c83c3eb43818ce6284e9ddb73be", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd92bbd66-2d", "ovs_interfaceid": "d92bbd66-2dd9-44e3-a834-a92797ae8d1f", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.524 2 DEBUG nova.network.os_vif_util [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:8c:7f:ac,bridge_name='br-int',has_traffic_filtering=True,id=d92bbd66-2dd9-44e3-a834-a92797ae8d1f,network=Network(0432e6a2-e111-484d-b6cf-d32d9fc846c9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd92bbd66-2d') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.526 2 DEBUG nova.objects.instance [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lazy-loading 'pci_devices' on Instance uuid a3d563c1-37ae-41be-a49b-ee6efeccfc94 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.541 2 DEBUG nova.virt.libvirt.driver [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:00:41 compute-0 nova_compute[192079]:   <uuid>a3d563c1-37ae-41be-a49b-ee6efeccfc94</uuid>
Oct 02 12:00:41 compute-0 nova_compute[192079]:   <name>instance-00000005</name>
Oct 02 12:00:41 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:00:41 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:00:41 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:00:41 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:       <nova:name>tempest-ServersWithSpecificFlavorTestJSON-server-305259138</nova:name>
Oct 02 12:00:41 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:00:41</nova:creationTime>
Oct 02 12:00:41 compute-0 nova_compute[192079]:       <nova:flavor name="tempest-flavor_with_ephemeral_0-768942469">
Oct 02 12:00:41 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:00:41 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:00:41 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:00:41 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:00:41 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:00:41 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:00:41 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:00:41 compute-0 nova_compute[192079]:         <nova:user uuid="67132a26bb4c454aa5ed0e4b8fee032c">tempest-ServersWithSpecificFlavorTestJSON-1100192498-project-member</nova:user>
Oct 02 12:00:41 compute-0 nova_compute[192079]:         <nova:project uuid="302a9c83c3eb43818ce6284e9ddb73be">tempest-ServersWithSpecificFlavorTestJSON-1100192498</nova:project>
Oct 02 12:00:41 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:00:41 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:00:41 compute-0 nova_compute[192079]:         <nova:port uuid="d92bbd66-2dd9-44e3-a834-a92797ae8d1f">
Oct 02 12:00:41 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.3" ipVersion="4"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:00:41 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:00:41 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:00:41 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <system>
Oct 02 12:00:41 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:00:41 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:00:41 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:00:41 compute-0 nova_compute[192079]:       <entry name="serial">a3d563c1-37ae-41be-a49b-ee6efeccfc94</entry>
Oct 02 12:00:41 compute-0 nova_compute[192079]:       <entry name="uuid">a3d563c1-37ae-41be-a49b-ee6efeccfc94</entry>
Oct 02 12:00:41 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     </system>
Oct 02 12:00:41 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:00:41 compute-0 nova_compute[192079]:   <os>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:   </os>
Oct 02 12:00:41 compute-0 nova_compute[192079]:   <features>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:   </features>
Oct 02 12:00:41 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:00:41 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:00:41 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:00:41 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/a3d563c1-37ae-41be-a49b-ee6efeccfc94/disk"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:00:41 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/a3d563c1-37ae-41be-a49b-ee6efeccfc94/disk.config"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:00:41 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:8c:7f:ac"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:       <target dev="tapd92bbd66-2d"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:00:41 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/a3d563c1-37ae-41be-a49b-ee6efeccfc94/console.log" append="off"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <video>
Oct 02 12:00:41 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     </video>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:00:41 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:00:41 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:00:41 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:00:41 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:00:41 compute-0 nova_compute[192079]: </domain>
Oct 02 12:00:41 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.541 2 DEBUG nova.compute.manager [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Preparing to wait for external event network-vif-plugged-d92bbd66-2dd9-44e3-a834-a92797ae8d1f prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.542 2 DEBUG oslo_concurrency.lockutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Acquiring lock "a3d563c1-37ae-41be-a49b-ee6efeccfc94-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.542 2 DEBUG oslo_concurrency.lockutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "a3d563c1-37ae-41be-a49b-ee6efeccfc94-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.543 2 DEBUG oslo_concurrency.lockutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "a3d563c1-37ae-41be-a49b-ee6efeccfc94-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.544 2 DEBUG nova.virt.libvirt.vif [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:00:31Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ServersWithSpecificFlavorTestJSON-server-305259138',display_name='tempest-ServersWithSpecificFlavorTestJSON-server-305259138',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(8),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverswithspecificflavortestjson-server-305259138',id=5,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=8,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBLQAR6rn15gxsCt5BVT9ZeXnbqUta2pJ91YMBkT9rHUc9ZBtTK728XqHiZfyDrBlMAbpgHvu/gvYEjRf3OvnLlEsO2AY8MfRajDqsbCXPjRzSoO5eacsxtVMw0D5LoybNA==',key_name='tempest-keypair-283845564',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='302a9c83c3eb43818ce6284e9ddb73be',ramdisk_id='',reservation_id='r-afr0o08k',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServersWithSpecificFlavorTestJSON-1100192498',owner_user_name='tempest-ServersWithSpecificFlavorTestJSON-1100192498-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:00:33Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='67132a26bb4c454aa5ed0e4b8fee032c',uuid=a3d563c1-37ae-41be-a49b-ee6efeccfc94,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "d92bbd66-2dd9-44e3-a834-a92797ae8d1f", "address": "fa:16:3e:8c:7f:ac", "network": {"id": "0432e6a2-e111-484d-b6cf-d32d9fc846c9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1078640656-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "302a9c83c3eb43818ce6284e9ddb73be", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd92bbd66-2d", "ovs_interfaceid": "d92bbd66-2dd9-44e3-a834-a92797ae8d1f", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.544 2 DEBUG nova.network.os_vif_util [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Converting VIF {"id": "d92bbd66-2dd9-44e3-a834-a92797ae8d1f", "address": "fa:16:3e:8c:7f:ac", "network": {"id": "0432e6a2-e111-484d-b6cf-d32d9fc846c9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1078640656-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "302a9c83c3eb43818ce6284e9ddb73be", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd92bbd66-2d", "ovs_interfaceid": "d92bbd66-2dd9-44e3-a834-a92797ae8d1f", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.545 2 DEBUG nova.network.os_vif_util [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:8c:7f:ac,bridge_name='br-int',has_traffic_filtering=True,id=d92bbd66-2dd9-44e3-a834-a92797ae8d1f,network=Network(0432e6a2-e111-484d-b6cf-d32d9fc846c9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd92bbd66-2d') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.546 2 DEBUG os_vif [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:8c:7f:ac,bridge_name='br-int',has_traffic_filtering=True,id=d92bbd66-2dd9-44e3-a834-a92797ae8d1f,network=Network(0432e6a2-e111-484d-b6cf-d32d9fc846c9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd92bbd66-2d') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.546 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.547 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.547 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.550 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.551 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapd92bbd66-2d, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.551 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapd92bbd66-2d, col_values=(('external_ids', {'iface-id': 'd92bbd66-2dd9-44e3-a834-a92797ae8d1f', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:8c:7f:ac', 'vm-uuid': 'a3d563c1-37ae-41be-a49b-ee6efeccfc94'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.553 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:41 compute-0 NetworkManager[51160]: <info>  [1759406441.5549] manager: (tapd92bbd66-2d): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/24)
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.557 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.565 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.566 2 INFO os_vif [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:8c:7f:ac,bridge_name='br-int',has_traffic_filtering=True,id=d92bbd66-2dd9-44e3-a834-a92797ae8d1f,network=Network(0432e6a2-e111-484d-b6cf-d32d9fc846c9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd92bbd66-2d')
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.580 2 INFO nova.virt.libvirt.driver [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Creating config drive at /var/lib/nova/instances/54199f32-2d2a-4c54-a6bd-31d2d5675a46/disk.config
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.592 2 DEBUG oslo_concurrency.processutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/54199f32-2d2a-4c54-a6bd-31d2d5675a46/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpimxjg4zj execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.668 2 DEBUG nova.virt.libvirt.driver [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.669 2 DEBUG nova.virt.libvirt.driver [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.669 2 DEBUG nova.virt.libvirt.driver [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] No VIF found with MAC fa:16:3e:8c:7f:ac, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.670 2 INFO nova.virt.libvirt.driver [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Using config drive
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.727 2 DEBUG oslo_concurrency.processutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/54199f32-2d2a-4c54-a6bd-31d2d5675a46/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpimxjg4zj" returned: 0 in 0.135s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:00:41 compute-0 kernel: tun: Universal TUN/TAP device driver, 1.6
Oct 02 12:00:41 compute-0 NetworkManager[51160]: <info>  [1759406441.8086] manager: (tap92d67693-7b): new Tun device (/org/freedesktop/NetworkManager/Devices/25)
Oct 02 12:00:41 compute-0 kernel: tap92d67693-7b: entered promiscuous mode
Oct 02 12:00:41 compute-0 ovn_controller[94336]: 2025-10-02T12:00:41Z|00027|binding|INFO|Claiming lport 92d67693-7b14-496d-85fc-00362ed0e9f5 for this chassis.
Oct 02 12:00:41 compute-0 ovn_controller[94336]: 2025-10-02T12:00:41Z|00028|binding|INFO|92d67693-7b14-496d-85fc-00362ed0e9f5: Claiming fa:16:3e:5f:29:fc 10.1.0.48 fdfe:381f:8400::287
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.817 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.824 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:41 compute-0 systemd-udevd[219759]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:00:41 compute-0 NetworkManager[51160]: <info>  [1759406441.8710] device (tap92d67693-7b): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:00:41 compute-0 NetworkManager[51160]: <info>  [1759406441.8725] device (tap92d67693-7b): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:00:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:41.881 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:5f:29:fc 10.1.0.48 fdfe:381f:8400::287'], port_security=['fa:16:3e:5f:29:fc 10.1.0.48 fdfe:381f:8400::287'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.1.0.48/26 fdfe:381f:8400::287/64', 'neutron:device_id': '54199f32-2d2a-4c54-a6bd-31d2d5675a46', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '23de7e9a877e477cb52ac4d4c1410e0d', 'neutron:revision_number': '2', 'neutron:security_group_ids': '6166ab66-e763-4e6f-ba6d-1725486f45f7', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=9cab3463-7636-46ad-b75d-f72d7d1739eb, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=5, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=92d67693-7b14-496d-85fc-00362ed0e9f5) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:00:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:41.882 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 92d67693-7b14-496d-85fc-00362ed0e9f5 in datapath 0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6 bound to our chassis
Oct 02 12:00:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:41.884 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6
Oct 02 12:00:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:41.886 103294 INFO oslo.privsep.daemon [-] Running privsep helper: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'privsep-helper', '--config-file', '/etc/neutron/neutron.conf', '--config-dir', '/etc/neutron.conf.d', '--privsep_context', 'neutron.privileged.default', '--privsep_sock_path', '/tmp/tmpg7qkqayj/privsep.sock']
Oct 02 12:00:41 compute-0 systemd-machined[152150]: New machine qemu-1-instance-00000003.
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.914 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:41 compute-0 systemd[1]: Started Virtual Machine qemu-1-instance-00000003.
Oct 02 12:00:41 compute-0 ovn_controller[94336]: 2025-10-02T12:00:41Z|00029|binding|INFO|Setting lport 92d67693-7b14-496d-85fc-00362ed0e9f5 ovn-installed in OVS
Oct 02 12:00:41 compute-0 ovn_controller[94336]: 2025-10-02T12:00:41Z|00030|binding|INFO|Setting lport 92d67693-7b14-496d-85fc-00362ed0e9f5 up in Southbound
Oct 02 12:00:41 compute-0 nova_compute[192079]: 2025-10-02 12:00:41.925 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:42 compute-0 nova_compute[192079]: 2025-10-02 12:00:42.261 2 INFO nova.virt.libvirt.driver [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Creating config drive at /var/lib/nova/instances/a3d563c1-37ae-41be-a49b-ee6efeccfc94/disk.config
Oct 02 12:00:42 compute-0 nova_compute[192079]: 2025-10-02 12:00:42.272 2 DEBUG oslo_concurrency.processutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/a3d563c1-37ae-41be-a49b-ee6efeccfc94/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpbldiu4aa execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:00:42 compute-0 nova_compute[192079]: 2025-10-02 12:00:42.417 2 DEBUG oslo_concurrency.processutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/a3d563c1-37ae-41be-a49b-ee6efeccfc94/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpbldiu4aa" returned: 0 in 0.145s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:00:42 compute-0 systemd-udevd[219762]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:00:42 compute-0 NetworkManager[51160]: <info>  [1759406442.4884] manager: (tapd92bbd66-2d): new Tun device (/org/freedesktop/NetworkManager/Devices/26)
Oct 02 12:00:42 compute-0 kernel: tapd92bbd66-2d: entered promiscuous mode
Oct 02 12:00:42 compute-0 NetworkManager[51160]: <info>  [1759406442.5193] device (tapd92bbd66-2d): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:00:42 compute-0 NetworkManager[51160]: <info>  [1759406442.5206] device (tapd92bbd66-2d): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:00:42 compute-0 nova_compute[192079]: 2025-10-02 12:00:42.520 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:42 compute-0 ovn_controller[94336]: 2025-10-02T12:00:42Z|00031|binding|INFO|Claiming lport d92bbd66-2dd9-44e3-a834-a92797ae8d1f for this chassis.
Oct 02 12:00:42 compute-0 ovn_controller[94336]: 2025-10-02T12:00:42Z|00032|binding|INFO|d92bbd66-2dd9-44e3-a834-a92797ae8d1f: Claiming fa:16:3e:8c:7f:ac 10.100.0.3
Oct 02 12:00:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:42.541 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:8c:7f:ac 10.100.0.3'], port_security=['fa:16:3e:8c:7f:ac 10.100.0.3'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.3/28', 'neutron:device_id': 'a3d563c1-37ae-41be-a49b-ee6efeccfc94', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-0432e6a2-e111-484d-b6cf-d32d9fc846c9', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '302a9c83c3eb43818ce6284e9ddb73be', 'neutron:revision_number': '2', 'neutron:security_group_ids': 'afed868a-564b-4ceb-947a-806e11012ac0', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=b550d36f-725b-4b76-9c4e-aa36183370a9, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=d92bbd66-2dd9-44e3-a834-a92797ae8d1f) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:00:42 compute-0 systemd-machined[152150]: New machine qemu-2-instance-00000005.
Oct 02 12:00:42 compute-0 ovn_controller[94336]: 2025-10-02T12:00:42Z|00033|binding|INFO|Setting lport d92bbd66-2dd9-44e3-a834-a92797ae8d1f ovn-installed in OVS
Oct 02 12:00:42 compute-0 ovn_controller[94336]: 2025-10-02T12:00:42Z|00034|binding|INFO|Setting lport d92bbd66-2dd9-44e3-a834-a92797ae8d1f up in Southbound
Oct 02 12:00:42 compute-0 nova_compute[192079]: 2025-10-02 12:00:42.577 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:42 compute-0 systemd[1]: Started Virtual Machine qemu-2-instance-00000005.
Oct 02 12:00:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:42.602 103294 INFO oslo.privsep.daemon [-] Spawned new privsep daemon via rootwrap
Oct 02 12:00:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:42.603 103294 DEBUG oslo.privsep.daemon [-] Accepted privsep connection to /tmp/tmpg7qkqayj/privsep.sock __init__ /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:362
Oct 02 12:00:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:42.462 219793 INFO oslo.privsep.daemon [-] privsep daemon starting
Oct 02 12:00:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:42.469 219793 INFO oslo.privsep.daemon [-] privsep process running with uid/gid: 0/0
Oct 02 12:00:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:42.473 219793 INFO oslo.privsep.daemon [-] privsep process running with capabilities (eff/prm/inh): CAP_DAC_OVERRIDE|CAP_DAC_READ_SEARCH|CAP_NET_ADMIN|CAP_SYS_ADMIN|CAP_SYS_PTRACE/CAP_DAC_OVERRIDE|CAP_DAC_READ_SEARCH|CAP_NET_ADMIN|CAP_SYS_ADMIN|CAP_SYS_PTRACE/none
Oct 02 12:00:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:42.474 219793 INFO oslo.privsep.daemon [-] privsep daemon running as pid 219793
Oct 02 12:00:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:42.606 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d21bb2bc-090b-4a49-a548-ac2f3f35adb1]: (2,) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.007 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406443.006851, 54199f32-2d2a-4c54-a6bd-31d2d5675a46 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.009 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] VM Started (Lifecycle Event)
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.032 2 DEBUG nova.compute.manager [req-cd85b8c5-6727-49ff-ab42-ca0fc25435d6 req-e3671e87-357b-4c81-8a1c-5dfa923e4624 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Received event network-vif-plugged-92d67693-7b14-496d-85fc-00362ed0e9f5 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.032 2 DEBUG oslo_concurrency.lockutils [req-cd85b8c5-6727-49ff-ab42-ca0fc25435d6 req-e3671e87-357b-4c81-8a1c-5dfa923e4624 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "54199f32-2d2a-4c54-a6bd-31d2d5675a46-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.033 2 DEBUG oslo_concurrency.lockutils [req-cd85b8c5-6727-49ff-ab42-ca0fc25435d6 req-e3671e87-357b-4c81-8a1c-5dfa923e4624 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "54199f32-2d2a-4c54-a6bd-31d2d5675a46-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.033 2 DEBUG oslo_concurrency.lockutils [req-cd85b8c5-6727-49ff-ab42-ca0fc25435d6 req-e3671e87-357b-4c81-8a1c-5dfa923e4624 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "54199f32-2d2a-4c54-a6bd-31d2d5675a46-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.033 2 DEBUG nova.compute.manager [req-cd85b8c5-6727-49ff-ab42-ca0fc25435d6 req-e3671e87-357b-4c81-8a1c-5dfa923e4624 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Processing event network-vif-plugged-92d67693-7b14-496d-85fc-00362ed0e9f5 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.035 2 DEBUG nova.compute.manager [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.045 2 DEBUG nova.virt.libvirt.driver [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.048 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.052 2 INFO nova.virt.libvirt.driver [-] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Instance spawned successfully.
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.053 2 DEBUG nova.virt.libvirt.driver [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.055 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:00:43 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:43.084 219793 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "context-manager" by "neutron_lib.db.api._create_context_manager" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:00:43 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:43.085 219793 DEBUG oslo_concurrency.lockutils [-] Lock "context-manager" acquired by "neutron_lib.db.api._create_context_manager" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:00:43 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:43.085 219793 DEBUG oslo_concurrency.lockutils [-] Lock "context-manager" "released" by "neutron_lib.db.api._create_context_manager" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.091 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.091 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406443.0082657, 54199f32-2d2a-4c54-a6bd-31d2d5675a46 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.092 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] VM Paused (Lifecycle Event)
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.096 2 DEBUG nova.virt.libvirt.driver [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.096 2 DEBUG nova.virt.libvirt.driver [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.097 2 DEBUG nova.virt.libvirt.driver [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.097 2 DEBUG nova.virt.libvirt.driver [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.098 2 DEBUG nova.virt.libvirt.driver [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.098 2 DEBUG nova.virt.libvirt.driver [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.126 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.131 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406443.0380561, 54199f32-2d2a-4c54-a6bd-31d2d5675a46 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.132 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] VM Resumed (Lifecycle Event)
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.167 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.170 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.206 2 DEBUG nova.network.neutron [req-0e60c3a7-4d7a-4453-b590-91ad6289f707 req-b16bc9ea-e9e7-4847-a99b-d20df11fb21a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Updated VIF entry in instance network info cache for port 92d67693-7b14-496d-85fc-00362ed0e9f5. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.206 2 DEBUG nova.network.neutron [req-0e60c3a7-4d7a-4453-b590-91ad6289f707 req-b16bc9ea-e9e7-4847-a99b-d20df11fb21a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Updating instance_info_cache with network_info: [{"id": "92d67693-7b14-496d-85fc-00362ed0e9f5", "address": "fa:16:3e:5f:29:fc", "network": {"id": "0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6", "bridge": "br-int", "label": "auto_allocated_network", "subnets": [{"cidr": "10.1.0.0/26", "dns": [], "gateway": {"address": "10.1.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.1.0.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}, {"cidr": "fdfe:381f:8400::/64", "dns": [], "gateway": {"address": "fdfe:381f:8400::1", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "fdfe:381f:8400::287", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "23de7e9a877e477cb52ac4d4c1410e0d", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92d67693-7b", "ovs_interfaceid": "92d67693-7b14-496d-85fc-00362ed0e9f5", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.215 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.218 2 INFO nova.compute.manager [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Took 30.80 seconds to spawn the instance on the hypervisor.
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.218 2 DEBUG nova.compute.manager [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.229 2 DEBUG oslo_concurrency.lockutils [req-0e60c3a7-4d7a-4453-b590-91ad6289f707 req-b16bc9ea-e9e7-4847-a99b-d20df11fb21a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-54199f32-2d2a-4c54-a6bd-31d2d5675a46" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.267 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406443.26733, a3d563c1-37ae-41be-a49b-ee6efeccfc94 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.268 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] VM Started (Lifecycle Event)
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.328 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.337 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406443.2674851, a3d563c1-37ae-41be-a49b-ee6efeccfc94 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.337 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] VM Paused (Lifecycle Event)
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.360 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.363 2 INFO nova.compute.manager [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Took 32.06 seconds to build instance.
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.365 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.396 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.421 2 DEBUG oslo_concurrency.lockutils [None req-35f6bb1e-0275-4338-8408-4659dad7719f 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Lock "54199f32-2d2a-4c54-a6bd-31d2d5675a46" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 32.213s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.541 2 DEBUG nova.network.neutron [req-402d8053-77f4-4e66-a568-e63455947141 req-4cb97c95-f4a7-4304-9f97-228d2c4743ec 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Updated VIF entry in instance network info cache for port d92bbd66-2dd9-44e3-a834-a92797ae8d1f. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.542 2 DEBUG nova.network.neutron [req-402d8053-77f4-4e66-a568-e63455947141 req-4cb97c95-f4a7-4304-9f97-228d2c4743ec 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Updating instance_info_cache with network_info: [{"id": "d92bbd66-2dd9-44e3-a834-a92797ae8d1f", "address": "fa:16:3e:8c:7f:ac", "network": {"id": "0432e6a2-e111-484d-b6cf-d32d9fc846c9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1078640656-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "302a9c83c3eb43818ce6284e9ddb73be", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd92bbd66-2d", "ovs_interfaceid": "d92bbd66-2dd9-44e3-a834-a92797ae8d1f", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:00:43 compute-0 nova_compute[192079]: 2025-10-02 12:00:43.587 2 DEBUG oslo_concurrency.lockutils [req-402d8053-77f4-4e66-a568-e63455947141 req-4cb97c95-f4a7-4304-9f97-228d2c4743ec 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-a3d563c1-37ae-41be-a49b-ee6efeccfc94" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:00:43 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:43.638 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[00d14637-7c4b-4f49-9a19-ce709de4d62c]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:43 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:43.639 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap0e6cbdbf-b1 in ovnmeta-0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:00:43 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:43.641 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap0e6cbdbf-b0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:00:43 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:43.641 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a32304ff-4b90-4f73-a8ee-6a860e6a0c83]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:43 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:43.645 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[756b3503-62d5-4608-bd3c-3360a391854f]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:43 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:43.679 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[e81b2bf3-00d9-4c7f-b84d-5ea4e759f9cb]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:43 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:43.711 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[09899a8c-800a-4c45-8bf0-19cae1378b4d]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:43 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:43.714 103294 INFO oslo.privsep.daemon [-] Running privsep helper: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'privsep-helper', '--config-file', '/etc/neutron/neutron.conf', '--config-dir', '/etc/neutron.conf.d', '--privsep_context', 'neutron.privileged.link_cmd', '--privsep_sock_path', '/tmp/tmpu_79flww/privsep.sock']
Oct 02 12:00:43 compute-0 podman[219828]: 2025-10-02 12:00:43.80383266 +0000 UTC m=+0.088180936 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, config_id=ovn_controller, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2)
Oct 02 12:00:44 compute-0 nova_compute[192079]: 2025-10-02 12:00:44.045 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:44.361 103294 INFO oslo.privsep.daemon [-] Spawned new privsep daemon via rootwrap
Oct 02 12:00:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:44.363 103294 DEBUG oslo.privsep.daemon [-] Accepted privsep connection to /tmp/tmpu_79flww/privsep.sock __init__ /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:362
Oct 02 12:00:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:44.254 219859 INFO oslo.privsep.daemon [-] privsep daemon starting
Oct 02 12:00:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:44.260 219859 INFO oslo.privsep.daemon [-] privsep process running with uid/gid: 0/0
Oct 02 12:00:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:44.262 219859 INFO oslo.privsep.daemon [-] privsep process running with capabilities (eff/prm/inh): CAP_NET_ADMIN|CAP_SYS_ADMIN/CAP_NET_ADMIN|CAP_SYS_ADMIN/none
Oct 02 12:00:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:44.262 219859 INFO oslo.privsep.daemon [-] privsep daemon running as pid 219859
Oct 02 12:00:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:44.367 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[11626232-e361-401c-b25d-5cfb2a91acda]: (2,) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:44 compute-0 nova_compute[192079]: 2025-10-02 12:00:44.822 2 DEBUG nova.compute.manager [req-130b83c1-ee03-4a8a-b941-add0a7cdc04b req-d0226f52-72ed-4359-837a-42b9047c1a05 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Received event network-vif-plugged-d92bbd66-2dd9-44e3-a834-a92797ae8d1f external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:00:44 compute-0 nova_compute[192079]: 2025-10-02 12:00:44.822 2 DEBUG oslo_concurrency.lockutils [req-130b83c1-ee03-4a8a-b941-add0a7cdc04b req-d0226f52-72ed-4359-837a-42b9047c1a05 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "a3d563c1-37ae-41be-a49b-ee6efeccfc94-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:00:44 compute-0 nova_compute[192079]: 2025-10-02 12:00:44.823 2 DEBUG oslo_concurrency.lockutils [req-130b83c1-ee03-4a8a-b941-add0a7cdc04b req-d0226f52-72ed-4359-837a-42b9047c1a05 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a3d563c1-37ae-41be-a49b-ee6efeccfc94-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:00:44 compute-0 nova_compute[192079]: 2025-10-02 12:00:44.823 2 DEBUG oslo_concurrency.lockutils [req-130b83c1-ee03-4a8a-b941-add0a7cdc04b req-d0226f52-72ed-4359-837a-42b9047c1a05 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a3d563c1-37ae-41be-a49b-ee6efeccfc94-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:00:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:44.822 219859 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "context-manager" by "neutron_lib.db.api._create_context_manager" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:00:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:44.822 219859 DEBUG oslo_concurrency.lockutils [-] Lock "context-manager" acquired by "neutron_lib.db.api._create_context_manager" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:00:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:44.822 219859 DEBUG oslo_concurrency.lockutils [-] Lock "context-manager" "released" by "neutron_lib.db.api._create_context_manager" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:00:44 compute-0 nova_compute[192079]: 2025-10-02 12:00:44.823 2 DEBUG nova.compute.manager [req-130b83c1-ee03-4a8a-b941-add0a7cdc04b req-d0226f52-72ed-4359-837a-42b9047c1a05 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Processing event network-vif-plugged-d92bbd66-2dd9-44e3-a834-a92797ae8d1f _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:00:44 compute-0 nova_compute[192079]: 2025-10-02 12:00:44.824 2 DEBUG nova.compute.manager [req-130b83c1-ee03-4a8a-b941-add0a7cdc04b req-d0226f52-72ed-4359-837a-42b9047c1a05 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Received event network-vif-plugged-d92bbd66-2dd9-44e3-a834-a92797ae8d1f external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:00:44 compute-0 nova_compute[192079]: 2025-10-02 12:00:44.824 2 DEBUG oslo_concurrency.lockutils [req-130b83c1-ee03-4a8a-b941-add0a7cdc04b req-d0226f52-72ed-4359-837a-42b9047c1a05 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "a3d563c1-37ae-41be-a49b-ee6efeccfc94-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:00:44 compute-0 nova_compute[192079]: 2025-10-02 12:00:44.824 2 DEBUG oslo_concurrency.lockutils [req-130b83c1-ee03-4a8a-b941-add0a7cdc04b req-d0226f52-72ed-4359-837a-42b9047c1a05 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a3d563c1-37ae-41be-a49b-ee6efeccfc94-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:00:44 compute-0 nova_compute[192079]: 2025-10-02 12:00:44.824 2 DEBUG oslo_concurrency.lockutils [req-130b83c1-ee03-4a8a-b941-add0a7cdc04b req-d0226f52-72ed-4359-837a-42b9047c1a05 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a3d563c1-37ae-41be-a49b-ee6efeccfc94-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:00:44 compute-0 nova_compute[192079]: 2025-10-02 12:00:44.825 2 DEBUG nova.compute.manager [req-130b83c1-ee03-4a8a-b941-add0a7cdc04b req-d0226f52-72ed-4359-837a-42b9047c1a05 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] No waiting events found dispatching network-vif-plugged-d92bbd66-2dd9-44e3-a834-a92797ae8d1f pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:00:44 compute-0 nova_compute[192079]: 2025-10-02 12:00:44.825 2 WARNING nova.compute.manager [req-130b83c1-ee03-4a8a-b941-add0a7cdc04b req-d0226f52-72ed-4359-837a-42b9047c1a05 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Received unexpected event network-vif-plugged-d92bbd66-2dd9-44e3-a834-a92797ae8d1f for instance with vm_state building and task_state spawning.
Oct 02 12:00:44 compute-0 nova_compute[192079]: 2025-10-02 12:00:44.825 2 DEBUG nova.compute.manager [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Instance event wait completed in 1 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:00:44 compute-0 nova_compute[192079]: 2025-10-02 12:00:44.830 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406444.828764, a3d563c1-37ae-41be-a49b-ee6efeccfc94 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:00:44 compute-0 nova_compute[192079]: 2025-10-02 12:00:44.830 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] VM Resumed (Lifecycle Event)
Oct 02 12:00:44 compute-0 nova_compute[192079]: 2025-10-02 12:00:44.832 2 DEBUG nova.virt.libvirt.driver [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:00:44 compute-0 nova_compute[192079]: 2025-10-02 12:00:44.835 2 INFO nova.virt.libvirt.driver [-] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Instance spawned successfully.
Oct 02 12:00:44 compute-0 nova_compute[192079]: 2025-10-02 12:00:44.836 2 DEBUG nova.virt.libvirt.driver [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:00:44 compute-0 nova_compute[192079]: 2025-10-02 12:00:44.904 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:00:44 compute-0 nova_compute[192079]: 2025-10-02 12:00:44.907 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:00:44 compute-0 nova_compute[192079]: 2025-10-02 12:00:44.915 2 DEBUG nova.virt.libvirt.driver [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:00:44 compute-0 nova_compute[192079]: 2025-10-02 12:00:44.915 2 DEBUG nova.virt.libvirt.driver [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:00:44 compute-0 nova_compute[192079]: 2025-10-02 12:00:44.916 2 DEBUG nova.virt.libvirt.driver [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:00:44 compute-0 nova_compute[192079]: 2025-10-02 12:00:44.916 2 DEBUG nova.virt.libvirt.driver [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:00:44 compute-0 nova_compute[192079]: 2025-10-02 12:00:44.916 2 DEBUG nova.virt.libvirt.driver [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:00:44 compute-0 nova_compute[192079]: 2025-10-02 12:00:44.917 2 DEBUG nova.virt.libvirt.driver [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:00:44 compute-0 nova_compute[192079]: 2025-10-02 12:00:44.957 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:00:45 compute-0 nova_compute[192079]: 2025-10-02 12:00:45.117 2 INFO nova.compute.manager [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Took 11.16 seconds to spawn the instance on the hypervisor.
Oct 02 12:00:45 compute-0 nova_compute[192079]: 2025-10-02 12:00:45.117 2 DEBUG nova.compute.manager [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:00:45 compute-0 nova_compute[192079]: 2025-10-02 12:00:45.175 2 DEBUG nova.compute.manager [req-a14d04f0-cf8d-4b2b-b549-b666eb1c6237 req-be726696-54fe-4a24-8bfd-b03f27f46fc9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Received event network-vif-plugged-92d67693-7b14-496d-85fc-00362ed0e9f5 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:00:45 compute-0 nova_compute[192079]: 2025-10-02 12:00:45.175 2 DEBUG oslo_concurrency.lockutils [req-a14d04f0-cf8d-4b2b-b549-b666eb1c6237 req-be726696-54fe-4a24-8bfd-b03f27f46fc9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "54199f32-2d2a-4c54-a6bd-31d2d5675a46-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:00:45 compute-0 nova_compute[192079]: 2025-10-02 12:00:45.176 2 DEBUG oslo_concurrency.lockutils [req-a14d04f0-cf8d-4b2b-b549-b666eb1c6237 req-be726696-54fe-4a24-8bfd-b03f27f46fc9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "54199f32-2d2a-4c54-a6bd-31d2d5675a46-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:00:45 compute-0 nova_compute[192079]: 2025-10-02 12:00:45.176 2 DEBUG oslo_concurrency.lockutils [req-a14d04f0-cf8d-4b2b-b549-b666eb1c6237 req-be726696-54fe-4a24-8bfd-b03f27f46fc9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "54199f32-2d2a-4c54-a6bd-31d2d5675a46-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:00:45 compute-0 nova_compute[192079]: 2025-10-02 12:00:45.176 2 DEBUG nova.compute.manager [req-a14d04f0-cf8d-4b2b-b549-b666eb1c6237 req-be726696-54fe-4a24-8bfd-b03f27f46fc9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] No waiting events found dispatching network-vif-plugged-92d67693-7b14-496d-85fc-00362ed0e9f5 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:00:45 compute-0 nova_compute[192079]: 2025-10-02 12:00:45.176 2 WARNING nova.compute.manager [req-a14d04f0-cf8d-4b2b-b549-b666eb1c6237 req-be726696-54fe-4a24-8bfd-b03f27f46fc9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Received unexpected event network-vif-plugged-92d67693-7b14-496d-85fc-00362ed0e9f5 for instance with vm_state active and task_state None.
Oct 02 12:00:45 compute-0 nova_compute[192079]: 2025-10-02 12:00:45.292 2 INFO nova.compute.manager [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Took 12.43 seconds to build instance.
Oct 02 12:00:45 compute-0 nova_compute[192079]: 2025-10-02 12:00:45.331 2 DEBUG oslo_concurrency.lockutils [None req-b7fdd069-f063-48f6-bb1d-5ae86ff297bf 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "a3d563c1-37ae-41be-a49b-ee6efeccfc94" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 12.592s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:45.444 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[c83761a7-5b8a-47dc-89fd-36e35b6ea69c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:45.450 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2a8306ca-3559-4a17-bbbf-009c96be8a1c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:45 compute-0 NetworkManager[51160]: <info>  [1759406445.4514] manager: (tap0e6cbdbf-b0): new Veth device (/org/freedesktop/NetworkManager/Devices/27)
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:45.480 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[178325d3-2440-4b9d-a588-b5bc41260203]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:45.483 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[ec056041-9c12-42a7-8912-ed3e60976036]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:45 compute-0 systemd-udevd[219869]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:00:45 compute-0 NetworkManager[51160]: <info>  [1759406445.5126] device (tap0e6cbdbf-b0): carrier: link connected
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:45.517 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[9cf65b04-0d54-4f8d-a9f0-7cb5222896f0]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:45.538 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c0910847-2084-4273-b6cf-9bf142934651]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap0e6cbdbf-b1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:44:05:20'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 15], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 444314, 'reachable_time': 38356, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 219888, 'error': None, 'target': 'ovnmeta-0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:45.561 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7757136d-5887-47ae-8d1c-b73251379c1a]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe44:520'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 444314, 'tstamp': 444314}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 219889, 'error': None, 'target': 'ovnmeta-0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:45.581 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[fba29810-0c52-482f-b207-96ca5ff0d476]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap0e6cbdbf-b1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:44:05:20'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 15], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 444314, 'reachable_time': 38356, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 219890, 'error': None, 'target': 'ovnmeta-0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:45.614 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[82ac9b94-b293-4e28-8319-40c2292dac8a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:45.670 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e1dffb57-374f-4b6e-b8a7-af73cff6da57]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:45.672 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap0e6cbdbf-b0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:45.672 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:45.672 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap0e6cbdbf-b0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:00:45 compute-0 kernel: tap0e6cbdbf-b0: entered promiscuous mode
Oct 02 12:00:45 compute-0 NetworkManager[51160]: <info>  [1759406445.6751] manager: (tap0e6cbdbf-b0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/28)
Oct 02 12:00:45 compute-0 nova_compute[192079]: 2025-10-02 12:00:45.674 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:45 compute-0 nova_compute[192079]: 2025-10-02 12:00:45.676 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:45.677 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap0e6cbdbf-b0, col_values=(('external_ids', {'iface-id': '0e5a8941-b399-4368-aa52-d99cb4bfefe5'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:00:45 compute-0 nova_compute[192079]: 2025-10-02 12:00:45.678 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:45 compute-0 ovn_controller[94336]: 2025-10-02T12:00:45Z|00035|binding|INFO|Releasing lport 0e5a8941-b399-4368-aa52-d99cb4bfefe5 from this chassis (sb_readonly=0)
Oct 02 12:00:45 compute-0 nova_compute[192079]: 2025-10-02 12:00:45.680 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:45.681 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:45.691 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ed36fe7e-d3d7-401e-bce7-2eaf2f510f07]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:45.692 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6.pid.haproxy
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:00:45 compute-0 nova_compute[192079]: 2025-10-02 12:00:45.692 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:00:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:45.695 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6', 'env', 'PROCESS_TAG=haproxy-0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:00:46 compute-0 podman[219923]: 2025-10-02 12:00:46.073455659 +0000 UTC m=+0.053117916 container create d4e81cac257c0b12537cd3ef5c121e1e69375954e1d21d6ebd85ea3e6b5d2b7d (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001)
Oct 02 12:00:46 compute-0 systemd[1]: Started libpod-conmon-d4e81cac257c0b12537cd3ef5c121e1e69375954e1d21d6ebd85ea3e6b5d2b7d.scope.
Oct 02 12:00:46 compute-0 podman[219923]: 2025-10-02 12:00:46.042564065 +0000 UTC m=+0.022226342 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:00:46 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:00:46 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/57b34cdda5bcf70df3ee3a5422ce1a2a4ad13df232e54fde3dbfe5204726f733/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:00:46 compute-0 podman[219923]: 2025-10-02 12:00:46.155235725 +0000 UTC m=+0.134898012 container init d4e81cac257c0b12537cd3ef5c121e1e69375954e1d21d6ebd85ea3e6b5d2b7d (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0)
Oct 02 12:00:46 compute-0 podman[219923]: 2025-10-02 12:00:46.163366622 +0000 UTC m=+0.143028879 container start d4e81cac257c0b12537cd3ef5c121e1e69375954e1d21d6ebd85ea3e6b5d2b7d (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001)
Oct 02 12:00:46 compute-0 neutron-haproxy-ovnmeta-0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6[219938]: [NOTICE]   (219942) : New worker (219944) forked
Oct 02 12:00:46 compute-0 neutron-haproxy-ovnmeta-0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6[219938]: [NOTICE]   (219942) : Loading success.
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:46.210 103294 INFO neutron.agent.ovn.metadata.agent [-] Port d92bbd66-2dd9-44e3-a834-a92797ae8d1f in datapath 0432e6a2-e111-484d-b6cf-d32d9fc846c9 unbound from our chassis
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:46.213 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 0432e6a2-e111-484d-b6cf-d32d9fc846c9
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:46.223 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5d9663f4-92e7-4637-9bc3-816386a07e2f]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:46.224 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap0432e6a2-e1 in ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:46.228 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap0432e6a2-e0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:46.228 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[33a1d4e6-dc85-44f3-b6c6-7545314b47a7]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:46.229 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[34673631-e9a0-4935-a538-867df860f1b9]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:46.248 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[79a551f2-78a2-4c75-9224-8625e905c55a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:46.274 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0e49ce7e-13a3-4e92-b397-211a09e3cd8c]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:46.298 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[4e82487c-465d-4b9f-ad4b-2929ee48652e]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:46.303 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[cc8fd4b8-c8e8-48bd-8af7-f4e8a810d7f9]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:46 compute-0 systemd-udevd[219874]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:00:46 compute-0 NetworkManager[51160]: <info>  [1759406446.3048] manager: (tap0432e6a2-e0): new Veth device (/org/freedesktop/NetworkManager/Devices/29)
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:46.332 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[74833ff3-b7cc-49c0-ba7e-0e27c2fbc52f]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:46.336 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[0058fd72-7fa9-4182-8173-df58b5eeb563]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:46 compute-0 NetworkManager[51160]: <info>  [1759406446.3573] device (tap0432e6a2-e0): carrier: link connected
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:46.362 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[d47402b5-641f-45d3-bddd-8671ac85cb93]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:46.395 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4c61e1a4-9b84-4346-b7d0-2b67666ce244]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap0432e6a2-e1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:54:ae:53'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 16], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 444398, 'reachable_time': 23624, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 219964, 'error': None, 'target': 'ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:46.414 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[28400659-e824-4303-a215-8b5c33b7ac56]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe54:ae53'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 444398, 'tstamp': 444398}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 219965, 'error': None, 'target': 'ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:46.432 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[eedc6858-e740-4be9-91a3-fc6676adfaf2]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap0432e6a2-e1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:54:ae:53'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 2, 'rx_bytes': 110, 'tx_bytes': 180, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 2, 'rx_bytes': 110, 'tx_bytes': 180, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 16], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 444398, 'reachable_time': 23624, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 2, 'outoctets': 152, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 2, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 152, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 2, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 219966, 'error': None, 'target': 'ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:46.456 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[210ff535-bdef-41e7-99d4-29a38fb734d6]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:46.505 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c5813289-8913-437e-a5ca-4d6fcbb157b0]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:46.506 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap0432e6a2-e0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:46.507 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:46.507 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap0432e6a2-e0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:00:46 compute-0 nova_compute[192079]: 2025-10-02 12:00:46.509 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:46 compute-0 kernel: tap0432e6a2-e0: entered promiscuous mode
Oct 02 12:00:46 compute-0 NetworkManager[51160]: <info>  [1759406446.5100] manager: (tap0432e6a2-e0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/30)
Oct 02 12:00:46 compute-0 nova_compute[192079]: 2025-10-02 12:00:46.514 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:46.515 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap0432e6a2-e0, col_values=(('external_ids', {'iface-id': '0fd0c84b-50f0-4eec-8552-49dfdd682f27'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:00:46 compute-0 nova_compute[192079]: 2025-10-02 12:00:46.516 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:46 compute-0 ovn_controller[94336]: 2025-10-02T12:00:46Z|00036|binding|INFO|Releasing lport 0fd0c84b-50f0-4eec-8552-49dfdd682f27 from this chassis (sb_readonly=0)
Oct 02 12:00:46 compute-0 nova_compute[192079]: 2025-10-02 12:00:46.529 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:46.530 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/0432e6a2-e111-484d-b6cf-d32d9fc846c9.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/0432e6a2-e111-484d-b6cf-d32d9fc846c9.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:46.531 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a8002e10-4271-4e3e-865f-95dfe0c8fac1]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:46.532 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-0432e6a2-e111-484d-b6cf-d32d9fc846c9
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/0432e6a2-e111-484d-b6cf-d32d9fc846c9.pid.haproxy
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 0432e6a2-e111-484d-b6cf-d32d9fc846c9
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:00:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:46.533 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9', 'env', 'PROCESS_TAG=haproxy-0432e6a2-e111-484d-b6cf-d32d9fc846c9', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/0432e6a2-e111-484d-b6cf-d32d9fc846c9.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:00:46 compute-0 nova_compute[192079]: 2025-10-02 12:00:46.554 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:46 compute-0 podman[219999]: 2025-10-02 12:00:46.884016088 +0000 UTC m=+0.072531739 container create 5571e08a9cb0a356552bd8b855cb2791f85c3949f01ea3f1ec5d54c4676f2d8c (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:00:46 compute-0 podman[219999]: 2025-10-02 12:00:46.831851419 +0000 UTC m=+0.020367100 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:00:46 compute-0 systemd[1]: Started libpod-conmon-5571e08a9cb0a356552bd8b855cb2791f85c3949f01ea3f1ec5d54c4676f2d8c.scope.
Oct 02 12:00:46 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:00:46 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/a6d0dc01f39dcf26a76b29bc901aba26a0fdec290e7b1e2bfc8e8f0b049bc946/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:00:47 compute-0 podman[219999]: 2025-10-02 12:00:47.009070814 +0000 UTC m=+0.197586455 container init 5571e08a9cb0a356552bd8b855cb2791f85c3949f01ea3f1ec5d54c4676f2d8c (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, tcib_managed=true)
Oct 02 12:00:47 compute-0 podman[219999]: 2025-10-02 12:00:47.023481147 +0000 UTC m=+0.211996798 container start 5571e08a9cb0a356552bd8b855cb2791f85c3949f01ea3f1ec5d54c4676f2d8c (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9, io.buildah.version=1.41.3, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:00:47 compute-0 neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9[220014]: [NOTICE]   (220018) : New worker (220020) forked
Oct 02 12:00:47 compute-0 neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9[220014]: [NOTICE]   (220018) : Loading success.
Oct 02 12:00:48 compute-0 podman[220029]: 2025-10-02 12:00:48.142718695 +0000 UTC m=+0.058234869 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_id=ovn_metadata_agent, container_name=ovn_metadata_agent, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, io.buildah.version=1.41.3)
Oct 02 12:00:48 compute-0 podman[220030]: 2025-10-02 12:00:48.155181284 +0000 UTC m=+0.064087033 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:00:48 compute-0 nova_compute[192079]: 2025-10-02 12:00:48.582 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:48 compute-0 NetworkManager[51160]: <info>  [1759406448.5837] manager: (patch-br-int-to-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d): new Open vSwitch Interface device (/org/freedesktop/NetworkManager/Devices/31)
Oct 02 12:00:48 compute-0 NetworkManager[51160]: <info>  [1759406448.5844] device (patch-br-int-to-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d)[Open vSwitch Interface]: state change: unmanaged -> unavailable (reason 'managed', managed-type: 'external')
Oct 02 12:00:48 compute-0 NetworkManager[51160]: <info>  [1759406448.5858] manager: (patch-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d-to-br-int): new Open vSwitch Interface device (/org/freedesktop/NetworkManager/Devices/32)
Oct 02 12:00:48 compute-0 NetworkManager[51160]: <info>  [1759406448.5864] device (patch-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d-to-br-int)[Open vSwitch Interface]: state change: unmanaged -> unavailable (reason 'managed', managed-type: 'external')
Oct 02 12:00:48 compute-0 NetworkManager[51160]: <info>  [1759406448.5875] manager: (patch-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d-to-br-int): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/33)
Oct 02 12:00:48 compute-0 NetworkManager[51160]: <info>  [1759406448.5882] manager: (patch-br-int-to-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/34)
Oct 02 12:00:48 compute-0 NetworkManager[51160]: <info>  [1759406448.5887] device (patch-br-int-to-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d)[Open vSwitch Interface]: state change: unavailable -> disconnected (reason 'none', managed-type: 'full')
Oct 02 12:00:48 compute-0 NetworkManager[51160]: <info>  [1759406448.5891] device (patch-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d-to-br-int)[Open vSwitch Interface]: state change: unavailable -> disconnected (reason 'none', managed-type: 'full')
Oct 02 12:00:48 compute-0 ovn_controller[94336]: 2025-10-02T12:00:48Z|00037|binding|INFO|Releasing lport 0e5a8941-b399-4368-aa52-d99cb4bfefe5 from this chassis (sb_readonly=0)
Oct 02 12:00:48 compute-0 ovn_controller[94336]: 2025-10-02T12:00:48Z|00038|binding|INFO|Releasing lport 0fd0c84b-50f0-4eec-8552-49dfdd682f27 from this chassis (sb_readonly=0)
Oct 02 12:00:48 compute-0 ovn_controller[94336]: 2025-10-02T12:00:48Z|00039|binding|INFO|Releasing lport 0e5a8941-b399-4368-aa52-d99cb4bfefe5 from this chassis (sb_readonly=0)
Oct 02 12:00:48 compute-0 ovn_controller[94336]: 2025-10-02T12:00:48Z|00040|binding|INFO|Releasing lport 0fd0c84b-50f0-4eec-8552-49dfdd682f27 from this chassis (sb_readonly=0)
Oct 02 12:00:48 compute-0 nova_compute[192079]: 2025-10-02 12:00:48.734 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:49 compute-0 nova_compute[192079]: 2025-10-02 12:00:49.048 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:49 compute-0 nova_compute[192079]: 2025-10-02 12:00:49.163 2 DEBUG nova.compute.manager [req-c1bd2396-3781-4ba6-a66c-8c5b8bbcb44a req-5c70c87a-4ce5-4e54-9f13-f16ed8ce36c3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Received event network-changed-d92bbd66-2dd9-44e3-a834-a92797ae8d1f external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:00:49 compute-0 nova_compute[192079]: 2025-10-02 12:00:49.163 2 DEBUG nova.compute.manager [req-c1bd2396-3781-4ba6-a66c-8c5b8bbcb44a req-5c70c87a-4ce5-4e54-9f13-f16ed8ce36c3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Refreshing instance network info cache due to event network-changed-d92bbd66-2dd9-44e3-a834-a92797ae8d1f. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:00:49 compute-0 nova_compute[192079]: 2025-10-02 12:00:49.163 2 DEBUG oslo_concurrency.lockutils [req-c1bd2396-3781-4ba6-a66c-8c5b8bbcb44a req-5c70c87a-4ce5-4e54-9f13-f16ed8ce36c3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-a3d563c1-37ae-41be-a49b-ee6efeccfc94" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:00:49 compute-0 nova_compute[192079]: 2025-10-02 12:00:49.164 2 DEBUG oslo_concurrency.lockutils [req-c1bd2396-3781-4ba6-a66c-8c5b8bbcb44a req-5c70c87a-4ce5-4e54-9f13-f16ed8ce36c3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-a3d563c1-37ae-41be-a49b-ee6efeccfc94" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:00:49 compute-0 nova_compute[192079]: 2025-10-02 12:00:49.164 2 DEBUG nova.network.neutron [req-c1bd2396-3781-4ba6-a66c-8c5b8bbcb44a req-5c70c87a-4ce5-4e54-9f13-f16ed8ce36c3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Refreshing network info cache for port d92bbd66-2dd9-44e3-a834-a92797ae8d1f _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:00:49 compute-0 nova_compute[192079]: 2025-10-02 12:00:49.914 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:50 compute-0 nova_compute[192079]: 2025-10-02 12:00:50.655 2 DEBUG nova.network.neutron [req-c1bd2396-3781-4ba6-a66c-8c5b8bbcb44a req-5c70c87a-4ce5-4e54-9f13-f16ed8ce36c3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Updated VIF entry in instance network info cache for port d92bbd66-2dd9-44e3-a834-a92797ae8d1f. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:00:50 compute-0 nova_compute[192079]: 2025-10-02 12:00:50.656 2 DEBUG nova.network.neutron [req-c1bd2396-3781-4ba6-a66c-8c5b8bbcb44a req-5c70c87a-4ce5-4e54-9f13-f16ed8ce36c3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Updating instance_info_cache with network_info: [{"id": "d92bbd66-2dd9-44e3-a834-a92797ae8d1f", "address": "fa:16:3e:8c:7f:ac", "network": {"id": "0432e6a2-e111-484d-b6cf-d32d9fc846c9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1078640656-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.218", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "302a9c83c3eb43818ce6284e9ddb73be", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd92bbd66-2d", "ovs_interfaceid": "d92bbd66-2dd9-44e3-a834-a92797ae8d1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:00:50 compute-0 nova_compute[192079]: 2025-10-02 12:00:50.791 2 DEBUG oslo_concurrency.lockutils [req-c1bd2396-3781-4ba6-a66c-8c5b8bbcb44a req-5c70c87a-4ce5-4e54-9f13-f16ed8ce36c3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-a3d563c1-37ae-41be-a49b-ee6efeccfc94" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:00:51 compute-0 nova_compute[192079]: 2025-10-02 12:00:51.556 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:54 compute-0 nova_compute[192079]: 2025-10-02 12:00:54.050 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:54 compute-0 podman[220077]: 2025-10-02 12:00:54.144749295 +0000 UTC m=+0.062873559 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, config_id=edpm)
Oct 02 12:00:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:54.416 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=4, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=3) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:00:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:54.417 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 0 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:00:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:54.418 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '4'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:00:54 compute-0 nova_compute[192079]: 2025-10-02 12:00:54.421 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:54 compute-0 nova_compute[192079]: 2025-10-02 12:00:54.966 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:55 compute-0 ovn_controller[94336]: 2025-10-02T12:00:55Z|00004|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:5f:29:fc 10.1.0.48
Oct 02 12:00:55 compute-0 ovn_controller[94336]: 2025-10-02T12:00:55Z|00005|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:5f:29:fc 10.1.0.48
Oct 02 12:00:55 compute-0 nova_compute[192079]: 2025-10-02 12:00:55.798 2 DEBUG oslo_concurrency.lockutils [None req-8b46f1c7-a927-4e30-a470-4dbae8044938 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Acquiring lock "54199f32-2d2a-4c54-a6bd-31d2d5675a46" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:00:55 compute-0 nova_compute[192079]: 2025-10-02 12:00:55.799 2 DEBUG oslo_concurrency.lockutils [None req-8b46f1c7-a927-4e30-a470-4dbae8044938 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Lock "54199f32-2d2a-4c54-a6bd-31d2d5675a46" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:00:55 compute-0 nova_compute[192079]: 2025-10-02 12:00:55.799 2 DEBUG oslo_concurrency.lockutils [None req-8b46f1c7-a927-4e30-a470-4dbae8044938 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Acquiring lock "54199f32-2d2a-4c54-a6bd-31d2d5675a46-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:00:55 compute-0 nova_compute[192079]: 2025-10-02 12:00:55.799 2 DEBUG oslo_concurrency.lockutils [None req-8b46f1c7-a927-4e30-a470-4dbae8044938 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Lock "54199f32-2d2a-4c54-a6bd-31d2d5675a46-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:00:55 compute-0 nova_compute[192079]: 2025-10-02 12:00:55.800 2 DEBUG oslo_concurrency.lockutils [None req-8b46f1c7-a927-4e30-a470-4dbae8044938 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Lock "54199f32-2d2a-4c54-a6bd-31d2d5675a46-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:00:55 compute-0 nova_compute[192079]: 2025-10-02 12:00:55.809 2 INFO nova.compute.manager [None req-8b46f1c7-a927-4e30-a470-4dbae8044938 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Terminating instance
Oct 02 12:00:55 compute-0 nova_compute[192079]: 2025-10-02 12:00:55.820 2 DEBUG nova.compute.manager [None req-8b46f1c7-a927-4e30-a470-4dbae8044938 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:00:55 compute-0 kernel: tap92d67693-7b (unregistering): left promiscuous mode
Oct 02 12:00:55 compute-0 NetworkManager[51160]: <info>  [1759406455.8493] device (tap92d67693-7b): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:00:55 compute-0 ovn_controller[94336]: 2025-10-02T12:00:55Z|00041|binding|INFO|Releasing lport 92d67693-7b14-496d-85fc-00362ed0e9f5 from this chassis (sb_readonly=0)
Oct 02 12:00:55 compute-0 ovn_controller[94336]: 2025-10-02T12:00:55Z|00042|binding|INFO|Setting lport 92d67693-7b14-496d-85fc-00362ed0e9f5 down in Southbound
Oct 02 12:00:55 compute-0 nova_compute[192079]: 2025-10-02 12:00:55.857 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:55 compute-0 ovn_controller[94336]: 2025-10-02T12:00:55Z|00043|binding|INFO|Removing iface tap92d67693-7b ovn-installed in OVS
Oct 02 12:00:55 compute-0 nova_compute[192079]: 2025-10-02 12:00:55.858 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:55 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:55.865 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:5f:29:fc 10.1.0.48 fdfe:381f:8400::287'], port_security=['fa:16:3e:5f:29:fc 10.1.0.48 fdfe:381f:8400::287'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.1.0.48/26 fdfe:381f:8400::287/64', 'neutron:device_id': '54199f32-2d2a-4c54-a6bd-31d2d5675a46', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '23de7e9a877e477cb52ac4d4c1410e0d', 'neutron:revision_number': '4', 'neutron:security_group_ids': '6166ab66-e763-4e6f-ba6d-1725486f45f7', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=9cab3463-7636-46ad-b75d-f72d7d1739eb, chassis=[], tunnel_key=5, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=92d67693-7b14-496d-85fc-00362ed0e9f5) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:00:55 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:55.866 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 92d67693-7b14-496d-85fc-00362ed0e9f5 in datapath 0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6 unbound from our chassis
Oct 02 12:00:55 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:55.867 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:00:55 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:55.868 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2a444bb3-fe50-45fd-8587-a8b04d3701e2]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:55 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:55.869 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6 namespace which is not needed anymore
Oct 02 12:00:55 compute-0 nova_compute[192079]: 2025-10-02 12:00:55.871 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:55 compute-0 systemd[1]: machine-qemu\x2d1\x2dinstance\x2d00000003.scope: Deactivated successfully.
Oct 02 12:00:55 compute-0 systemd[1]: machine-qemu\x2d1\x2dinstance\x2d00000003.scope: Consumed 13.221s CPU time.
Oct 02 12:00:55 compute-0 systemd-machined[152150]: Machine qemu-1-instance-00000003 terminated.
Oct 02 12:00:55 compute-0 neutron-haproxy-ovnmeta-0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6[219938]: [NOTICE]   (219942) : haproxy version is 2.8.14-c23fe91
Oct 02 12:00:55 compute-0 neutron-haproxy-ovnmeta-0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6[219938]: [NOTICE]   (219942) : path to executable is /usr/sbin/haproxy
Oct 02 12:00:55 compute-0 neutron-haproxy-ovnmeta-0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6[219938]: [WARNING]  (219942) : Exiting Master process...
Oct 02 12:00:55 compute-0 neutron-haproxy-ovnmeta-0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6[219938]: [ALERT]    (219942) : Current worker (219944) exited with code 143 (Terminated)
Oct 02 12:00:55 compute-0 neutron-haproxy-ovnmeta-0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6[219938]: [WARNING]  (219942) : All workers exited. Exiting... (0)
Oct 02 12:00:55 compute-0 systemd[1]: libpod-d4e81cac257c0b12537cd3ef5c121e1e69375954e1d21d6ebd85ea3e6b5d2b7d.scope: Deactivated successfully.
Oct 02 12:00:55 compute-0 conmon[219938]: conmon d4e81cac257c0b12537c <nwarn>: Failed to open cgroups file: /sys/fs/cgroup/machine.slice/libpod-d4e81cac257c0b12537cd3ef5c121e1e69375954e1d21d6ebd85ea3e6b5d2b7d.scope/container/memory.events
Oct 02 12:00:55 compute-0 podman[220141]: 2025-10-02 12:00:55.997852943 +0000 UTC m=+0.052553496 container died d4e81cac257c0b12537cd3ef5c121e1e69375954e1d21d6ebd85ea3e6b5d2b7d (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3)
Oct 02 12:00:56 compute-0 nova_compute[192079]: 2025-10-02 12:00:56.085 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:56 compute-0 nova_compute[192079]: 2025-10-02 12:00:56.097 2 DEBUG nova.compute.manager [req-7de8a6d2-0eac-40d0-86be-58e6ac7fbc2c req-fc7ce7ab-2c55-4d5d-b11d-b525e1954ec3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Received event network-vif-unplugged-92d67693-7b14-496d-85fc-00362ed0e9f5 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:00:56 compute-0 nova_compute[192079]: 2025-10-02 12:00:56.097 2 DEBUG oslo_concurrency.lockutils [req-7de8a6d2-0eac-40d0-86be-58e6ac7fbc2c req-fc7ce7ab-2c55-4d5d-b11d-b525e1954ec3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "54199f32-2d2a-4c54-a6bd-31d2d5675a46-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:00:56 compute-0 nova_compute[192079]: 2025-10-02 12:00:56.097 2 DEBUG oslo_concurrency.lockutils [req-7de8a6d2-0eac-40d0-86be-58e6ac7fbc2c req-fc7ce7ab-2c55-4d5d-b11d-b525e1954ec3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "54199f32-2d2a-4c54-a6bd-31d2d5675a46-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:00:56 compute-0 nova_compute[192079]: 2025-10-02 12:00:56.097 2 DEBUG oslo_concurrency.lockutils [req-7de8a6d2-0eac-40d0-86be-58e6ac7fbc2c req-fc7ce7ab-2c55-4d5d-b11d-b525e1954ec3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "54199f32-2d2a-4c54-a6bd-31d2d5675a46-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:00:56 compute-0 nova_compute[192079]: 2025-10-02 12:00:56.098 2 DEBUG nova.compute.manager [req-7de8a6d2-0eac-40d0-86be-58e6ac7fbc2c req-fc7ce7ab-2c55-4d5d-b11d-b525e1954ec3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] No waiting events found dispatching network-vif-unplugged-92d67693-7b14-496d-85fc-00362ed0e9f5 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:00:56 compute-0 nova_compute[192079]: 2025-10-02 12:00:56.098 2 DEBUG nova.compute.manager [req-7de8a6d2-0eac-40d0-86be-58e6ac7fbc2c req-fc7ce7ab-2c55-4d5d-b11d-b525e1954ec3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Received event network-vif-unplugged-92d67693-7b14-496d-85fc-00362ed0e9f5 for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:00:56 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-d4e81cac257c0b12537cd3ef5c121e1e69375954e1d21d6ebd85ea3e6b5d2b7d-userdata-shm.mount: Deactivated successfully.
Oct 02 12:00:56 compute-0 systemd[1]: var-lib-containers-storage-overlay-57b34cdda5bcf70df3ee3a5422ce1a2a4ad13df232e54fde3dbfe5204726f733-merged.mount: Deactivated successfully.
Oct 02 12:00:56 compute-0 nova_compute[192079]: 2025-10-02 12:00:56.119 2 INFO nova.virt.libvirt.driver [-] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Instance destroyed successfully.
Oct 02 12:00:56 compute-0 nova_compute[192079]: 2025-10-02 12:00:56.120 2 DEBUG nova.objects.instance [None req-8b46f1c7-a927-4e30-a470-4dbae8044938 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Lazy-loading 'resources' on Instance uuid 54199f32-2d2a-4c54-a6bd-31d2d5675a46 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:00:56 compute-0 nova_compute[192079]: 2025-10-02 12:00:56.136 2 DEBUG nova.virt.libvirt.vif [None req-8b46f1c7-a927-4e30-a470-4dbae8044938 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:00:09Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description=None,display_name='tempest-tempest.common.compute-instance-549213814-2',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-tempest-common-compute-instance-549213814-2',id=3,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=1,launched_at=2025-10-02T12:00:43Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='23de7e9a877e477cb52ac4d4c1410e0d',ramdisk_id='',reservation_id='r-2zs6ym1o',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-AutoAllocateNetworkTest-1436985778',owner_user_name='tempest-AutoAllocateNetworkTest-1436985778-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:00:43Z,user_data=None,user_id='4e1cdf41d58b4774b94da988b9e8db73',uuid=54199f32-2d2a-4c54-a6bd-31d2d5675a46,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "92d67693-7b14-496d-85fc-00362ed0e9f5", "address": "fa:16:3e:5f:29:fc", "network": {"id": "0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6", "bridge": "br-int", "label": "auto_allocated_network", "subnets": [{"cidr": "10.1.0.0/26", "dns": [], "gateway": {"address": "10.1.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.1.0.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}, {"cidr": "fdfe:381f:8400::/64", "dns": [], "gateway": {"address": "fdfe:381f:8400::1", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "fdfe:381f:8400::287", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "23de7e9a877e477cb52ac4d4c1410e0d", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92d67693-7b", "ovs_interfaceid": "92d67693-7b14-496d-85fc-00362ed0e9f5", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:00:56 compute-0 nova_compute[192079]: 2025-10-02 12:00:56.137 2 DEBUG nova.network.os_vif_util [None req-8b46f1c7-a927-4e30-a470-4dbae8044938 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Converting VIF {"id": "92d67693-7b14-496d-85fc-00362ed0e9f5", "address": "fa:16:3e:5f:29:fc", "network": {"id": "0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6", "bridge": "br-int", "label": "auto_allocated_network", "subnets": [{"cidr": "10.1.0.0/26", "dns": [], "gateway": {"address": "10.1.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.1.0.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}, {"cidr": "fdfe:381f:8400::/64", "dns": [], "gateway": {"address": "fdfe:381f:8400::1", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "fdfe:381f:8400::287", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "23de7e9a877e477cb52ac4d4c1410e0d", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92d67693-7b", "ovs_interfaceid": "92d67693-7b14-496d-85fc-00362ed0e9f5", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:00:56 compute-0 nova_compute[192079]: 2025-10-02 12:00:56.137 2 DEBUG nova.network.os_vif_util [None req-8b46f1c7-a927-4e30-a470-4dbae8044938 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:5f:29:fc,bridge_name='br-int',has_traffic_filtering=True,id=92d67693-7b14-496d-85fc-00362ed0e9f5,network=Network(0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap92d67693-7b') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:00:56 compute-0 nova_compute[192079]: 2025-10-02 12:00:56.137 2 DEBUG os_vif [None req-8b46f1c7-a927-4e30-a470-4dbae8044938 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:5f:29:fc,bridge_name='br-int',has_traffic_filtering=True,id=92d67693-7b14-496d-85fc-00362ed0e9f5,network=Network(0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap92d67693-7b') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:00:56 compute-0 nova_compute[192079]: 2025-10-02 12:00:56.139 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:56 compute-0 nova_compute[192079]: 2025-10-02 12:00:56.139 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap92d67693-7b, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:00:56 compute-0 nova_compute[192079]: 2025-10-02 12:00:56.141 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:56 compute-0 nova_compute[192079]: 2025-10-02 12:00:56.143 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:56 compute-0 nova_compute[192079]: 2025-10-02 12:00:56.145 2 INFO os_vif [None req-8b46f1c7-a927-4e30-a470-4dbae8044938 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:5f:29:fc,bridge_name='br-int',has_traffic_filtering=True,id=92d67693-7b14-496d-85fc-00362ed0e9f5,network=Network(0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap92d67693-7b')
Oct 02 12:00:56 compute-0 nova_compute[192079]: 2025-10-02 12:00:56.145 2 INFO nova.virt.libvirt.driver [None req-8b46f1c7-a927-4e30-a470-4dbae8044938 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Deleting instance files /var/lib/nova/instances/54199f32-2d2a-4c54-a6bd-31d2d5675a46_del
Oct 02 12:00:56 compute-0 nova_compute[192079]: 2025-10-02 12:00:56.146 2 INFO nova.virt.libvirt.driver [None req-8b46f1c7-a927-4e30-a470-4dbae8044938 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Deletion of /var/lib/nova/instances/54199f32-2d2a-4c54-a6bd-31d2d5675a46_del complete
Oct 02 12:00:56 compute-0 podman[220141]: 2025-10-02 12:00:56.150265986 +0000 UTC m=+0.204966539 container cleanup d4e81cac257c0b12537cd3ef5c121e1e69375954e1d21d6ebd85ea3e6b5d2b7d (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:00:56 compute-0 systemd[1]: libpod-conmon-d4e81cac257c0b12537cd3ef5c121e1e69375954e1d21d6ebd85ea3e6b5d2b7d.scope: Deactivated successfully.
Oct 02 12:00:56 compute-0 nova_compute[192079]: 2025-10-02 12:00:56.219 2 DEBUG nova.virt.libvirt.host [None req-8b46f1c7-a927-4e30-a470-4dbae8044938 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Checking UEFI support for host arch (x86_64) supports_uefi /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1754
Oct 02 12:00:56 compute-0 nova_compute[192079]: 2025-10-02 12:00:56.220 2 INFO nova.virt.libvirt.host [None req-8b46f1c7-a927-4e30-a470-4dbae8044938 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] UEFI support detected
Oct 02 12:00:56 compute-0 nova_compute[192079]: 2025-10-02 12:00:56.221 2 INFO nova.compute.manager [None req-8b46f1c7-a927-4e30-a470-4dbae8044938 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Took 0.40 seconds to destroy the instance on the hypervisor.
Oct 02 12:00:56 compute-0 nova_compute[192079]: 2025-10-02 12:00:56.221 2 DEBUG oslo.service.loopingcall [None req-8b46f1c7-a927-4e30-a470-4dbae8044938 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:00:56 compute-0 nova_compute[192079]: 2025-10-02 12:00:56.221 2 DEBUG nova.compute.manager [-] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:00:56 compute-0 nova_compute[192079]: 2025-10-02 12:00:56.222 2 DEBUG nova.network.neutron [-] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:00:56 compute-0 podman[220186]: 2025-10-02 12:00:56.25659198 +0000 UTC m=+0.087470351 container remove d4e81cac257c0b12537cd3ef5c121e1e69375954e1d21d6ebd85ea3e6b5d2b7d (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, tcib_managed=true, org.label-schema.schema-version=1.0)
Oct 02 12:00:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:56.261 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[aa80d75b-7d1a-45ba-88e3-8d8192687765]: (4, ('Thu Oct  2 12:00:55 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6 (d4e81cac257c0b12537cd3ef5c121e1e69375954e1d21d6ebd85ea3e6b5d2b7d)\nd4e81cac257c0b12537cd3ef5c121e1e69375954e1d21d6ebd85ea3e6b5d2b7d\nThu Oct  2 12:00:56 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6 (d4e81cac257c0b12537cd3ef5c121e1e69375954e1d21d6ebd85ea3e6b5d2b7d)\nd4e81cac257c0b12537cd3ef5c121e1e69375954e1d21d6ebd85ea3e6b5d2b7d\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:56.262 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[593440d2-da6c-4fea-84a4-d21439be4623]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:56.263 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap0e6cbdbf-b0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:00:56 compute-0 nova_compute[192079]: 2025-10-02 12:00:56.264 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:56 compute-0 kernel: tap0e6cbdbf-b0: left promiscuous mode
Oct 02 12:00:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:56.268 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a9de93aa-90cc-4c75-9691-6e855f884346]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:56 compute-0 nova_compute[192079]: 2025-10-02 12:00:56.277 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:56.297 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5bb3a552-3e1c-401f-8b99-3e9ab18a43d5]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:56.298 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a7f961fa-90ec-42bf-81ee-c1c6a7aa51e0]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:56.310 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[82d5854c-80e8-4abd-8d0c-00aa733d840c]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 444306, 'reachable_time': 40040, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 220201, 'error': None, 'target': 'ovnmeta-0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:56 compute-0 systemd[1]: run-netns-ovnmeta\x2d0e6cbdbf\x2db727\x2d48dc\x2d82d1\x2df7af5e6b3fc6.mount: Deactivated successfully.
Oct 02 12:00:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:56.319 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-0e6cbdbf-b727-48dc-82d1-f7af5e6b3fc6 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:00:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:00:56.320 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[6c0e2cdf-f6f8-4d6d-8343-68600865fc39]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:00:56 compute-0 nova_compute[192079]: 2025-10-02 12:00:56.865 2 DEBUG nova.network.neutron [-] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:00:57 compute-0 nova_compute[192079]: 2025-10-02 12:00:57.006 2 INFO nova.compute.manager [-] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Took 0.78 seconds to deallocate network for instance.
Oct 02 12:00:57 compute-0 nova_compute[192079]: 2025-10-02 12:00:57.080 2 DEBUG oslo_concurrency.lockutils [None req-8b46f1c7-a927-4e30-a470-4dbae8044938 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:00:57 compute-0 nova_compute[192079]: 2025-10-02 12:00:57.080 2 DEBUG oslo_concurrency.lockutils [None req-8b46f1c7-a927-4e30-a470-4dbae8044938 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:00:57 compute-0 nova_compute[192079]: 2025-10-02 12:00:57.158 2 DEBUG nova.compute.provider_tree [None req-8b46f1c7-a927-4e30-a470-4dbae8044938 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:00:57 compute-0 nova_compute[192079]: 2025-10-02 12:00:57.177 2 DEBUG nova.scheduler.client.report [None req-8b46f1c7-a927-4e30-a470-4dbae8044938 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:00:57 compute-0 nova_compute[192079]: 2025-10-02 12:00:57.208 2 DEBUG oslo_concurrency.lockutils [None req-8b46f1c7-a927-4e30-a470-4dbae8044938 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.127s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:00:57 compute-0 nova_compute[192079]: 2025-10-02 12:00:57.228 2 INFO nova.scheduler.client.report [None req-8b46f1c7-a927-4e30-a470-4dbae8044938 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Deleted allocations for instance 54199f32-2d2a-4c54-a6bd-31d2d5675a46
Oct 02 12:00:57 compute-0 nova_compute[192079]: 2025-10-02 12:00:57.320 2 DEBUG oslo_concurrency.lockutils [None req-8b46f1c7-a927-4e30-a470-4dbae8044938 4e1cdf41d58b4774b94da988b9e8db73 23de7e9a877e477cb52ac4d4c1410e0d - - default default] Lock "54199f32-2d2a-4c54-a6bd-31d2d5675a46" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.521s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:00:58 compute-0 ovn_controller[94336]: 2025-10-02T12:00:58Z|00006|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:8c:7f:ac 10.100.0.3
Oct 02 12:00:58 compute-0 ovn_controller[94336]: 2025-10-02T12:00:58Z|00007|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:8c:7f:ac 10.100.0.3
Oct 02 12:00:58 compute-0 nova_compute[192079]: 2025-10-02 12:00:58.199 2 DEBUG nova.compute.manager [req-81526533-b4d3-432e-b80f-1d3d65b450db req-fa033c61-d86f-4217-b64b-47c3d459104a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Received event network-vif-plugged-92d67693-7b14-496d-85fc-00362ed0e9f5 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:00:58 compute-0 nova_compute[192079]: 2025-10-02 12:00:58.199 2 DEBUG oslo_concurrency.lockutils [req-81526533-b4d3-432e-b80f-1d3d65b450db req-fa033c61-d86f-4217-b64b-47c3d459104a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "54199f32-2d2a-4c54-a6bd-31d2d5675a46-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:00:58 compute-0 nova_compute[192079]: 2025-10-02 12:00:58.200 2 DEBUG oslo_concurrency.lockutils [req-81526533-b4d3-432e-b80f-1d3d65b450db req-fa033c61-d86f-4217-b64b-47c3d459104a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "54199f32-2d2a-4c54-a6bd-31d2d5675a46-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:00:58 compute-0 nova_compute[192079]: 2025-10-02 12:00:58.200 2 DEBUG oslo_concurrency.lockutils [req-81526533-b4d3-432e-b80f-1d3d65b450db req-fa033c61-d86f-4217-b64b-47c3d459104a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "54199f32-2d2a-4c54-a6bd-31d2d5675a46-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:00:58 compute-0 nova_compute[192079]: 2025-10-02 12:00:58.201 2 DEBUG nova.compute.manager [req-81526533-b4d3-432e-b80f-1d3d65b450db req-fa033c61-d86f-4217-b64b-47c3d459104a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] No waiting events found dispatching network-vif-plugged-92d67693-7b14-496d-85fc-00362ed0e9f5 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:00:58 compute-0 nova_compute[192079]: 2025-10-02 12:00:58.201 2 WARNING nova.compute.manager [req-81526533-b4d3-432e-b80f-1d3d65b450db req-fa033c61-d86f-4217-b64b-47c3d459104a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Received unexpected event network-vif-plugged-92d67693-7b14-496d-85fc-00362ed0e9f5 for instance with vm_state deleted and task_state None.
Oct 02 12:00:58 compute-0 nova_compute[192079]: 2025-10-02 12:00:58.772 2 DEBUG nova.compute.manager [req-5e943290-0669-4090-9ba8-928d511098ec req-83947c60-d7f7-4bd5-ad2f-1db16592444b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Received event network-vif-deleted-92d67693-7b14-496d-85fc-00362ed0e9f5 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:00:59 compute-0 nova_compute[192079]: 2025-10-02 12:00:59.052 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:00:59 compute-0 podman[220210]: 2025-10-02 12:00:59.154518932 +0000 UTC m=+0.058534900 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, config_id=multipathd, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:00:59 compute-0 podman[220209]: 2025-10-02 12:00:59.154912903 +0000 UTC m=+0.062591290 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.openshift.tags=minimal rhel9, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, build-date=2025-08-20T13:12:41, com.redhat.component=ubi9-minimal-container, container_name=openstack_network_exporter, vendor=Red Hat, Inc., io.openshift.expose-services=, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., architecture=x86_64, url=https://catalog.redhat.com/en/search?searchType=containers, vcs-type=git, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, config_id=edpm, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, distribution-scope=public, name=ubi9-minimal, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, version=9.6, release=1755695350, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., maintainer=Red Hat, Inc., io.buildah.version=1.33.7, managed_by=edpm_ansible, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9.)
Oct 02 12:01:01 compute-0 nova_compute[192079]: 2025-10-02 12:01:01.143 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:01 compute-0 anacron[1094]: Job `cron.weekly' started
Oct 02 12:01:01 compute-0 anacron[1094]: Job `cron.weekly' terminated
Oct 02 12:01:01 compute-0 CROND[220253]: (root) CMD (run-parts /etc/cron.hourly)
Oct 02 12:01:01 compute-0 run-parts[220256]: (/etc/cron.hourly) starting 0anacron
Oct 02 12:01:01 compute-0 run-parts[220262]: (/etc/cron.hourly) finished 0anacron
Oct 02 12:01:01 compute-0 CROND[220252]: (root) CMDEND (run-parts /etc/cron.hourly)
Oct 02 12:01:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:02.201 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:02.201 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:02.202 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:04 compute-0 nova_compute[192079]: 2025-10-02 12:01:04.055 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:04 compute-0 ovn_controller[94336]: 2025-10-02T12:01:04Z|00044|binding|INFO|Releasing lport 0fd0c84b-50f0-4eec-8552-49dfdd682f27 from this chassis (sb_readonly=0)
Oct 02 12:01:04 compute-0 nova_compute[192079]: 2025-10-02 12:01:04.925 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:05 compute-0 nova_compute[192079]: 2025-10-02 12:01:05.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:01:05 compute-0 nova_compute[192079]: 2025-10-02 12:01:05.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:01:06 compute-0 nova_compute[192079]: 2025-10-02 12:01:06.144 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:06 compute-0 podman[220263]: 2025-10-02 12:01:06.14891542 +0000 UTC m=+0.053366808 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:01:06 compute-0 podman[220264]: 2025-10-02 12:01:06.163694934 +0000 UTC m=+0.060017720 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, container_name=iscsid, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid, tcib_managed=true)
Oct 02 12:01:06 compute-0 nova_compute[192079]: 2025-10-02 12:01:06.522 2 DEBUG oslo_concurrency.lockutils [None req-66ffcc42-737a-484f-b5c3-ef806a4eeb94 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Acquiring lock "a3d563c1-37ae-41be-a49b-ee6efeccfc94" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:06 compute-0 nova_compute[192079]: 2025-10-02 12:01:06.522 2 DEBUG oslo_concurrency.lockutils [None req-66ffcc42-737a-484f-b5c3-ef806a4eeb94 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "a3d563c1-37ae-41be-a49b-ee6efeccfc94" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:06 compute-0 nova_compute[192079]: 2025-10-02 12:01:06.523 2 DEBUG oslo_concurrency.lockutils [None req-66ffcc42-737a-484f-b5c3-ef806a4eeb94 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Acquiring lock "a3d563c1-37ae-41be-a49b-ee6efeccfc94-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:06 compute-0 nova_compute[192079]: 2025-10-02 12:01:06.523 2 DEBUG oslo_concurrency.lockutils [None req-66ffcc42-737a-484f-b5c3-ef806a4eeb94 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "a3d563c1-37ae-41be-a49b-ee6efeccfc94-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:06 compute-0 nova_compute[192079]: 2025-10-02 12:01:06.523 2 DEBUG oslo_concurrency.lockutils [None req-66ffcc42-737a-484f-b5c3-ef806a4eeb94 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "a3d563c1-37ae-41be-a49b-ee6efeccfc94-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:06 compute-0 nova_compute[192079]: 2025-10-02 12:01:06.918 2 INFO nova.compute.manager [None req-66ffcc42-737a-484f-b5c3-ef806a4eeb94 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Terminating instance
Oct 02 12:01:07 compute-0 nova_compute[192079]: 2025-10-02 12:01:07.335 2 DEBUG nova.compute.manager [None req-66ffcc42-737a-484f-b5c3-ef806a4eeb94 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:01:07 compute-0 kernel: tapd92bbd66-2d (unregistering): left promiscuous mode
Oct 02 12:01:07 compute-0 NetworkManager[51160]: <info>  [1759406467.3593] device (tapd92bbd66-2d): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:01:07 compute-0 nova_compute[192079]: 2025-10-02 12:01:07.369 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:07 compute-0 ovn_controller[94336]: 2025-10-02T12:01:07Z|00045|binding|INFO|Releasing lport d92bbd66-2dd9-44e3-a834-a92797ae8d1f from this chassis (sb_readonly=0)
Oct 02 12:01:07 compute-0 ovn_controller[94336]: 2025-10-02T12:01:07Z|00046|binding|INFO|Setting lport d92bbd66-2dd9-44e3-a834-a92797ae8d1f down in Southbound
Oct 02 12:01:07 compute-0 ovn_controller[94336]: 2025-10-02T12:01:07Z|00047|binding|INFO|Removing iface tapd92bbd66-2d ovn-installed in OVS
Oct 02 12:01:07 compute-0 nova_compute[192079]: 2025-10-02 12:01:07.374 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:07 compute-0 nova_compute[192079]: 2025-10-02 12:01:07.401 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:07 compute-0 systemd[1]: machine-qemu\x2d2\x2dinstance\x2d00000005.scope: Deactivated successfully.
Oct 02 12:01:07 compute-0 systemd[1]: machine-qemu\x2d2\x2dinstance\x2d00000005.scope: Consumed 12.971s CPU time.
Oct 02 12:01:07 compute-0 systemd-machined[152150]: Machine qemu-2-instance-00000005 terminated.
Oct 02 12:01:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:07.540 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:8c:7f:ac 10.100.0.3'], port_security=['fa:16:3e:8c:7f:ac 10.100.0.3'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.3/28', 'neutron:device_id': 'a3d563c1-37ae-41be-a49b-ee6efeccfc94', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-0432e6a2-e111-484d-b6cf-d32d9fc846c9', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '302a9c83c3eb43818ce6284e9ddb73be', 'neutron:revision_number': '4', 'neutron:security_group_ids': 'afed868a-564b-4ceb-947a-806e11012ac0', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com', 'neutron:port_fip': '192.168.122.218'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=b550d36f-725b-4b76-9c4e-aa36183370a9, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=d92bbd66-2dd9-44e3-a834-a92797ae8d1f) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:01:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:07.542 103294 INFO neutron.agent.ovn.metadata.agent [-] Port d92bbd66-2dd9-44e3-a834-a92797ae8d1f in datapath 0432e6a2-e111-484d-b6cf-d32d9fc846c9 unbound from our chassis
Oct 02 12:01:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:07.543 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 0432e6a2-e111-484d-b6cf-d32d9fc846c9, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:01:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:07.544 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5139083d-b46c-4709-97df-5ca4e0a10961]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:07.545 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9 namespace which is not needed anymore
Oct 02 12:01:07 compute-0 nova_compute[192079]: 2025-10-02 12:01:07.598 2 INFO nova.virt.libvirt.driver [-] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Instance destroyed successfully.
Oct 02 12:01:07 compute-0 nova_compute[192079]: 2025-10-02 12:01:07.599 2 DEBUG nova.objects.instance [None req-66ffcc42-737a-484f-b5c3-ef806a4eeb94 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lazy-loading 'resources' on Instance uuid a3d563c1-37ae-41be-a49b-ee6efeccfc94 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:01:07 compute-0 nova_compute[192079]: 2025-10-02 12:01:07.660 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:01:07 compute-0 nova_compute[192079]: 2025-10-02 12:01:07.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:01:07 compute-0 nova_compute[192079]: 2025-10-02 12:01:07.664 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:01:07 compute-0 nova_compute[192079]: 2025-10-02 12:01:07.664 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:01:07 compute-0 neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9[220014]: [NOTICE]   (220018) : haproxy version is 2.8.14-c23fe91
Oct 02 12:01:07 compute-0 neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9[220014]: [NOTICE]   (220018) : path to executable is /usr/sbin/haproxy
Oct 02 12:01:07 compute-0 neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9[220014]: [WARNING]  (220018) : Exiting Master process...
Oct 02 12:01:07 compute-0 neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9[220014]: [ALERT]    (220018) : Current worker (220020) exited with code 143 (Terminated)
Oct 02 12:01:07 compute-0 neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9[220014]: [WARNING]  (220018) : All workers exited. Exiting... (0)
Oct 02 12:01:07 compute-0 systemd[1]: libpod-5571e08a9cb0a356552bd8b855cb2791f85c3949f01ea3f1ec5d54c4676f2d8c.scope: Deactivated successfully.
Oct 02 12:01:07 compute-0 podman[220349]: 2025-10-02 12:01:07.683252353 +0000 UTC m=+0.049819441 container died 5571e08a9cb0a356552bd8b855cb2791f85c3949f01ea3f1ec5d54c4676f2d8c (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 12:01:07 compute-0 nova_compute[192079]: 2025-10-02 12:01:07.706 2 DEBUG nova.virt.libvirt.vif [None req-66ffcc42-737a-484f-b5c3-ef806a4eeb94 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:00:31Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServersWithSpecificFlavorTestJSON-server-305259138',display_name='tempest-ServersWithSpecificFlavorTestJSON-server-305259138',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(8),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverswithspecificflavortestjson-server-305259138',id=5,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=8,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBLQAR6rn15gxsCt5BVT9ZeXnbqUta2pJ91YMBkT9rHUc9ZBtTK728XqHiZfyDrBlMAbpgHvu/gvYEjRf3OvnLlEsO2AY8MfRajDqsbCXPjRzSoO5eacsxtVMw0D5LoybNA==',key_name='tempest-keypair-283845564',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:00:45Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='302a9c83c3eb43818ce6284e9ddb73be',ramdisk_id='',reservation_id='r-afr0o08k',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServersWithSpecificFlavorTestJSON-1100192498',owner_user_name='tempest-ServersWithSpecificFlavorTestJSON-1100192498-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:00:45Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='67132a26bb4c454aa5ed0e4b8fee032c',uuid=a3d563c1-37ae-41be-a49b-ee6efeccfc94,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "d92bbd66-2dd9-44e3-a834-a92797ae8d1f", "address": "fa:16:3e:8c:7f:ac", "network": {"id": "0432e6a2-e111-484d-b6cf-d32d9fc846c9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1078640656-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.218", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "302a9c83c3eb43818ce6284e9ddb73be", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd92bbd66-2d", "ovs_interfaceid": "d92bbd66-2dd9-44e3-a834-a92797ae8d1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:01:07 compute-0 nova_compute[192079]: 2025-10-02 12:01:07.706 2 DEBUG nova.network.os_vif_util [None req-66ffcc42-737a-484f-b5c3-ef806a4eeb94 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Converting VIF {"id": "d92bbd66-2dd9-44e3-a834-a92797ae8d1f", "address": "fa:16:3e:8c:7f:ac", "network": {"id": "0432e6a2-e111-484d-b6cf-d32d9fc846c9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1078640656-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.218", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "302a9c83c3eb43818ce6284e9ddb73be", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd92bbd66-2d", "ovs_interfaceid": "d92bbd66-2dd9-44e3-a834-a92797ae8d1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:01:07 compute-0 nova_compute[192079]: 2025-10-02 12:01:07.707 2 DEBUG nova.network.os_vif_util [None req-66ffcc42-737a-484f-b5c3-ef806a4eeb94 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:8c:7f:ac,bridge_name='br-int',has_traffic_filtering=True,id=d92bbd66-2dd9-44e3-a834-a92797ae8d1f,network=Network(0432e6a2-e111-484d-b6cf-d32d9fc846c9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd92bbd66-2d') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:01:07 compute-0 nova_compute[192079]: 2025-10-02 12:01:07.707 2 DEBUG os_vif [None req-66ffcc42-737a-484f-b5c3-ef806a4eeb94 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:8c:7f:ac,bridge_name='br-int',has_traffic_filtering=True,id=d92bbd66-2dd9-44e3-a834-a92797ae8d1f,network=Network(0432e6a2-e111-484d-b6cf-d32d9fc846c9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd92bbd66-2d') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:01:07 compute-0 nova_compute[192079]: 2025-10-02 12:01:07.709 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:07 compute-0 nova_compute[192079]: 2025-10-02 12:01:07.709 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapd92bbd66-2d, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:01:07 compute-0 nova_compute[192079]: 2025-10-02 12:01:07.710 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:07 compute-0 nova_compute[192079]: 2025-10-02 12:01:07.713 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:01:07 compute-0 nova_compute[192079]: 2025-10-02 12:01:07.715 2 INFO os_vif [None req-66ffcc42-737a-484f-b5c3-ef806a4eeb94 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:8c:7f:ac,bridge_name='br-int',has_traffic_filtering=True,id=d92bbd66-2dd9-44e3-a834-a92797ae8d1f,network=Network(0432e6a2-e111-484d-b6cf-d32d9fc846c9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd92bbd66-2d')
Oct 02 12:01:07 compute-0 nova_compute[192079]: 2025-10-02 12:01:07.715 2 INFO nova.virt.libvirt.driver [None req-66ffcc42-737a-484f-b5c3-ef806a4eeb94 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Deleting instance files /var/lib/nova/instances/a3d563c1-37ae-41be-a49b-ee6efeccfc94_del
Oct 02 12:01:07 compute-0 nova_compute[192079]: 2025-10-02 12:01:07.716 2 INFO nova.virt.libvirt.driver [None req-66ffcc42-737a-484f-b5c3-ef806a4eeb94 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Deletion of /var/lib/nova/instances/a3d563c1-37ae-41be-a49b-ee6efeccfc94_del complete
Oct 02 12:01:07 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-5571e08a9cb0a356552bd8b855cb2791f85c3949f01ea3f1ec5d54c4676f2d8c-userdata-shm.mount: Deactivated successfully.
Oct 02 12:01:07 compute-0 systemd[1]: var-lib-containers-storage-overlay-a6d0dc01f39dcf26a76b29bc901aba26a0fdec290e7b1e2bfc8e8f0b049bc946-merged.mount: Deactivated successfully.
Oct 02 12:01:07 compute-0 nova_compute[192079]: 2025-10-02 12:01:07.779 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Skipping network cache update for instance because it is being deleted. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9875
Oct 02 12:01:07 compute-0 nova_compute[192079]: 2025-10-02 12:01:07.780 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:01:07 compute-0 nova_compute[192079]: 2025-10-02 12:01:07.780 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:01:07 compute-0 nova_compute[192079]: 2025-10-02 12:01:07.781 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:01:07 compute-0 podman[220349]: 2025-10-02 12:01:07.87061972 +0000 UTC m=+0.237186808 container cleanup 5571e08a9cb0a356552bd8b855cb2791f85c3949f01ea3f1ec5d54c4676f2d8c (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true)
Oct 02 12:01:07 compute-0 systemd[1]: libpod-conmon-5571e08a9cb0a356552bd8b855cb2791f85c3949f01ea3f1ec5d54c4676f2d8c.scope: Deactivated successfully.
Oct 02 12:01:07 compute-0 podman[220377]: 2025-10-02 12:01:07.963719383 +0000 UTC m=+0.073447558 container remove 5571e08a9cb0a356552bd8b855cb2791f85c3949f01ea3f1ec5d54c4676f2d8c (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS)
Oct 02 12:01:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:07.968 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[df83d401-eec1-4ed7-8ba0-b3606819e863]: (4, ('Thu Oct  2 12:01:07 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9 (5571e08a9cb0a356552bd8b855cb2791f85c3949f01ea3f1ec5d54c4676f2d8c)\n5571e08a9cb0a356552bd8b855cb2791f85c3949f01ea3f1ec5d54c4676f2d8c\nThu Oct  2 12:01:07 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9 (5571e08a9cb0a356552bd8b855cb2791f85c3949f01ea3f1ec5d54c4676f2d8c)\n5571e08a9cb0a356552bd8b855cb2791f85c3949f01ea3f1ec5d54c4676f2d8c\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:07.970 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[126c0df9-42b5-4fba-8d38-a84cd683b262]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:07.970 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap0432e6a2-e0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:01:07 compute-0 kernel: tap0432e6a2-e0: left promiscuous mode
Oct 02 12:01:07 compute-0 nova_compute[192079]: 2025-10-02 12:01:07.972 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:07 compute-0 nova_compute[192079]: 2025-10-02 12:01:07.984 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:07.988 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[07618e3f-73c4-493c-b0f8-6068b5ce0280]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:08.025 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0d1a17cc-576e-4c2b-9481-cca8d0790c62]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:08.026 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[872340a2-7c23-4ef6-9f63-eda20eb80036]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:08.041 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[64bc84e7-9320-415e-b5cb-ee601cfa781e]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 444392, 'reachable_time': 31448, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 220392, 'error': None, 'target': 'ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:08.043 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:01:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:08.043 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[2d638b62-228b-403f-bd5a-4ffabfa5d0b5]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:08 compute-0 systemd[1]: run-netns-ovnmeta\x2d0432e6a2\x2de111\x2d484d\x2db6cf\x2dd32d9fc846c9.mount: Deactivated successfully.
Oct 02 12:01:08 compute-0 nova_compute[192079]: 2025-10-02 12:01:08.252 2 INFO nova.compute.manager [None req-66ffcc42-737a-484f-b5c3-ef806a4eeb94 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Took 0.92 seconds to destroy the instance on the hypervisor.
Oct 02 12:01:08 compute-0 nova_compute[192079]: 2025-10-02 12:01:08.253 2 DEBUG oslo.service.loopingcall [None req-66ffcc42-737a-484f-b5c3-ef806a4eeb94 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:01:08 compute-0 nova_compute[192079]: 2025-10-02 12:01:08.254 2 DEBUG nova.compute.manager [-] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:01:08 compute-0 nova_compute[192079]: 2025-10-02 12:01:08.254 2 DEBUG nova.network.neutron [-] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:01:08 compute-0 nova_compute[192079]: 2025-10-02 12:01:08.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:01:08 compute-0 nova_compute[192079]: 2025-10-02 12:01:08.664 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:01:08 compute-0 nova_compute[192079]: 2025-10-02 12:01:08.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:01:08 compute-0 nova_compute[192079]: 2025-10-02 12:01:08.870 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:08 compute-0 nova_compute[192079]: 2025-10-02 12:01:08.871 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:08 compute-0 nova_compute[192079]: 2025-10-02 12:01:08.871 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:08 compute-0 nova_compute[192079]: 2025-10-02 12:01:08.872 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:01:09 compute-0 nova_compute[192079]: 2025-10-02 12:01:09.040 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:01:09 compute-0 nova_compute[192079]: 2025-10-02 12:01:09.041 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5764MB free_disk=73.46717834472656GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:01:09 compute-0 nova_compute[192079]: 2025-10-02 12:01:09.042 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:09 compute-0 nova_compute[192079]: 2025-10-02 12:01:09.042 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:09 compute-0 nova_compute[192079]: 2025-10-02 12:01:09.099 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:09 compute-0 nova_compute[192079]: 2025-10-02 12:01:09.484 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance a3d563c1-37ae-41be-a49b-ee6efeccfc94 actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:01:09 compute-0 nova_compute[192079]: 2025-10-02 12:01:09.485 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 1 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:01:09 compute-0 nova_compute[192079]: 2025-10-02 12:01:09.486 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=640MB phys_disk=79GB used_disk=1GB total_vcpus=8 used_vcpus=1 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:01:09 compute-0 nova_compute[192079]: 2025-10-02 12:01:09.606 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:01:09 compute-0 nova_compute[192079]: 2025-10-02 12:01:09.790 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:01:10 compute-0 nova_compute[192079]: 2025-10-02 12:01:10.362 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:01:10 compute-0 nova_compute[192079]: 2025-10-02 12:01:10.363 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.321s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:11 compute-0 nova_compute[192079]: 2025-10-02 12:01:11.117 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759406456.1169274, 54199f32-2d2a-4c54-a6bd-31d2d5675a46 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:01:11 compute-0 nova_compute[192079]: 2025-10-02 12:01:11.118 2 INFO nova.compute.manager [-] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] VM Stopped (Lifecycle Event)
Oct 02 12:01:11 compute-0 nova_compute[192079]: 2025-10-02 12:01:11.296 2 DEBUG nova.compute.manager [None req-6af049e8-ea65-4cf5-9500-3c214e85e7f4 - - - - - -] [instance: 54199f32-2d2a-4c54-a6bd-31d2d5675a46] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:01:11 compute-0 nova_compute[192079]: 2025-10-02 12:01:11.375 2 DEBUG nova.compute.manager [req-54c9e5d0-5d12-46fa-8cbe-d7d9ee2ea8e6 req-3ec0202d-ab8b-4ecd-850b-4dbcfe5bb6e4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Received event network-vif-unplugged-d92bbd66-2dd9-44e3-a834-a92797ae8d1f external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:01:11 compute-0 nova_compute[192079]: 2025-10-02 12:01:11.375 2 DEBUG oslo_concurrency.lockutils [req-54c9e5d0-5d12-46fa-8cbe-d7d9ee2ea8e6 req-3ec0202d-ab8b-4ecd-850b-4dbcfe5bb6e4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "a3d563c1-37ae-41be-a49b-ee6efeccfc94-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:11 compute-0 nova_compute[192079]: 2025-10-02 12:01:11.375 2 DEBUG oslo_concurrency.lockutils [req-54c9e5d0-5d12-46fa-8cbe-d7d9ee2ea8e6 req-3ec0202d-ab8b-4ecd-850b-4dbcfe5bb6e4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a3d563c1-37ae-41be-a49b-ee6efeccfc94-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:11 compute-0 nova_compute[192079]: 2025-10-02 12:01:11.376 2 DEBUG oslo_concurrency.lockutils [req-54c9e5d0-5d12-46fa-8cbe-d7d9ee2ea8e6 req-3ec0202d-ab8b-4ecd-850b-4dbcfe5bb6e4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a3d563c1-37ae-41be-a49b-ee6efeccfc94-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:11 compute-0 nova_compute[192079]: 2025-10-02 12:01:11.376 2 DEBUG nova.compute.manager [req-54c9e5d0-5d12-46fa-8cbe-d7d9ee2ea8e6 req-3ec0202d-ab8b-4ecd-850b-4dbcfe5bb6e4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] No waiting events found dispatching network-vif-unplugged-d92bbd66-2dd9-44e3-a834-a92797ae8d1f pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:01:11 compute-0 nova_compute[192079]: 2025-10-02 12:01:11.376 2 DEBUG nova.compute.manager [req-54c9e5d0-5d12-46fa-8cbe-d7d9ee2ea8e6 req-3ec0202d-ab8b-4ecd-850b-4dbcfe5bb6e4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Received event network-vif-unplugged-d92bbd66-2dd9-44e3-a834-a92797ae8d1f for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:01:11 compute-0 nova_compute[192079]: 2025-10-02 12:01:11.420 2 DEBUG nova.network.neutron [-] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:01:11 compute-0 nova_compute[192079]: 2025-10-02 12:01:11.706 2 INFO nova.compute.manager [-] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Took 3.45 seconds to deallocate network for instance.
Oct 02 12:01:12 compute-0 nova_compute[192079]: 2025-10-02 12:01:12.364 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:01:12 compute-0 nova_compute[192079]: 2025-10-02 12:01:12.712 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:12 compute-0 nova_compute[192079]: 2025-10-02 12:01:12.989 2 DEBUG oslo_concurrency.lockutils [None req-66ffcc42-737a-484f-b5c3-ef806a4eeb94 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:12 compute-0 nova_compute[192079]: 2025-10-02 12:01:12.990 2 DEBUG oslo_concurrency.lockutils [None req-66ffcc42-737a-484f-b5c3-ef806a4eeb94 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:13 compute-0 nova_compute[192079]: 2025-10-02 12:01:13.030 2 DEBUG nova.compute.provider_tree [None req-66ffcc42-737a-484f-b5c3-ef806a4eeb94 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:01:13 compute-0 nova_compute[192079]: 2025-10-02 12:01:13.109 2 DEBUG nova.scheduler.client.report [None req-66ffcc42-737a-484f-b5c3-ef806a4eeb94 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:01:13 compute-0 nova_compute[192079]: 2025-10-02 12:01:13.332 2 DEBUG oslo_concurrency.lockutils [None req-66ffcc42-737a-484f-b5c3-ef806a4eeb94 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.343s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:13 compute-0 nova_compute[192079]: 2025-10-02 12:01:13.487 2 INFO nova.scheduler.client.report [None req-66ffcc42-737a-484f-b5c3-ef806a4eeb94 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Deleted allocations for instance a3d563c1-37ae-41be-a49b-ee6efeccfc94
Oct 02 12:01:13 compute-0 nova_compute[192079]: 2025-10-02 12:01:13.577 2 DEBUG nova.compute.manager [req-7d12e6cd-b1fe-40b2-9326-6671d0f9fedc req-4882a9ab-3ae0-4f78-92e8-0e8b40fcc892 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Received event network-vif-plugged-d92bbd66-2dd9-44e3-a834-a92797ae8d1f external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:01:13 compute-0 nova_compute[192079]: 2025-10-02 12:01:13.577 2 DEBUG oslo_concurrency.lockutils [req-7d12e6cd-b1fe-40b2-9326-6671d0f9fedc req-4882a9ab-3ae0-4f78-92e8-0e8b40fcc892 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "a3d563c1-37ae-41be-a49b-ee6efeccfc94-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:13 compute-0 nova_compute[192079]: 2025-10-02 12:01:13.578 2 DEBUG oslo_concurrency.lockutils [req-7d12e6cd-b1fe-40b2-9326-6671d0f9fedc req-4882a9ab-3ae0-4f78-92e8-0e8b40fcc892 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a3d563c1-37ae-41be-a49b-ee6efeccfc94-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:13 compute-0 nova_compute[192079]: 2025-10-02 12:01:13.578 2 DEBUG oslo_concurrency.lockutils [req-7d12e6cd-b1fe-40b2-9326-6671d0f9fedc req-4882a9ab-3ae0-4f78-92e8-0e8b40fcc892 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a3d563c1-37ae-41be-a49b-ee6efeccfc94-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:13 compute-0 nova_compute[192079]: 2025-10-02 12:01:13.578 2 DEBUG nova.compute.manager [req-7d12e6cd-b1fe-40b2-9326-6671d0f9fedc req-4882a9ab-3ae0-4f78-92e8-0e8b40fcc892 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] No waiting events found dispatching network-vif-plugged-d92bbd66-2dd9-44e3-a834-a92797ae8d1f pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:01:13 compute-0 nova_compute[192079]: 2025-10-02 12:01:13.578 2 WARNING nova.compute.manager [req-7d12e6cd-b1fe-40b2-9326-6671d0f9fedc req-4882a9ab-3ae0-4f78-92e8-0e8b40fcc892 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Received unexpected event network-vif-plugged-d92bbd66-2dd9-44e3-a834-a92797ae8d1f for instance with vm_state deleted and task_state None.
Oct 02 12:01:13 compute-0 nova_compute[192079]: 2025-10-02 12:01:13.579 2 DEBUG nova.compute.manager [req-7d12e6cd-b1fe-40b2-9326-6671d0f9fedc req-4882a9ab-3ae0-4f78-92e8-0e8b40fcc892 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Received event network-vif-deleted-d92bbd66-2dd9-44e3-a834-a92797ae8d1f external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:01:14 compute-0 nova_compute[192079]: 2025-10-02 12:01:14.143 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:14 compute-0 podman[220394]: 2025-10-02 12:01:14.195157775 +0000 UTC m=+0.113834820 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_controller, io.buildah.version=1.41.3, tcib_managed=true, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_id=ovn_controller)
Oct 02 12:01:14 compute-0 nova_compute[192079]: 2025-10-02 12:01:14.464 2 DEBUG oslo_concurrency.lockutils [None req-66ffcc42-737a-484f-b5c3-ef806a4eeb94 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "a3d563c1-37ae-41be-a49b-ee6efeccfc94" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 7.941s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:01:17.097 12 DEBUG ceilometer.polling.manager [-] Skip pollster memory.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:01:17.098 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:01:17.098 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:01:17.098 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:01:17.098 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:01:17.099 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:01:17.099 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:01:17.099 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:01:17.099 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:01:17.099 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.allocation, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:01:17.099 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.iops, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:01:17.099 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:01:17.099 12 DEBUG ceilometer.polling.manager [-] Skip pollster cpu, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:01:17.099 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:01:17.099 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:01:17.100 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.capacity, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:01:17.100 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:01:17.100 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:01:17.100 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:01:17.100 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:01:17.100 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:01:17.100 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:01:17.100 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:01:17.100 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:01:17.100 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:01:17 compute-0 nova_compute[192079]: 2025-10-02 12:01:17.715 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:19 compute-0 nova_compute[192079]: 2025-10-02 12:01:19.143 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:19 compute-0 podman[220421]: 2025-10-02 12:01:19.173552027 +0000 UTC m=+0.072372157 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 12:01:19 compute-0 podman[220420]: 2025-10-02 12:01:19.180032533 +0000 UTC m=+0.084233960 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, container_name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, config_id=ovn_metadata_agent, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_managed=true)
Oct 02 12:01:21 compute-0 nova_compute[192079]: 2025-10-02 12:01:21.381 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Acquiring lock "eba3fb05-4dd5-4f34-9cb5-c932a86f4c53" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:21 compute-0 nova_compute[192079]: 2025-10-02 12:01:21.382 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "eba3fb05-4dd5-4f34-9cb5-c932a86f4c53" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:21 compute-0 nova_compute[192079]: 2025-10-02 12:01:21.836 2 DEBUG nova.compute.manager [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:01:22 compute-0 nova_compute[192079]: 2025-10-02 12:01:22.016 2 DEBUG nova.virt.libvirt.driver [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Creating tmpfile /var/lib/nova/instances/tmp0qszz7w0 to notify to other compute nodes that they should mount the same storage. _create_shared_storage_test_file /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10041
Oct 02 12:01:22 compute-0 nova_compute[192079]: 2025-10-02 12:01:22.201 2 DEBUG nova.compute.manager [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] destination check data is LibvirtLiveMigrateData(bdms=<?>,block_migration=<?>,disk_available_mb=74752,disk_over_commit=<?>,dst_numa_info=<?>,dst_supports_numa_live_migration=<?>,dst_wants_file_backed_memory=False,file_backed_memory_discard=<?>,filename='tmp0qszz7w0',graphics_listen_addr_spice=127.0.0.1,graphics_listen_addr_vnc=::,image_type='qcow2',instance_relative_path=<?>,is_shared_block_storage=<?>,is_shared_instance_path=<?>,is_volume_backed=<?>,migration=<?>,old_vol_attachment_ids=<?>,serial_listen_addr=None,serial_listen_ports=<?>,src_supports_native_luks=<?>,src_supports_numa_live_migration=<?>,supported_perf_events=<?>,target_connect_addr=<?>,vifs=[VIFMigrateData],wait_for_vif_plugged=<?>) check_can_live_migrate_destination /usr/lib/python3.9/site-packages/nova/compute/manager.py:8476
Oct 02 12:01:22 compute-0 nova_compute[192079]: 2025-10-02 12:01:22.243 2 DEBUG oslo_concurrency.lockutils [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Acquiring lock "compute-rpcapi-router" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:01:22 compute-0 nova_compute[192079]: 2025-10-02 12:01:22.244 2 DEBUG oslo_concurrency.lockutils [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Acquired lock "compute-rpcapi-router" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:01:22 compute-0 nova_compute[192079]: 2025-10-02 12:01:22.314 2 INFO nova.compute.rpcapi [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Automatically selected compute RPC version 6.2 from minimum service version 66
Oct 02 12:01:22 compute-0 nova_compute[192079]: 2025-10-02 12:01:22.315 2 DEBUG oslo_concurrency.lockutils [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Releasing lock "compute-rpcapi-router" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:01:22 compute-0 nova_compute[192079]: 2025-10-02 12:01:22.529 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:22 compute-0 nova_compute[192079]: 2025-10-02 12:01:22.529 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:22 compute-0 nova_compute[192079]: 2025-10-02 12:01:22.536 2 DEBUG nova.virt.hardware [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:01:22 compute-0 nova_compute[192079]: 2025-10-02 12:01:22.536 2 INFO nova.compute.claims [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:01:22 compute-0 nova_compute[192079]: 2025-10-02 12:01:22.597 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759406467.5962353, a3d563c1-37ae-41be-a49b-ee6efeccfc94 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:01:22 compute-0 nova_compute[192079]: 2025-10-02 12:01:22.597 2 INFO nova.compute.manager [-] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] VM Stopped (Lifecycle Event)
Oct 02 12:01:22 compute-0 nova_compute[192079]: 2025-10-02 12:01:22.670 2 DEBUG nova.compute.manager [None req-3248645c-ecce-4b4c-bb83-bbc75ad9f7b2 - - - - - -] [instance: a3d563c1-37ae-41be-a49b-ee6efeccfc94] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:01:22 compute-0 nova_compute[192079]: 2025-10-02 12:01:22.717 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:23 compute-0 nova_compute[192079]: 2025-10-02 12:01:23.057 2 DEBUG nova.compute.provider_tree [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:01:23 compute-0 nova_compute[192079]: 2025-10-02 12:01:23.130 2 DEBUG nova.scheduler.client.report [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:01:23 compute-0 nova_compute[192079]: 2025-10-02 12:01:23.267 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.737s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:23 compute-0 nova_compute[192079]: 2025-10-02 12:01:23.268 2 DEBUG nova.compute.manager [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:01:23 compute-0 nova_compute[192079]: 2025-10-02 12:01:23.803 2 DEBUG nova.compute.manager [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:01:23 compute-0 nova_compute[192079]: 2025-10-02 12:01:23.804 2 DEBUG nova.network.neutron [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:01:23 compute-0 nova_compute[192079]: 2025-10-02 12:01:23.891 2 INFO nova.virt.libvirt.driver [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:01:24 compute-0 nova_compute[192079]: 2025-10-02 12:01:24.019 2 DEBUG nova.compute.manager [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:01:24 compute-0 nova_compute[192079]: 2025-10-02 12:01:24.183 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:24 compute-0 nova_compute[192079]: 2025-10-02 12:01:24.412 2 DEBUG nova.compute.manager [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:01:24 compute-0 nova_compute[192079]: 2025-10-02 12:01:24.414 2 DEBUG nova.virt.libvirt.driver [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:01:24 compute-0 nova_compute[192079]: 2025-10-02 12:01:24.414 2 INFO nova.virt.libvirt.driver [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Creating image(s)
Oct 02 12:01:24 compute-0 nova_compute[192079]: 2025-10-02 12:01:24.414 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Acquiring lock "/var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:24 compute-0 nova_compute[192079]: 2025-10-02 12:01:24.415 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "/var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:24 compute-0 nova_compute[192079]: 2025-10-02 12:01:24.415 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "/var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:24 compute-0 nova_compute[192079]: 2025-10-02 12:01:24.432 2 DEBUG oslo_concurrency.processutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:01:24 compute-0 nova_compute[192079]: 2025-10-02 12:01:24.451 2 DEBUG nova.policy [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '67132a26bb4c454aa5ed0e4b8fee032c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '302a9c83c3eb43818ce6284e9ddb73be', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:01:24 compute-0 nova_compute[192079]: 2025-10-02 12:01:24.487 2 DEBUG oslo_concurrency.processutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:01:24 compute-0 nova_compute[192079]: 2025-10-02 12:01:24.488 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:24 compute-0 nova_compute[192079]: 2025-10-02 12:01:24.488 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:24 compute-0 nova_compute[192079]: 2025-10-02 12:01:24.500 2 DEBUG oslo_concurrency.processutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:01:24 compute-0 nova_compute[192079]: 2025-10-02 12:01:24.519 2 DEBUG nova.compute.manager [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] pre_live_migration data is LibvirtLiveMigrateData(bdms=<?>,block_migration=True,disk_available_mb=74752,disk_over_commit=<?>,dst_numa_info=<?>,dst_supports_numa_live_migration=<?>,dst_wants_file_backed_memory=False,file_backed_memory_discard=<?>,filename='tmp0qszz7w0',graphics_listen_addr_spice=127.0.0.1,graphics_listen_addr_vnc=::,image_type='qcow2',instance_relative_path='a20c354d-a1af-4fad-958f-59623ebe4437',is_shared_block_storage=False,is_shared_instance_path=False,is_volume_backed=False,migration=<?>,old_vol_attachment_ids=<?>,serial_listen_addr=None,serial_listen_ports=<?>,src_supports_native_luks=<?>,src_supports_numa_live_migration=<?>,supported_perf_events=<?>,target_connect_addr=<?>,vifs=[VIFMigrateData],wait_for_vif_plugged=<?>) pre_live_migration /usr/lib/python3.9/site-packages/nova/compute/manager.py:8604
Oct 02 12:01:24 compute-0 nova_compute[192079]: 2025-10-02 12:01:24.556 2 DEBUG oslo_concurrency.processutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:01:24 compute-0 nova_compute[192079]: 2025-10-02 12:01:24.557 2 DEBUG oslo_concurrency.processutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:01:24 compute-0 nova_compute[192079]: 2025-10-02 12:01:24.619 2 DEBUG oslo_concurrency.lockutils [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Acquiring lock "refresh_cache-a20c354d-a1af-4fad-958f-59623ebe4437" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:01:24 compute-0 nova_compute[192079]: 2025-10-02 12:01:24.620 2 DEBUG oslo_concurrency.lockutils [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Acquired lock "refresh_cache-a20c354d-a1af-4fad-958f-59623ebe4437" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:01:24 compute-0 nova_compute[192079]: 2025-10-02 12:01:24.620 2 DEBUG nova.network.neutron [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:01:24 compute-0 nova_compute[192079]: 2025-10-02 12:01:24.701 2 DEBUG oslo_concurrency.processutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk 1073741824" returned: 0 in 0.144s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:01:24 compute-0 nova_compute[192079]: 2025-10-02 12:01:24.701 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.213s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:24 compute-0 nova_compute[192079]: 2025-10-02 12:01:24.702 2 DEBUG oslo_concurrency.processutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:01:24 compute-0 nova_compute[192079]: 2025-10-02 12:01:24.751 2 DEBUG oslo_concurrency.processutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.049s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:01:24 compute-0 nova_compute[192079]: 2025-10-02 12:01:24.752 2 DEBUG nova.virt.disk.api [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Checking if we can resize image /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:01:24 compute-0 nova_compute[192079]: 2025-10-02 12:01:24.752 2 DEBUG oslo_concurrency.processutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:01:24 compute-0 nova_compute[192079]: 2025-10-02 12:01:24.816 2 DEBUG oslo_concurrency.processutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk --force-share --output=json" returned: 0 in 0.064s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:01:24 compute-0 nova_compute[192079]: 2025-10-02 12:01:24.817 2 DEBUG nova.virt.disk.api [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Cannot resize image /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:01:24 compute-0 nova_compute[192079]: 2025-10-02 12:01:24.817 2 DEBUG nova.objects.instance [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lazy-loading 'migration_context' on Instance uuid eba3fb05-4dd5-4f34-9cb5-c932a86f4c53 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:01:25 compute-0 nova_compute[192079]: 2025-10-02 12:01:25.029 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Acquiring lock "/var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:25 compute-0 nova_compute[192079]: 2025-10-02 12:01:25.029 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "/var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:25 compute-0 nova_compute[192079]: 2025-10-02 12:01:25.030 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "/var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:25 compute-0 nova_compute[192079]: 2025-10-02 12:01:25.030 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Acquiring lock "ephemeral_1_0706d66" by "nova.virt.libvirt.imagebackend.Image.cache.<locals>.fetch_func_sync" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:25 compute-0 nova_compute[192079]: 2025-10-02 12:01:25.031 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "ephemeral_1_0706d66" acquired by "nova.virt.libvirt.imagebackend.Image.cache.<locals>.fetch_func_sync" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:25 compute-0 nova_compute[192079]: 2025-10-02 12:01:25.031 2 DEBUG oslo_concurrency.processutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f raw /var/lib/nova/instances/_base/ephemeral_1_0706d66 1G execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:01:25 compute-0 nova_compute[192079]: 2025-10-02 12:01:25.067 2 DEBUG oslo_concurrency.processutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f raw /var/lib/nova/instances/_base/ephemeral_1_0706d66 1G" returned: 0 in 0.036s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:01:25 compute-0 nova_compute[192079]: 2025-10-02 12:01:25.068 2 DEBUG oslo_concurrency.processutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Running cmd (subprocess): mkfs -t vfat -n ephemeral0 /var/lib/nova/instances/_base/ephemeral_1_0706d66 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:01:25 compute-0 nova_compute[192079]: 2025-10-02 12:01:25.133 2 DEBUG oslo_concurrency.processutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] CMD "mkfs -t vfat -n ephemeral0 /var/lib/nova/instances/_base/ephemeral_1_0706d66" returned: 0 in 0.065s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:01:25 compute-0 nova_compute[192079]: 2025-10-02 12:01:25.134 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "ephemeral_1_0706d66" "released" by "nova.virt.libvirt.imagebackend.Image.cache.<locals>.fetch_func_sync" :: held 0.104s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:25 compute-0 podman[220480]: 2025-10-02 12:01:25.138965454 +0000 UTC m=+0.055610310 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true)
Oct 02 12:01:25 compute-0 nova_compute[192079]: 2025-10-02 12:01:25.148 2 DEBUG oslo_concurrency.processutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/ephemeral_1_0706d66 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:01:25 compute-0 nova_compute[192079]: 2025-10-02 12:01:25.196 2 DEBUG oslo_concurrency.processutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/ephemeral_1_0706d66 --force-share --output=json" returned: 0 in 0.049s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:01:25 compute-0 nova_compute[192079]: 2025-10-02 12:01:25.197 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Acquiring lock "ephemeral_1_0706d66" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:25 compute-0 nova_compute[192079]: 2025-10-02 12:01:25.198 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "ephemeral_1_0706d66" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:25 compute-0 nova_compute[192079]: 2025-10-02 12:01:25.208 2 DEBUG oslo_concurrency.processutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/ephemeral_1_0706d66 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:01:25 compute-0 nova_compute[192079]: 2025-10-02 12:01:25.258 2 DEBUG oslo_concurrency.processutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/ephemeral_1_0706d66 --force-share --output=json" returned: 0 in 0.050s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:01:25 compute-0 nova_compute[192079]: 2025-10-02 12:01:25.259 2 DEBUG oslo_concurrency.processutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/ephemeral_1_0706d66,backing_fmt=raw /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk.eph0 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:01:25 compute-0 nova_compute[192079]: 2025-10-02 12:01:25.313 2 DEBUG oslo_concurrency.processutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/ephemeral_1_0706d66,backing_fmt=raw /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk.eph0 1073741824" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:01:25 compute-0 nova_compute[192079]: 2025-10-02 12:01:25.315 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "ephemeral_1_0706d66" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.117s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:25 compute-0 nova_compute[192079]: 2025-10-02 12:01:25.316 2 DEBUG oslo_concurrency.processutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/ephemeral_1_0706d66 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:01:25 compute-0 nova_compute[192079]: 2025-10-02 12:01:25.395 2 DEBUG oslo_concurrency.processutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/ephemeral_1_0706d66 --force-share --output=json" returned: 0 in 0.079s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:01:25 compute-0 nova_compute[192079]: 2025-10-02 12:01:25.397 2 DEBUG nova.virt.libvirt.driver [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:01:25 compute-0 nova_compute[192079]: 2025-10-02 12:01:25.397 2 DEBUG nova.virt.libvirt.driver [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Ensure instance console log exists: /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:01:25 compute-0 nova_compute[192079]: 2025-10-02 12:01:25.398 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:25 compute-0 nova_compute[192079]: 2025-10-02 12:01:25.399 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:25 compute-0 nova_compute[192079]: 2025-10-02 12:01:25.399 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:26 compute-0 nova_compute[192079]: 2025-10-02 12:01:26.389 2 DEBUG nova.network.neutron [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Successfully created port: dc3331f3-a49a-4c18-98e4-476f3e2c97d4 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:01:26 compute-0 nova_compute[192079]: 2025-10-02 12:01:26.601 2 DEBUG nova.network.neutron [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Updating instance_info_cache with network_info: [{"id": "5562a861-2a3e-4411-8aaa-be6dde7a658a", "address": "fa:16:3e:09:db:7c", "network": {"id": "020b4768-a07a-4769-8636-455566c87083", "bridge": "br-int", "label": "tempest-LiveAutoBlockMigrationV225Test-804372870-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "5cc73d75e0864e838eefa90cb33b7e01", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5562a861-2a", "ovs_interfaceid": "5562a861-2a3e-4411-8aaa-be6dde7a658a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:01:26 compute-0 nova_compute[192079]: 2025-10-02 12:01:26.657 2 DEBUG oslo_concurrency.lockutils [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Releasing lock "refresh_cache-a20c354d-a1af-4fad-958f-59623ebe4437" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:01:26 compute-0 nova_compute[192079]: 2025-10-02 12:01:26.702 2 DEBUG nova.virt.libvirt.driver [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] migrate_data in pre_live_migration: LibvirtLiveMigrateData(bdms=<?>,block_migration=True,disk_available_mb=74752,disk_over_commit=<?>,dst_numa_info=<?>,dst_supports_numa_live_migration=<?>,dst_wants_file_backed_memory=False,file_backed_memory_discard=<?>,filename='tmp0qszz7w0',graphics_listen_addr_spice=127.0.0.1,graphics_listen_addr_vnc=::,image_type='qcow2',instance_relative_path='a20c354d-a1af-4fad-958f-59623ebe4437',is_shared_block_storage=False,is_shared_instance_path=False,is_volume_backed=False,migration=<?>,old_vol_attachment_ids={},serial_listen_addr=None,serial_listen_ports=<?>,src_supports_native_luks=<?>,src_supports_numa_live_migration=<?>,supported_perf_events=<?>,target_connect_addr=<?>,vifs=[VIFMigrateData],wait_for_vif_plugged=<?>) pre_live_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10827
Oct 02 12:01:26 compute-0 nova_compute[192079]: 2025-10-02 12:01:26.704 2 DEBUG nova.virt.libvirt.driver [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Creating instance directory: /var/lib/nova/instances/a20c354d-a1af-4fad-958f-59623ebe4437 pre_live_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10840
Oct 02 12:01:26 compute-0 nova_compute[192079]: 2025-10-02 12:01:26.704 2 DEBUG nova.virt.libvirt.driver [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Creating disk.info with the contents: {'/var/lib/nova/instances/a20c354d-a1af-4fad-958f-59623ebe4437/disk': 'qcow2', '/var/lib/nova/instances/a20c354d-a1af-4fad-958f-59623ebe4437/disk.config': 'raw'} pre_live_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10854
Oct 02 12:01:26 compute-0 nova_compute[192079]: 2025-10-02 12:01:26.705 2 DEBUG nova.virt.libvirt.driver [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Checking to make sure images and backing files are present before live migration. pre_live_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10864
Oct 02 12:01:26 compute-0 nova_compute[192079]: 2025-10-02 12:01:26.706 2 DEBUG nova.objects.instance [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Lazy-loading 'trusted_certs' on Instance uuid a20c354d-a1af-4fad-958f-59623ebe4437 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:01:26 compute-0 nova_compute[192079]: 2025-10-02 12:01:26.783 2 DEBUG oslo_concurrency.processutils [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:01:26 compute-0 nova_compute[192079]: 2025-10-02 12:01:26.855 2 DEBUG oslo_concurrency.processutils [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.072s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:01:26 compute-0 nova_compute[192079]: 2025-10-02 12:01:26.856 2 DEBUG oslo_concurrency.lockutils [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:26 compute-0 nova_compute[192079]: 2025-10-02 12:01:26.857 2 DEBUG oslo_concurrency.lockutils [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:26 compute-0 nova_compute[192079]: 2025-10-02 12:01:26.874 2 DEBUG oslo_concurrency.processutils [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:01:26 compute-0 nova_compute[192079]: 2025-10-02 12:01:26.940 2 DEBUG oslo_concurrency.processutils [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.066s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:01:26 compute-0 nova_compute[192079]: 2025-10-02 12:01:26.941 2 DEBUG oslo_concurrency.processutils [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/a20c354d-a1af-4fad-958f-59623ebe4437/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:01:26 compute-0 nova_compute[192079]: 2025-10-02 12:01:26.973 2 DEBUG oslo_concurrency.processutils [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/a20c354d-a1af-4fad-958f-59623ebe4437/disk 1073741824" returned: 0 in 0.031s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:01:26 compute-0 nova_compute[192079]: 2025-10-02 12:01:26.974 2 DEBUG oslo_concurrency.lockutils [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.117s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:26 compute-0 nova_compute[192079]: 2025-10-02 12:01:26.974 2 DEBUG oslo_concurrency.processutils [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.037 2 DEBUG oslo_concurrency.processutils [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.063s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.039 2 DEBUG nova.virt.disk.api [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Checking if we can resize image /var/lib/nova/instances/a20c354d-a1af-4fad-958f-59623ebe4437/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.039 2 DEBUG oslo_concurrency.processutils [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a20c354d-a1af-4fad-958f-59623ebe4437/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.094 2 DEBUG oslo_concurrency.processutils [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a20c354d-a1af-4fad-958f-59623ebe4437/disk --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.095 2 DEBUG nova.virt.disk.api [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Cannot resize image /var/lib/nova/instances/a20c354d-a1af-4fad-958f-59623ebe4437/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.095 2 DEBUG nova.objects.instance [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Lazy-loading 'migration_context' on Instance uuid a20c354d-a1af-4fad-958f-59623ebe4437 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.137 2 DEBUG oslo_concurrency.processutils [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f raw /var/lib/nova/instances/a20c354d-a1af-4fad-958f-59623ebe4437/disk.config 485376 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.165 2 DEBUG oslo_concurrency.processutils [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f raw /var/lib/nova/instances/a20c354d-a1af-4fad-958f-59623ebe4437/disk.config 485376" returned: 0 in 0.028s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.166 2 DEBUG nova.virt.libvirt.volume.remotefs [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Copying file compute-1.ctlplane.example.com:/var/lib/nova/instances/a20c354d-a1af-4fad-958f-59623ebe4437/disk.config to /var/lib/nova/instances/a20c354d-a1af-4fad-958f-59623ebe4437 copy_file /usr/lib/python3.9/site-packages/nova/virt/libvirt/volume/remotefs.py:103
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.167 2 DEBUG oslo_concurrency.processutils [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Running cmd (subprocess): scp -C -r compute-1.ctlplane.example.com:/var/lib/nova/instances/a20c354d-a1af-4fad-958f-59623ebe4437/disk.config /var/lib/nova/instances/a20c354d-a1af-4fad-958f-59623ebe4437 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.299 2 DEBUG nova.network.neutron [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Successfully updated port: dc3331f3-a49a-4c18-98e4-476f3e2c97d4 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.353 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Acquiring lock "refresh_cache-eba3fb05-4dd5-4f34-9cb5-c932a86f4c53" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.354 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Acquired lock "refresh_cache-eba3fb05-4dd5-4f34-9cb5-c932a86f4c53" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.354 2 DEBUG nova.network.neutron [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.485 2 DEBUG nova.compute.manager [req-23a10cea-4164-41f0-87f0-aecc9499612d req-afaeb675-fdcb-498e-a44b-b3ec0429e7d5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Received event network-changed-dc3331f3-a49a-4c18-98e4-476f3e2c97d4 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.486 2 DEBUG nova.compute.manager [req-23a10cea-4164-41f0-87f0-aecc9499612d req-afaeb675-fdcb-498e-a44b-b3ec0429e7d5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Refreshing instance network info cache due to event network-changed-dc3331f3-a49a-4c18-98e4-476f3e2c97d4. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.486 2 DEBUG oslo_concurrency.lockutils [req-23a10cea-4164-41f0-87f0-aecc9499612d req-afaeb675-fdcb-498e-a44b-b3ec0429e7d5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-eba3fb05-4dd5-4f34-9cb5-c932a86f4c53" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.598 2 DEBUG nova.network.neutron [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.622 2 DEBUG oslo_concurrency.processutils [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] CMD "scp -C -r compute-1.ctlplane.example.com:/var/lib/nova/instances/a20c354d-a1af-4fad-958f-59623ebe4437/disk.config /var/lib/nova/instances/a20c354d-a1af-4fad-958f-59623ebe4437" returned: 0 in 0.455s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.622 2 DEBUG nova.virt.libvirt.driver [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Plugging VIFs using destination host port bindings before live migration. _pre_live_migration_plug_vifs /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10794
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.624 2 DEBUG nova.virt.libvirt.vif [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:00:57Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description=None,display_name='tempest-LiveAutoBlockMigrationV225Test-server-1982637812',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-1.ctlplane.example.com',hostname='tempest-liveautoblockmigrationv225test-server-1982637812',id=7,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:01:06Z,launched_on='compute-1.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-1.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='5cc73d75e0864e838eefa90cb33b7e01',ramdisk_id='',reservation_id='r-bvhrjcj5',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-LiveAutoBlockMigrationV225Test-984573444',owner_user_name='tempest-LiveAutoBlockMigrationV225Test-984573444-project-member'},tags=<?>,task_state='migrating',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:01:06Z,user_data=None,user_id='59e8135d73ee43e088ba5ee7d9bd84b1',uuid=a20c354d-a1af-4fad-958f-59623ebe4437,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "5562a861-2a3e-4411-8aaa-be6dde7a658a", "address": "fa:16:3e:09:db:7c", "network": {"id": "020b4768-a07a-4769-8636-455566c87083", "bridge": "br-int", "label": "tempest-LiveAutoBlockMigrationV225Test-804372870-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "5cc73d75e0864e838eefa90cb33b7e01", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system"}, "devname": "tap5562a861-2a", "ovs_interfaceid": "5562a861-2a3e-4411-8aaa-be6dde7a658a", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {"os_vif_delegation": true}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.624 2 DEBUG nova.network.os_vif_util [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Converting VIF {"id": "5562a861-2a3e-4411-8aaa-be6dde7a658a", "address": "fa:16:3e:09:db:7c", "network": {"id": "020b4768-a07a-4769-8636-455566c87083", "bridge": "br-int", "label": "tempest-LiveAutoBlockMigrationV225Test-804372870-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "5cc73d75e0864e838eefa90cb33b7e01", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system"}, "devname": "tap5562a861-2a", "ovs_interfaceid": "5562a861-2a3e-4411-8aaa-be6dde7a658a", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {"os_vif_delegation": true}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.625 2 DEBUG nova.network.os_vif_util [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:09:db:7c,bridge_name='br-int',has_traffic_filtering=True,id=5562a861-2a3e-4411-8aaa-be6dde7a658a,network=Network(020b4768-a07a-4769-8636-455566c87083),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5562a861-2a') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.625 2 DEBUG os_vif [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:09:db:7c,bridge_name='br-int',has_traffic_filtering=True,id=5562a861-2a3e-4411-8aaa-be6dde7a658a,network=Network(020b4768-a07a-4769-8636-455566c87083),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5562a861-2a') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.626 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.627 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.627 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.631 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.631 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap5562a861-2a, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.631 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap5562a861-2a, col_values=(('external_ids', {'iface-id': '5562a861-2a3e-4411-8aaa-be6dde7a658a', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:09:db:7c', 'vm-uuid': 'a20c354d-a1af-4fad-958f-59623ebe4437'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.633 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:27 compute-0 NetworkManager[51160]: <info>  [1759406487.6347] manager: (tap5562a861-2a): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/35)
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.636 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.638 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.639 2 INFO os_vif [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:09:db:7c,bridge_name='br-int',has_traffic_filtering=True,id=5562a861-2a3e-4411-8aaa-be6dde7a658a,network=Network(020b4768-a07a-4769-8636-455566c87083),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5562a861-2a')
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.639 2 DEBUG nova.virt.libvirt.driver [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] No dst_numa_info in migrate_data, no cores to power up in pre_live_migration. pre_live_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10954
Oct 02 12:01:27 compute-0 nova_compute[192079]: 2025-10-02 12:01:27.639 2 DEBUG nova.compute.manager [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] driver pre_live_migration data is LibvirtLiveMigrateData(bdms=[],block_migration=True,disk_available_mb=74752,disk_over_commit=<?>,dst_numa_info=<?>,dst_supports_numa_live_migration=<?>,dst_wants_file_backed_memory=False,file_backed_memory_discard=<?>,filename='tmp0qszz7w0',graphics_listen_addr_spice=127.0.0.1,graphics_listen_addr_vnc=::,image_type='qcow2',instance_relative_path='a20c354d-a1af-4fad-958f-59623ebe4437',is_shared_block_storage=False,is_shared_instance_path=False,is_volume_backed=False,migration=<?>,old_vol_attachment_ids={},serial_listen_addr=None,serial_listen_ports=[],src_supports_native_luks=<?>,src_supports_numa_live_migration=<?>,supported_perf_events=[],target_connect_addr=None,vifs=[VIFMigrateData],wait_for_vif_plugged=<?>) pre_live_migration /usr/lib/python3.9/site-packages/nova/compute/manager.py:8668
Oct 02 12:01:28 compute-0 nova_compute[192079]: 2025-10-02 12:01:28.843 2 DEBUG nova.network.neutron [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Updating instance_info_cache with network_info: [{"id": "dc3331f3-a49a-4c18-98e4-476f3e2c97d4", "address": "fa:16:3e:2e:71:61", "network": {"id": "0432e6a2-e111-484d-b6cf-d32d9fc846c9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1078640656-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "302a9c83c3eb43818ce6284e9ddb73be", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapdc3331f3-a4", "ovs_interfaceid": "dc3331f3-a49a-4c18-98e4-476f3e2c97d4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:01:28 compute-0 nova_compute[192079]: 2025-10-02 12:01:28.929 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Releasing lock "refresh_cache-eba3fb05-4dd5-4f34-9cb5-c932a86f4c53" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:01:28 compute-0 nova_compute[192079]: 2025-10-02 12:01:28.929 2 DEBUG nova.compute.manager [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Instance network_info: |[{"id": "dc3331f3-a49a-4c18-98e4-476f3e2c97d4", "address": "fa:16:3e:2e:71:61", "network": {"id": "0432e6a2-e111-484d-b6cf-d32d9fc846c9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1078640656-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "302a9c83c3eb43818ce6284e9ddb73be", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapdc3331f3-a4", "ovs_interfaceid": "dc3331f3-a49a-4c18-98e4-476f3e2c97d4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:01:28 compute-0 nova_compute[192079]: 2025-10-02 12:01:28.930 2 DEBUG oslo_concurrency.lockutils [req-23a10cea-4164-41f0-87f0-aecc9499612d req-afaeb675-fdcb-498e-a44b-b3ec0429e7d5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-eba3fb05-4dd5-4f34-9cb5-c932a86f4c53" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:01:28 compute-0 nova_compute[192079]: 2025-10-02 12:01:28.930 2 DEBUG nova.network.neutron [req-23a10cea-4164-41f0-87f0-aecc9499612d req-afaeb675-fdcb-498e-a44b-b3ec0429e7d5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Refreshing network info cache for port dc3331f3-a49a-4c18-98e4-476f3e2c97d4 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:01:28 compute-0 nova_compute[192079]: 2025-10-02 12:01:28.933 2 DEBUG nova.virt.libvirt.driver [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Start _get_guest_xml network_info=[{"id": "dc3331f3-a49a-4c18-98e4-476f3e2c97d4", "address": "fa:16:3e:2e:71:61", "network": {"id": "0432e6a2-e111-484d-b6cf-d32d9fc846c9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1078640656-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "302a9c83c3eb43818ce6284e9ddb73be", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapdc3331f3-a4", "ovs_interfaceid": "dc3331f3-a49a-4c18-98e4-476f3e2c97d4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.eph0': {'bus': 'virtio', 'dev': 'vdb', 'type': 'disk'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [{'guest_format': None, 'size': 1, 'encrypted': False, 'device_name': '/dev/vdb', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'encryption_secret_uuid': None}], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:01:28 compute-0 nova_compute[192079]: 2025-10-02 12:01:28.939 2 WARNING nova.virt.libvirt.driver [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:01:28 compute-0 nova_compute[192079]: 2025-10-02 12:01:28.948 2 DEBUG nova.virt.libvirt.host [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:01:28 compute-0 nova_compute[192079]: 2025-10-02 12:01:28.948 2 DEBUG nova.virt.libvirt.host [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:01:28 compute-0 nova_compute[192079]: 2025-10-02 12:01:28.953 2 DEBUG nova.virt.libvirt.host [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:01:28 compute-0 nova_compute[192079]: 2025-10-02 12:01:28.953 2 DEBUG nova.virt.libvirt.host [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:01:28 compute-0 nova_compute[192079]: 2025-10-02 12:01:28.955 2 DEBUG nova.virt.libvirt.driver [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:01:28 compute-0 nova_compute[192079]: 2025-10-02 12:01:28.955 2 DEBUG nova.virt.hardware [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T12:00:18Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='990647346',id=6,is_public=True,memory_mb=128,name='tempest-flavor_with_ephemeral_1-66771630',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:01:28 compute-0 nova_compute[192079]: 2025-10-02 12:01:28.955 2 DEBUG nova.virt.hardware [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:01:28 compute-0 nova_compute[192079]: 2025-10-02 12:01:28.956 2 DEBUG nova.virt.hardware [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:01:28 compute-0 nova_compute[192079]: 2025-10-02 12:01:28.956 2 DEBUG nova.virt.hardware [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:01:28 compute-0 nova_compute[192079]: 2025-10-02 12:01:28.956 2 DEBUG nova.virt.hardware [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:01:28 compute-0 nova_compute[192079]: 2025-10-02 12:01:28.956 2 DEBUG nova.virt.hardware [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:01:28 compute-0 nova_compute[192079]: 2025-10-02 12:01:28.956 2 DEBUG nova.virt.hardware [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:01:28 compute-0 nova_compute[192079]: 2025-10-02 12:01:28.957 2 DEBUG nova.virt.hardware [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:01:28 compute-0 nova_compute[192079]: 2025-10-02 12:01:28.957 2 DEBUG nova.virt.hardware [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:01:28 compute-0 nova_compute[192079]: 2025-10-02 12:01:28.957 2 DEBUG nova.virt.hardware [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:01:28 compute-0 nova_compute[192079]: 2025-10-02 12:01:28.957 2 DEBUG nova.virt.hardware [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:01:28 compute-0 nova_compute[192079]: 2025-10-02 12:01:28.961 2 DEBUG nova.virt.libvirt.vif [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:01:11Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ServersWithSpecificFlavorTestJSON-server-2025065881',display_name='tempest-ServersWithSpecificFlavorTestJSON-server-2025065881',ec2_ids=EC2Ids,ephemeral_gb=1,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(6),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverswithspecificflavortestjson-server-2025065881',id=8,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=6,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBLQAR6rn15gxsCt5BVT9ZeXnbqUta2pJ91YMBkT9rHUc9ZBtTK728XqHiZfyDrBlMAbpgHvu/gvYEjRf3OvnLlEsO2AY8MfRajDqsbCXPjRzSoO5eacsxtVMw0D5LoybNA==',key_name='tempest-keypair-283845564',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='302a9c83c3eb43818ce6284e9ddb73be',ramdisk_id='',reservation_id='r-p5h5ztvr',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServersWithSpecificFlavorTestJSON-1100192498',owner_user_name='tempest-ServersWithSpecificFlavorTestJSON-1100192498-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:01:24Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='67132a26bb4c454aa5ed0e4b8fee032c',uuid=eba3fb05-4dd5-4f34-9cb5-c932a86f4c53,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "dc3331f3-a49a-4c18-98e4-476f3e2c97d4", "address": "fa:16:3e:2e:71:61", "network": {"id": "0432e6a2-e111-484d-b6cf-d32d9fc846c9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1078640656-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "302a9c83c3eb43818ce6284e9ddb73be", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapdc3331f3-a4", "ovs_interfaceid": "dc3331f3-a49a-4c18-98e4-476f3e2c97d4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:01:28 compute-0 nova_compute[192079]: 2025-10-02 12:01:28.961 2 DEBUG nova.network.os_vif_util [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Converting VIF {"id": "dc3331f3-a49a-4c18-98e4-476f3e2c97d4", "address": "fa:16:3e:2e:71:61", "network": {"id": "0432e6a2-e111-484d-b6cf-d32d9fc846c9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1078640656-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "302a9c83c3eb43818ce6284e9ddb73be", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapdc3331f3-a4", "ovs_interfaceid": "dc3331f3-a49a-4c18-98e4-476f3e2c97d4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:01:28 compute-0 nova_compute[192079]: 2025-10-02 12:01:28.962 2 DEBUG nova.network.os_vif_util [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:2e:71:61,bridge_name='br-int',has_traffic_filtering=True,id=dc3331f3-a49a-4c18-98e4-476f3e2c97d4,network=Network(0432e6a2-e111-484d-b6cf-d32d9fc846c9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapdc3331f3-a4') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:01:28 compute-0 nova_compute[192079]: 2025-10-02 12:01:28.963 2 DEBUG nova.objects.instance [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lazy-loading 'pci_devices' on Instance uuid eba3fb05-4dd5-4f34-9cb5-c932a86f4c53 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.022 2 DEBUG nova.network.neutron [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Port 5562a861-2a3e-4411-8aaa-be6dde7a658a updated with migration profile {'migrating_to': 'compute-0.ctlplane.example.com'} successfully _setup_migration_port_profile /usr/lib/python3.9/site-packages/nova/network/neutron.py:354
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.079 2 DEBUG nova.compute.manager [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] pre_live_migration result data is LibvirtLiveMigrateData(bdms=[],block_migration=True,disk_available_mb=74752,disk_over_commit=<?>,dst_numa_info=<?>,dst_supports_numa_live_migration=<?>,dst_wants_file_backed_memory=False,file_backed_memory_discard=<?>,filename='tmp0qszz7w0',graphics_listen_addr_spice=127.0.0.1,graphics_listen_addr_vnc=::,image_type='qcow2',instance_relative_path='a20c354d-a1af-4fad-958f-59623ebe4437',is_shared_block_storage=False,is_shared_instance_path=False,is_volume_backed=False,migration=<?>,old_vol_attachment_ids={},serial_listen_addr=None,serial_listen_ports=[],src_supports_native_luks=<?>,src_supports_numa_live_migration=<?>,supported_perf_events=[],target_connect_addr=None,vifs=[VIFMigrateData],wait_for_vif_plugged=True) pre_live_migration /usr/lib/python3.9/site-packages/nova/compute/manager.py:8723
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.119 2 DEBUG nova.virt.libvirt.driver [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:01:29 compute-0 nova_compute[192079]:   <uuid>eba3fb05-4dd5-4f34-9cb5-c932a86f4c53</uuid>
Oct 02 12:01:29 compute-0 nova_compute[192079]:   <name>instance-00000008</name>
Oct 02 12:01:29 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:01:29 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:01:29 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <nova:name>tempest-ServersWithSpecificFlavorTestJSON-server-2025065881</nova:name>
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:01:28</nova:creationTime>
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <nova:flavor name="tempest-flavor_with_ephemeral_1-66771630">
Oct 02 12:01:29 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:01:29 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:01:29 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:01:29 compute-0 nova_compute[192079]:         <nova:ephemeral>1</nova:ephemeral>
Oct 02 12:01:29 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:01:29 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:01:29 compute-0 nova_compute[192079]:         <nova:user uuid="67132a26bb4c454aa5ed0e4b8fee032c">tempest-ServersWithSpecificFlavorTestJSON-1100192498-project-member</nova:user>
Oct 02 12:01:29 compute-0 nova_compute[192079]:         <nova:project uuid="302a9c83c3eb43818ce6284e9ddb73be">tempest-ServersWithSpecificFlavorTestJSON-1100192498</nova:project>
Oct 02 12:01:29 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:01:29 compute-0 nova_compute[192079]:         <nova:port uuid="dc3331f3-a49a-4c18-98e4-476f3e2c97d4">
Oct 02 12:01:29 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.14" ipVersion="4"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:01:29 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:01:29 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:01:29 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <system>
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <entry name="serial">eba3fb05-4dd5-4f34-9cb5-c932a86f4c53</entry>
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <entry name="uuid">eba3fb05-4dd5-4f34-9cb5-c932a86f4c53</entry>
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     </system>
Oct 02 12:01:29 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:01:29 compute-0 nova_compute[192079]:   <os>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:   </os>
Oct 02 12:01:29 compute-0 nova_compute[192079]:   <features>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:   </features>
Oct 02 12:01:29 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:01:29 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:01:29 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk.eph0"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <target dev="vdb" bus="virtio"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk.config"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:2e:71:61"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <target dev="tapdc3331f3-a4"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/console.log" append="off"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <video>
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     </video>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:01:29 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:01:29 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:01:29 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:01:29 compute-0 nova_compute[192079]: </domain>
Oct 02 12:01:29 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.121 2 DEBUG nova.compute.manager [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Preparing to wait for external event network-vif-plugged-dc3331f3-a49a-4c18-98e4-476f3e2c97d4 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.122 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Acquiring lock "eba3fb05-4dd5-4f34-9cb5-c932a86f4c53-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.122 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "eba3fb05-4dd5-4f34-9cb5-c932a86f4c53-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.123 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "eba3fb05-4dd5-4f34-9cb5-c932a86f4c53-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.124 2 DEBUG nova.virt.libvirt.vif [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:01:11Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ServersWithSpecificFlavorTestJSON-server-2025065881',display_name='tempest-ServersWithSpecificFlavorTestJSON-server-2025065881',ec2_ids=EC2Ids,ephemeral_gb=1,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(6),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverswithspecificflavortestjson-server-2025065881',id=8,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=6,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBLQAR6rn15gxsCt5BVT9ZeXnbqUta2pJ91YMBkT9rHUc9ZBtTK728XqHiZfyDrBlMAbpgHvu/gvYEjRf3OvnLlEsO2AY8MfRajDqsbCXPjRzSoO5eacsxtVMw0D5LoybNA==',key_name='tempest-keypair-283845564',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='302a9c83c3eb43818ce6284e9ddb73be',ramdisk_id='',reservation_id='r-p5h5ztvr',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServersWithSpecificFlavorTestJSON-1100192498',owner_user_name='tempest-ServersWithSpecificFlavorTestJSON-1100192498-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:01:24Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='67132a26bb4c454aa5ed0e4b8fee032c',uuid=eba3fb05-4dd5-4f34-9cb5-c932a86f4c53,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "dc3331f3-a49a-4c18-98e4-476f3e2c97d4", "address": "fa:16:3e:2e:71:61", "network": {"id": "0432e6a2-e111-484d-b6cf-d32d9fc846c9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1078640656-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "302a9c83c3eb43818ce6284e9ddb73be", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapdc3331f3-a4", "ovs_interfaceid": "dc3331f3-a49a-4c18-98e4-476f3e2c97d4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.125 2 DEBUG nova.network.os_vif_util [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Converting VIF {"id": "dc3331f3-a49a-4c18-98e4-476f3e2c97d4", "address": "fa:16:3e:2e:71:61", "network": {"id": "0432e6a2-e111-484d-b6cf-d32d9fc846c9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1078640656-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "302a9c83c3eb43818ce6284e9ddb73be", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapdc3331f3-a4", "ovs_interfaceid": "dc3331f3-a49a-4c18-98e4-476f3e2c97d4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.126 2 DEBUG nova.network.os_vif_util [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:2e:71:61,bridge_name='br-int',has_traffic_filtering=True,id=dc3331f3-a49a-4c18-98e4-476f3e2c97d4,network=Network(0432e6a2-e111-484d-b6cf-d32d9fc846c9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapdc3331f3-a4') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.127 2 DEBUG os_vif [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:2e:71:61,bridge_name='br-int',has_traffic_filtering=True,id=dc3331f3-a49a-4c18-98e4-476f3e2c97d4,network=Network(0432e6a2-e111-484d-b6cf-d32d9fc846c9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapdc3331f3-a4') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.128 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.128 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.129 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.133 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.133 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapdc3331f3-a4, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.134 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapdc3331f3-a4, col_values=(('external_ids', {'iface-id': 'dc3331f3-a49a-4c18-98e4-476f3e2c97d4', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:2e:71:61', 'vm-uuid': 'eba3fb05-4dd5-4f34-9cb5-c932a86f4c53'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:01:29 compute-0 NetworkManager[51160]: <info>  [1759406489.1771] manager: (tapdc3331f3-a4): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/36)
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.176 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.179 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.185 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.186 2 INFO os_vif [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:2e:71:61,bridge_name='br-int',has_traffic_filtering=True,id=dc3331f3-a49a-4c18-98e4-476f3e2c97d4,network=Network(0432e6a2-e111-484d-b6cf-d32d9fc846c9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapdc3331f3-a4')
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.188 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.290 2 DEBUG nova.virt.libvirt.driver [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.291 2 DEBUG nova.virt.libvirt.driver [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] No BDM found with device name vdb, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.291 2 DEBUG nova.virt.libvirt.driver [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.292 2 DEBUG nova.virt.libvirt.driver [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] No VIF found with MAC fa:16:3e:2e:71:61, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.292 2 INFO nova.virt.libvirt.driver [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Using config drive
Oct 02 12:01:29 compute-0 systemd[1]: Starting libvirt proxy daemon...
Oct 02 12:01:29 compute-0 systemd[1]: Started libvirt proxy daemon.
Oct 02 12:01:29 compute-0 podman[220540]: 2025-10-02 12:01:29.482090254 +0000 UTC m=+0.076233132 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, build-date=2025-08-20T13:12:41, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vendor=Red Hat, Inc., io.buildah.version=1.33.7, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., distribution-scope=public, version=9.6, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, name=ubi9-minimal, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, vcs-type=git, config_id=edpm, container_name=openstack_network_exporter, release=1755695350, architecture=x86_64, com.redhat.component=ubi9-minimal-container, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.expose-services=, managed_by=edpm_ansible, maintainer=Red Hat, Inc., url=https://catalog.redhat.com/en/search?searchType=containers, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.openshift.tags=minimal rhel9)
Oct 02 12:01:29 compute-0 podman[220541]: 2025-10-02 12:01:29.500066315 +0000 UTC m=+0.080479148 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible)
Oct 02 12:01:29 compute-0 kernel: tap5562a861-2a: entered promiscuous mode
Oct 02 12:01:29 compute-0 NetworkManager[51160]: <info>  [1759406489.5790] manager: (tap5562a861-2a): new Tun device (/org/freedesktop/NetworkManager/Devices/37)
Oct 02 12:01:29 compute-0 ovn_controller[94336]: 2025-10-02T12:01:29Z|00048|binding|INFO|Claiming lport 5562a861-2a3e-4411-8aaa-be6dde7a658a for this additional chassis.
Oct 02 12:01:29 compute-0 ovn_controller[94336]: 2025-10-02T12:01:29Z|00049|binding|INFO|5562a861-2a3e-4411-8aaa-be6dde7a658a: Claiming fa:16:3e:09:db:7c 10.100.0.13
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.588 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:29 compute-0 ovn_controller[94336]: 2025-10-02T12:01:29Z|00050|binding|INFO|Setting lport 5562a861-2a3e-4411-8aaa-be6dde7a658a ovn-installed in OVS
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.612 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:29 compute-0 nova_compute[192079]: 2025-10-02 12:01:29.617 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:29 compute-0 systemd-udevd[220611]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:01:29 compute-0 systemd-machined[152150]: New machine qemu-3-instance-00000007.
Oct 02 12:01:29 compute-0 systemd[1]: Started Virtual Machine qemu-3-instance-00000007.
Oct 02 12:01:29 compute-0 NetworkManager[51160]: <info>  [1759406489.6394] device (tap5562a861-2a): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:01:29 compute-0 NetworkManager[51160]: <info>  [1759406489.6402] device (tap5562a861-2a): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:01:31 compute-0 nova_compute[192079]: 2025-10-02 12:01:31.190 2 INFO nova.virt.libvirt.driver [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Creating config drive at /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk.config
Oct 02 12:01:31 compute-0 nova_compute[192079]: 2025-10-02 12:01:31.196 2 DEBUG oslo_concurrency.processutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpogngqjff execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:01:31 compute-0 nova_compute[192079]: 2025-10-02 12:01:31.330 2 DEBUG oslo_concurrency.processutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpogngqjff" returned: 0 in 0.134s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:01:31 compute-0 nova_compute[192079]: 2025-10-02 12:01:31.373 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406491.3729692, a20c354d-a1af-4fad-958f-59623ebe4437 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:01:31 compute-0 nova_compute[192079]: 2025-10-02 12:01:31.374 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] VM Started (Lifecycle Event)
Oct 02 12:01:31 compute-0 kernel: tapdc3331f3-a4: entered promiscuous mode
Oct 02 12:01:31 compute-0 NetworkManager[51160]: <info>  [1759406491.3962] manager: (tapdc3331f3-a4): new Tun device (/org/freedesktop/NetworkManager/Devices/38)
Oct 02 12:01:31 compute-0 NetworkManager[51160]: <info>  [1759406491.4077] device (tapdc3331f3-a4): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:01:31 compute-0 NetworkManager[51160]: <info>  [1759406491.4086] device (tapdc3331f3-a4): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:01:31 compute-0 ovn_controller[94336]: 2025-10-02T12:01:31Z|00051|binding|INFO|Claiming lport dc3331f3-a49a-4c18-98e4-476f3e2c97d4 for this chassis.
Oct 02 12:01:31 compute-0 ovn_controller[94336]: 2025-10-02T12:01:31Z|00052|binding|INFO|dc3331f3-a49a-4c18-98e4-476f3e2c97d4: Claiming fa:16:3e:2e:71:61 10.100.0.14
Oct 02 12:01:31 compute-0 nova_compute[192079]: 2025-10-02 12:01:31.436 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:31 compute-0 ovn_controller[94336]: 2025-10-02T12:01:31Z|00053|binding|INFO|Setting lport dc3331f3-a49a-4c18-98e4-476f3e2c97d4 ovn-installed in OVS
Oct 02 12:01:31 compute-0 nova_compute[192079]: 2025-10-02 12:01:31.456 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:31 compute-0 systemd-machined[152150]: New machine qemu-4-instance-00000008.
Oct 02 12:01:31 compute-0 systemd[1]: Started Virtual Machine qemu-4-instance-00000008.
Oct 02 12:01:31 compute-0 ovn_controller[94336]: 2025-10-02T12:01:31Z|00054|binding|INFO|Setting lport dc3331f3-a49a-4c18-98e4-476f3e2c97d4 up in Southbound
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:31.551 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:2e:71:61 10.100.0.14'], port_security=['fa:16:3e:2e:71:61 10.100.0.14'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.14/28', 'neutron:device_id': 'eba3fb05-4dd5-4f34-9cb5-c932a86f4c53', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-0432e6a2-e111-484d-b6cf-d32d9fc846c9', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '302a9c83c3eb43818ce6284e9ddb73be', 'neutron:revision_number': '2', 'neutron:security_group_ids': 'afed868a-564b-4ceb-947a-806e11012ac0', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=b550d36f-725b-4b76-9c4e-aa36183370a9, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=dc3331f3-a49a-4c18-98e4-476f3e2c97d4) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:31.553 103294 INFO neutron.agent.ovn.metadata.agent [-] Port dc3331f3-a49a-4c18-98e4-476f3e2c97d4 in datapath 0432e6a2-e111-484d-b6cf-d32d9fc846c9 bound to our chassis
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:31.554 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 0432e6a2-e111-484d-b6cf-d32d9fc846c9
Oct 02 12:01:31 compute-0 nova_compute[192079]: 2025-10-02 12:01:31.566 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:31.568 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[25b5d691-c550-4d09-a951-2b34df0101df]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:31.569 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap0432e6a2-e1 in ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:31.572 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap0432e6a2-e0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:31.573 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6c671c49-cca2-413e-a8e2-26a1a80cf320]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:31.574 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d59fb996-2d7b-41ef-b232-6d56746a308c]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:31.589 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[2cd6c42b-7ae0-41f1-977d-9ecc294b72e9]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:31.613 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[cf47cdf5-5371-42a8-af1d-b6bd3210db28]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:31.649 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[7686cb17-788e-4f20-8235-52fb875ee1d9]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:31 compute-0 NetworkManager[51160]: <info>  [1759406491.6552] manager: (tap0432e6a2-e0): new Veth device (/org/freedesktop/NetworkManager/Devices/39)
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:31.654 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7ca7db27-9749-48f5-bfb2-12442816bbb1]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:31.686 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[ba8c61bc-4e8c-4b86-b8af-e409db90965f]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:31.689 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[6ac700b3-4249-4484-aaaa-35388d613a41]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:31 compute-0 NetworkManager[51160]: <info>  [1759406491.7102] device (tap0432e6a2-e0): carrier: link connected
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:31.714 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[ccae8066-08c0-4811-be8d-6b5062027ec8]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:31.730 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[43697493-7dcd-43cb-a795-850a8c2ccfec]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap0432e6a2-e1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:54:ae:53'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 21], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 448933, 'reachable_time': 44662, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 220697, 'error': None, 'target': 'ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:31.749 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ca94da6e-2458-4df0-9eb3-bb64181ed4bc]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe54:ae53'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 448933, 'tstamp': 448933}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 220699, 'error': None, 'target': 'ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:31.774 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[22428079-febf-4350-9e38-85a0faa27af1]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap0432e6a2-e1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:54:ae:53'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 21], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 448933, 'reachable_time': 44662, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 220703, 'error': None, 'target': 'ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:31.821 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5d329561-abef-43b9-a37a-c6706ea1be74]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:31 compute-0 nova_compute[192079]: 2025-10-02 12:01:31.852 2 DEBUG nova.network.neutron [req-23a10cea-4164-41f0-87f0-aecc9499612d req-afaeb675-fdcb-498e-a44b-b3ec0429e7d5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Updated VIF entry in instance network info cache for port dc3331f3-a49a-4c18-98e4-476f3e2c97d4. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:01:31 compute-0 nova_compute[192079]: 2025-10-02 12:01:31.852 2 DEBUG nova.network.neutron [req-23a10cea-4164-41f0-87f0-aecc9499612d req-afaeb675-fdcb-498e-a44b-b3ec0429e7d5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Updating instance_info_cache with network_info: [{"id": "dc3331f3-a49a-4c18-98e4-476f3e2c97d4", "address": "fa:16:3e:2e:71:61", "network": {"id": "0432e6a2-e111-484d-b6cf-d32d9fc846c9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1078640656-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "302a9c83c3eb43818ce6284e9ddb73be", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapdc3331f3-a4", "ovs_interfaceid": "dc3331f3-a49a-4c18-98e4-476f3e2c97d4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:31.894 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e29fe180-f9ee-4540-b7cf-0b22b1e86ae6]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:31.896 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap0432e6a2-e0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:31.897 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:31.897 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap0432e6a2-e0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:01:31 compute-0 nova_compute[192079]: 2025-10-02 12:01:31.899 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:31 compute-0 NetworkManager[51160]: <info>  [1759406491.9001] manager: (tap0432e6a2-e0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/40)
Oct 02 12:01:31 compute-0 kernel: tap0432e6a2-e0: entered promiscuous mode
Oct 02 12:01:31 compute-0 nova_compute[192079]: 2025-10-02 12:01:31.901 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:31.903 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap0432e6a2-e0, col_values=(('external_ids', {'iface-id': '0fd0c84b-50f0-4eec-8552-49dfdd682f27'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:01:31 compute-0 nova_compute[192079]: 2025-10-02 12:01:31.905 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:31 compute-0 ovn_controller[94336]: 2025-10-02T12:01:31Z|00055|binding|INFO|Releasing lport 0fd0c84b-50f0-4eec-8552-49dfdd682f27 from this chassis (sb_readonly=0)
Oct 02 12:01:31 compute-0 nova_compute[192079]: 2025-10-02 12:01:31.906 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:31.907 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/0432e6a2-e111-484d-b6cf-d32d9fc846c9.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/0432e6a2-e111-484d-b6cf-d32d9fc846c9.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:31.908 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b9fc61b0-f021-404a-91d2-f5e40767df19]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:31.909 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-0432e6a2-e111-484d-b6cf-d32d9fc846c9
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/0432e6a2-e111-484d-b6cf-d32d9fc846c9.pid.haproxy
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 0432e6a2-e111-484d-b6cf-d32d9fc846c9
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:01:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:31.910 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9', 'env', 'PROCESS_TAG=haproxy-0432e6a2-e111-484d-b6cf-d32d9fc846c9', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/0432e6a2-e111-484d-b6cf-d32d9fc846c9.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:01:31 compute-0 nova_compute[192079]: 2025-10-02 12:01:31.916 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:31 compute-0 nova_compute[192079]: 2025-10-02 12:01:31.944 2 DEBUG oslo_concurrency.lockutils [req-23a10cea-4164-41f0-87f0-aecc9499612d req-afaeb675-fdcb-498e-a44b-b3ec0429e7d5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-eba3fb05-4dd5-4f34-9cb5-c932a86f4c53" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:01:31 compute-0 nova_compute[192079]: 2025-10-02 12:01:31.987 2 DEBUG nova.compute.manager [req-a5a8e437-ba0d-4142-b719-12a8ba4bf83d req-28f5c96b-f6cc-40de-8b16-6e95ea06866f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Received event network-vif-plugged-dc3331f3-a49a-4c18-98e4-476f3e2c97d4 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:01:31 compute-0 nova_compute[192079]: 2025-10-02 12:01:31.988 2 DEBUG oslo_concurrency.lockutils [req-a5a8e437-ba0d-4142-b719-12a8ba4bf83d req-28f5c96b-f6cc-40de-8b16-6e95ea06866f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "eba3fb05-4dd5-4f34-9cb5-c932a86f4c53-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:31 compute-0 nova_compute[192079]: 2025-10-02 12:01:31.988 2 DEBUG oslo_concurrency.lockutils [req-a5a8e437-ba0d-4142-b719-12a8ba4bf83d req-28f5c96b-f6cc-40de-8b16-6e95ea06866f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "eba3fb05-4dd5-4f34-9cb5-c932a86f4c53-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:31 compute-0 nova_compute[192079]: 2025-10-02 12:01:31.989 2 DEBUG oslo_concurrency.lockutils [req-a5a8e437-ba0d-4142-b719-12a8ba4bf83d req-28f5c96b-f6cc-40de-8b16-6e95ea06866f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "eba3fb05-4dd5-4f34-9cb5-c932a86f4c53-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:31 compute-0 nova_compute[192079]: 2025-10-02 12:01:31.989 2 DEBUG nova.compute.manager [req-a5a8e437-ba0d-4142-b719-12a8ba4bf83d req-28f5c96b-f6cc-40de-8b16-6e95ea06866f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Processing event network-vif-plugged-dc3331f3-a49a-4c18-98e4-476f3e2c97d4 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:01:32 compute-0 nova_compute[192079]: 2025-10-02 12:01:32.190 2 DEBUG nova.compute.manager [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:01:32 compute-0 nova_compute[192079]: 2025-10-02 12:01:32.190 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406492.189581, eba3fb05-4dd5-4f34-9cb5-c932a86f4c53 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:01:32 compute-0 nova_compute[192079]: 2025-10-02 12:01:32.190 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] VM Started (Lifecycle Event)
Oct 02 12:01:32 compute-0 nova_compute[192079]: 2025-10-02 12:01:32.194 2 DEBUG nova.virt.libvirt.driver [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:01:32 compute-0 nova_compute[192079]: 2025-10-02 12:01:32.197 2 INFO nova.virt.libvirt.driver [-] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Instance spawned successfully.
Oct 02 12:01:32 compute-0 nova_compute[192079]: 2025-10-02 12:01:32.197 2 DEBUG nova.virt.libvirt.driver [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:01:32 compute-0 nova_compute[192079]: 2025-10-02 12:01:32.237 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:01:32 compute-0 nova_compute[192079]: 2025-10-02 12:01:32.241 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:01:32 compute-0 nova_compute[192079]: 2025-10-02 12:01:32.298 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:01:32 compute-0 nova_compute[192079]: 2025-10-02 12:01:32.298 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406492.1904345, eba3fb05-4dd5-4f34-9cb5-c932a86f4c53 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:01:32 compute-0 nova_compute[192079]: 2025-10-02 12:01:32.298 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] VM Paused (Lifecycle Event)
Oct 02 12:01:32 compute-0 nova_compute[192079]: 2025-10-02 12:01:32.302 2 DEBUG nova.virt.libvirt.driver [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:01:32 compute-0 nova_compute[192079]: 2025-10-02 12:01:32.302 2 DEBUG nova.virt.libvirt.driver [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:01:32 compute-0 nova_compute[192079]: 2025-10-02 12:01:32.303 2 DEBUG nova.virt.libvirt.driver [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:01:32 compute-0 nova_compute[192079]: 2025-10-02 12:01:32.303 2 DEBUG nova.virt.libvirt.driver [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:01:32 compute-0 nova_compute[192079]: 2025-10-02 12:01:32.304 2 DEBUG nova.virt.libvirt.driver [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:01:32 compute-0 nova_compute[192079]: 2025-10-02 12:01:32.304 2 DEBUG nova.virt.libvirt.driver [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:01:32 compute-0 podman[220737]: 2025-10-02 12:01:32.354661504 +0000 UTC m=+0.048524756 container create 3573cdfb43ade666670c40924a132cceb65a84f458ea2fd1c797283fff1e8728 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2)
Oct 02 12:01:32 compute-0 systemd[1]: Started libpod-conmon-3573cdfb43ade666670c40924a132cceb65a84f458ea2fd1c797283fff1e8728.scope.
Oct 02 12:01:32 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:01:32 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/ee3ad8522c9b7f65b1ca66e4c4b52a64dc085976c0e26d3d70af0fe5f1e87e85/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:01:32 compute-0 podman[220737]: 2025-10-02 12:01:32.327817182 +0000 UTC m=+0.021680454 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:01:32 compute-0 podman[220737]: 2025-10-02 12:01:32.423229357 +0000 UTC m=+0.117092629 container init 3573cdfb43ade666670c40924a132cceb65a84f458ea2fd1c797283fff1e8728 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true)
Oct 02 12:01:32 compute-0 podman[220737]: 2025-10-02 12:01:32.428661166 +0000 UTC m=+0.122524408 container start 3573cdfb43ade666670c40924a132cceb65a84f458ea2fd1c797283fff1e8728 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true)
Oct 02 12:01:32 compute-0 nova_compute[192079]: 2025-10-02 12:01:32.445 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:01:32 compute-0 nova_compute[192079]: 2025-10-02 12:01:32.449 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406492.1935153, eba3fb05-4dd5-4f34-9cb5-c932a86f4c53 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:01:32 compute-0 nova_compute[192079]: 2025-10-02 12:01:32.449 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] VM Resumed (Lifecycle Event)
Oct 02 12:01:32 compute-0 neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9[220752]: [NOTICE]   (220756) : New worker (220758) forked
Oct 02 12:01:32 compute-0 neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9[220752]: [NOTICE]   (220756) : Loading success.
Oct 02 12:01:32 compute-0 nova_compute[192079]: 2025-10-02 12:01:32.724 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:01:32 compute-0 nova_compute[192079]: 2025-10-02 12:01:32.727 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:01:33 compute-0 nova_compute[192079]: 2025-10-02 12:01:33.001 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:01:33 compute-0 nova_compute[192079]: 2025-10-02 12:01:33.002 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406492.2245476, a20c354d-a1af-4fad-958f-59623ebe4437 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:01:33 compute-0 nova_compute[192079]: 2025-10-02 12:01:33.002 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] VM Resumed (Lifecycle Event)
Oct 02 12:01:33 compute-0 nova_compute[192079]: 2025-10-02 12:01:33.071 2 INFO nova.compute.manager [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Took 8.66 seconds to spawn the instance on the hypervisor.
Oct 02 12:01:33 compute-0 nova_compute[192079]: 2025-10-02 12:01:33.072 2 DEBUG nova.compute.manager [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:01:33 compute-0 nova_compute[192079]: 2025-10-02 12:01:33.074 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:01:33 compute-0 nova_compute[192079]: 2025-10-02 12:01:33.082 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: active, current task_state: migrating, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:01:33 compute-0 nova_compute[192079]: 2025-10-02 12:01:33.192 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] During the sync_power process the instance has moved from host compute-1.ctlplane.example.com to host compute-0.ctlplane.example.com
Oct 02 12:01:33 compute-0 nova_compute[192079]: 2025-10-02 12:01:33.445 2 INFO nova.compute.manager [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Took 11.15 seconds to build instance.
Oct 02 12:01:33 compute-0 nova_compute[192079]: 2025-10-02 12:01:33.515 2 DEBUG oslo_concurrency.lockutils [None req-9fb9f579-04fa-403e-9e03-d0eb6c60bdd6 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "eba3fb05-4dd5-4f34-9cb5-c932a86f4c53" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 12.133s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:33 compute-0 ovn_controller[94336]: 2025-10-02T12:01:33Z|00056|binding|INFO|Claiming lport 5562a861-2a3e-4411-8aaa-be6dde7a658a for this chassis.
Oct 02 12:01:33 compute-0 ovn_controller[94336]: 2025-10-02T12:01:33Z|00057|binding|INFO|5562a861-2a3e-4411-8aaa-be6dde7a658a: Claiming fa:16:3e:09:db:7c 10.100.0.13
Oct 02 12:01:33 compute-0 ovn_controller[94336]: 2025-10-02T12:01:33Z|00058|binding|INFO|Setting lport 5562a861-2a3e-4411-8aaa-be6dde7a658a up in Southbound
Oct 02 12:01:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:33.722 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:09:db:7c 10.100.0.13'], port_security=['fa:16:3e:09:db:7c 10.100.0.13'], type=, nat_addresses=[], virtual_parent=[], up=[True], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.13/28', 'neutron:device_id': 'a20c354d-a1af-4fad-958f-59623ebe4437', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-020b4768-a07a-4769-8636-455566c87083', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '5cc73d75e0864e838eefa90cb33b7e01', 'neutron:revision_number': '11', 'neutron:security_group_ids': 'f3fadef5-4bfc-406c-93c4-14d4abd0583e', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=11c0be75-bb4b-4e01-8cfa-b9aa4fcaf0e9, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=5562a861-2a3e-4411-8aaa-be6dde7a658a) old=Port_Binding(up=[False], additional_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:01:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:33.724 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 5562a861-2a3e-4411-8aaa-be6dde7a658a in datapath 020b4768-a07a-4769-8636-455566c87083 bound to our chassis
Oct 02 12:01:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:33.726 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 020b4768-a07a-4769-8636-455566c87083
Oct 02 12:01:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:33.743 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[37acc7db-dba9-4566-b869-c4305d420f62]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:33.744 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap020b4768-a1 in ovnmeta-020b4768-a07a-4769-8636-455566c87083 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:01:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:33.746 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap020b4768-a0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:01:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:33.746 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0a073069-e8a5-4df8-9f49-3ee17535f1ff]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:33.747 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[afc9a2e7-62c6-4884-bea7-7e14ec57b5ab]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:33.766 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[405ba518-f9e8-41a3-98e8-422884f6b6c1]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:33.798 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ba880361-a4fc-47e5-aa37-88261a923265]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:33.840 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[c464c865-8fcb-49f4-b55c-d49469cf1daf]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:33 compute-0 NetworkManager[51160]: <info>  [1759406493.8692] manager: (tap020b4768-a0): new Veth device (/org/freedesktop/NetworkManager/Devices/41)
Oct 02 12:01:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:33.868 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[fe0d12db-7299-459e-910d-d9e0830fa27a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:33 compute-0 systemd-udevd[220774]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:01:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:33.914 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[e1050bff-f5eb-40e2-aca0-d3c409c80c0f]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:33.918 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[291c15f5-7426-450f-8909-787b5c54776c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:33 compute-0 NetworkManager[51160]: <info>  [1759406493.9543] device (tap020b4768-a0): carrier: link connected
Oct 02 12:01:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:33.967 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[d3377e26-3bc9-4688-b55d-799e19e0ef6b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:33.983 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a3d97e5d-2f3f-4d12-91e3-f49b99507c59]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap020b4768-a1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:62:d2:ce'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 22], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 449158, 'reachable_time': 15856, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 220793, 'error': None, 'target': 'ovnmeta-020b4768-a07a-4769-8636-455566c87083', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:33.995 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7245587a-65b0-4ab0-b3b2-543828d95ece]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe62:d2ce'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 449158, 'tstamp': 449158}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 220794, 'error': None, 'target': 'ovnmeta-020b4768-a07a-4769-8636-455566c87083', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:34.013 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d89b1734-d08e-4055-bdf8-fc96bfbfeeb9]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap020b4768-a1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:62:d2:ce'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 2, 'tx_packets': 1, 'rx_bytes': 196, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 2, 'tx_packets': 1, 'rx_bytes': 196, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 22], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 449158, 'reachable_time': 15856, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 2, 'inoctets': 168, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 2, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 168, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 2, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 220796, 'error': None, 'target': 'ovnmeta-020b4768-a07a-4769-8636-455566c87083', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:34.044 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e3deee73-557b-4708-a34c-d83bfadc8af9]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:34.122 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5ef4e55f-65ab-46cb-b38c-f476dfba163d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:34.124 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap020b4768-a0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:34.125 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:34.126 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap020b4768-a0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:01:34 compute-0 NetworkManager[51160]: <info>  [1759406494.1295] manager: (tap020b4768-a0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/42)
Oct 02 12:01:34 compute-0 kernel: tap020b4768-a0: entered promiscuous mode
Oct 02 12:01:34 compute-0 nova_compute[192079]: 2025-10-02 12:01:34.128 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:34 compute-0 nova_compute[192079]: 2025-10-02 12:01:34.131 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:34.134 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap020b4768-a0, col_values=(('external_ids', {'iface-id': '7ad14bc1-f6e9-4852-aef9-ac72c7291cba'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:01:34 compute-0 ovn_controller[94336]: 2025-10-02T12:01:34Z|00059|binding|INFO|Releasing lport 7ad14bc1-f6e9-4852-aef9-ac72c7291cba from this chassis (sb_readonly=0)
Oct 02 12:01:34 compute-0 nova_compute[192079]: 2025-10-02 12:01:34.136 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:34 compute-0 nova_compute[192079]: 2025-10-02 12:01:34.137 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:34.140 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/020b4768-a07a-4769-8636-455566c87083.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/020b4768-a07a-4769-8636-455566c87083.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:34.141 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[07cce26c-2560-4b19-aece-7e12b8a9b835]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:34.142 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-020b4768-a07a-4769-8636-455566c87083
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/020b4768-a07a-4769-8636-455566c87083.pid.haproxy
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 020b4768-a07a-4769-8636-455566c87083
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:01:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:34.143 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-020b4768-a07a-4769-8636-455566c87083', 'env', 'PROCESS_TAG=haproxy-020b4768-a07a-4769-8636-455566c87083', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/020b4768-a07a-4769-8636-455566c87083.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:01:34 compute-0 nova_compute[192079]: 2025-10-02 12:01:34.151 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:34 compute-0 nova_compute[192079]: 2025-10-02 12:01:34.166 2 DEBUG nova.compute.manager [req-d99ade87-57bf-4257-815a-67f0d084877a req-d621edaf-f0d5-43a8-82ee-c83f58d1b642 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Received event network-vif-plugged-dc3331f3-a49a-4c18-98e4-476f3e2c97d4 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:01:34 compute-0 nova_compute[192079]: 2025-10-02 12:01:34.167 2 DEBUG oslo_concurrency.lockutils [req-d99ade87-57bf-4257-815a-67f0d084877a req-d621edaf-f0d5-43a8-82ee-c83f58d1b642 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "eba3fb05-4dd5-4f34-9cb5-c932a86f4c53-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:34 compute-0 nova_compute[192079]: 2025-10-02 12:01:34.167 2 DEBUG oslo_concurrency.lockutils [req-d99ade87-57bf-4257-815a-67f0d084877a req-d621edaf-f0d5-43a8-82ee-c83f58d1b642 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "eba3fb05-4dd5-4f34-9cb5-c932a86f4c53-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:34 compute-0 nova_compute[192079]: 2025-10-02 12:01:34.167 2 DEBUG oslo_concurrency.lockutils [req-d99ade87-57bf-4257-815a-67f0d084877a req-d621edaf-f0d5-43a8-82ee-c83f58d1b642 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "eba3fb05-4dd5-4f34-9cb5-c932a86f4c53-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:34 compute-0 nova_compute[192079]: 2025-10-02 12:01:34.167 2 DEBUG nova.compute.manager [req-d99ade87-57bf-4257-815a-67f0d084877a req-d621edaf-f0d5-43a8-82ee-c83f58d1b642 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] No waiting events found dispatching network-vif-plugged-dc3331f3-a49a-4c18-98e4-476f3e2c97d4 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:01:34 compute-0 nova_compute[192079]: 2025-10-02 12:01:34.167 2 WARNING nova.compute.manager [req-d99ade87-57bf-4257-815a-67f0d084877a req-d621edaf-f0d5-43a8-82ee-c83f58d1b642 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Received unexpected event network-vif-plugged-dc3331f3-a49a-4c18-98e4-476f3e2c97d4 for instance with vm_state active and task_state None.
Oct 02 12:01:34 compute-0 nova_compute[192079]: 2025-10-02 12:01:34.175 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:34 compute-0 nova_compute[192079]: 2025-10-02 12:01:34.188 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:34 compute-0 nova_compute[192079]: 2025-10-02 12:01:34.534 2 INFO nova.compute.manager [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Post operation of migration started
Oct 02 12:01:34 compute-0 podman[220829]: 2025-10-02 12:01:34.596463169 +0000 UTC m=+0.113005368 container create aaaf3fce9d3cba20b6c5d638b12fd8f6f046753db3193b006b7818cc371f3534 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001)
Oct 02 12:01:34 compute-0 podman[220829]: 2025-10-02 12:01:34.50900527 +0000 UTC m=+0.025547519 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:01:34 compute-0 systemd[1]: Started libpod-conmon-aaaf3fce9d3cba20b6c5d638b12fd8f6f046753db3193b006b7818cc371f3534.scope.
Oct 02 12:01:34 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:01:34 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/21f2a2d335939271bb52bd87d7288d5982c677411ca25bb9a342b6b3551da1c4/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:01:34 compute-0 podman[220829]: 2025-10-02 12:01:34.693061527 +0000 UTC m=+0.209603756 container init aaaf3fce9d3cba20b6c5d638b12fd8f6f046753db3193b006b7818cc371f3534 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.vendor=CentOS)
Oct 02 12:01:34 compute-0 podman[220829]: 2025-10-02 12:01:34.698442963 +0000 UTC m=+0.214985162 container start aaaf3fce9d3cba20b6c5d638b12fd8f6f046753db3193b006b7818cc371f3534 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:01:34 compute-0 neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083[220844]: [NOTICE]   (220848) : New worker (220850) forked
Oct 02 12:01:34 compute-0 neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083[220844]: [NOTICE]   (220848) : Loading success.
Oct 02 12:01:35 compute-0 nova_compute[192079]: 2025-10-02 12:01:35.175 2 DEBUG oslo_concurrency.lockutils [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Acquiring lock "refresh_cache-a20c354d-a1af-4fad-958f-59623ebe4437" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:01:35 compute-0 nova_compute[192079]: 2025-10-02 12:01:35.175 2 DEBUG oslo_concurrency.lockutils [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Acquired lock "refresh_cache-a20c354d-a1af-4fad-958f-59623ebe4437" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:01:35 compute-0 nova_compute[192079]: 2025-10-02 12:01:35.175 2 DEBUG nova.network.neutron [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:01:36 compute-0 nova_compute[192079]: 2025-10-02 12:01:36.132 2 DEBUG nova.compute.manager [req-feda9890-15c3-4677-b46a-b4badcf1d393 req-2aa6a219-cecf-4eea-9df3-84c90cdc2a73 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Received event network-changed-dc3331f3-a49a-4c18-98e4-476f3e2c97d4 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:01:36 compute-0 nova_compute[192079]: 2025-10-02 12:01:36.133 2 DEBUG nova.compute.manager [req-feda9890-15c3-4677-b46a-b4badcf1d393 req-2aa6a219-cecf-4eea-9df3-84c90cdc2a73 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Refreshing instance network info cache due to event network-changed-dc3331f3-a49a-4c18-98e4-476f3e2c97d4. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:01:36 compute-0 nova_compute[192079]: 2025-10-02 12:01:36.133 2 DEBUG oslo_concurrency.lockutils [req-feda9890-15c3-4677-b46a-b4badcf1d393 req-2aa6a219-cecf-4eea-9df3-84c90cdc2a73 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-eba3fb05-4dd5-4f34-9cb5-c932a86f4c53" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:01:36 compute-0 nova_compute[192079]: 2025-10-02 12:01:36.133 2 DEBUG oslo_concurrency.lockutils [req-feda9890-15c3-4677-b46a-b4badcf1d393 req-2aa6a219-cecf-4eea-9df3-84c90cdc2a73 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-eba3fb05-4dd5-4f34-9cb5-c932a86f4c53" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:01:36 compute-0 nova_compute[192079]: 2025-10-02 12:01:36.133 2 DEBUG nova.network.neutron [req-feda9890-15c3-4677-b46a-b4badcf1d393 req-2aa6a219-cecf-4eea-9df3-84c90cdc2a73 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Refreshing network info cache for port dc3331f3-a49a-4c18-98e4-476f3e2c97d4 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:01:36 compute-0 nova_compute[192079]: 2025-10-02 12:01:36.459 2 DEBUG nova.network.neutron [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Updating instance_info_cache with network_info: [{"id": "5562a861-2a3e-4411-8aaa-be6dde7a658a", "address": "fa:16:3e:09:db:7c", "network": {"id": "020b4768-a07a-4769-8636-455566c87083", "bridge": "br-int", "label": "tempest-LiveAutoBlockMigrationV225Test-804372870-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "5cc73d75e0864e838eefa90cb33b7e01", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5562a861-2a", "ovs_interfaceid": "5562a861-2a3e-4411-8aaa-be6dde7a658a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {"os_vif_delegation": true}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:01:36 compute-0 nova_compute[192079]: 2025-10-02 12:01:36.522 2 DEBUG oslo_concurrency.lockutils [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Releasing lock "refresh_cache-a20c354d-a1af-4fad-958f-59623ebe4437" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:01:36 compute-0 nova_compute[192079]: 2025-10-02 12:01:36.607 2 DEBUG oslo_concurrency.lockutils [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.allocate_pci_devices_for_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:36 compute-0 nova_compute[192079]: 2025-10-02 12:01:36.608 2 DEBUG oslo_concurrency.lockutils [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.allocate_pci_devices_for_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:36 compute-0 nova_compute[192079]: 2025-10-02 12:01:36.608 2 DEBUG oslo_concurrency.lockutils [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.allocate_pci_devices_for_instance" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:36 compute-0 nova_compute[192079]: 2025-10-02 12:01:36.614 2 INFO nova.virt.libvirt.driver [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Sending announce-self command to QEMU monitor. Attempt 1 of 3
Oct 02 12:01:36 compute-0 virtqemud[191807]: Domain id=3 name='instance-00000007' uuid=a20c354d-a1af-4fad-958f-59623ebe4437 is tainted: custom-monitor
Oct 02 12:01:36 compute-0 nova_compute[192079]: 2025-10-02 12:01:36.718 2 DEBUG nova.compute.manager [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Stashing vm_state: active _prep_resize /usr/lib/python3.9/site-packages/nova/compute/manager.py:5560
Oct 02 12:01:36 compute-0 nova_compute[192079]: 2025-10-02 12:01:36.874 2 DEBUG oslo_concurrency.lockutils [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:36 compute-0 nova_compute[192079]: 2025-10-02 12:01:36.875 2 DEBUG oslo_concurrency.lockutils [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:36 compute-0 nova_compute[192079]: 2025-10-02 12:01:36.912 2 DEBUG nova.objects.instance [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Lazy-loading 'pci_requests' on Instance uuid e09de65a-0b2d-4aa5-9d9a-49f039add691 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:01:36 compute-0 nova_compute[192079]: 2025-10-02 12:01:36.926 2 DEBUG nova.virt.hardware [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:01:36 compute-0 nova_compute[192079]: 2025-10-02 12:01:36.927 2 INFO nova.compute.claims [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:01:36 compute-0 nova_compute[192079]: 2025-10-02 12:01:36.927 2 DEBUG nova.objects.instance [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Lazy-loading 'resources' on Instance uuid e09de65a-0b2d-4aa5-9d9a-49f039add691 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:01:36 compute-0 nova_compute[192079]: 2025-10-02 12:01:36.940 2 DEBUG nova.objects.instance [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Lazy-loading 'pci_devices' on Instance uuid e09de65a-0b2d-4aa5-9d9a-49f039add691 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:01:36 compute-0 nova_compute[192079]: 2025-10-02 12:01:36.986 2 INFO nova.compute.resource_tracker [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Updating resource usage from migration df4af920-0d69-485c-a689-db5f063c5cf4
Oct 02 12:01:36 compute-0 nova_compute[192079]: 2025-10-02 12:01:36.987 2 DEBUG nova.compute.resource_tracker [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Starting to track incoming migration df4af920-0d69-485c-a689-db5f063c5cf4 with flavor 9949d9da-6314-4ede-8797-6f2f0a6a64fc _update_usage_from_migration /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1431
Oct 02 12:01:37 compute-0 nova_compute[192079]: 2025-10-02 12:01:37.094 2 DEBUG nova.compute.provider_tree [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:01:37 compute-0 nova_compute[192079]: 2025-10-02 12:01:37.113 2 DEBUG nova.scheduler.client.report [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:01:37 compute-0 nova_compute[192079]: 2025-10-02 12:01:37.139 2 DEBUG oslo_concurrency.lockutils [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 0.264s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:37 compute-0 nova_compute[192079]: 2025-10-02 12:01:37.139 2 INFO nova.compute.manager [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Migrating
Oct 02 12:01:37 compute-0 podman[220859]: 2025-10-02 12:01:37.141877544 +0000 UTC m=+0.056071313 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:01:37 compute-0 podman[220860]: 2025-10-02 12:01:37.158811276 +0000 UTC m=+0.075092881 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.vendor=CentOS, tcib_managed=true, config_id=iscsid, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.license=GPLv2, container_name=iscsid, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:01:37 compute-0 nova_compute[192079]: 2025-10-02 12:01:37.625 2 INFO nova.virt.libvirt.driver [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Sending announce-self command to QEMU monitor. Attempt 2 of 3
Oct 02 12:01:37 compute-0 nova_compute[192079]: 2025-10-02 12:01:37.804 2 DEBUG nova.network.neutron [req-feda9890-15c3-4677-b46a-b4badcf1d393 req-2aa6a219-cecf-4eea-9df3-84c90cdc2a73 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Updated VIF entry in instance network info cache for port dc3331f3-a49a-4c18-98e4-476f3e2c97d4. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:01:37 compute-0 nova_compute[192079]: 2025-10-02 12:01:37.805 2 DEBUG nova.network.neutron [req-feda9890-15c3-4677-b46a-b4badcf1d393 req-2aa6a219-cecf-4eea-9df3-84c90cdc2a73 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Updating instance_info_cache with network_info: [{"id": "dc3331f3-a49a-4c18-98e4-476f3e2c97d4", "address": "fa:16:3e:2e:71:61", "network": {"id": "0432e6a2-e111-484d-b6cf-d32d9fc846c9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1078640656-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.218", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "302a9c83c3eb43818ce6284e9ddb73be", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapdc3331f3-a4", "ovs_interfaceid": "dc3331f3-a49a-4c18-98e4-476f3e2c97d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:01:37 compute-0 nova_compute[192079]: 2025-10-02 12:01:37.837 2 DEBUG oslo_concurrency.lockutils [req-feda9890-15c3-4677-b46a-b4badcf1d393 req-2aa6a219-cecf-4eea-9df3-84c90cdc2a73 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-eba3fb05-4dd5-4f34-9cb5-c932a86f4c53" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:01:37 compute-0 sshd-session[220899]: Accepted publickey for nova from 192.168.122.102 port 43812 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:01:37 compute-0 systemd[1]: Created slice User Slice of UID 42436.
Oct 02 12:01:37 compute-0 systemd[1]: Starting User Runtime Directory /run/user/42436...
Oct 02 12:01:37 compute-0 systemd-logind[827]: New session 28 of user nova.
Oct 02 12:01:37 compute-0 systemd[1]: Finished User Runtime Directory /run/user/42436.
Oct 02 12:01:37 compute-0 systemd[1]: Starting User Manager for UID 42436...
Oct 02 12:01:37 compute-0 systemd[220903]: pam_unix(systemd-user:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:01:38 compute-0 systemd[220903]: Queued start job for default target Main User Target.
Oct 02 12:01:38 compute-0 systemd[220903]: Created slice User Application Slice.
Oct 02 12:01:38 compute-0 systemd[220903]: Started Mark boot as successful after the user session has run 2 minutes.
Oct 02 12:01:38 compute-0 systemd[220903]: Started Daily Cleanup of User's Temporary Directories.
Oct 02 12:01:38 compute-0 systemd[220903]: Reached target Paths.
Oct 02 12:01:38 compute-0 systemd[220903]: Reached target Timers.
Oct 02 12:01:38 compute-0 systemd[220903]: Starting D-Bus User Message Bus Socket...
Oct 02 12:01:38 compute-0 systemd[220903]: Starting Create User's Volatile Files and Directories...
Oct 02 12:01:38 compute-0 systemd[220903]: Listening on D-Bus User Message Bus Socket.
Oct 02 12:01:38 compute-0 systemd[220903]: Reached target Sockets.
Oct 02 12:01:38 compute-0 systemd[220903]: Finished Create User's Volatile Files and Directories.
Oct 02 12:01:38 compute-0 systemd[220903]: Reached target Basic System.
Oct 02 12:01:38 compute-0 systemd[220903]: Reached target Main User Target.
Oct 02 12:01:38 compute-0 systemd[220903]: Startup finished in 148ms.
Oct 02 12:01:38 compute-0 systemd[1]: Started User Manager for UID 42436.
Oct 02 12:01:38 compute-0 systemd[1]: Started Session 28 of User nova.
Oct 02 12:01:38 compute-0 sshd-session[220899]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:01:38 compute-0 sshd-session[220918]: Received disconnect from 192.168.122.102 port 43812:11: disconnected by user
Oct 02 12:01:38 compute-0 sshd-session[220918]: Disconnected from user nova 192.168.122.102 port 43812
Oct 02 12:01:38 compute-0 sshd-session[220899]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:01:38 compute-0 systemd[1]: session-28.scope: Deactivated successfully.
Oct 02 12:01:38 compute-0 systemd-logind[827]: Session 28 logged out. Waiting for processes to exit.
Oct 02 12:01:38 compute-0 systemd-logind[827]: Removed session 28.
Oct 02 12:01:38 compute-0 sshd-session[220920]: Accepted publickey for nova from 192.168.122.102 port 43816 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:01:38 compute-0 systemd-logind[827]: New session 30 of user nova.
Oct 02 12:01:38 compute-0 systemd[1]: Started Session 30 of User nova.
Oct 02 12:01:38 compute-0 sshd-session[220920]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:01:38 compute-0 sshd-session[220923]: Received disconnect from 192.168.122.102 port 43816:11: disconnected by user
Oct 02 12:01:38 compute-0 sshd-session[220923]: Disconnected from user nova 192.168.122.102 port 43816
Oct 02 12:01:38 compute-0 sshd-session[220920]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:01:38 compute-0 systemd[1]: session-30.scope: Deactivated successfully.
Oct 02 12:01:38 compute-0 systemd-logind[827]: Session 30 logged out. Waiting for processes to exit.
Oct 02 12:01:38 compute-0 systemd-logind[827]: Removed session 30.
Oct 02 12:01:38 compute-0 nova_compute[192079]: 2025-10-02 12:01:38.632 2 INFO nova.virt.libvirt.driver [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Sending announce-self command to QEMU monitor. Attempt 3 of 3
Oct 02 12:01:38 compute-0 nova_compute[192079]: 2025-10-02 12:01:38.643 2 DEBUG nova.compute.manager [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:01:38 compute-0 nova_compute[192079]: 2025-10-02 12:01:38.671 2 DEBUG nova.objects.instance [None req-daac6ca0-12dd-4c72-b88b-364a76fca2c4 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Trying to apply a migration context that does not seem to be set for this instance apply_migration_context /usr/lib/python3.9/site-packages/nova/objects/instance.py:1032
Oct 02 12:01:39 compute-0 nova_compute[192079]: 2025-10-02 12:01:39.176 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:39 compute-0 nova_compute[192079]: 2025-10-02 12:01:39.190 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:43 compute-0 nova_compute[192079]: 2025-10-02 12:01:43.015 2 DEBUG nova.virt.libvirt.driver [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Check if temp file /var/lib/nova/instances/tmp3iu_n5cr exists to indicate shared storage is being used for migration. Exists? False _check_shared_storage_test_file /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10065
Oct 02 12:01:43 compute-0 nova_compute[192079]: 2025-10-02 12:01:43.016 2 DEBUG nova.compute.manager [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] source check data is LibvirtLiveMigrateData(bdms=<?>,block_migration=True,disk_available_mb=74752,disk_over_commit=<?>,dst_numa_info=<?>,dst_supports_numa_live_migration=<?>,dst_wants_file_backed_memory=False,file_backed_memory_discard=<?>,filename='tmp3iu_n5cr',graphics_listen_addr_spice=127.0.0.1,graphics_listen_addr_vnc=::,image_type='qcow2',instance_relative_path='a20c354d-a1af-4fad-958f-59623ebe4437',is_shared_block_storage=False,is_shared_instance_path=False,is_volume_backed=False,migration=<?>,old_vol_attachment_ids=<?>,serial_listen_addr=None,serial_listen_ports=<?>,src_supports_native_luks=<?>,src_supports_numa_live_migration=<?>,supported_perf_events=<?>,target_connect_addr=<?>,vifs=[VIFMigrateData],wait_for_vif_plugged=<?>) check_can_live_migrate_source /usr/lib/python3.9/site-packages/nova/compute/manager.py:8587
Oct 02 12:01:43 compute-0 nova_compute[192079]: 2025-10-02 12:01:43.814 2 DEBUG oslo_concurrency.processutils [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a20c354d-a1af-4fad-958f-59623ebe4437/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:01:43 compute-0 nova_compute[192079]: 2025-10-02 12:01:43.927 2 DEBUG oslo_concurrency.processutils [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a20c354d-a1af-4fad-958f-59623ebe4437/disk --force-share --output=json" returned: 0 in 0.113s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:01:43 compute-0 nova_compute[192079]: 2025-10-02 12:01:43.929 2 DEBUG oslo_concurrency.processutils [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a20c354d-a1af-4fad-958f-59623ebe4437/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:01:44 compute-0 nova_compute[192079]: 2025-10-02 12:01:44.016 2 DEBUG oslo_concurrency.processutils [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a20c354d-a1af-4fad-958f-59623ebe4437/disk --force-share --output=json" returned: 0 in 0.087s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:01:44 compute-0 nova_compute[192079]: 2025-10-02 12:01:44.178 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:44 compute-0 nova_compute[192079]: 2025-10-02 12:01:44.192 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:45 compute-0 podman[220952]: 2025-10-02 12:01:45.22958275 +0000 UTC m=+0.130666469 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller, container_name=ovn_controller, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS)
Oct 02 12:01:46 compute-0 sshd-session[220979]: Accepted publickey for nova from 192.168.122.101 port 34590 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:01:46 compute-0 systemd-logind[827]: New session 31 of user nova.
Oct 02 12:01:46 compute-0 systemd[1]: Started Session 31 of User nova.
Oct 02 12:01:46 compute-0 sshd-session[220979]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:01:46 compute-0 sshd-session[220982]: Received disconnect from 192.168.122.101 port 34590:11: disconnected by user
Oct 02 12:01:46 compute-0 sshd-session[220982]: Disconnected from user nova 192.168.122.101 port 34590
Oct 02 12:01:46 compute-0 sshd-session[220979]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:01:46 compute-0 systemd[1]: session-31.scope: Deactivated successfully.
Oct 02 12:01:46 compute-0 systemd-logind[827]: Session 31 logged out. Waiting for processes to exit.
Oct 02 12:01:46 compute-0 systemd-logind[827]: Removed session 31.
Oct 02 12:01:48 compute-0 nova_compute[192079]: 2025-10-02 12:01:48.535 2 DEBUG nova.compute.manager [req-ac86938b-b536-428e-ab3f-7899501cf756 req-2984d800-19c6-4106-b496-15abc5635fe4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Received event network-vif-unplugged-5562a861-2a3e-4411-8aaa-be6dde7a658a external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:01:48 compute-0 nova_compute[192079]: 2025-10-02 12:01:48.536 2 DEBUG oslo_concurrency.lockutils [req-ac86938b-b536-428e-ab3f-7899501cf756 req-2984d800-19c6-4106-b496-15abc5635fe4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "a20c354d-a1af-4fad-958f-59623ebe4437-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:48 compute-0 nova_compute[192079]: 2025-10-02 12:01:48.536 2 DEBUG oslo_concurrency.lockutils [req-ac86938b-b536-428e-ab3f-7899501cf756 req-2984d800-19c6-4106-b496-15abc5635fe4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a20c354d-a1af-4fad-958f-59623ebe4437-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:48 compute-0 nova_compute[192079]: 2025-10-02 12:01:48.536 2 DEBUG oslo_concurrency.lockutils [req-ac86938b-b536-428e-ab3f-7899501cf756 req-2984d800-19c6-4106-b496-15abc5635fe4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a20c354d-a1af-4fad-958f-59623ebe4437-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:48 compute-0 nova_compute[192079]: 2025-10-02 12:01:48.536 2 DEBUG nova.compute.manager [req-ac86938b-b536-428e-ab3f-7899501cf756 req-2984d800-19c6-4106-b496-15abc5635fe4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] No waiting events found dispatching network-vif-unplugged-5562a861-2a3e-4411-8aaa-be6dde7a658a pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:01:48 compute-0 nova_compute[192079]: 2025-10-02 12:01:48.536 2 DEBUG nova.compute.manager [req-ac86938b-b536-428e-ab3f-7899501cf756 req-2984d800-19c6-4106-b496-15abc5635fe4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Received event network-vif-unplugged-5562a861-2a3e-4411-8aaa-be6dde7a658a for instance with task_state migrating. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:01:48 compute-0 ovn_controller[94336]: 2025-10-02T12:01:48Z|00008|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:2e:71:61 10.100.0.14
Oct 02 12:01:48 compute-0 ovn_controller[94336]: 2025-10-02T12:01:48Z|00009|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:2e:71:61 10.100.0.14
Oct 02 12:01:48 compute-0 nova_compute[192079]: 2025-10-02 12:01:48.928 2 INFO nova.compute.manager [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Took 4.91 seconds for pre_live_migration on destination host compute-1.ctlplane.example.com.
Oct 02 12:01:48 compute-0 nova_compute[192079]: 2025-10-02 12:01:48.929 2 DEBUG nova.compute.manager [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:01:48 compute-0 nova_compute[192079]: 2025-10-02 12:01:48.955 2 DEBUG nova.compute.manager [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] live_migration data is LibvirtLiveMigrateData(bdms=[],block_migration=True,disk_available_mb=74752,disk_over_commit=<?>,dst_numa_info=<?>,dst_supports_numa_live_migration=<?>,dst_wants_file_backed_memory=False,file_backed_memory_discard=<?>,filename='tmp3iu_n5cr',graphics_listen_addr_spice=127.0.0.1,graphics_listen_addr_vnc=::,image_type='qcow2',instance_relative_path='a20c354d-a1af-4fad-958f-59623ebe4437',is_shared_block_storage=False,is_shared_instance_path=False,is_volume_backed=False,migration=Migration(35179a92-1205-4643-acbc-8771c847a215),old_vol_attachment_ids={},serial_listen_addr=None,serial_listen_ports=[],src_supports_native_luks=<?>,src_supports_numa_live_migration=<?>,supported_perf_events=[],target_connect_addr=None,vifs=[VIFMigrateData],wait_for_vif_plugged=True) _do_live_migration /usr/lib/python3.9/site-packages/nova/compute/manager.py:8939
Oct 02 12:01:48 compute-0 nova_compute[192079]: 2025-10-02 12:01:48.983 2 DEBUG nova.objects.instance [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Lazy-loading 'migration_context' on Instance uuid a20c354d-a1af-4fad-958f-59623ebe4437 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:01:48 compute-0 nova_compute[192079]: 2025-10-02 12:01:48.984 2 DEBUG nova.virt.libvirt.driver [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Starting monitoring of live migration _live_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10639
Oct 02 12:01:48 compute-0 nova_compute[192079]: 2025-10-02 12:01:48.986 2 DEBUG nova.virt.libvirt.driver [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Operation thread is still running _live_migration_monitor /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10440
Oct 02 12:01:48 compute-0 nova_compute[192079]: 2025-10-02 12:01:48.987 2 DEBUG nova.virt.libvirt.driver [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Migration not running yet _live_migration_monitor /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10449
Oct 02 12:01:49 compute-0 nova_compute[192079]: 2025-10-02 12:01:49.005 2 DEBUG nova.virt.libvirt.vif [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=True,config_drive='True',created_at=2025-10-02T12:00:57Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description=None,display_name='tempest-LiveAutoBlockMigrationV225Test-server-1982637812',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-liveautoblockmigrationv225test-server-1982637812',id=7,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:01:06Z,launched_on='compute-1.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='5cc73d75e0864e838eefa90cb33b7e01',ramdisk_id='',reservation_id='r-bvhrjcj5',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',clean_attempts='1',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-LiveAutoBlockMigrationV225Test-984573444',owner_user_name='tempest-LiveAutoBlockMigrationV225Test-984573444-project-member'},tags=<?>,task_state='migrating',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:01:38Z,user_data=None,user_id='59e8135d73ee43e088ba5ee7d9bd84b1',uuid=a20c354d-a1af-4fad-958f-59623ebe4437,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "5562a861-2a3e-4411-8aaa-be6dde7a658a", "address": "fa:16:3e:09:db:7c", "network": {"id": "020b4768-a07a-4769-8636-455566c87083", "bridge": "br-int", "label": "tempest-LiveAutoBlockMigrationV225Test-804372870-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "5cc73d75e0864e838eefa90cb33b7e01", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system"}, "devname": "tap5562a861-2a", "ovs_interfaceid": "5562a861-2a3e-4411-8aaa-be6dde7a658a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {"os_vif_delegation": true}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:01:49 compute-0 nova_compute[192079]: 2025-10-02 12:01:49.006 2 DEBUG nova.network.os_vif_util [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Converting VIF {"id": "5562a861-2a3e-4411-8aaa-be6dde7a658a", "address": "fa:16:3e:09:db:7c", "network": {"id": "020b4768-a07a-4769-8636-455566c87083", "bridge": "br-int", "label": "tempest-LiveAutoBlockMigrationV225Test-804372870-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "5cc73d75e0864e838eefa90cb33b7e01", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system"}, "devname": "tap5562a861-2a", "ovs_interfaceid": "5562a861-2a3e-4411-8aaa-be6dde7a658a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {"os_vif_delegation": true}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:01:49 compute-0 nova_compute[192079]: 2025-10-02 12:01:49.007 2 DEBUG nova.network.os_vif_util [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:09:db:7c,bridge_name='br-int',has_traffic_filtering=True,id=5562a861-2a3e-4411-8aaa-be6dde7a658a,network=Network(020b4768-a07a-4769-8636-455566c87083),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5562a861-2a') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:01:49 compute-0 nova_compute[192079]: 2025-10-02 12:01:49.008 2 DEBUG nova.virt.libvirt.migration [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Updating guest XML with vif config: <interface type="ethernet">
Oct 02 12:01:49 compute-0 nova_compute[192079]:   <mac address="fa:16:3e:09:db:7c"/>
Oct 02 12:01:49 compute-0 nova_compute[192079]:   <model type="virtio"/>
Oct 02 12:01:49 compute-0 nova_compute[192079]:   <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:01:49 compute-0 nova_compute[192079]:   <mtu size="1442"/>
Oct 02 12:01:49 compute-0 nova_compute[192079]:   <target dev="tap5562a861-2a"/>
Oct 02 12:01:49 compute-0 nova_compute[192079]: </interface>
Oct 02 12:01:49 compute-0 nova_compute[192079]:  _update_vif_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/migration.py:388
Oct 02 12:01:49 compute-0 nova_compute[192079]: 2025-10-02 12:01:49.009 2 DEBUG nova.virt.libvirt.driver [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] About to invoke the migrate API _live_migration_operation /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10272
Oct 02 12:01:49 compute-0 nova_compute[192079]: 2025-10-02 12:01:49.185 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:49 compute-0 nova_compute[192079]: 2025-10-02 12:01:49.194 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:49 compute-0 nova_compute[192079]: 2025-10-02 12:01:49.489 2 DEBUG nova.virt.libvirt.migration [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Current None elapsed 0 steps [(0, 50), (300, 95), (600, 140), (900, 185), (1200, 230), (1500, 275), (1800, 320), (2100, 365), (2400, 410), (2700, 455), (3000, 500)] update_downtime /usr/lib/python3.9/site-packages/nova/virt/libvirt/migration.py:512
Oct 02 12:01:49 compute-0 nova_compute[192079]: 2025-10-02 12:01:49.489 2 INFO nova.virt.libvirt.migration [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Increasing downtime to 50 ms after 0 sec elapsed time
Oct 02 12:01:49 compute-0 nova_compute[192079]: 2025-10-02 12:01:49.572 2 INFO nova.virt.libvirt.driver [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Migration running for 0 secs, memory 100% remaining (bytes processed=0, remaining=0, total=0); disk 100% remaining (bytes processed=0, remaining=0, total=0).
Oct 02 12:01:49 compute-0 podman[220985]: 2025-10-02 12:01:49.659295158 +0000 UTC m=+0.051426996 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:01:49 compute-0 podman[220984]: 2025-10-02 12:01:49.685686069 +0000 UTC m=+0.084772926 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, config_id=ovn_metadata_agent, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:01:50 compute-0 nova_compute[192079]: 2025-10-02 12:01:50.075 2 DEBUG nova.virt.libvirt.migration [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Current 50 elapsed 1 steps [(0, 50), (300, 95), (600, 140), (900, 185), (1200, 230), (1500, 275), (1800, 320), (2100, 365), (2400, 410), (2700, 455), (3000, 500)] update_downtime /usr/lib/python3.9/site-packages/nova/virt/libvirt/migration.py:512
Oct 02 12:01:50 compute-0 nova_compute[192079]: 2025-10-02 12:01:50.075 2 DEBUG nova.virt.libvirt.migration [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Downtime does not need to change update_downtime /usr/lib/python3.9/site-packages/nova/virt/libvirt/migration.py:525
Oct 02 12:01:50 compute-0 nova_compute[192079]: 2025-10-02 12:01:50.577 2 DEBUG nova.virt.libvirt.migration [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Current 50 elapsed 1 steps [(0, 50), (300, 95), (600, 140), (900, 185), (1200, 230), (1500, 275), (1800, 320), (2100, 365), (2400, 410), (2700, 455), (3000, 500)] update_downtime /usr/lib/python3.9/site-packages/nova/virt/libvirt/migration.py:512
Oct 02 12:01:50 compute-0 nova_compute[192079]: 2025-10-02 12:01:50.578 2 DEBUG nova.virt.libvirt.migration [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Downtime does not need to change update_downtime /usr/lib/python3.9/site-packages/nova/virt/libvirt/migration.py:525
Oct 02 12:01:50 compute-0 nova_compute[192079]: 2025-10-02 12:01:50.734 2 DEBUG nova.compute.manager [req-f936d6b1-9fa8-44df-9c3a-6952946b4f59 req-38aea21d-2958-4542-92c5-473386a2d227 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Received event network-vif-plugged-5562a861-2a3e-4411-8aaa-be6dde7a658a external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:01:50 compute-0 nova_compute[192079]: 2025-10-02 12:01:50.734 2 DEBUG oslo_concurrency.lockutils [req-f936d6b1-9fa8-44df-9c3a-6952946b4f59 req-38aea21d-2958-4542-92c5-473386a2d227 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "a20c354d-a1af-4fad-958f-59623ebe4437-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:50 compute-0 nova_compute[192079]: 2025-10-02 12:01:50.734 2 DEBUG oslo_concurrency.lockutils [req-f936d6b1-9fa8-44df-9c3a-6952946b4f59 req-38aea21d-2958-4542-92c5-473386a2d227 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a20c354d-a1af-4fad-958f-59623ebe4437-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:50 compute-0 nova_compute[192079]: 2025-10-02 12:01:50.734 2 DEBUG oslo_concurrency.lockutils [req-f936d6b1-9fa8-44df-9c3a-6952946b4f59 req-38aea21d-2958-4542-92c5-473386a2d227 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a20c354d-a1af-4fad-958f-59623ebe4437-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:50 compute-0 nova_compute[192079]: 2025-10-02 12:01:50.735 2 DEBUG nova.compute.manager [req-f936d6b1-9fa8-44df-9c3a-6952946b4f59 req-38aea21d-2958-4542-92c5-473386a2d227 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] No waiting events found dispatching network-vif-plugged-5562a861-2a3e-4411-8aaa-be6dde7a658a pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:01:50 compute-0 nova_compute[192079]: 2025-10-02 12:01:50.735 2 WARNING nova.compute.manager [req-f936d6b1-9fa8-44df-9c3a-6952946b4f59 req-38aea21d-2958-4542-92c5-473386a2d227 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Received unexpected event network-vif-plugged-5562a861-2a3e-4411-8aaa-be6dde7a658a for instance with vm_state active and task_state migrating.
Oct 02 12:01:50 compute-0 nova_compute[192079]: 2025-10-02 12:01:50.735 2 DEBUG nova.compute.manager [req-f936d6b1-9fa8-44df-9c3a-6952946b4f59 req-38aea21d-2958-4542-92c5-473386a2d227 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Received event network-changed-5562a861-2a3e-4411-8aaa-be6dde7a658a external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:01:50 compute-0 nova_compute[192079]: 2025-10-02 12:01:50.735 2 DEBUG nova.compute.manager [req-f936d6b1-9fa8-44df-9c3a-6952946b4f59 req-38aea21d-2958-4542-92c5-473386a2d227 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Refreshing instance network info cache due to event network-changed-5562a861-2a3e-4411-8aaa-be6dde7a658a. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:01:50 compute-0 nova_compute[192079]: 2025-10-02 12:01:50.735 2 DEBUG oslo_concurrency.lockutils [req-f936d6b1-9fa8-44df-9c3a-6952946b4f59 req-38aea21d-2958-4542-92c5-473386a2d227 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-a20c354d-a1af-4fad-958f-59623ebe4437" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:01:50 compute-0 nova_compute[192079]: 2025-10-02 12:01:50.736 2 DEBUG oslo_concurrency.lockutils [req-f936d6b1-9fa8-44df-9c3a-6952946b4f59 req-38aea21d-2958-4542-92c5-473386a2d227 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-a20c354d-a1af-4fad-958f-59623ebe4437" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:01:50 compute-0 nova_compute[192079]: 2025-10-02 12:01:50.736 2 DEBUG nova.network.neutron [req-f936d6b1-9fa8-44df-9c3a-6952946b4f59 req-38aea21d-2958-4542-92c5-473386a2d227 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Refreshing network info cache for port 5562a861-2a3e-4411-8aaa-be6dde7a658a _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:01:51 compute-0 nova_compute[192079]: 2025-10-02 12:01:51.081 2 DEBUG nova.virt.libvirt.migration [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Current 50 elapsed 2 steps [(0, 50), (300, 95), (600, 140), (900, 185), (1200, 230), (1500, 275), (1800, 320), (2100, 365), (2400, 410), (2700, 455), (3000, 500)] update_downtime /usr/lib/python3.9/site-packages/nova/virt/libvirt/migration.py:512
Oct 02 12:01:51 compute-0 nova_compute[192079]: 2025-10-02 12:01:51.082 2 DEBUG nova.virt.libvirt.migration [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Downtime does not need to change update_downtime /usr/lib/python3.9/site-packages/nova/virt/libvirt/migration.py:525
Oct 02 12:01:51 compute-0 nova_compute[192079]: 2025-10-02 12:01:51.585 2 DEBUG nova.virt.libvirt.migration [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Current 50 elapsed 2 steps [(0, 50), (300, 95), (600, 140), (900, 185), (1200, 230), (1500, 275), (1800, 320), (2100, 365), (2400, 410), (2700, 455), (3000, 500)] update_downtime /usr/lib/python3.9/site-packages/nova/virt/libvirt/migration.py:512
Oct 02 12:01:51 compute-0 nova_compute[192079]: 2025-10-02 12:01:51.585 2 DEBUG nova.virt.libvirt.migration [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Downtime does not need to change update_downtime /usr/lib/python3.9/site-packages/nova/virt/libvirt/migration.py:525
Oct 02 12:01:51 compute-0 sshd-session[221035]: Accepted publickey for nova from 192.168.122.102 port 37852 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:01:51 compute-0 systemd-logind[827]: New session 32 of user nova.
Oct 02 12:01:51 compute-0 systemd[1]: Started Session 32 of User nova.
Oct 02 12:01:51 compute-0 sshd-session[221035]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:01:52 compute-0 nova_compute[192079]: 2025-10-02 12:01:52.088 2 DEBUG nova.virt.libvirt.migration [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Current 50 elapsed 3 steps [(0, 50), (300, 95), (600, 140), (900, 185), (1200, 230), (1500, 275), (1800, 320), (2100, 365), (2400, 410), (2700, 455), (3000, 500)] update_downtime /usr/lib/python3.9/site-packages/nova/virt/libvirt/migration.py:512
Oct 02 12:01:52 compute-0 nova_compute[192079]: 2025-10-02 12:01:52.089 2 DEBUG nova.virt.libvirt.migration [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Downtime does not need to change update_downtime /usr/lib/python3.9/site-packages/nova/virt/libvirt/migration.py:525
Oct 02 12:01:52 compute-0 sshd-session[221038]: Received disconnect from 192.168.122.102 port 37852:11: disconnected by user
Oct 02 12:01:52 compute-0 sshd-session[221038]: Disconnected from user nova 192.168.122.102 port 37852
Oct 02 12:01:52 compute-0 sshd-session[221035]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:01:52 compute-0 systemd[1]: session-32.scope: Deactivated successfully.
Oct 02 12:01:52 compute-0 systemd-logind[827]: Session 32 logged out. Waiting for processes to exit.
Oct 02 12:01:52 compute-0 systemd-logind[827]: Removed session 32.
Oct 02 12:01:52 compute-0 nova_compute[192079]: 2025-10-02 12:01:52.445 2 DEBUG nova.network.neutron [req-f936d6b1-9fa8-44df-9c3a-6952946b4f59 req-38aea21d-2958-4542-92c5-473386a2d227 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Updated VIF entry in instance network info cache for port 5562a861-2a3e-4411-8aaa-be6dde7a658a. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:01:52 compute-0 nova_compute[192079]: 2025-10-02 12:01:52.446 2 DEBUG nova.network.neutron [req-f936d6b1-9fa8-44df-9c3a-6952946b4f59 req-38aea21d-2958-4542-92c5-473386a2d227 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Updating instance_info_cache with network_info: [{"id": "5562a861-2a3e-4411-8aaa-be6dde7a658a", "address": "fa:16:3e:09:db:7c", "network": {"id": "020b4768-a07a-4769-8636-455566c87083", "bridge": "br-int", "label": "tempest-LiveAutoBlockMigrationV225Test-804372870-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "5cc73d75e0864e838eefa90cb33b7e01", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5562a861-2a", "ovs_interfaceid": "5562a861-2a3e-4411-8aaa-be6dde7a658a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {"os_vif_delegation": true, "migrating_to": "compute-1.ctlplane.example.com"}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:01:52 compute-0 nova_compute[192079]: 2025-10-02 12:01:52.470 2 DEBUG oslo_concurrency.lockutils [req-f936d6b1-9fa8-44df-9c3a-6952946b4f59 req-38aea21d-2958-4542-92c5-473386a2d227 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-a20c354d-a1af-4fad-958f-59623ebe4437" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:01:52 compute-0 sshd-session[221044]: Accepted publickey for nova from 192.168.122.102 port 37866 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:01:52 compute-0 systemd-logind[827]: New session 33 of user nova.
Oct 02 12:01:52 compute-0 systemd[1]: Started Session 33 of User nova.
Oct 02 12:01:52 compute-0 sshd-session[221044]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:01:52 compute-0 sshd-session[221047]: Received disconnect from 192.168.122.102 port 37866:11: disconnected by user
Oct 02 12:01:52 compute-0 sshd-session[221047]: Disconnected from user nova 192.168.122.102 port 37866
Oct 02 12:01:52 compute-0 nova_compute[192079]: 2025-10-02 12:01:52.629 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406512.6285539, a20c354d-a1af-4fad-958f-59623ebe4437 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:01:52 compute-0 nova_compute[192079]: 2025-10-02 12:01:52.629 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] VM Paused (Lifecycle Event)
Oct 02 12:01:52 compute-0 nova_compute[192079]: 2025-10-02 12:01:52.632 2 DEBUG nova.virt.libvirt.migration [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Current 50 elapsed 3 steps [(0, 50), (300, 95), (600, 140), (900, 185), (1200, 230), (1500, 275), (1800, 320), (2100, 365), (2400, 410), (2700, 455), (3000, 500)] update_downtime /usr/lib/python3.9/site-packages/nova/virt/libvirt/migration.py:512
Oct 02 12:01:52 compute-0 sshd-session[221044]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:01:52 compute-0 nova_compute[192079]: 2025-10-02 12:01:52.633 2 DEBUG nova.virt.libvirt.migration [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Downtime does not need to change update_downtime /usr/lib/python3.9/site-packages/nova/virt/libvirt/migration.py:525
Oct 02 12:01:52 compute-0 systemd[1]: session-33.scope: Deactivated successfully.
Oct 02 12:01:52 compute-0 systemd-logind[827]: Session 33 logged out. Waiting for processes to exit.
Oct 02 12:01:52 compute-0 systemd-logind[827]: Removed session 33.
Oct 02 12:01:52 compute-0 nova_compute[192079]: 2025-10-02 12:01:52.656 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:01:52 compute-0 nova_compute[192079]: 2025-10-02 12:01:52.682 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: active, current task_state: migrating, current DB power_state: 1, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:01:52 compute-0 nova_compute[192079]: 2025-10-02 12:01:52.704 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] During sync_power_state the instance has a pending task (migrating). Skip.
Oct 02 12:01:52 compute-0 sshd-session[221049]: Accepted publickey for nova from 192.168.122.102 port 37880 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:01:52 compute-0 systemd-logind[827]: New session 34 of user nova.
Oct 02 12:01:52 compute-0 systemd[1]: Started Session 34 of User nova.
Oct 02 12:01:52 compute-0 sshd-session[221049]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:01:52 compute-0 kernel: tap5562a861-2a (unregistering): left promiscuous mode
Oct 02 12:01:52 compute-0 NetworkManager[51160]: <info>  [1759406512.8484] device (tap5562a861-2a): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:01:52 compute-0 nova_compute[192079]: 2025-10-02 12:01:52.862 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:52 compute-0 ovn_controller[94336]: 2025-10-02T12:01:52Z|00060|binding|INFO|Releasing lport 5562a861-2a3e-4411-8aaa-be6dde7a658a from this chassis (sb_readonly=0)
Oct 02 12:01:52 compute-0 ovn_controller[94336]: 2025-10-02T12:01:52Z|00061|binding|INFO|Setting lport 5562a861-2a3e-4411-8aaa-be6dde7a658a down in Southbound
Oct 02 12:01:52 compute-0 ovn_controller[94336]: 2025-10-02T12:01:52Z|00062|binding|INFO|Removing iface tap5562a861-2a ovn-installed in OVS
Oct 02 12:01:52 compute-0 nova_compute[192079]: 2025-10-02 12:01:52.865 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:52 compute-0 sshd-session[221052]: Received disconnect from 192.168.122.102 port 37880:11: disconnected by user
Oct 02 12:01:52 compute-0 sshd-session[221052]: Disconnected from user nova 192.168.122.102 port 37880
Oct 02 12:01:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:52.869 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:09:db:7c 10.100.0.13'], port_security=['fa:16:3e:09:db:7c 10.100.0.13'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com,compute-1.ctlplane.example.com', 'activation-strategy': 'rarp', 'additional-chassis-activated': 'ef6a8be5-dcfe-4652-b22e-0ba81a5a76ec'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.13/28', 'neutron:device_id': 'a20c354d-a1af-4fad-958f-59623ebe4437', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-020b4768-a07a-4769-8636-455566c87083', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '5cc73d75e0864e838eefa90cb33b7e01', 'neutron:revision_number': '18', 'neutron:security_group_ids': 'f3fadef5-4bfc-406c-93c4-14d4abd0583e', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=11c0be75-bb4b-4e01-8cfa-b9aa4fcaf0e9, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=5562a861-2a3e-4411-8aaa-be6dde7a658a) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:01:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:52.870 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 5562a861-2a3e-4411-8aaa-be6dde7a658a in datapath 020b4768-a07a-4769-8636-455566c87083 unbound from our chassis
Oct 02 12:01:52 compute-0 sshd-session[221049]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:01:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:52.872 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 020b4768-a07a-4769-8636-455566c87083, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:01:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:52.873 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[121e5ae3-2b27-4204-8f70-24f54e219fd2]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:52.873 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-020b4768-a07a-4769-8636-455566c87083 namespace which is not needed anymore
Oct 02 12:01:52 compute-0 systemd[1]: session-34.scope: Deactivated successfully.
Oct 02 12:01:52 compute-0 systemd-logind[827]: Session 34 logged out. Waiting for processes to exit.
Oct 02 12:01:52 compute-0 nova_compute[192079]: 2025-10-02 12:01:52.879 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:52 compute-0 systemd-logind[827]: Removed session 34.
Oct 02 12:01:52 compute-0 systemd[1]: machine-qemu\x2d3\x2dinstance\x2d00000007.scope: Deactivated successfully.
Oct 02 12:01:52 compute-0 systemd[1]: machine-qemu\x2d3\x2dinstance\x2d00000007.scope: Consumed 3.858s CPU time.
Oct 02 12:01:52 compute-0 systemd-machined[152150]: Machine qemu-3-instance-00000007 terminated.
Oct 02 12:01:53 compute-0 neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083[220844]: [NOTICE]   (220848) : haproxy version is 2.8.14-c23fe91
Oct 02 12:01:53 compute-0 neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083[220844]: [NOTICE]   (220848) : path to executable is /usr/sbin/haproxy
Oct 02 12:01:53 compute-0 neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083[220844]: [WARNING]  (220848) : Exiting Master process...
Oct 02 12:01:53 compute-0 neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083[220844]: [ALERT]    (220848) : Current worker (220850) exited with code 143 (Terminated)
Oct 02 12:01:53 compute-0 neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083[220844]: [WARNING]  (220848) : All workers exited. Exiting... (0)
Oct 02 12:01:53 compute-0 systemd[1]: libpod-aaaf3fce9d3cba20b6c5d638b12fd8f6f046753db3193b006b7818cc371f3534.scope: Deactivated successfully.
Oct 02 12:01:53 compute-0 podman[221075]: 2025-10-02 12:01:53.012900406 +0000 UTC m=+0.048538978 container died aaaf3fce9d3cba20b6c5d638b12fd8f6f046753db3193b006b7818cc371f3534 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001)
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.053 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.060 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.095 2 DEBUG nova.virt.libvirt.driver [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Migrate API has completed _live_migration_operation /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10279
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.095 2 DEBUG nova.virt.libvirt.driver [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Migration operation thread has finished _live_migration_operation /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10327
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.095 2 DEBUG nova.virt.libvirt.driver [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Migration operation thread notification thread_finished /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10630
Oct 02 12:01:53 compute-0 systemd[1]: var-lib-containers-storage-overlay-21f2a2d335939271bb52bd87d7288d5982c677411ca25bb9a342b6b3551da1c4-merged.mount: Deactivated successfully.
Oct 02 12:01:53 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-aaaf3fce9d3cba20b6c5d638b12fd8f6f046753db3193b006b7818cc371f3534-userdata-shm.mount: Deactivated successfully.
Oct 02 12:01:53 compute-0 podman[221075]: 2025-10-02 12:01:53.112085284 +0000 UTC m=+0.147723856 container cleanup aaaf3fce9d3cba20b6c5d638b12fd8f6f046753db3193b006b7818cc371f3534 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:01:53 compute-0 systemd[1]: libpod-conmon-aaaf3fce9d3cba20b6c5d638b12fd8f6f046753db3193b006b7818cc371f3534.scope: Deactivated successfully.
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.135 2 DEBUG nova.virt.libvirt.guest [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Domain has shutdown/gone away: Domain not found: no domain with matching uuid 'a20c354d-a1af-4fad-958f-59623ebe4437' (instance-00000007) get_job_info /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:688
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.137 2 INFO nova.virt.libvirt.driver [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Migration operation has completed
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.138 2 INFO nova.compute.manager [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] _post_live_migration() is started..
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.174 2 DEBUG nova.compute.manager [req-9d2197c5-bca5-4b0f-b3a3-2b2ac9a44679 req-fe0951b7-0ffd-4a2f-a12c-40c2eb4cb060 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Received event network-vif-unplugged-5562a861-2a3e-4411-8aaa-be6dde7a658a external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.176 2 DEBUG oslo_concurrency.lockutils [req-9d2197c5-bca5-4b0f-b3a3-2b2ac9a44679 req-fe0951b7-0ffd-4a2f-a12c-40c2eb4cb060 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "a20c354d-a1af-4fad-958f-59623ebe4437-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.176 2 DEBUG oslo_concurrency.lockutils [req-9d2197c5-bca5-4b0f-b3a3-2b2ac9a44679 req-fe0951b7-0ffd-4a2f-a12c-40c2eb4cb060 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a20c354d-a1af-4fad-958f-59623ebe4437-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.176 2 DEBUG oslo_concurrency.lockutils [req-9d2197c5-bca5-4b0f-b3a3-2b2ac9a44679 req-fe0951b7-0ffd-4a2f-a12c-40c2eb4cb060 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a20c354d-a1af-4fad-958f-59623ebe4437-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.177 2 DEBUG nova.compute.manager [req-9d2197c5-bca5-4b0f-b3a3-2b2ac9a44679 req-fe0951b7-0ffd-4a2f-a12c-40c2eb4cb060 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] No waiting events found dispatching network-vif-unplugged-5562a861-2a3e-4411-8aaa-be6dde7a658a pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.177 2 DEBUG nova.compute.manager [req-9d2197c5-bca5-4b0f-b3a3-2b2ac9a44679 req-fe0951b7-0ffd-4a2f-a12c-40c2eb4cb060 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Received event network-vif-unplugged-5562a861-2a3e-4411-8aaa-be6dde7a658a for instance with task_state migrating. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:01:53 compute-0 podman[221121]: 2025-10-02 12:01:53.185117569 +0000 UTC m=+0.051293612 container remove aaaf3fce9d3cba20b6c5d638b12fd8f6f046753db3193b006b7818cc371f3534 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3)
Oct 02 12:01:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:53.192 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[50c8dc68-2fa1-4ed1-be6e-2d03ceea2e38]: (4, ('Thu Oct  2 12:01:52 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083 (aaaf3fce9d3cba20b6c5d638b12fd8f6f046753db3193b006b7818cc371f3534)\naaaf3fce9d3cba20b6c5d638b12fd8f6f046753db3193b006b7818cc371f3534\nThu Oct  2 12:01:53 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083 (aaaf3fce9d3cba20b6c5d638b12fd8f6f046753db3193b006b7818cc371f3534)\naaaf3fce9d3cba20b6c5d638b12fd8f6f046753db3193b006b7818cc371f3534\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:53.193 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b3cefda4-61b9-4e33-861f-f7084478c6cc]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:53.194 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap020b4768-a0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.197 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:53 compute-0 kernel: tap020b4768-a0: left promiscuous mode
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.223 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:53.224 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d4e92fe7-8156-43b1-b18f-892df6050217]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.262 2 DEBUG oslo_concurrency.lockutils [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Acquiring lock "refresh_cache-e09de65a-0b2d-4aa5-9d9a-49f039add691" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.262 2 DEBUG oslo_concurrency.lockutils [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Acquired lock "refresh_cache-e09de65a-0b2d-4aa5-9d9a-49f039add691" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.263 2 DEBUG nova.network.neutron [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:01:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:53.268 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a3ae1efa-ef55-455a-be23-143db92d1e5a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:53.269 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[88e43233-53a2-4cca-a7e9-a5b2bf2e0672]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:53.289 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4fb298f7-67a4-47db-b20f-a09f2a35a365]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 449146, 'reachable_time': 29982, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 221139, 'error': None, 'target': 'ovnmeta-020b4768-a07a-4769-8636-455566c87083', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:53.293 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-020b4768-a07a-4769-8636-455566c87083 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:01:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:53.293 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[0589f627-c1c9-4bda-9341-d28b9a232781]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:01:53 compute-0 systemd[1]: run-netns-ovnmeta\x2d020b4768\x2da07a\x2d4769\x2d8636\x2d455566c87083.mount: Deactivated successfully.
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.455 2 DEBUG nova.network.neutron [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.788 2 DEBUG nova.compute.manager [req-bf6f25ba-fa29-4cf5-bee0-f63ff433ee82 req-bb8d49dd-fa33-4564-a4a8-96735dd749dd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Received event network-vif-unplugged-5562a861-2a3e-4411-8aaa-be6dde7a658a external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.789 2 DEBUG oslo_concurrency.lockutils [req-bf6f25ba-fa29-4cf5-bee0-f63ff433ee82 req-bb8d49dd-fa33-4564-a4a8-96735dd749dd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "a20c354d-a1af-4fad-958f-59623ebe4437-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.790 2 DEBUG oslo_concurrency.lockutils [req-bf6f25ba-fa29-4cf5-bee0-f63ff433ee82 req-bb8d49dd-fa33-4564-a4a8-96735dd749dd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a20c354d-a1af-4fad-958f-59623ebe4437-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.790 2 DEBUG oslo_concurrency.lockutils [req-bf6f25ba-fa29-4cf5-bee0-f63ff433ee82 req-bb8d49dd-fa33-4564-a4a8-96735dd749dd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a20c354d-a1af-4fad-958f-59623ebe4437-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.790 2 DEBUG nova.compute.manager [req-bf6f25ba-fa29-4cf5-bee0-f63ff433ee82 req-bb8d49dd-fa33-4564-a4a8-96735dd749dd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] No waiting events found dispatching network-vif-unplugged-5562a861-2a3e-4411-8aaa-be6dde7a658a pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.791 2 DEBUG nova.compute.manager [req-bf6f25ba-fa29-4cf5-bee0-f63ff433ee82 req-bb8d49dd-fa33-4564-a4a8-96735dd749dd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Received event network-vif-unplugged-5562a861-2a3e-4411-8aaa-be6dde7a658a for instance with task_state migrating. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.839 2 DEBUG nova.network.neutron [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.852 2 DEBUG oslo_concurrency.lockutils [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Releasing lock "refresh_cache-e09de65a-0b2d-4aa5-9d9a-49f039add691" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.925 2 DEBUG nova.network.neutron [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Activated binding for port 5562a861-2a3e-4411-8aaa-be6dde7a658a and host compute-1.ctlplane.example.com migrate_instance_start /usr/lib/python3.9/site-packages/nova/network/neutron.py:3181
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.925 2 DEBUG nova.compute.manager [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Calling driver.post_live_migration_at_source with original source VIFs from migrate_data: [{"id": "5562a861-2a3e-4411-8aaa-be6dde7a658a", "address": "fa:16:3e:09:db:7c", "network": {"id": "020b4768-a07a-4769-8636-455566c87083", "bridge": "br-int", "label": "tempest-LiveAutoBlockMigrationV225Test-804372870-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "5cc73d75e0864e838eefa90cb33b7e01", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5562a861-2a", "ovs_interfaceid": "5562a861-2a3e-4411-8aaa-be6dde7a658a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {"os_vif_delegation": true}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] _post_live_migration /usr/lib/python3.9/site-packages/nova/compute/manager.py:9326
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.926 2 DEBUG nova.virt.libvirt.vif [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=True,config_drive='True',created_at=2025-10-02T12:00:57Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description=None,display_name='tempest-LiveAutoBlockMigrationV225Test-server-1982637812',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-liveautoblockmigrationv225test-server-1982637812',id=7,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:01:06Z,launched_on='compute-1.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='5cc73d75e0864e838eefa90cb33b7e01',ramdisk_id='',reservation_id='r-bvhrjcj5',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',clean_attempts='1',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-LiveAutoBlockMigrationV225Test-984573444',owner_user_name='tempest-LiveAutoBlockMigrationV225Test-984573444-project-member'},tags=<?>,task_state='migrating',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:01:42Z,user_data=None,user_id='59e8135d73ee43e088ba5ee7d9bd84b1',uuid=a20c354d-a1af-4fad-958f-59623ebe4437,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "5562a861-2a3e-4411-8aaa-be6dde7a658a", "address": "fa:16:3e:09:db:7c", "network": {"id": "020b4768-a07a-4769-8636-455566c87083", "bridge": "br-int", "label": "tempest-LiveAutoBlockMigrationV225Test-804372870-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "5cc73d75e0864e838eefa90cb33b7e01", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5562a861-2a", "ovs_interfaceid": "5562a861-2a3e-4411-8aaa-be6dde7a658a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {"os_vif_delegation": true}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.927 2 DEBUG nova.network.os_vif_util [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Converting VIF {"id": "5562a861-2a3e-4411-8aaa-be6dde7a658a", "address": "fa:16:3e:09:db:7c", "network": {"id": "020b4768-a07a-4769-8636-455566c87083", "bridge": "br-int", "label": "tempest-LiveAutoBlockMigrationV225Test-804372870-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "5cc73d75e0864e838eefa90cb33b7e01", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5562a861-2a", "ovs_interfaceid": "5562a861-2a3e-4411-8aaa-be6dde7a658a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {"os_vif_delegation": true}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.927 2 DEBUG nova.network.os_vif_util [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:09:db:7c,bridge_name='br-int',has_traffic_filtering=True,id=5562a861-2a3e-4411-8aaa-be6dde7a658a,network=Network(020b4768-a07a-4769-8636-455566c87083),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5562a861-2a') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.928 2 DEBUG os_vif [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:09:db:7c,bridge_name='br-int',has_traffic_filtering=True,id=5562a861-2a3e-4411-8aaa-be6dde7a658a,network=Network(020b4768-a07a-4769-8636-455566c87083),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5562a861-2a') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.930 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.930 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap5562a861-2a, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.935 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.939 2 INFO os_vif [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:09:db:7c,bridge_name='br-int',has_traffic_filtering=True,id=5562a861-2a3e-4411-8aaa-be6dde7a658a,network=Network(020b4768-a07a-4769-8636-455566c87083),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5562a861-2a')
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.939 2 DEBUG oslo_concurrency.lockutils [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.free_pci_device_allocations_for_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.939 2 DEBUG oslo_concurrency.lockutils [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.free_pci_device_allocations_for_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.940 2 DEBUG oslo_concurrency.lockutils [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.free_pci_device_allocations_for_instance" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.940 2 DEBUG nova.compute.manager [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Calling driver.cleanup from _post_live_migration _post_live_migration /usr/lib/python3.9/site-packages/nova/compute/manager.py:9349
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.940 2 INFO nova.virt.libvirt.driver [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Deleting instance files /var/lib/nova/instances/a20c354d-a1af-4fad-958f-59623ebe4437_del
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.941 2 INFO nova.virt.libvirt.driver [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Deletion of /var/lib/nova/instances/a20c354d-a1af-4fad-958f-59623ebe4437_del complete
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.975 2 DEBUG nova.virt.libvirt.driver [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Starting finish_migration finish_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:11698
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.976 2 DEBUG nova.virt.libvirt.driver [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Instance directory exists: not creating _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4719
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.977 2 INFO nova.virt.libvirt.driver [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Creating image(s)
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.977 2 DEBUG nova.objects.instance [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Lazy-loading 'trusted_certs' on Instance uuid e09de65a-0b2d-4aa5-9d9a-49f039add691 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:01:53 compute-0 nova_compute[192079]: 2025-10-02 12:01:53.998 2 DEBUG oslo_concurrency.processutils [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.051 2 DEBUG oslo_concurrency.processutils [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.053s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.053 2 DEBUG nova.virt.disk.api [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Checking if we can resize image /var/lib/nova/instances/e09de65a-0b2d-4aa5-9d9a-49f039add691/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.053 2 DEBUG oslo_concurrency.processutils [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/e09de65a-0b2d-4aa5-9d9a-49f039add691/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.119 2 DEBUG oslo_concurrency.processutils [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/e09de65a-0b2d-4aa5-9d9a-49f039add691/disk --force-share --output=json" returned: 0 in 0.066s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.120 2 DEBUG nova.virt.disk.api [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Cannot resize image /var/lib/nova/instances/e09de65a-0b2d-4aa5-9d9a-49f039add691/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.137 2 DEBUG nova.virt.libvirt.driver [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Did not create local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4859
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.137 2 DEBUG nova.virt.libvirt.driver [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Ensure instance console log exists: /var/lib/nova/instances/e09de65a-0b2d-4aa5-9d9a-49f039add691/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.138 2 DEBUG oslo_concurrency.lockutils [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.138 2 DEBUG oslo_concurrency.lockutils [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.138 2 DEBUG oslo_concurrency.lockutils [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.141 2 DEBUG nova.virt.libvirt.driver [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Start _get_guest_xml network_info=[] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.150 2 WARNING nova.virt.libvirt.driver [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.157 2 DEBUG nova.virt.libvirt.host [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.158 2 DEBUG nova.virt.libvirt.host [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.163 2 DEBUG nova.virt.libvirt.host [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.164 2 DEBUG nova.virt.libvirt.host [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.165 2 DEBUG nova.virt.libvirt.driver [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.166 2 DEBUG nova.virt.hardware [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:25Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9949d9da-6314-4ede-8797-6f2f0a6a64fc',id=2,is_public=True,memory_mb=192,name='m1.micro',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.166 2 DEBUG nova.virt.hardware [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.166 2 DEBUG nova.virt.hardware [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.167 2 DEBUG nova.virt.hardware [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.167 2 DEBUG nova.virt.hardware [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.167 2 DEBUG nova.virt.hardware [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.167 2 DEBUG nova.virt.hardware [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.168 2 DEBUG nova.virt.hardware [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.168 2 DEBUG nova.virt.hardware [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.168 2 DEBUG nova.virt.hardware [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.168 2 DEBUG nova.virt.hardware [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.169 2 DEBUG nova.objects.instance [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Lazy-loading 'vcpu_model' on Instance uuid e09de65a-0b2d-4aa5-9d9a-49f039add691 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.182 2 DEBUG oslo_concurrency.processutils [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/e09de65a-0b2d-4aa5-9d9a-49f039add691/disk.config --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.198 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.236 2 DEBUG oslo_concurrency.processutils [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/e09de65a-0b2d-4aa5-9d9a-49f039add691/disk.config --force-share --output=json" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.237 2 DEBUG oslo_concurrency.lockutils [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Acquiring lock "/var/lib/nova/instances/e09de65a-0b2d-4aa5-9d9a-49f039add691/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.238 2 DEBUG oslo_concurrency.lockutils [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Lock "/var/lib/nova/instances/e09de65a-0b2d-4aa5-9d9a-49f039add691/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.238 2 DEBUG oslo_concurrency.lockutils [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Lock "/var/lib/nova/instances/e09de65a-0b2d-4aa5-9d9a-49f039add691/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.241 2 DEBUG nova.virt.libvirt.driver [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:01:54 compute-0 nova_compute[192079]:   <uuid>e09de65a-0b2d-4aa5-9d9a-49f039add691</uuid>
Oct 02 12:01:54 compute-0 nova_compute[192079]:   <name>instance-00000009</name>
Oct 02 12:01:54 compute-0 nova_compute[192079]:   <memory>196608</memory>
Oct 02 12:01:54 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:01:54 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:01:54 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:       <nova:name>tempest-MigrationsAdminTest-server-1510345576</nova:name>
Oct 02 12:01:54 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:01:54</nova:creationTime>
Oct 02 12:01:54 compute-0 nova_compute[192079]:       <nova:flavor name="m1.micro">
Oct 02 12:01:54 compute-0 nova_compute[192079]:         <nova:memory>192</nova:memory>
Oct 02 12:01:54 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:01:54 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:01:54 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:01:54 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:01:54 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:01:54 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:01:54 compute-0 nova_compute[192079]:         <nova:user uuid="8da35688aa864e189f10b334a21bc6c4">tempest-MigrationsAdminTest-1651504538-project-member</nova:user>
Oct 02 12:01:54 compute-0 nova_compute[192079]:         <nova:project uuid="4dcc6c51db2640cbb04083b3336de813">tempest-MigrationsAdminTest-1651504538</nova:project>
Oct 02 12:01:54 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:01:54 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:       <nova:ports/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:01:54 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:01:54 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <system>
Oct 02 12:01:54 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:01:54 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:01:54 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:01:54 compute-0 nova_compute[192079]:       <entry name="serial">e09de65a-0b2d-4aa5-9d9a-49f039add691</entry>
Oct 02 12:01:54 compute-0 nova_compute[192079]:       <entry name="uuid">e09de65a-0b2d-4aa5-9d9a-49f039add691</entry>
Oct 02 12:01:54 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     </system>
Oct 02 12:01:54 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:01:54 compute-0 nova_compute[192079]:   <os>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:   </os>
Oct 02 12:01:54 compute-0 nova_compute[192079]:   <features>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:   </features>
Oct 02 12:01:54 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:01:54 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:01:54 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:01:54 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/e09de65a-0b2d-4aa5-9d9a-49f039add691/disk"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:01:54 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/e09de65a-0b2d-4aa5-9d9a-49f039add691/disk.config"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:01:54 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/e09de65a-0b2d-4aa5-9d9a-49f039add691/console.log" append="off"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <video>
Oct 02 12:01:54 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     </video>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:01:54 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:01:54 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:01:54 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:01:54 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:01:54 compute-0 nova_compute[192079]: </domain>
Oct 02 12:01:54 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.305 2 DEBUG nova.virt.libvirt.driver [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.305 2 DEBUG nova.virt.libvirt.driver [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.306 2 INFO nova.virt.libvirt.driver [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Using config drive
Oct 02 12:01:54 compute-0 systemd-machined[152150]: New machine qemu-5-instance-00000009.
Oct 02 12:01:54 compute-0 systemd[1]: Started Virtual Machine qemu-5-instance-00000009.
Oct 02 12:01:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:54.726 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=5, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=4) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:01:54 compute-0 nova_compute[192079]: 2025-10-02 12:01:54.726 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:54.728 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 1 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.122 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406515.1221364, e09de65a-0b2d-4aa5-9d9a-49f039add691 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.123 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] VM Resumed (Lifecycle Event)
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.125 2 DEBUG nova.compute.manager [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.127 2 INFO nova.virt.libvirt.driver [-] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Instance running successfully.
Oct 02 12:01:55 compute-0 virtqemud[191807]: argument unsupported: QEMU guest agent is not configured
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.129 2 DEBUG nova.virt.libvirt.guest [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Failed to set time: agent not configured sync_guest_time /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:200
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.130 2 DEBUG nova.virt.libvirt.driver [None req-7b85e7b1-cbe0-401b-96b7-2f36c2189cff 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] finish_migration finished successfully. finish_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:11793
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.150 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.160 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: active, current task_state: resize_finish, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.218 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] During sync_power_state the instance has a pending task (resize_finish). Skip.
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.219 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406515.1235557, e09de65a-0b2d-4aa5-9d9a-49f039add691 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.219 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] VM Started (Lifecycle Event)
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.245 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.250 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Synchronizing instance power state after lifecycle event "Started"; current vm_state: active, current task_state: resize_finish, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.305 2 DEBUG nova.compute.manager [req-32dfd36c-377a-404a-90e6-e384f93d631f req-d2978347-b2fd-425a-bfda-1cde5c73c265 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Received event network-vif-plugged-5562a861-2a3e-4411-8aaa-be6dde7a658a external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.306 2 DEBUG oslo_concurrency.lockutils [req-32dfd36c-377a-404a-90e6-e384f93d631f req-d2978347-b2fd-425a-bfda-1cde5c73c265 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "a20c354d-a1af-4fad-958f-59623ebe4437-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.306 2 DEBUG oslo_concurrency.lockutils [req-32dfd36c-377a-404a-90e6-e384f93d631f req-d2978347-b2fd-425a-bfda-1cde5c73c265 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a20c354d-a1af-4fad-958f-59623ebe4437-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.306 2 DEBUG oslo_concurrency.lockutils [req-32dfd36c-377a-404a-90e6-e384f93d631f req-d2978347-b2fd-425a-bfda-1cde5c73c265 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a20c354d-a1af-4fad-958f-59623ebe4437-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.307 2 DEBUG nova.compute.manager [req-32dfd36c-377a-404a-90e6-e384f93d631f req-d2978347-b2fd-425a-bfda-1cde5c73c265 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] No waiting events found dispatching network-vif-plugged-5562a861-2a3e-4411-8aaa-be6dde7a658a pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.307 2 WARNING nova.compute.manager [req-32dfd36c-377a-404a-90e6-e384f93d631f req-d2978347-b2fd-425a-bfda-1cde5c73c265 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Received unexpected event network-vif-plugged-5562a861-2a3e-4411-8aaa-be6dde7a658a for instance with vm_state active and task_state migrating.
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.307 2 DEBUG nova.compute.manager [req-32dfd36c-377a-404a-90e6-e384f93d631f req-d2978347-b2fd-425a-bfda-1cde5c73c265 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Received event network-vif-plugged-5562a861-2a3e-4411-8aaa-be6dde7a658a external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.308 2 DEBUG oslo_concurrency.lockutils [req-32dfd36c-377a-404a-90e6-e384f93d631f req-d2978347-b2fd-425a-bfda-1cde5c73c265 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "a20c354d-a1af-4fad-958f-59623ebe4437-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.308 2 DEBUG oslo_concurrency.lockutils [req-32dfd36c-377a-404a-90e6-e384f93d631f req-d2978347-b2fd-425a-bfda-1cde5c73c265 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a20c354d-a1af-4fad-958f-59623ebe4437-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.309 2 DEBUG oslo_concurrency.lockutils [req-32dfd36c-377a-404a-90e6-e384f93d631f req-d2978347-b2fd-425a-bfda-1cde5c73c265 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a20c354d-a1af-4fad-958f-59623ebe4437-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.309 2 DEBUG nova.compute.manager [req-32dfd36c-377a-404a-90e6-e384f93d631f req-d2978347-b2fd-425a-bfda-1cde5c73c265 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] No waiting events found dispatching network-vif-plugged-5562a861-2a3e-4411-8aaa-be6dde7a658a pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.309 2 WARNING nova.compute.manager [req-32dfd36c-377a-404a-90e6-e384f93d631f req-d2978347-b2fd-425a-bfda-1cde5c73c265 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Received unexpected event network-vif-plugged-5562a861-2a3e-4411-8aaa-be6dde7a658a for instance with vm_state active and task_state migrating.
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.310 2 DEBUG nova.compute.manager [req-32dfd36c-377a-404a-90e6-e384f93d631f req-d2978347-b2fd-425a-bfda-1cde5c73c265 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Received event network-vif-plugged-5562a861-2a3e-4411-8aaa-be6dde7a658a external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.310 2 DEBUG oslo_concurrency.lockutils [req-32dfd36c-377a-404a-90e6-e384f93d631f req-d2978347-b2fd-425a-bfda-1cde5c73c265 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "a20c354d-a1af-4fad-958f-59623ebe4437-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.310 2 DEBUG oslo_concurrency.lockutils [req-32dfd36c-377a-404a-90e6-e384f93d631f req-d2978347-b2fd-425a-bfda-1cde5c73c265 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a20c354d-a1af-4fad-958f-59623ebe4437-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.311 2 DEBUG oslo_concurrency.lockutils [req-32dfd36c-377a-404a-90e6-e384f93d631f req-d2978347-b2fd-425a-bfda-1cde5c73c265 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a20c354d-a1af-4fad-958f-59623ebe4437-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.311 2 DEBUG nova.compute.manager [req-32dfd36c-377a-404a-90e6-e384f93d631f req-d2978347-b2fd-425a-bfda-1cde5c73c265 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] No waiting events found dispatching network-vif-plugged-5562a861-2a3e-4411-8aaa-be6dde7a658a pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.311 2 WARNING nova.compute.manager [req-32dfd36c-377a-404a-90e6-e384f93d631f req-d2978347-b2fd-425a-bfda-1cde5c73c265 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Received unexpected event network-vif-plugged-5562a861-2a3e-4411-8aaa-be6dde7a658a for instance with vm_state active and task_state migrating.
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.312 2 DEBUG nova.compute.manager [req-32dfd36c-377a-404a-90e6-e384f93d631f req-d2978347-b2fd-425a-bfda-1cde5c73c265 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Received event network-vif-plugged-5562a861-2a3e-4411-8aaa-be6dde7a658a external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.312 2 DEBUG oslo_concurrency.lockutils [req-32dfd36c-377a-404a-90e6-e384f93d631f req-d2978347-b2fd-425a-bfda-1cde5c73c265 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "a20c354d-a1af-4fad-958f-59623ebe4437-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.312 2 DEBUG oslo_concurrency.lockutils [req-32dfd36c-377a-404a-90e6-e384f93d631f req-d2978347-b2fd-425a-bfda-1cde5c73c265 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a20c354d-a1af-4fad-958f-59623ebe4437-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.312 2 DEBUG oslo_concurrency.lockutils [req-32dfd36c-377a-404a-90e6-e384f93d631f req-d2978347-b2fd-425a-bfda-1cde5c73c265 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a20c354d-a1af-4fad-958f-59623ebe4437-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.313 2 DEBUG nova.compute.manager [req-32dfd36c-377a-404a-90e6-e384f93d631f req-d2978347-b2fd-425a-bfda-1cde5c73c265 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] No waiting events found dispatching network-vif-plugged-5562a861-2a3e-4411-8aaa-be6dde7a658a pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:01:55 compute-0 nova_compute[192079]: 2025-10-02 12:01:55.313 2 WARNING nova.compute.manager [req-32dfd36c-377a-404a-90e6-e384f93d631f req-d2978347-b2fd-425a-bfda-1cde5c73c265 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Received unexpected event network-vif-plugged-5562a861-2a3e-4411-8aaa-be6dde7a658a for instance with vm_state active and task_state migrating.
Oct 02 12:01:55 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:01:55.731 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '5'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:01:56 compute-0 podman[221174]: 2025-10-02 12:01:56.167588383 +0000 UTC m=+0.080489062 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=ceilometer_agent_compute, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 12:01:58 compute-0 nova_compute[192079]: 2025-10-02 12:01:58.932 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:01:59 compute-0 nova_compute[192079]: 2025-10-02 12:01:59.083 2 DEBUG oslo_concurrency.processutils [None req-ea47791a-e438-4055-96a9-66915366c2b7 9e8191126948466ab09575476c8b2279 8eaecae3d3c644a2908721b5e92569ee - - default default] Running cmd (subprocess): env LANG=C uptime execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:01:59 compute-0 nova_compute[192079]: 2025-10-02 12:01:59.127 2 DEBUG oslo_concurrency.processutils [None req-ea47791a-e438-4055-96a9-66915366c2b7 9e8191126948466ab09575476c8b2279 8eaecae3d3c644a2908721b5e92569ee - - default default] CMD "env LANG=C uptime" returned: 0 in 0.044s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:01:59 compute-0 nova_compute[192079]: 2025-10-02 12:01:59.249 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.119 2 DEBUG oslo_concurrency.lockutils [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Acquiring lock "a20c354d-a1af-4fad-958f-59623ebe4437-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.120 2 DEBUG oslo_concurrency.lockutils [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Lock "a20c354d-a1af-4fad-958f-59623ebe4437-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.121 2 DEBUG oslo_concurrency.lockutils [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Lock "a20c354d-a1af-4fad-958f-59623ebe4437-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.142 2 DEBUG oslo_concurrency.lockutils [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.143 2 DEBUG oslo_concurrency.lockutils [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.143 2 DEBUG oslo_concurrency.lockutils [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.143 2 DEBUG nova.compute.resource_tracker [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:02:00 compute-0 podman[221195]: 2025-10-02 12:02:00.188818714 +0000 UTC m=+0.089184880 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, vcs-type=git, io.buildah.version=1.33.7, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, distribution-scope=public, maintainer=Red Hat, Inc., name=ubi9-minimal, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, com.redhat.component=ubi9-minimal-container, container_name=openstack_network_exporter, config_id=edpm, vendor=Red Hat, Inc., managed_by=edpm_ansible, architecture=x86_64, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., release=1755695350, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., url=https://catalog.redhat.com/en/search?searchType=containers, io.openshift.tags=minimal rhel9, io.openshift.expose-services=, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, version=9.6, build-date=2025-08-20T13:12:41, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly.)
Oct 02 12:02:00 compute-0 podman[221196]: 2025-10-02 12:02:00.203120575 +0000 UTC m=+0.109746072 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, config_id=multipathd, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, container_name=multipathd, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS)
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.260 2 DEBUG oslo_concurrency.processutils [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/e09de65a-0b2d-4aa5-9d9a-49f039add691/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.330 2 DEBUG oslo_concurrency.processutils [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/e09de65a-0b2d-4aa5-9d9a-49f039add691/disk --force-share --output=json" returned: 0 in 0.070s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.331 2 DEBUG oslo_concurrency.processutils [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/e09de65a-0b2d-4aa5-9d9a-49f039add691/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.386 2 DEBUG oslo_concurrency.processutils [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/e09de65a-0b2d-4aa5-9d9a-49f039add691/disk --force-share --output=json" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.399 2 DEBUG oslo_concurrency.processutils [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.454 2 DEBUG oslo_concurrency.processutils [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.456 2 DEBUG oslo_concurrency.processutils [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.516 2 DEBUG oslo_concurrency.processutils [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk --force-share --output=json" returned: 0 in 0.060s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.518 2 DEBUG oslo_concurrency.processutils [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk.eph0 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.569 2 DEBUG oslo_concurrency.processutils [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk.eph0 --force-share --output=json" returned: 0 in 0.052s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.570 2 DEBUG oslo_concurrency.processutils [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk.eph0 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.635 2 DEBUG oslo_concurrency.processutils [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk.eph0 --force-share --output=json" returned: 0 in 0.065s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.826 2 WARNING nova.virt.libvirt.driver [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.829 2 DEBUG nova.compute.resource_tracker [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5408MB free_disk=73.40510940551758GB free_vcpus=6 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.830 2 DEBUG oslo_concurrency.lockutils [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.830 2 DEBUG oslo_concurrency.lockutils [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.906 2 DEBUG nova.compute.resource_tracker [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Migration for instance a20c354d-a1af-4fad-958f-59623ebe4437 refers to another host's instance! _pair_instances_to_migrations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:903
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.936 2 DEBUG nova.compute.resource_tracker [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Skipping migration as instance is neither resizing nor live-migrating. _update_usage_from_migrations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1491
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.967 2 DEBUG nova.compute.resource_tracker [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Instance eba3fb05-4dd5-4f34-9cb5-c932a86f4c53 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.968 2 DEBUG nova.compute.resource_tracker [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Instance e09de65a-0b2d-4aa5-9d9a-49f039add691 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.968 2 DEBUG nova.compute.resource_tracker [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Migration 35179a92-1205-4643-acbc-8771c847a215 is active on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1640
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.968 2 DEBUG nova.compute.resource_tracker [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Total usable vcpus: 8, total allocated vcpus: 2 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:02:00 compute-0 nova_compute[192079]: 2025-10-02 12:02:00.969 2 DEBUG nova.compute.resource_tracker [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=832MB phys_disk=79GB used_disk=3GB total_vcpus=8 used_vcpus=2 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:02:01 compute-0 nova_compute[192079]: 2025-10-02 12:02:01.046 2 DEBUG nova.compute.provider_tree [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:02:01 compute-0 nova_compute[192079]: 2025-10-02 12:02:01.063 2 DEBUG nova.scheduler.client.report [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:02:01 compute-0 nova_compute[192079]: 2025-10-02 12:02:01.084 2 DEBUG nova.compute.resource_tracker [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:02:01 compute-0 nova_compute[192079]: 2025-10-02 12:02:01.086 2 DEBUG oslo_concurrency.lockutils [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.255s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:01 compute-0 nova_compute[192079]: 2025-10-02 12:02:01.107 2 INFO nova.compute.manager [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Migrating instance to compute-1.ctlplane.example.com finished successfully.
Oct 02 12:02:01 compute-0 nova_compute[192079]: 2025-10-02 12:02:01.205 2 INFO nova.scheduler.client.report [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Deleted allocation for migration 35179a92-1205-4643-acbc-8771c847a215
Oct 02 12:02:01 compute-0 nova_compute[192079]: 2025-10-02 12:02:01.207 2 DEBUG nova.virt.libvirt.driver [None req-e82f1a40-da7d-4ec7-877d-f33699e5f88a 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Live migration monitoring is all done _live_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10662
Oct 02 12:02:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:02.202 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:02.202 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:02.203 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:03 compute-0 systemd[1]: Stopping User Manager for UID 42436...
Oct 02 12:02:03 compute-0 systemd[220903]: Activating special unit Exit the Session...
Oct 02 12:02:03 compute-0 systemd[220903]: Stopped target Main User Target.
Oct 02 12:02:03 compute-0 systemd[220903]: Stopped target Basic System.
Oct 02 12:02:03 compute-0 systemd[220903]: Stopped target Paths.
Oct 02 12:02:03 compute-0 systemd[220903]: Stopped target Sockets.
Oct 02 12:02:03 compute-0 systemd[220903]: Stopped target Timers.
Oct 02 12:02:03 compute-0 systemd[220903]: Stopped Mark boot as successful after the user session has run 2 minutes.
Oct 02 12:02:03 compute-0 systemd[220903]: Stopped Daily Cleanup of User's Temporary Directories.
Oct 02 12:02:03 compute-0 systemd[220903]: Closed D-Bus User Message Bus Socket.
Oct 02 12:02:03 compute-0 systemd[220903]: Stopped Create User's Volatile Files and Directories.
Oct 02 12:02:03 compute-0 systemd[220903]: Removed slice User Application Slice.
Oct 02 12:02:03 compute-0 systemd[220903]: Reached target Shutdown.
Oct 02 12:02:03 compute-0 systemd[220903]: Finished Exit the Session.
Oct 02 12:02:03 compute-0 systemd[220903]: Reached target Exit the Session.
Oct 02 12:02:03 compute-0 systemd[1]: user@42436.service: Deactivated successfully.
Oct 02 12:02:03 compute-0 systemd[1]: Stopped User Manager for UID 42436.
Oct 02 12:02:03 compute-0 systemd[1]: Stopping User Runtime Directory /run/user/42436...
Oct 02 12:02:03 compute-0 systemd[1]: run-user-42436.mount: Deactivated successfully.
Oct 02 12:02:03 compute-0 systemd[1]: user-runtime-dir@42436.service: Deactivated successfully.
Oct 02 12:02:03 compute-0 systemd[1]: Stopped User Runtime Directory /run/user/42436.
Oct 02 12:02:03 compute-0 systemd[1]: Removed slice User Slice of UID 42436.
Oct 02 12:02:03 compute-0 nova_compute[192079]: 2025-10-02 12:02:03.983 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:04 compute-0 nova_compute[192079]: 2025-10-02 12:02:04.250 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:05 compute-0 nova_compute[192079]: 2025-10-02 12:02:05.660 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:02:06 compute-0 nova_compute[192079]: 2025-10-02 12:02:06.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:02:07 compute-0 nova_compute[192079]: 2025-10-02 12:02:07.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:02:08 compute-0 nova_compute[192079]: 2025-10-02 12:02:08.093 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759406513.0921693, a20c354d-a1af-4fad-958f-59623ebe4437 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:02:08 compute-0 nova_compute[192079]: 2025-10-02 12:02:08.093 2 INFO nova.compute.manager [-] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] VM Stopped (Lifecycle Event)
Oct 02 12:02:08 compute-0 nova_compute[192079]: 2025-10-02 12:02:08.122 2 DEBUG nova.compute.manager [None req-30e44962-5afa-4122-9ab6-c62f23757754 - - - - - -] [instance: a20c354d-a1af-4fad-958f-59623ebe4437] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:02:08 compute-0 podman[221262]: 2025-10-02 12:02:08.153802546 +0000 UTC m=+0.056856565 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:02:08 compute-0 podman[221263]: 2025-10-02 12:02:08.170826103 +0000 UTC m=+0.072137604 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, config_id=iscsid, container_name=iscsid, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, tcib_managed=true)
Oct 02 12:02:08 compute-0 nova_compute[192079]: 2025-10-02 12:02:08.678 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:02:08 compute-0 nova_compute[192079]: 2025-10-02 12:02:08.679 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:02:08 compute-0 nova_compute[192079]: 2025-10-02 12:02:08.712 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:08 compute-0 nova_compute[192079]: 2025-10-02 12:02:08.713 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:08 compute-0 nova_compute[192079]: 2025-10-02 12:02:08.713 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:08 compute-0 nova_compute[192079]: 2025-10-02 12:02:08.713 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:02:08 compute-0 nova_compute[192079]: 2025-10-02 12:02:08.784 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/e09de65a-0b2d-4aa5-9d9a-49f039add691/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:08 compute-0 nova_compute[192079]: 2025-10-02 12:02:08.851 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/e09de65a-0b2d-4aa5-9d9a-49f039add691/disk --force-share --output=json" returned: 0 in 0.067s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:08 compute-0 nova_compute[192079]: 2025-10-02 12:02:08.852 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/e09de65a-0b2d-4aa5-9d9a-49f039add691/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:08 compute-0 nova_compute[192079]: 2025-10-02 12:02:08.905 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/e09de65a-0b2d-4aa5-9d9a-49f039add691/disk --force-share --output=json" returned: 0 in 0.053s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:08 compute-0 nova_compute[192079]: 2025-10-02 12:02:08.911 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:08 compute-0 nova_compute[192079]: 2025-10-02 12:02:08.962 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk --force-share --output=json" returned: 0 in 0.052s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:08 compute-0 nova_compute[192079]: 2025-10-02 12:02:08.963 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.011 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk --force-share --output=json" returned: 0 in 0.048s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.012 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk.eph0 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.055 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.063 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk.eph0 --force-share --output=json" returned: 0 in 0.050s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.063 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk.eph0 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.116 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53/disk.eph0 --force-share --output=json" returned: 0 in 0.052s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.169 2 DEBUG oslo_concurrency.lockutils [None req-e75608b4-caf0-4d02-b1dd-868947f46da7 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Acquiring lock "eba3fb05-4dd5-4f34-9cb5-c932a86f4c53" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.170 2 DEBUG oslo_concurrency.lockutils [None req-e75608b4-caf0-4d02-b1dd-868947f46da7 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "eba3fb05-4dd5-4f34-9cb5-c932a86f4c53" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.170 2 DEBUG oslo_concurrency.lockutils [None req-e75608b4-caf0-4d02-b1dd-868947f46da7 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Acquiring lock "eba3fb05-4dd5-4f34-9cb5-c932a86f4c53-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.171 2 DEBUG oslo_concurrency.lockutils [None req-e75608b4-caf0-4d02-b1dd-868947f46da7 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "eba3fb05-4dd5-4f34-9cb5-c932a86f4c53-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.171 2 DEBUG oslo_concurrency.lockutils [None req-e75608b4-caf0-4d02-b1dd-868947f46da7 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "eba3fb05-4dd5-4f34-9cb5-c932a86f4c53-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.183 2 INFO nova.compute.manager [None req-e75608b4-caf0-4d02-b1dd-868947f46da7 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Terminating instance
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.194 2 DEBUG nova.compute.manager [None req-e75608b4-caf0-4d02-b1dd-868947f46da7 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.252 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:09 compute-0 kernel: tapdc3331f3-a4 (unregistering): left promiscuous mode
Oct 02 12:02:09 compute-0 NetworkManager[51160]: <info>  [1759406529.3372] device (tapdc3331f3-a4): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.347 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.349 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:09 compute-0 ovn_controller[94336]: 2025-10-02T12:02:09Z|00063|binding|INFO|Releasing lport dc3331f3-a49a-4c18-98e4-476f3e2c97d4 from this chassis (sb_readonly=0)
Oct 02 12:02:09 compute-0 ovn_controller[94336]: 2025-10-02T12:02:09Z|00064|binding|INFO|Setting lport dc3331f3-a49a-4c18-98e4-476f3e2c97d4 down in Southbound
Oct 02 12:02:09 compute-0 ovn_controller[94336]: 2025-10-02T12:02:09Z|00065|binding|INFO|Removing iface tapdc3331f3-a4 ovn-installed in OVS
Oct 02 12:02:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:09.359 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:2e:71:61 10.100.0.14'], port_security=['fa:16:3e:2e:71:61 10.100.0.14'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.14/28', 'neutron:device_id': 'eba3fb05-4dd5-4f34-9cb5-c932a86f4c53', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-0432e6a2-e111-484d-b6cf-d32d9fc846c9', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '302a9c83c3eb43818ce6284e9ddb73be', 'neutron:revision_number': '4', 'neutron:security_group_ids': 'afed868a-564b-4ceb-947a-806e11012ac0', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com', 'neutron:port_fip': '192.168.122.218'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=b550d36f-725b-4b76-9c4e-aa36183370a9, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=dc3331f3-a49a-4c18-98e4-476f3e2c97d4) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:02:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:09.361 103294 INFO neutron.agent.ovn.metadata.agent [-] Port dc3331f3-a49a-4c18-98e4-476f3e2c97d4 in datapath 0432e6a2-e111-484d-b6cf-d32d9fc846c9 unbound from our chassis
Oct 02 12:02:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:09.362 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 0432e6a2-e111-484d-b6cf-d32d9fc846c9, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.363 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:09.363 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d908428a-b111-4622-8d26-f5541885c53a]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:09.364 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9 namespace which is not needed anymore
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.383 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.385 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5389MB free_disk=73.40501403808594GB free_vcpus=6 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.385 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.385 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:09 compute-0 systemd[1]: machine-qemu\x2d4\x2dinstance\x2d00000008.scope: Deactivated successfully.
Oct 02 12:02:09 compute-0 systemd[1]: machine-qemu\x2d4\x2dinstance\x2d00000008.scope: Consumed 14.228s CPU time.
Oct 02 12:02:09 compute-0 systemd-machined[152150]: Machine qemu-4-instance-00000008 terminated.
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.486 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance eba3fb05-4dd5-4f34-9cb5-c932a86f4c53 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.486 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance e09de65a-0b2d-4aa5-9d9a-49f039add691 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.487 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 2 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.487 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=832MB phys_disk=79GB used_disk=3GB total_vcpus=8 used_vcpus=2 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:02:09 compute-0 neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9[220752]: [NOTICE]   (220756) : haproxy version is 2.8.14-c23fe91
Oct 02 12:02:09 compute-0 neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9[220752]: [NOTICE]   (220756) : path to executable is /usr/sbin/haproxy
Oct 02 12:02:09 compute-0 neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9[220752]: [WARNING]  (220756) : Exiting Master process...
Oct 02 12:02:09 compute-0 neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9[220752]: [ALERT]    (220756) : Current worker (220758) exited with code 143 (Terminated)
Oct 02 12:02:09 compute-0 neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9[220752]: [WARNING]  (220756) : All workers exited. Exiting... (0)
Oct 02 12:02:09 compute-0 systemd[1]: libpod-3573cdfb43ade666670c40924a132cceb65a84f458ea2fd1c797283fff1e8728.scope: Deactivated successfully.
Oct 02 12:02:09 compute-0 podman[221346]: 2025-10-02 12:02:09.522333916 +0000 UTC m=+0.084837441 container died 3573cdfb43ade666670c40924a132cceb65a84f458ea2fd1c797283fff1e8728 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.638 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.659 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.672 2 INFO nova.virt.libvirt.driver [-] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Instance destroyed successfully.
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.673 2 DEBUG nova.objects.instance [None req-e75608b4-caf0-4d02-b1dd-868947f46da7 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lazy-loading 'resources' on Instance uuid eba3fb05-4dd5-4f34-9cb5-c932a86f4c53 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.688 2 DEBUG nova.virt.libvirt.vif [None req-e75608b4-caf0-4d02-b1dd-868947f46da7 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:01:11Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServersWithSpecificFlavorTestJSON-server-2025065881',display_name='tempest-ServersWithSpecificFlavorTestJSON-server-2025065881',ec2_ids=<?>,ephemeral_gb=1,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(6),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverswithspecificflavortestjson-server-2025065881',id=8,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=6,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBLQAR6rn15gxsCt5BVT9ZeXnbqUta2pJ91YMBkT9rHUc9ZBtTK728XqHiZfyDrBlMAbpgHvu/gvYEjRf3OvnLlEsO2AY8MfRajDqsbCXPjRzSoO5eacsxtVMw0D5LoybNA==',key_name='tempest-keypair-283845564',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:01:33Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='302a9c83c3eb43818ce6284e9ddb73be',ramdisk_id='',reservation_id='r-p5h5ztvr',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServersWithSpecificFlavorTestJSON-1100192498',owner_user_name='tempest-ServersWithSpecificFlavorTestJSON-1100192498-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:01:33Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='67132a26bb4c454aa5ed0e4b8fee032c',uuid=eba3fb05-4dd5-4f34-9cb5-c932a86f4c53,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "dc3331f3-a49a-4c18-98e4-476f3e2c97d4", "address": "fa:16:3e:2e:71:61", "network": {"id": "0432e6a2-e111-484d-b6cf-d32d9fc846c9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1078640656-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.218", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "302a9c83c3eb43818ce6284e9ddb73be", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapdc3331f3-a4", "ovs_interfaceid": "dc3331f3-a49a-4c18-98e4-476f3e2c97d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.689 2 DEBUG nova.network.os_vif_util [None req-e75608b4-caf0-4d02-b1dd-868947f46da7 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Converting VIF {"id": "dc3331f3-a49a-4c18-98e4-476f3e2c97d4", "address": "fa:16:3e:2e:71:61", "network": {"id": "0432e6a2-e111-484d-b6cf-d32d9fc846c9", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1078640656-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.218", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "302a9c83c3eb43818ce6284e9ddb73be", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapdc3331f3-a4", "ovs_interfaceid": "dc3331f3-a49a-4c18-98e4-476f3e2c97d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.690 2 DEBUG nova.network.os_vif_util [None req-e75608b4-caf0-4d02-b1dd-868947f46da7 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:2e:71:61,bridge_name='br-int',has_traffic_filtering=True,id=dc3331f3-a49a-4c18-98e4-476f3e2c97d4,network=Network(0432e6a2-e111-484d-b6cf-d32d9fc846c9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapdc3331f3-a4') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.690 2 DEBUG os_vif [None req-e75608b4-caf0-4d02-b1dd-868947f46da7 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:2e:71:61,bridge_name='br-int',has_traffic_filtering=True,id=dc3331f3-a49a-4c18-98e4-476f3e2c97d4,network=Network(0432e6a2-e111-484d-b6cf-d32d9fc846c9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapdc3331f3-a4') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:02:09 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-3573cdfb43ade666670c40924a132cceb65a84f458ea2fd1c797283fff1e8728-userdata-shm.mount: Deactivated successfully.
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.693 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.693 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapdc3331f3-a4, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.695 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.695 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.310s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:09 compute-0 systemd[1]: var-lib-containers-storage-overlay-ee3ad8522c9b7f65b1ca66e4c4b52a64dc085976c0e26d3d70af0fe5f1e87e85-merged.mount: Deactivated successfully.
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.696 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.697 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.699 2 INFO os_vif [None req-e75608b4-caf0-4d02-b1dd-868947f46da7 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:2e:71:61,bridge_name='br-int',has_traffic_filtering=True,id=dc3331f3-a49a-4c18-98e4-476f3e2c97d4,network=Network(0432e6a2-e111-484d-b6cf-d32d9fc846c9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapdc3331f3-a4')
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.700 2 INFO nova.virt.libvirt.driver [None req-e75608b4-caf0-4d02-b1dd-868947f46da7 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Deleting instance files /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53_del
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.700 2 INFO nova.virt.libvirt.driver [None req-e75608b4-caf0-4d02-b1dd-868947f46da7 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Deletion of /var/lib/nova/instances/eba3fb05-4dd5-4f34-9cb5-c932a86f4c53_del complete
Oct 02 12:02:09 compute-0 podman[221346]: 2025-10-02 12:02:09.714187593 +0000 UTC m=+0.276691108 container cleanup 3573cdfb43ade666670c40924a132cceb65a84f458ea2fd1c797283fff1e8728 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3)
Oct 02 12:02:09 compute-0 systemd[1]: libpod-conmon-3573cdfb43ade666670c40924a132cceb65a84f458ea2fd1c797283fff1e8728.scope: Deactivated successfully.
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.732 2 DEBUG nova.compute.manager [req-824505fc-951d-471c-a935-444500c03f61 req-42f420b9-ea0b-4db5-9ba4-ffb05e9c7a63 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Received event network-vif-unplugged-dc3331f3-a49a-4c18-98e4-476f3e2c97d4 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.733 2 DEBUG oslo_concurrency.lockutils [req-824505fc-951d-471c-a935-444500c03f61 req-42f420b9-ea0b-4db5-9ba4-ffb05e9c7a63 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "eba3fb05-4dd5-4f34-9cb5-c932a86f4c53-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.733 2 DEBUG oslo_concurrency.lockutils [req-824505fc-951d-471c-a935-444500c03f61 req-42f420b9-ea0b-4db5-9ba4-ffb05e9c7a63 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "eba3fb05-4dd5-4f34-9cb5-c932a86f4c53-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.733 2 DEBUG oslo_concurrency.lockutils [req-824505fc-951d-471c-a935-444500c03f61 req-42f420b9-ea0b-4db5-9ba4-ffb05e9c7a63 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "eba3fb05-4dd5-4f34-9cb5-c932a86f4c53-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.733 2 DEBUG nova.compute.manager [req-824505fc-951d-471c-a935-444500c03f61 req-42f420b9-ea0b-4db5-9ba4-ffb05e9c7a63 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] No waiting events found dispatching network-vif-unplugged-dc3331f3-a49a-4c18-98e4-476f3e2c97d4 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.734 2 DEBUG nova.compute.manager [req-824505fc-951d-471c-a935-444500c03f61 req-42f420b9-ea0b-4db5-9ba4-ffb05e9c7a63 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Received event network-vif-unplugged-dc3331f3-a49a-4c18-98e4-476f3e2c97d4 for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.817 2 INFO nova.compute.manager [None req-e75608b4-caf0-4d02-b1dd-868947f46da7 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Took 0.62 seconds to destroy the instance on the hypervisor.
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.817 2 DEBUG oslo.service.loopingcall [None req-e75608b4-caf0-4d02-b1dd-868947f46da7 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.817 2 DEBUG nova.compute.manager [-] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.818 2 DEBUG nova.network.neutron [-] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:02:09 compute-0 podman[221397]: 2025-10-02 12:02:09.902188716 +0000 UTC m=+0.168957083 container remove 3573cdfb43ade666670c40924a132cceb65a84f458ea2fd1c797283fff1e8728 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9, org.label-schema.vendor=CentOS, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001)
Oct 02 12:02:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:09.907 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ff305e29-6a0a-42e3-b865-50d86a5ebcaf]: (4, ('Thu Oct  2 12:02:09 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9 (3573cdfb43ade666670c40924a132cceb65a84f458ea2fd1c797283fff1e8728)\n3573cdfb43ade666670c40924a132cceb65a84f458ea2fd1c797283fff1e8728\nThu Oct  2 12:02:09 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9 (3573cdfb43ade666670c40924a132cceb65a84f458ea2fd1c797283fff1e8728)\n3573cdfb43ade666670c40924a132cceb65a84f458ea2fd1c797283fff1e8728\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:09.908 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[403e662f-0f65-48ed-a203-3fc670bb1506]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:09.909 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap0432e6a2-e0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.911 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:09 compute-0 kernel: tap0432e6a2-e0: left promiscuous mode
Oct 02 12:02:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:09.915 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0f4003f9-cea4-40d0-b1e9-7f16e740fd29]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:09 compute-0 nova_compute[192079]: 2025-10-02 12:02:09.925 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:09.954 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[fc4b7af5-c8c8-434f-8fd1-595735d3c17d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:09.956 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[cd1c0fc7-d7bf-4216-8994-1c55d581dcb9]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:09.969 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[96f7b714-a533-431f-a434-4da9533fbb11]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 448927, 'reachable_time': 28454, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 221413, 'error': None, 'target': 'ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:09.971 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-0432e6a2-e111-484d-b6cf-d32d9fc846c9 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:02:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:09.971 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[157355c3-2926-41f4-9421-5445388745b7]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:09 compute-0 systemd[1]: run-netns-ovnmeta\x2d0432e6a2\x2de111\x2d484d\x2db6cf\x2dd32d9fc846c9.mount: Deactivated successfully.
Oct 02 12:02:10 compute-0 nova_compute[192079]: 2025-10-02 12:02:10.677 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:02:10 compute-0 nova_compute[192079]: 2025-10-02 12:02:10.678 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:02:10 compute-0 nova_compute[192079]: 2025-10-02 12:02:10.679 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:02:10 compute-0 nova_compute[192079]: 2025-10-02 12:02:10.679 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:02:10 compute-0 nova_compute[192079]: 2025-10-02 12:02:10.696 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Skipping network cache update for instance because it is being deleted. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9875
Oct 02 12:02:10 compute-0 nova_compute[192079]: 2025-10-02 12:02:10.887 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "refresh_cache-e09de65a-0b2d-4aa5-9d9a-49f039add691" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:02:10 compute-0 nova_compute[192079]: 2025-10-02 12:02:10.888 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquired lock "refresh_cache-e09de65a-0b2d-4aa5-9d9a-49f039add691" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:02:10 compute-0 nova_compute[192079]: 2025-10-02 12:02:10.888 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Forcefully refreshing network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2004
Oct 02 12:02:10 compute-0 nova_compute[192079]: 2025-10-02 12:02:10.889 2 DEBUG nova.objects.instance [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lazy-loading 'info_cache' on Instance uuid e09de65a-0b2d-4aa5-9d9a-49f039add691 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:02:11 compute-0 nova_compute[192079]: 2025-10-02 12:02:11.162 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:02:11 compute-0 nova_compute[192079]: 2025-10-02 12:02:11.306 2 DEBUG nova.network.neutron [-] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:02:11 compute-0 nova_compute[192079]: 2025-10-02 12:02:11.326 2 INFO nova.compute.manager [-] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Took 1.51 seconds to deallocate network for instance.
Oct 02 12:02:11 compute-0 nova_compute[192079]: 2025-10-02 12:02:11.423 2 DEBUG oslo_concurrency.lockutils [None req-e75608b4-caf0-4d02-b1dd-868947f46da7 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:11 compute-0 nova_compute[192079]: 2025-10-02 12:02:11.424 2 DEBUG oslo_concurrency.lockutils [None req-e75608b4-caf0-4d02-b1dd-868947f46da7 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:11 compute-0 nova_compute[192079]: 2025-10-02 12:02:11.426 2 DEBUG nova.compute.manager [req-7a1f4b2b-0370-4c0b-8ece-976a8d39c35f req-735bc707-54c5-40b4-abd8-9b8360598bb6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Received event network-vif-deleted-dc3331f3-a49a-4c18-98e4-476f3e2c97d4 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:02:11 compute-0 nova_compute[192079]: 2025-10-02 12:02:11.517 2 DEBUG nova.compute.provider_tree [None req-e75608b4-caf0-4d02-b1dd-868947f46da7 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:02:11 compute-0 nova_compute[192079]: 2025-10-02 12:02:11.531 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:02:11 compute-0 nova_compute[192079]: 2025-10-02 12:02:11.548 2 DEBUG nova.scheduler.client.report [None req-e75608b4-caf0-4d02-b1dd-868947f46da7 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:02:11 compute-0 nova_compute[192079]: 2025-10-02 12:02:11.554 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Releasing lock "refresh_cache-e09de65a-0b2d-4aa5-9d9a-49f039add691" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:02:11 compute-0 nova_compute[192079]: 2025-10-02 12:02:11.554 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Updated the network info_cache for instance _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9929
Oct 02 12:02:11 compute-0 nova_compute[192079]: 2025-10-02 12:02:11.554 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:02:11 compute-0 nova_compute[192079]: 2025-10-02 12:02:11.554 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:02:11 compute-0 nova_compute[192079]: 2025-10-02 12:02:11.555 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:02:11 compute-0 nova_compute[192079]: 2025-10-02 12:02:11.594 2 DEBUG oslo_concurrency.lockutils [None req-e75608b4-caf0-4d02-b1dd-868947f46da7 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.170s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:11 compute-0 nova_compute[192079]: 2025-10-02 12:02:11.652 2 INFO nova.scheduler.client.report [None req-e75608b4-caf0-4d02-b1dd-868947f46da7 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Deleted allocations for instance eba3fb05-4dd5-4f34-9cb5-c932a86f4c53
Oct 02 12:02:11 compute-0 nova_compute[192079]: 2025-10-02 12:02:11.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:02:11 compute-0 nova_compute[192079]: 2025-10-02 12:02:11.836 2 DEBUG oslo_concurrency.lockutils [None req-e75608b4-caf0-4d02-b1dd-868947f46da7 67132a26bb4c454aa5ed0e4b8fee032c 302a9c83c3eb43818ce6284e9ddb73be - - default default] Lock "eba3fb05-4dd5-4f34-9cb5-c932a86f4c53" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 2.666s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:12 compute-0 nova_compute[192079]: 2025-10-02 12:02:12.105 2 DEBUG nova.compute.manager [req-25e001cd-aab9-4436-a3ff-0d96a87e2f91 req-b584d719-258a-48b6-87d0-9a73f660ee37 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Received event network-vif-plugged-dc3331f3-a49a-4c18-98e4-476f3e2c97d4 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:02:12 compute-0 nova_compute[192079]: 2025-10-02 12:02:12.106 2 DEBUG oslo_concurrency.lockutils [req-25e001cd-aab9-4436-a3ff-0d96a87e2f91 req-b584d719-258a-48b6-87d0-9a73f660ee37 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "eba3fb05-4dd5-4f34-9cb5-c932a86f4c53-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:12 compute-0 nova_compute[192079]: 2025-10-02 12:02:12.107 2 DEBUG oslo_concurrency.lockutils [req-25e001cd-aab9-4436-a3ff-0d96a87e2f91 req-b584d719-258a-48b6-87d0-9a73f660ee37 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "eba3fb05-4dd5-4f34-9cb5-c932a86f4c53-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:12 compute-0 nova_compute[192079]: 2025-10-02 12:02:12.107 2 DEBUG oslo_concurrency.lockutils [req-25e001cd-aab9-4436-a3ff-0d96a87e2f91 req-b584d719-258a-48b6-87d0-9a73f660ee37 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "eba3fb05-4dd5-4f34-9cb5-c932a86f4c53-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:12 compute-0 nova_compute[192079]: 2025-10-02 12:02:12.108 2 DEBUG nova.compute.manager [req-25e001cd-aab9-4436-a3ff-0d96a87e2f91 req-b584d719-258a-48b6-87d0-9a73f660ee37 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] No waiting events found dispatching network-vif-plugged-dc3331f3-a49a-4c18-98e4-476f3e2c97d4 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:02:12 compute-0 nova_compute[192079]: 2025-10-02 12:02:12.108 2 WARNING nova.compute.manager [req-25e001cd-aab9-4436-a3ff-0d96a87e2f91 req-b584d719-258a-48b6-87d0-9a73f660ee37 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Received unexpected event network-vif-plugged-dc3331f3-a49a-4c18-98e4-476f3e2c97d4 for instance with vm_state deleted and task_state None.
Oct 02 12:02:14 compute-0 nova_compute[192079]: 2025-10-02 12:02:14.255 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:14 compute-0 nova_compute[192079]: 2025-10-02 12:02:14.695 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:16 compute-0 podman[221414]: 2025-10-02 12:02:16.220793961 +0000 UTC m=+0.119341434 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, container_name=ovn_controller, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_id=ovn_controller, org.label-schema.schema-version=1.0, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001)
Oct 02 12:02:19 compute-0 nova_compute[192079]: 2025-10-02 12:02:19.256 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:19 compute-0 nova_compute[192079]: 2025-10-02 12:02:19.697 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:20 compute-0 podman[221441]: 2025-10-02 12:02:20.163379341 +0000 UTC m=+0.071851195 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, config_id=ovn_metadata_agent, tcib_managed=true)
Oct 02 12:02:20 compute-0 podman[221442]: 2025-10-02 12:02:20.164489142 +0000 UTC m=+0.067378493 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:02:22 compute-0 nova_compute[192079]: 2025-10-02 12:02:22.350 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:22 compute-0 nova_compute[192079]: 2025-10-02 12:02:22.572 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:24 compute-0 nova_compute[192079]: 2025-10-02 12:02:24.288 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:24 compute-0 nova_compute[192079]: 2025-10-02 12:02:24.671 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759406529.670249, eba3fb05-4dd5-4f34-9cb5-c932a86f4c53 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:02:24 compute-0 nova_compute[192079]: 2025-10-02 12:02:24.672 2 INFO nova.compute.manager [-] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] VM Stopped (Lifecycle Event)
Oct 02 12:02:24 compute-0 nova_compute[192079]: 2025-10-02 12:02:24.698 2 DEBUG nova.compute.manager [None req-f3ef99ba-8b49-45e3-bea2-823cb94a086f - - - - - -] [instance: eba3fb05-4dd5-4f34-9cb5-c932a86f4c53] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:02:24 compute-0 nova_compute[192079]: 2025-10-02 12:02:24.699 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:27 compute-0 podman[221485]: 2025-10-02 12:02:27.142752327 +0000 UTC m=+0.057601617 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=edpm, container_name=ceilometer_agent_compute)
Oct 02 12:02:27 compute-0 nova_compute[192079]: 2025-10-02 12:02:27.309 2 DEBUG nova.virt.libvirt.driver [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Creating tmpfile /var/lib/nova/instances/tmp0t9xiq55 to notify to other compute nodes that they should mount the same storage. _create_shared_storage_test_file /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10041
Oct 02 12:02:27 compute-0 nova_compute[192079]: 2025-10-02 12:02:27.310 2 DEBUG nova.compute.manager [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] destination check data is LibvirtLiveMigrateData(bdms=<?>,block_migration=<?>,disk_available_mb=71680,disk_over_commit=<?>,dst_numa_info=<?>,dst_supports_numa_live_migration=<?>,dst_wants_file_backed_memory=False,file_backed_memory_discard=<?>,filename='tmp0t9xiq55',graphics_listen_addr_spice=127.0.0.1,graphics_listen_addr_vnc=::,image_type='qcow2',instance_relative_path=<?>,is_shared_block_storage=<?>,is_shared_instance_path=<?>,is_volume_backed=<?>,migration=<?>,old_vol_attachment_ids=<?>,serial_listen_addr=None,serial_listen_ports=<?>,src_supports_native_luks=<?>,src_supports_numa_live_migration=<?>,supported_perf_events=<?>,target_connect_addr=<?>,vifs=[VIFMigrateData],wait_for_vif_plugged=<?>) check_can_live_migrate_destination /usr/lib/python3.9/site-packages/nova/compute/manager.py:8476
Oct 02 12:02:28 compute-0 nova_compute[192079]: 2025-10-02 12:02:28.743 2 DEBUG nova.compute.manager [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] pre_live_migration data is LibvirtLiveMigrateData(bdms=<?>,block_migration=True,disk_available_mb=71680,disk_over_commit=<?>,dst_numa_info=<?>,dst_supports_numa_live_migration=<?>,dst_wants_file_backed_memory=False,file_backed_memory_discard=<?>,filename='tmp0t9xiq55',graphics_listen_addr_spice=127.0.0.1,graphics_listen_addr_vnc=::,image_type='qcow2',instance_relative_path='f1267fe1-552c-4312-b9b0-c02eae82a77a',is_shared_block_storage=False,is_shared_instance_path=False,is_volume_backed=False,migration=<?>,old_vol_attachment_ids=<?>,serial_listen_addr=None,serial_listen_ports=<?>,src_supports_native_luks=<?>,src_supports_numa_live_migration=<?>,supported_perf_events=<?>,target_connect_addr=<?>,vifs=[VIFMigrateData],wait_for_vif_plugged=<?>) pre_live_migration /usr/lib/python3.9/site-packages/nova/compute/manager.py:8604
Oct 02 12:02:28 compute-0 nova_compute[192079]: 2025-10-02 12:02:28.768 2 DEBUG oslo_concurrency.lockutils [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Acquiring lock "refresh_cache-f1267fe1-552c-4312-b9b0-c02eae82a77a" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:02:28 compute-0 nova_compute[192079]: 2025-10-02 12:02:28.769 2 DEBUG oslo_concurrency.lockutils [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Acquired lock "refresh_cache-f1267fe1-552c-4312-b9b0-c02eae82a77a" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:02:28 compute-0 nova_compute[192079]: 2025-10-02 12:02:28.769 2 DEBUG nova.network.neutron [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:02:29 compute-0 nova_compute[192079]: 2025-10-02 12:02:29.290 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:29 compute-0 nova_compute[192079]: 2025-10-02 12:02:29.701 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:31 compute-0 podman[221507]: 2025-10-02 12:02:31.16919384 +0000 UTC m=+0.078964961 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, tcib_managed=true, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:02:31 compute-0 podman[221506]: 2025-10-02 12:02:31.202702286 +0000 UTC m=+0.105232738 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, version=9.6, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, managed_by=edpm_ansible, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., distribution-scope=public, container_name=openstack_network_exporter, name=ubi9-minimal, io.buildah.version=1.33.7, io.openshift.tags=minimal rhel9, architecture=x86_64, com.redhat.component=ubi9-minimal-container, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.expose-services=, vcs-type=git, config_id=edpm, url=https://catalog.redhat.com/en/search?searchType=containers, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, build-date=2025-08-20T13:12:41, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, maintainer=Red Hat, Inc., release=1755695350, vendor=Red Hat, Inc., summary=Provides the latest release of the minimal Red Hat Universal Base Image 9.)
Oct 02 12:02:31 compute-0 nova_compute[192079]: 2025-10-02 12:02:31.211 2 DEBUG nova.network.neutron [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Updating instance_info_cache with network_info: [{"id": "75561bb8-bfb9-4100-9c79-271fd50011de", "address": "fa:16:3e:19:d8:66", "network": {"id": "020b4768-a07a-4769-8636-455566c87083", "bridge": "br-int", "label": "tempest-LiveAutoBlockMigrationV225Test-804372870-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "5cc73d75e0864e838eefa90cb33b7e01", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap75561bb8-bf", "ovs_interfaceid": "75561bb8-bfb9-4100-9c79-271fd50011de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:02:31 compute-0 nova_compute[192079]: 2025-10-02 12:02:31.313 2 DEBUG oslo_concurrency.lockutils [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Releasing lock "refresh_cache-f1267fe1-552c-4312-b9b0-c02eae82a77a" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:02:31 compute-0 nova_compute[192079]: 2025-10-02 12:02:31.375 2 DEBUG nova.virt.libvirt.driver [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] migrate_data in pre_live_migration: LibvirtLiveMigrateData(bdms=<?>,block_migration=True,disk_available_mb=71680,disk_over_commit=<?>,dst_numa_info=<?>,dst_supports_numa_live_migration=<?>,dst_wants_file_backed_memory=False,file_backed_memory_discard=<?>,filename='tmp0t9xiq55',graphics_listen_addr_spice=127.0.0.1,graphics_listen_addr_vnc=::,image_type='qcow2',instance_relative_path='f1267fe1-552c-4312-b9b0-c02eae82a77a',is_shared_block_storage=False,is_shared_instance_path=False,is_volume_backed=False,migration=<?>,old_vol_attachment_ids={},serial_listen_addr=None,serial_listen_ports=<?>,src_supports_native_luks=<?>,src_supports_numa_live_migration=<?>,supported_perf_events=<?>,target_connect_addr=<?>,vifs=[VIFMigrateData],wait_for_vif_plugged=<?>) pre_live_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10827
Oct 02 12:02:31 compute-0 nova_compute[192079]: 2025-10-02 12:02:31.375 2 DEBUG nova.virt.libvirt.driver [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Creating instance directory: /var/lib/nova/instances/f1267fe1-552c-4312-b9b0-c02eae82a77a pre_live_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10840
Oct 02 12:02:31 compute-0 nova_compute[192079]: 2025-10-02 12:02:31.376 2 DEBUG nova.virt.libvirt.driver [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Creating disk.info with the contents: {'/var/lib/nova/instances/f1267fe1-552c-4312-b9b0-c02eae82a77a/disk': 'qcow2', '/var/lib/nova/instances/f1267fe1-552c-4312-b9b0-c02eae82a77a/disk.config': 'raw'} pre_live_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10854
Oct 02 12:02:31 compute-0 nova_compute[192079]: 2025-10-02 12:02:31.376 2 DEBUG nova.virt.libvirt.driver [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Checking to make sure images and backing files are present before live migration. pre_live_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10864
Oct 02 12:02:31 compute-0 nova_compute[192079]: 2025-10-02 12:02:31.377 2 DEBUG nova.objects.instance [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Lazy-loading 'trusted_certs' on Instance uuid f1267fe1-552c-4312-b9b0-c02eae82a77a obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:02:31 compute-0 nova_compute[192079]: 2025-10-02 12:02:31.405 2 DEBUG oslo_concurrency.processutils [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:31 compute-0 nova_compute[192079]: 2025-10-02 12:02:31.459 2 DEBUG oslo_concurrency.processutils [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:31 compute-0 nova_compute[192079]: 2025-10-02 12:02:31.460 2 DEBUG oslo_concurrency.lockutils [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:31 compute-0 nova_compute[192079]: 2025-10-02 12:02:31.461 2 DEBUG oslo_concurrency.lockutils [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:31 compute-0 nova_compute[192079]: 2025-10-02 12:02:31.472 2 DEBUG oslo_concurrency.processutils [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:31 compute-0 nova_compute[192079]: 2025-10-02 12:02:31.531 2 DEBUG oslo_concurrency.processutils [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.059s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:31 compute-0 nova_compute[192079]: 2025-10-02 12:02:31.532 2 DEBUG oslo_concurrency.processutils [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/f1267fe1-552c-4312-b9b0-c02eae82a77a/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:31 compute-0 nova_compute[192079]: 2025-10-02 12:02:31.569 2 DEBUG oslo_concurrency.processutils [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/f1267fe1-552c-4312-b9b0-c02eae82a77a/disk 1073741824" returned: 0 in 0.038s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:31 compute-0 nova_compute[192079]: 2025-10-02 12:02:31.570 2 DEBUG oslo_concurrency.lockutils [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.109s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:31 compute-0 nova_compute[192079]: 2025-10-02 12:02:31.571 2 DEBUG oslo_concurrency.processutils [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:31 compute-0 nova_compute[192079]: 2025-10-02 12:02:31.622 2 DEBUG oslo_concurrency.processutils [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.051s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:31 compute-0 nova_compute[192079]: 2025-10-02 12:02:31.623 2 DEBUG nova.virt.disk.api [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Checking if we can resize image /var/lib/nova/instances/f1267fe1-552c-4312-b9b0-c02eae82a77a/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:02:31 compute-0 nova_compute[192079]: 2025-10-02 12:02:31.623 2 DEBUG oslo_concurrency.processutils [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/f1267fe1-552c-4312-b9b0-c02eae82a77a/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:31 compute-0 nova_compute[192079]: 2025-10-02 12:02:31.677 2 DEBUG oslo_concurrency.processutils [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/f1267fe1-552c-4312-b9b0-c02eae82a77a/disk --force-share --output=json" returned: 0 in 0.053s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:31 compute-0 nova_compute[192079]: 2025-10-02 12:02:31.678 2 DEBUG nova.virt.disk.api [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Cannot resize image /var/lib/nova/instances/f1267fe1-552c-4312-b9b0-c02eae82a77a/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:02:31 compute-0 nova_compute[192079]: 2025-10-02 12:02:31.678 2 DEBUG nova.objects.instance [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Lazy-loading 'migration_context' on Instance uuid f1267fe1-552c-4312-b9b0-c02eae82a77a obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:02:31 compute-0 nova_compute[192079]: 2025-10-02 12:02:31.692 2 DEBUG oslo_concurrency.processutils [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f raw /var/lib/nova/instances/f1267fe1-552c-4312-b9b0-c02eae82a77a/disk.config 485376 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:31 compute-0 nova_compute[192079]: 2025-10-02 12:02:31.715 2 DEBUG oslo_concurrency.processutils [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f raw /var/lib/nova/instances/f1267fe1-552c-4312-b9b0-c02eae82a77a/disk.config 485376" returned: 0 in 0.023s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:31 compute-0 nova_compute[192079]: 2025-10-02 12:02:31.717 2 DEBUG nova.virt.libvirt.volume.remotefs [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Copying file compute-2.ctlplane.example.com:/var/lib/nova/instances/f1267fe1-552c-4312-b9b0-c02eae82a77a/disk.config to /var/lib/nova/instances/f1267fe1-552c-4312-b9b0-c02eae82a77a copy_file /usr/lib/python3.9/site-packages/nova/virt/libvirt/volume/remotefs.py:103
Oct 02 12:02:31 compute-0 nova_compute[192079]: 2025-10-02 12:02:31.717 2 DEBUG oslo_concurrency.processutils [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Running cmd (subprocess): scp -C -r compute-2.ctlplane.example.com:/var/lib/nova/instances/f1267fe1-552c-4312-b9b0-c02eae82a77a/disk.config /var/lib/nova/instances/f1267fe1-552c-4312-b9b0-c02eae82a77a execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:32 compute-0 nova_compute[192079]: 2025-10-02 12:02:32.158 2 DEBUG oslo_concurrency.processutils [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] CMD "scp -C -r compute-2.ctlplane.example.com:/var/lib/nova/instances/f1267fe1-552c-4312-b9b0-c02eae82a77a/disk.config /var/lib/nova/instances/f1267fe1-552c-4312-b9b0-c02eae82a77a" returned: 0 in 0.440s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:32 compute-0 nova_compute[192079]: 2025-10-02 12:02:32.158 2 DEBUG nova.virt.libvirt.driver [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Plugging VIFs using destination host port bindings before live migration. _pre_live_migration_plug_vifs /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10794
Oct 02 12:02:32 compute-0 nova_compute[192079]: 2025-10-02 12:02:32.160 2 DEBUG nova.virt.libvirt.vif [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:02:09Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description=None,display_name='tempest-LiveAutoBlockMigrationV225Test-server-1420306859',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-2.ctlplane.example.com',hostname='tempest-liveautoblockmigrationv225test-server-1420306859',id=11,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:02:21Z,launched_on='compute-2.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-2.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='5cc73d75e0864e838eefa90cb33b7e01',ramdisk_id='',reservation_id='r-f4n74pvj',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-LiveAutoBlockMigrationV225Test-984573444',owner_user_name='tempest-LiveAutoBlockMigrationV225Test-984573444-project-member'},tags=<?>,task_state='migrating',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:02:21Z,user_data=None,user_id='59e8135d73ee43e088ba5ee7d9bd84b1',uuid=f1267fe1-552c-4312-b9b0-c02eae82a77a,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "75561bb8-bfb9-4100-9c79-271fd50011de", "address": "fa:16:3e:19:d8:66", "network": {"id": "020b4768-a07a-4769-8636-455566c87083", "bridge": "br-int", "label": "tempest-LiveAutoBlockMigrationV225Test-804372870-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "5cc73d75e0864e838eefa90cb33b7e01", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system"}, "devname": "tap75561bb8-bf", "ovs_interfaceid": "75561bb8-bfb9-4100-9c79-271fd50011de", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {"os_vif_delegation": true}, "preserve_on_delete": true, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:02:32 compute-0 nova_compute[192079]: 2025-10-02 12:02:32.160 2 DEBUG nova.network.os_vif_util [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Converting VIF {"id": "75561bb8-bfb9-4100-9c79-271fd50011de", "address": "fa:16:3e:19:d8:66", "network": {"id": "020b4768-a07a-4769-8636-455566c87083", "bridge": "br-int", "label": "tempest-LiveAutoBlockMigrationV225Test-804372870-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "5cc73d75e0864e838eefa90cb33b7e01", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system"}, "devname": "tap75561bb8-bf", "ovs_interfaceid": "75561bb8-bfb9-4100-9c79-271fd50011de", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {"os_vif_delegation": true}, "preserve_on_delete": true, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:02:32 compute-0 nova_compute[192079]: 2025-10-02 12:02:32.161 2 DEBUG nova.network.os_vif_util [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:19:d8:66,bridge_name='br-int',has_traffic_filtering=True,id=75561bb8-bfb9-4100-9c79-271fd50011de,network=Network(020b4768-a07a-4769-8636-455566c87083),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=True,vif_name='tap75561bb8-bf') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:02:32 compute-0 nova_compute[192079]: 2025-10-02 12:02:32.161 2 DEBUG os_vif [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:19:d8:66,bridge_name='br-int',has_traffic_filtering=True,id=75561bb8-bfb9-4100-9c79-271fd50011de,network=Network(020b4768-a07a-4769-8636-455566c87083),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=True,vif_name='tap75561bb8-bf') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:02:32 compute-0 nova_compute[192079]: 2025-10-02 12:02:32.162 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:32 compute-0 nova_compute[192079]: 2025-10-02 12:02:32.162 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:02:32 compute-0 nova_compute[192079]: 2025-10-02 12:02:32.163 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:02:32 compute-0 nova_compute[192079]: 2025-10-02 12:02:32.165 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:32 compute-0 nova_compute[192079]: 2025-10-02 12:02:32.165 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap75561bb8-bf, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:02:32 compute-0 nova_compute[192079]: 2025-10-02 12:02:32.165 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap75561bb8-bf, col_values=(('external_ids', {'iface-id': '75561bb8-bfb9-4100-9c79-271fd50011de', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:19:d8:66', 'vm-uuid': 'f1267fe1-552c-4312-b9b0-c02eae82a77a'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:02:32 compute-0 nova_compute[192079]: 2025-10-02 12:02:32.167 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:32 compute-0 NetworkManager[51160]: <info>  [1759406552.1678] manager: (tap75561bb8-bf): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/43)
Oct 02 12:02:32 compute-0 nova_compute[192079]: 2025-10-02 12:02:32.170 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:02:32 compute-0 nova_compute[192079]: 2025-10-02 12:02:32.172 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:32 compute-0 nova_compute[192079]: 2025-10-02 12:02:32.173 2 INFO os_vif [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:19:d8:66,bridge_name='br-int',has_traffic_filtering=True,id=75561bb8-bfb9-4100-9c79-271fd50011de,network=Network(020b4768-a07a-4769-8636-455566c87083),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=True,vif_name='tap75561bb8-bf')
Oct 02 12:02:32 compute-0 nova_compute[192079]: 2025-10-02 12:02:32.173 2 DEBUG nova.virt.libvirt.driver [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] No dst_numa_info in migrate_data, no cores to power up in pre_live_migration. pre_live_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10954
Oct 02 12:02:32 compute-0 nova_compute[192079]: 2025-10-02 12:02:32.173 2 DEBUG nova.compute.manager [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] driver pre_live_migration data is LibvirtLiveMigrateData(bdms=[],block_migration=True,disk_available_mb=71680,disk_over_commit=<?>,dst_numa_info=<?>,dst_supports_numa_live_migration=<?>,dst_wants_file_backed_memory=False,file_backed_memory_discard=<?>,filename='tmp0t9xiq55',graphics_listen_addr_spice=127.0.0.1,graphics_listen_addr_vnc=::,image_type='qcow2',instance_relative_path='f1267fe1-552c-4312-b9b0-c02eae82a77a',is_shared_block_storage=False,is_shared_instance_path=False,is_volume_backed=False,migration=<?>,old_vol_attachment_ids={},serial_listen_addr=None,serial_listen_ports=[],src_supports_native_luks=<?>,src_supports_numa_live_migration=<?>,supported_perf_events=[],target_connect_addr=None,vifs=[VIFMigrateData],wait_for_vif_plugged=<?>) pre_live_migration /usr/lib/python3.9/site-packages/nova/compute/manager.py:8668
Oct 02 12:02:34 compute-0 nova_compute[192079]: 2025-10-02 12:02:34.291 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:34 compute-0 nova_compute[192079]: 2025-10-02 12:02:34.581 2 DEBUG nova.network.neutron [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Port 75561bb8-bfb9-4100-9c79-271fd50011de updated with migration profile {'migrating_to': 'compute-0.ctlplane.example.com'} successfully _setup_migration_port_profile /usr/lib/python3.9/site-packages/nova/network/neutron.py:354
Oct 02 12:02:34 compute-0 nova_compute[192079]: 2025-10-02 12:02:34.671 2 DEBUG nova.compute.manager [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] pre_live_migration result data is LibvirtLiveMigrateData(bdms=[],block_migration=True,disk_available_mb=71680,disk_over_commit=<?>,dst_numa_info=<?>,dst_supports_numa_live_migration=<?>,dst_wants_file_backed_memory=False,file_backed_memory_discard=<?>,filename='tmp0t9xiq55',graphics_listen_addr_spice=127.0.0.1,graphics_listen_addr_vnc=::,image_type='qcow2',instance_relative_path='f1267fe1-552c-4312-b9b0-c02eae82a77a',is_shared_block_storage=False,is_shared_instance_path=False,is_volume_backed=False,migration=<?>,old_vol_attachment_ids={},serial_listen_addr=None,serial_listen_ports=[],src_supports_native_luks=<?>,src_supports_numa_live_migration=<?>,supported_perf_events=[],target_connect_addr=None,vifs=[VIFMigrateData],wait_for_vif_plugged=True) pre_live_migration /usr/lib/python3.9/site-packages/nova/compute/manager.py:8723
Oct 02 12:02:35 compute-0 kernel: tap75561bb8-bf: entered promiscuous mode
Oct 02 12:02:35 compute-0 NetworkManager[51160]: <info>  [1759406555.0516] manager: (tap75561bb8-bf): new Tun device (/org/freedesktop/NetworkManager/Devices/44)
Oct 02 12:02:35 compute-0 ovn_controller[94336]: 2025-10-02T12:02:35Z|00066|binding|INFO|Claiming lport 75561bb8-bfb9-4100-9c79-271fd50011de for this additional chassis.
Oct 02 12:02:35 compute-0 ovn_controller[94336]: 2025-10-02T12:02:35Z|00067|binding|INFO|75561bb8-bfb9-4100-9c79-271fd50011de: Claiming fa:16:3e:19:d8:66 10.100.0.6
Oct 02 12:02:35 compute-0 ovn_controller[94336]: 2025-10-02T12:02:35Z|00068|binding|INFO|Claiming lport 3197a9b3-066e-4dd5-acdc-899f59bb4e28 for this additional chassis.
Oct 02 12:02:35 compute-0 ovn_controller[94336]: 2025-10-02T12:02:35Z|00069|binding|INFO|3197a9b3-066e-4dd5-acdc-899f59bb4e28: Claiming fa:16:3e:e8:2d:9c 19.80.0.100
Oct 02 12:02:35 compute-0 nova_compute[192079]: 2025-10-02 12:02:35.052 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:35 compute-0 nova_compute[192079]: 2025-10-02 12:02:35.056 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:35 compute-0 nova_compute[192079]: 2025-10-02 12:02:35.058 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:35 compute-0 systemd-udevd[221586]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:02:35 compute-0 systemd-machined[152150]: New machine qemu-6-instance-0000000b.
Oct 02 12:02:35 compute-0 NetworkManager[51160]: <info>  [1759406555.0936] device (tap75561bb8-bf): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:02:35 compute-0 NetworkManager[51160]: <info>  [1759406555.0946] device (tap75561bb8-bf): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:02:35 compute-0 systemd[1]: Started Virtual Machine qemu-6-instance-0000000b.
Oct 02 12:02:35 compute-0 nova_compute[192079]: 2025-10-02 12:02:35.139 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:35 compute-0 ovn_controller[94336]: 2025-10-02T12:02:35Z|00070|binding|INFO|Setting lport 75561bb8-bfb9-4100-9c79-271fd50011de ovn-installed in OVS
Oct 02 12:02:35 compute-0 nova_compute[192079]: 2025-10-02 12:02:35.145 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:35 compute-0 rsyslogd[1013]: imjournal from <np0005466011:nova_compute>: begin to drop messages due to rate-limiting
Oct 02 12:02:36 compute-0 nova_compute[192079]: 2025-10-02 12:02:36.325 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406556.3246713, f1267fe1-552c-4312-b9b0-c02eae82a77a => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:02:36 compute-0 nova_compute[192079]: 2025-10-02 12:02:36.325 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] VM Started (Lifecycle Event)
Oct 02 12:02:36 compute-0 nova_compute[192079]: 2025-10-02 12:02:36.348 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:02:37 compute-0 nova_compute[192079]: 2025-10-02 12:02:37.166 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:37 compute-0 nova_compute[192079]: 2025-10-02 12:02:37.256 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406557.256207, f1267fe1-552c-4312-b9b0-c02eae82a77a => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:02:37 compute-0 nova_compute[192079]: 2025-10-02 12:02:37.257 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] VM Resumed (Lifecycle Event)
Oct 02 12:02:37 compute-0 nova_compute[192079]: 2025-10-02 12:02:37.282 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:02:37 compute-0 nova_compute[192079]: 2025-10-02 12:02:37.285 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: active, current task_state: migrating, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:02:37 compute-0 nova_compute[192079]: 2025-10-02 12:02:37.321 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] During the sync_power process the instance has moved from host compute-2.ctlplane.example.com to host compute-0.ctlplane.example.com
Oct 02 12:02:37 compute-0 nova_compute[192079]: 2025-10-02 12:02:37.685 2 DEBUG oslo_concurrency.lockutils [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Acquiring lock "564d2c1b-397f-4f8c-9bf3-8251528aecd3" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:37 compute-0 nova_compute[192079]: 2025-10-02 12:02:37.686 2 DEBUG oslo_concurrency.lockutils [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lock "564d2c1b-397f-4f8c-9bf3-8251528aecd3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:37 compute-0 nova_compute[192079]: 2025-10-02 12:02:37.724 2 DEBUG nova.compute.manager [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:02:37 compute-0 nova_compute[192079]: 2025-10-02 12:02:37.889 2 DEBUG oslo_concurrency.lockutils [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:37 compute-0 nova_compute[192079]: 2025-10-02 12:02:37.890 2 DEBUG oslo_concurrency.lockutils [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:37 compute-0 nova_compute[192079]: 2025-10-02 12:02:37.895 2 DEBUG nova.virt.hardware [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:02:37 compute-0 nova_compute[192079]: 2025-10-02 12:02:37.895 2 INFO nova.compute.claims [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.062 2 DEBUG nova.compute.provider_tree [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.075 2 DEBUG nova.scheduler.client.report [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.092 2 DEBUG oslo_concurrency.lockutils [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.202s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.093 2 DEBUG nova.compute.manager [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.145 2 DEBUG nova.compute.manager [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.145 2 DEBUG nova.network.neutron [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.175 2 INFO nova.virt.libvirt.driver [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.225 2 DEBUG nova.compute.manager [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.358 2 DEBUG nova.compute.manager [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.359 2 DEBUG nova.virt.libvirt.driver [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.360 2 INFO nova.virt.libvirt.driver [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Creating image(s)
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.360 2 DEBUG oslo_concurrency.lockutils [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Acquiring lock "/var/lib/nova/instances/564d2c1b-397f-4f8c-9bf3-8251528aecd3/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.360 2 DEBUG oslo_concurrency.lockutils [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lock "/var/lib/nova/instances/564d2c1b-397f-4f8c-9bf3-8251528aecd3/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.361 2 DEBUG oslo_concurrency.lockutils [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lock "/var/lib/nova/instances/564d2c1b-397f-4f8c-9bf3-8251528aecd3/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.373 2 DEBUG oslo_concurrency.processutils [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.429 2 DEBUG oslo_concurrency.processutils [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.430 2 DEBUG oslo_concurrency.lockutils [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.430 2 DEBUG oslo_concurrency.lockutils [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.441 2 DEBUG oslo_concurrency.processutils [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.496 2 DEBUG oslo_concurrency.processutils [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.497 2 DEBUG oslo_concurrency.processutils [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/564d2c1b-397f-4f8c-9bf3-8251528aecd3/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.541 2 DEBUG oslo_concurrency.processutils [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/564d2c1b-397f-4f8c-9bf3-8251528aecd3/disk 1073741824" returned: 0 in 0.044s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.542 2 DEBUG oslo_concurrency.lockutils [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.112s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.542 2 DEBUG oslo_concurrency.processutils [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.594 2 DEBUG oslo_concurrency.processutils [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.051s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.595 2 DEBUG nova.virt.disk.api [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Checking if we can resize image /var/lib/nova/instances/564d2c1b-397f-4f8c-9bf3-8251528aecd3/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.595 2 DEBUG oslo_concurrency.processutils [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/564d2c1b-397f-4f8c-9bf3-8251528aecd3/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.649 2 DEBUG oslo_concurrency.processutils [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/564d2c1b-397f-4f8c-9bf3-8251528aecd3/disk --force-share --output=json" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.651 2 DEBUG nova.virt.disk.api [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Cannot resize image /var/lib/nova/instances/564d2c1b-397f-4f8c-9bf3-8251528aecd3/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.651 2 DEBUG nova.objects.instance [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lazy-loading 'migration_context' on Instance uuid 564d2c1b-397f-4f8c-9bf3-8251528aecd3 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.671 2 DEBUG nova.virt.libvirt.driver [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.671 2 DEBUG nova.virt.libvirt.driver [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Ensure instance console log exists: /var/lib/nova/instances/564d2c1b-397f-4f8c-9bf3-8251528aecd3/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.672 2 DEBUG oslo_concurrency.lockutils [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.672 2 DEBUG oslo_concurrency.lockutils [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:38 compute-0 nova_compute[192079]: 2025-10-02 12:02:38.672 2 DEBUG oslo_concurrency.lockutils [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:39 compute-0 podman[221633]: 2025-10-02 12:02:39.14572353 +0000 UTC m=+0.063368655 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_managed=true, container_name=iscsid, managed_by=edpm_ansible, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=iscsid, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']})
Oct 02 12:02:39 compute-0 podman[221632]: 2025-10-02 12:02:39.146062499 +0000 UTC m=+0.063711254 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter)
Oct 02 12:02:39 compute-0 nova_compute[192079]: 2025-10-02 12:02:39.294 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:39 compute-0 nova_compute[192079]: 2025-10-02 12:02:39.694 2 DEBUG nova.network.neutron [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] No network configured allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1188
Oct 02 12:02:39 compute-0 nova_compute[192079]: 2025-10-02 12:02:39.694 2 DEBUG nova.compute.manager [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Instance network_info: |[]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:02:39 compute-0 nova_compute[192079]: 2025-10-02 12:02:39.695 2 DEBUG nova.virt.libvirt.driver [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Start _get_guest_xml network_info=[] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:02:39 compute-0 nova_compute[192079]: 2025-10-02 12:02:39.700 2 WARNING nova.virt.libvirt.driver [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:02:39 compute-0 nova_compute[192079]: 2025-10-02 12:02:39.707 2 DEBUG nova.virt.libvirt.host [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:02:39 compute-0 nova_compute[192079]: 2025-10-02 12:02:39.708 2 DEBUG nova.virt.libvirt.host [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:02:39 compute-0 nova_compute[192079]: 2025-10-02 12:02:39.711 2 DEBUG nova.virt.libvirt.host [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:02:39 compute-0 nova_compute[192079]: 2025-10-02 12:02:39.712 2 DEBUG nova.virt.libvirt.host [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:02:39 compute-0 nova_compute[192079]: 2025-10-02 12:02:39.713 2 DEBUG nova.virt.libvirt.driver [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:02:39 compute-0 nova_compute[192079]: 2025-10-02 12:02:39.713 2 DEBUG nova.virt.hardware [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:02:39 compute-0 nova_compute[192079]: 2025-10-02 12:02:39.714 2 DEBUG nova.virt.hardware [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:02:39 compute-0 nova_compute[192079]: 2025-10-02 12:02:39.714 2 DEBUG nova.virt.hardware [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:02:39 compute-0 nova_compute[192079]: 2025-10-02 12:02:39.714 2 DEBUG nova.virt.hardware [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:02:39 compute-0 nova_compute[192079]: 2025-10-02 12:02:39.715 2 DEBUG nova.virt.hardware [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:02:39 compute-0 nova_compute[192079]: 2025-10-02 12:02:39.715 2 DEBUG nova.virt.hardware [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:02:39 compute-0 nova_compute[192079]: 2025-10-02 12:02:39.715 2 DEBUG nova.virt.hardware [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:02:39 compute-0 nova_compute[192079]: 2025-10-02 12:02:39.715 2 DEBUG nova.virt.hardware [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:02:39 compute-0 nova_compute[192079]: 2025-10-02 12:02:39.716 2 DEBUG nova.virt.hardware [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:02:39 compute-0 nova_compute[192079]: 2025-10-02 12:02:39.716 2 DEBUG nova.virt.hardware [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:02:39 compute-0 nova_compute[192079]: 2025-10-02 12:02:39.716 2 DEBUG nova.virt.hardware [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:02:39 compute-0 nova_compute[192079]: 2025-10-02 12:02:39.720 2 DEBUG nova.objects.instance [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lazy-loading 'pci_devices' on Instance uuid 564d2c1b-397f-4f8c-9bf3-8251528aecd3 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:02:39 compute-0 nova_compute[192079]: 2025-10-02 12:02:39.734 2 DEBUG nova.virt.libvirt.driver [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:02:39 compute-0 nova_compute[192079]:   <uuid>564d2c1b-397f-4f8c-9bf3-8251528aecd3</uuid>
Oct 02 12:02:39 compute-0 nova_compute[192079]:   <name>instance-0000000c</name>
Oct 02 12:02:39 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:02:39 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:02:39 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:02:39 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:       <nova:name>tempest-DeleteServersAdminTestJSON-server-153166058</nova:name>
Oct 02 12:02:39 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:02:39</nova:creationTime>
Oct 02 12:02:39 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:02:39 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:02:39 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:02:39 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:02:39 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:02:39 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:02:39 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:02:39 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:02:39 compute-0 nova_compute[192079]:         <nova:user uuid="bc9dc801fac849e18b73470021e7d314">tempest-DeleteServersAdminTestJSON-344517168-project-member</nova:user>
Oct 02 12:02:39 compute-0 nova_compute[192079]:         <nova:project uuid="d195e92cfe7049bf9f470765ff4435a9">tempest-DeleteServersAdminTestJSON-344517168</nova:project>
Oct 02 12:02:39 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:02:39 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:       <nova:ports/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:02:39 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:02:39 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <system>
Oct 02 12:02:39 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:02:39 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:02:39 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:02:39 compute-0 nova_compute[192079]:       <entry name="serial">564d2c1b-397f-4f8c-9bf3-8251528aecd3</entry>
Oct 02 12:02:39 compute-0 nova_compute[192079]:       <entry name="uuid">564d2c1b-397f-4f8c-9bf3-8251528aecd3</entry>
Oct 02 12:02:39 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     </system>
Oct 02 12:02:39 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:02:39 compute-0 nova_compute[192079]:   <os>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:   </os>
Oct 02 12:02:39 compute-0 nova_compute[192079]:   <features>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:   </features>
Oct 02 12:02:39 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:02:39 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:02:39 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:02:39 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/564d2c1b-397f-4f8c-9bf3-8251528aecd3/disk"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:02:39 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/564d2c1b-397f-4f8c-9bf3-8251528aecd3/disk.config"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:02:39 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/564d2c1b-397f-4f8c-9bf3-8251528aecd3/console.log" append="off"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <video>
Oct 02 12:02:39 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     </video>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:02:39 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:02:39 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:02:39 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:02:39 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:02:39 compute-0 nova_compute[192079]: </domain>
Oct 02 12:02:39 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:02:39 compute-0 nova_compute[192079]: 2025-10-02 12:02:39.787 2 DEBUG nova.virt.libvirt.driver [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:02:39 compute-0 nova_compute[192079]: 2025-10-02 12:02:39.787 2 DEBUG nova.virt.libvirt.driver [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:02:39 compute-0 nova_compute[192079]: 2025-10-02 12:02:39.788 2 INFO nova.virt.libvirt.driver [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Using config drive
Oct 02 12:02:39 compute-0 nova_compute[192079]: 2025-10-02 12:02:39.954 2 INFO nova.virt.libvirt.driver [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Creating config drive at /var/lib/nova/instances/564d2c1b-397f-4f8c-9bf3-8251528aecd3/disk.config
Oct 02 12:02:39 compute-0 nova_compute[192079]: 2025-10-02 12:02:39.959 2 DEBUG oslo_concurrency.processutils [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/564d2c1b-397f-4f8c-9bf3-8251528aecd3/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp80ctbheo execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:40 compute-0 nova_compute[192079]: 2025-10-02 12:02:40.085 2 DEBUG oslo_concurrency.processutils [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/564d2c1b-397f-4f8c-9bf3-8251528aecd3/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp80ctbheo" returned: 0 in 0.126s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:40 compute-0 systemd-machined[152150]: New machine qemu-7-instance-0000000c.
Oct 02 12:02:40 compute-0 systemd[1]: Started Virtual Machine qemu-7-instance-0000000c.
Oct 02 12:02:40 compute-0 ovn_controller[94336]: 2025-10-02T12:02:40Z|00071|binding|INFO|Claiming lport 75561bb8-bfb9-4100-9c79-271fd50011de for this chassis.
Oct 02 12:02:40 compute-0 ovn_controller[94336]: 2025-10-02T12:02:40Z|00072|binding|INFO|75561bb8-bfb9-4100-9c79-271fd50011de: Claiming fa:16:3e:19:d8:66 10.100.0.6
Oct 02 12:02:40 compute-0 ovn_controller[94336]: 2025-10-02T12:02:40Z|00073|binding|INFO|Claiming lport 3197a9b3-066e-4dd5-acdc-899f59bb4e28 for this chassis.
Oct 02 12:02:40 compute-0 ovn_controller[94336]: 2025-10-02T12:02:40Z|00074|binding|INFO|3197a9b3-066e-4dd5-acdc-899f59bb4e28: Claiming fa:16:3e:e8:2d:9c 19.80.0.100
Oct 02 12:02:40 compute-0 ovn_controller[94336]: 2025-10-02T12:02:40Z|00075|binding|INFO|Setting lport 75561bb8-bfb9-4100-9c79-271fd50011de up in Southbound
Oct 02 12:02:40 compute-0 ovn_controller[94336]: 2025-10-02T12:02:40Z|00076|binding|INFO|Setting lport 3197a9b3-066e-4dd5-acdc-899f59bb4e28 up in Southbound
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:40.219 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:e8:2d:9c 19.80.0.100'], port_security=['fa:16:3e:e8:2d:9c 19.80.0.100'], type=, nat_addresses=[], virtual_parent=[], up=[True], options={'requested-chassis': ''}, parent_port=['75561bb8-bfb9-4100-9c79-271fd50011de'], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'name': 'tempest-subport-207340523', 'neutron:cidrs': '19.80.0.100/24', 'neutron:device_id': '', 'neutron:device_owner': 'trunk:subport', 'neutron:mtu': '', 'neutron:network_name': 'neutron-78c3d2d3-8bfe-47b8-9282-3e9091b37043', 'neutron:port_capabilities': '', 'neutron:port_name': 'tempest-subport-207340523', 'neutron:project_id': '5cc73d75e0864e838eefa90cb33b7e01', 'neutron:revision_number': '4', 'neutron:security_group_ids': 'f3fadef5-4bfc-406c-93c4-14d4abd0583e', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[42], additional_encap=[], encap=[], mirror_rules=[], datapath=1160f689-1347-48a0-ba14-b69afc977804, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=2, gateway_chassis=[], requested_chassis=[], logical_port=3197a9b3-066e-4dd5-acdc-899f59bb4e28) old=Port_Binding(up=[False], additional_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:40.221 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:19:d8:66 10.100.0.6'], port_security=['fa:16:3e:19:d8:66 10.100.0.6'], type=, nat_addresses=[], virtual_parent=[], up=[True], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'name': 'tempest-parent-2028677656', 'neutron:cidrs': '10.100.0.6/28', 'neutron:device_id': 'f1267fe1-552c-4312-b9b0-c02eae82a77a', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-020b4768-a07a-4769-8636-455566c87083', 'neutron:port_capabilities': '', 'neutron:port_name': 'tempest-parent-2028677656', 'neutron:project_id': '5cc73d75e0864e838eefa90cb33b7e01', 'neutron:revision_number': '11', 'neutron:security_group_ids': 'f3fadef5-4bfc-406c-93c4-14d4abd0583e', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=11c0be75-bb4b-4e01-8cfa-b9aa4fcaf0e9, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=75561bb8-bfb9-4100-9c79-271fd50011de) old=Port_Binding(up=[False], additional_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:40.222 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 3197a9b3-066e-4dd5-acdc-899f59bb4e28 in datapath 78c3d2d3-8bfe-47b8-9282-3e9091b37043 bound to our chassis
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:40.223 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 78c3d2d3-8bfe-47b8-9282-3e9091b37043
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:40.236 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c262acda-e6eb-4013-b7bf-58bc8358b076]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:40.237 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap78c3d2d3-81 in ovnmeta-78c3d2d3-8bfe-47b8-9282-3e9091b37043 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:40.241 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap78c3d2d3-80 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:40.241 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[cf41746a-13b5-4515-a68c-7308f0535f42]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:40.243 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[fac81206-f12e-41c5-a280-55e1105bdcd3]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:40.263 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[87ba274a-fc75-421f-ad05-fa90294838d0]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:40.294 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2757fe41-7131-45ef-bab1-56999f4ed622]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:40.328 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[5ab8be9e-d4c5-4b74-b954-0db5a893021e]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:40 compute-0 NetworkManager[51160]: <info>  [1759406560.3348] manager: (tap78c3d2d3-80): new Veth device (/org/freedesktop/NetworkManager/Devices/45)
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:40.335 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a33bf0c7-bf9f-4cc3-836c-f493bd0d85fc]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:40 compute-0 systemd-udevd[221704]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:40.365 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[090a001c-c121-4da5-85e8-3a8607ad292a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:40.368 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[36bfcf88-2e2c-4500-ab8c-0756d6f4c9bf]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:40 compute-0 NetworkManager[51160]: <info>  [1759406560.3920] device (tap78c3d2d3-80): carrier: link connected
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:40.395 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[1d4f3185-b9a8-450c-80b3-7b04522ace8a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:40.410 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5fff3220-9a8e-40c8-a418-c6272d79c6f8]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap78c3d2d3-81'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:ce:84:e1'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 26], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 455802, 'reachable_time': 42742, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 221723, 'error': None, 'target': 'ovnmeta-78c3d2d3-8bfe-47b8-9282-3e9091b37043', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:40.423 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[487ccfb8-1c12-401f-97cb-fd373c106922]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fece:84e1'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 455802, 'tstamp': 455802}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 221724, 'error': None, 'target': 'ovnmeta-78c3d2d3-8bfe-47b8-9282-3e9091b37043', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:40.439 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ef1db158-5f24-49bc-add3-7f928dcaf601]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap78c3d2d3-81'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:ce:84:e1'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 26], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 455802, 'reachable_time': 42742, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 221725, 'error': None, 'target': 'ovnmeta-78c3d2d3-8bfe-47b8-9282-3e9091b37043', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:40.468 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b6443650-a48d-48ab-aa1a-2e6fe2dc41cd]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:40 compute-0 nova_compute[192079]: 2025-10-02 12:02:40.480 2 INFO nova.compute.manager [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Post operation of migration started
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:40.521 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7098bb90-f15b-4da1-8b9d-f2ef658898b6]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:40.523 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap78c3d2d3-80, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:40.523 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:40.524 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap78c3d2d3-80, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:02:40 compute-0 nova_compute[192079]: 2025-10-02 12:02:40.526 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:40 compute-0 kernel: tap78c3d2d3-80: entered promiscuous mode
Oct 02 12:02:40 compute-0 NetworkManager[51160]: <info>  [1759406560.5273] manager: (tap78c3d2d3-80): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/46)
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:40.535 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap78c3d2d3-80, col_values=(('external_ids', {'iface-id': 'ced2993d-8371-4d25-a439-bcab0bc7265c'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:02:40 compute-0 ovn_controller[94336]: 2025-10-02T12:02:40Z|00077|binding|INFO|Releasing lport ced2993d-8371-4d25-a439-bcab0bc7265c from this chassis (sb_readonly=0)
Oct 02 12:02:40 compute-0 nova_compute[192079]: 2025-10-02 12:02:40.536 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:40.538 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/78c3d2d3-8bfe-47b8-9282-3e9091b37043.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/78c3d2d3-8bfe-47b8-9282-3e9091b37043.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:40.539 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b04c84a8-5c21-4b1f-9d6f-3c0d56707732]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:40.540 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-78c3d2d3-8bfe-47b8-9282-3e9091b37043
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/78c3d2d3-8bfe-47b8-9282-3e9091b37043.pid.haproxy
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 78c3d2d3-8bfe-47b8-9282-3e9091b37043
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:02:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:40.541 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-78c3d2d3-8bfe-47b8-9282-3e9091b37043', 'env', 'PROCESS_TAG=haproxy-78c3d2d3-8bfe-47b8-9282-3e9091b37043', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/78c3d2d3-8bfe-47b8-9282-3e9091b37043.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:02:40 compute-0 nova_compute[192079]: 2025-10-02 12:02:40.548 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:40 compute-0 podman[221763]: 2025-10-02 12:02:40.930432661 +0000 UTC m=+0.053849794 container create f2bf576e540b47529440ab4caf418ac8043eb66fa79532a6b365177c2506ca53 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-78c3d2d3-8bfe-47b8-9282-3e9091b37043, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0)
Oct 02 12:02:40 compute-0 systemd[1]: Started libpod-conmon-f2bf576e540b47529440ab4caf418ac8043eb66fa79532a6b365177c2506ca53.scope.
Oct 02 12:02:40 compute-0 podman[221763]: 2025-10-02 12:02:40.9033227 +0000 UTC m=+0.026739863 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:02:41 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:02:41 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/9a44f79c747eb2cbfbc7cad74e58720c56ee60df34d877ce3b64803e50fd4ba0/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:02:41 compute-0 podman[221763]: 2025-10-02 12:02:41.04006355 +0000 UTC m=+0.163480683 container init f2bf576e540b47529440ab4caf418ac8043eb66fa79532a6b365177c2506ca53 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-78c3d2d3-8bfe-47b8-9282-3e9091b37043, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001)
Oct 02 12:02:41 compute-0 podman[221763]: 2025-10-02 12:02:41.045038336 +0000 UTC m=+0.168455469 container start f2bf576e540b47529440ab4caf418ac8043eb66fa79532a6b365177c2506ca53 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-78c3d2d3-8bfe-47b8-9282-3e9091b37043, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:02:41 compute-0 neutron-haproxy-ovnmeta-78c3d2d3-8bfe-47b8-9282-3e9091b37043[221780]: [NOTICE]   (221784) : New worker (221786) forked
Oct 02 12:02:41 compute-0 neutron-haproxy-ovnmeta-78c3d2d3-8bfe-47b8-9282-3e9091b37043[221780]: [NOTICE]   (221784) : Loading success.
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:41.116 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 75561bb8-bfb9-4100-9c79-271fd50011de in datapath 020b4768-a07a-4769-8636-455566c87083 unbound from our chassis
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:41.118 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 020b4768-a07a-4769-8636-455566c87083
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:41.128 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[edbcae60-5c70-4da0-9ebc-c2d6ba173d8e]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:41.129 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap020b4768-a1 in ovnmeta-020b4768-a07a-4769-8636-455566c87083 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:41.131 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap020b4768-a0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:41.131 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d721f92b-9c6a-4350-8b82-f99f1e4eb74d]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:41.132 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3e81eaeb-2161-4258-9c17-b569305e80d2]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.137 2 DEBUG oslo_concurrency.lockutils [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Acquiring lock "refresh_cache-f1267fe1-552c-4312-b9b0-c02eae82a77a" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.137 2 DEBUG oslo_concurrency.lockutils [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Acquired lock "refresh_cache-f1267fe1-552c-4312-b9b0-c02eae82a77a" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.138 2 DEBUG nova.network.neutron [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:41.144 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[74fce214-352b-4422-bb62-e604af299b44]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:41.167 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[824f0a8d-7603-4ba2-84c1-13f8d401b47c]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:41.194 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[e0779efc-a6c7-4748-af83-86feb188fe0d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:41.199 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4781a9a0-7d3e-49f3-9cac-46216bb4b1db]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:41 compute-0 NetworkManager[51160]: <info>  [1759406561.2007] manager: (tap020b4768-a0): new Veth device (/org/freedesktop/NetworkManager/Devices/47)
Oct 02 12:02:41 compute-0 systemd-udevd[221713]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:41.231 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[8452bb4a-c8f9-4238-b295-4e225f44efbc]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:41.235 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[6def3bef-d59c-4ad0-a649-d2013c29f350]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:41 compute-0 NetworkManager[51160]: <info>  [1759406561.2644] device (tap020b4768-a0): carrier: link connected
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:41.269 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[c4fe6cb7-729b-4c06-882c-f1303c784987]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:41.285 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8655b5bf-f546-4ee7-b7e0-3b48482245a6]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap020b4768-a1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:62:d2:ce'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 27], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 455889, 'reachable_time': 31962, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 221806, 'error': None, 'target': 'ovnmeta-020b4768-a07a-4769-8636-455566c87083', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:41.301 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b2f043da-d360-45fc-bc13-6ec104f59c44]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe62:d2ce'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 455889, 'tstamp': 455889}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 221807, 'error': None, 'target': 'ovnmeta-020b4768-a07a-4769-8636-455566c87083', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:41.316 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a93db33b-77bb-451e-8ad0-2f214590cdc5]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap020b4768-a1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:62:d2:ce'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 27], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 455889, 'reachable_time': 31962, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 221808, 'error': None, 'target': 'ovnmeta-020b4768-a07a-4769-8636-455566c87083', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:41.347 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[56948375-5383-45bc-898e-3f3efc47d11b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.376 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406561.3758662, 564d2c1b-397f-4f8c-9bf3-8251528aecd3 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.378 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] VM Resumed (Lifecycle Event)
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.381 2 DEBUG nova.compute.manager [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.382 2 DEBUG nova.virt.libvirt.driver [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.389 2 INFO nova.virt.libvirt.driver [-] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Instance spawned successfully.
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.390 2 DEBUG nova.virt.libvirt.driver [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.408 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:41.413 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ea399227-79db-4259-8cac-45653960dae2]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:41.414 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap020b4768-a0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:41.414 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:41.415 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap020b4768-a0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:02:41 compute-0 NetworkManager[51160]: <info>  [1759406561.4178] manager: (tap020b4768-a0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/48)
Oct 02 12:02:41 compute-0 kernel: tap020b4768-a0: entered promiscuous mode
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.418 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.423 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:41.424 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap020b4768-a0, col_values=(('external_ids', {'iface-id': '7ad14bc1-f6e9-4852-aef9-ac72c7291cba'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:02:41 compute-0 ovn_controller[94336]: 2025-10-02T12:02:41Z|00078|binding|INFO|Releasing lport 7ad14bc1-f6e9-4852-aef9-ac72c7291cba from this chassis (sb_readonly=0)
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.427 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:41.429 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/020b4768-a07a-4769-8636-455566c87083.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/020b4768-a07a-4769-8636-455566c87083.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:41.430 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3fefcf1b-5c5b-4138-ab78-5661626efd3d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:41.431 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-020b4768-a07a-4769-8636-455566c87083
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/020b4768-a07a-4769-8636-455566c87083.pid.haproxy
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 020b4768-a07a-4769-8636-455566c87083
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:02:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:41.431 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-020b4768-a07a-4769-8636-455566c87083', 'env', 'PROCESS_TAG=haproxy-020b4768-a07a-4769-8636-455566c87083', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/020b4768-a07a-4769-8636-455566c87083.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.431 2 DEBUG nova.virt.libvirt.driver [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.432 2 DEBUG nova.virt.libvirt.driver [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.433 2 DEBUG nova.virt.libvirt.driver [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.433 2 DEBUG nova.virt.libvirt.driver [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.434 2 DEBUG nova.virt.libvirt.driver [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.435 2 DEBUG nova.virt.libvirt.driver [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.450 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.465 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.466 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406561.3763173, 564d2c1b-397f-4f8c-9bf3-8251528aecd3 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.466 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] VM Started (Lifecycle Event)
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.498 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.501 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.539 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.562 2 INFO nova.compute.manager [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Took 3.20 seconds to spawn the instance on the hypervisor.
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.562 2 DEBUG nova.compute.manager [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.655 2 INFO nova.compute.manager [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Took 3.84 seconds to build instance.
Oct 02 12:02:41 compute-0 nova_compute[192079]: 2025-10-02 12:02:41.676 2 DEBUG oslo_concurrency.lockutils [None req-483c0f3b-491a-4e42-aa2b-8e22771f80b6 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lock "564d2c1b-397f-4f8c-9bf3-8251528aecd3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 3.990s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:41 compute-0 podman[221840]: 2025-10-02 12:02:41.833161251 +0000 UTC m=+0.065648177 container create 75ec33d6f9c3f98d32e379473a5c921553fa97d19026609b82cf165cc3ad0f27 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083, tcib_managed=true, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2)
Oct 02 12:02:41 compute-0 systemd[1]: Started libpod-conmon-75ec33d6f9c3f98d32e379473a5c921553fa97d19026609b82cf165cc3ad0f27.scope.
Oct 02 12:02:41 compute-0 podman[221840]: 2025-10-02 12:02:41.8002371 +0000 UTC m=+0.032724106 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:02:41 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:02:41 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/faf27e3c4a8deb1a7224a5828583149b36bd391b0a7f252f3accfc8a86aa5dbf/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:02:41 compute-0 podman[221840]: 2025-10-02 12:02:41.923607694 +0000 UTC m=+0.156094670 container init 75ec33d6f9c3f98d32e379473a5c921553fa97d19026609b82cf165cc3ad0f27 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, io.buildah.version=1.41.3)
Oct 02 12:02:41 compute-0 podman[221840]: 2025-10-02 12:02:41.930562195 +0000 UTC m=+0.163049151 container start 75ec33d6f9c3f98d32e379473a5c921553fa97d19026609b82cf165cc3ad0f27 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:02:41 compute-0 neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083[221856]: [NOTICE]   (221860) : New worker (221862) forked
Oct 02 12:02:41 compute-0 neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083[221856]: [NOTICE]   (221860) : Loading success.
Oct 02 12:02:42 compute-0 nova_compute[192079]: 2025-10-02 12:02:42.169 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:43 compute-0 nova_compute[192079]: 2025-10-02 12:02:43.675 2 DEBUG oslo_concurrency.lockutils [None req-7dade690-c1bb-4097-8e37-fbf8fb8a7246 be3000d9ab9d4b57b270d9604e0b6c6d 67f42fbc87f441f08ae8df339361d823 - - default default] Acquiring lock "564d2c1b-397f-4f8c-9bf3-8251528aecd3" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:43 compute-0 nova_compute[192079]: 2025-10-02 12:02:43.675 2 DEBUG oslo_concurrency.lockutils [None req-7dade690-c1bb-4097-8e37-fbf8fb8a7246 be3000d9ab9d4b57b270d9604e0b6c6d 67f42fbc87f441f08ae8df339361d823 - - default default] Lock "564d2c1b-397f-4f8c-9bf3-8251528aecd3" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:43 compute-0 nova_compute[192079]: 2025-10-02 12:02:43.676 2 DEBUG oslo_concurrency.lockutils [None req-7dade690-c1bb-4097-8e37-fbf8fb8a7246 be3000d9ab9d4b57b270d9604e0b6c6d 67f42fbc87f441f08ae8df339361d823 - - default default] Acquiring lock "564d2c1b-397f-4f8c-9bf3-8251528aecd3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:43 compute-0 nova_compute[192079]: 2025-10-02 12:02:43.676 2 DEBUG oslo_concurrency.lockutils [None req-7dade690-c1bb-4097-8e37-fbf8fb8a7246 be3000d9ab9d4b57b270d9604e0b6c6d 67f42fbc87f441f08ae8df339361d823 - - default default] Lock "564d2c1b-397f-4f8c-9bf3-8251528aecd3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:43 compute-0 nova_compute[192079]: 2025-10-02 12:02:43.677 2 DEBUG oslo_concurrency.lockutils [None req-7dade690-c1bb-4097-8e37-fbf8fb8a7246 be3000d9ab9d4b57b270d9604e0b6c6d 67f42fbc87f441f08ae8df339361d823 - - default default] Lock "564d2c1b-397f-4f8c-9bf3-8251528aecd3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:43 compute-0 nova_compute[192079]: 2025-10-02 12:02:43.696 2 INFO nova.compute.manager [None req-7dade690-c1bb-4097-8e37-fbf8fb8a7246 be3000d9ab9d4b57b270d9604e0b6c6d 67f42fbc87f441f08ae8df339361d823 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Terminating instance
Oct 02 12:02:43 compute-0 nova_compute[192079]: 2025-10-02 12:02:43.709 2 DEBUG oslo_concurrency.lockutils [None req-7dade690-c1bb-4097-8e37-fbf8fb8a7246 be3000d9ab9d4b57b270d9604e0b6c6d 67f42fbc87f441f08ae8df339361d823 - - default default] Acquiring lock "refresh_cache-564d2c1b-397f-4f8c-9bf3-8251528aecd3" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:02:43 compute-0 nova_compute[192079]: 2025-10-02 12:02:43.710 2 DEBUG oslo_concurrency.lockutils [None req-7dade690-c1bb-4097-8e37-fbf8fb8a7246 be3000d9ab9d4b57b270d9604e0b6c6d 67f42fbc87f441f08ae8df339361d823 - - default default] Acquired lock "refresh_cache-564d2c1b-397f-4f8c-9bf3-8251528aecd3" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:02:43 compute-0 nova_compute[192079]: 2025-10-02 12:02:43.710 2 DEBUG nova.network.neutron [None req-7dade690-c1bb-4097-8e37-fbf8fb8a7246 be3000d9ab9d4b57b270d9604e0b6c6d 67f42fbc87f441f08ae8df339361d823 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:02:43 compute-0 nova_compute[192079]: 2025-10-02 12:02:43.976 2 DEBUG nova.network.neutron [None req-7dade690-c1bb-4097-8e37-fbf8fb8a7246 be3000d9ab9d4b57b270d9604e0b6c6d 67f42fbc87f441f08ae8df339361d823 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:02:44 compute-0 nova_compute[192079]: 2025-10-02 12:02:44.296 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:44 compute-0 nova_compute[192079]: 2025-10-02 12:02:44.988 2 DEBUG nova.network.neutron [None req-7dade690-c1bb-4097-8e37-fbf8fb8a7246 be3000d9ab9d4b57b270d9604e0b6c6d 67f42fbc87f441f08ae8df339361d823 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:02:45 compute-0 nova_compute[192079]: 2025-10-02 12:02:45.025 2 DEBUG oslo_concurrency.lockutils [None req-7dade690-c1bb-4097-8e37-fbf8fb8a7246 be3000d9ab9d4b57b270d9604e0b6c6d 67f42fbc87f441f08ae8df339361d823 - - default default] Releasing lock "refresh_cache-564d2c1b-397f-4f8c-9bf3-8251528aecd3" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:02:45 compute-0 nova_compute[192079]: 2025-10-02 12:02:45.025 2 DEBUG nova.compute.manager [None req-7dade690-c1bb-4097-8e37-fbf8fb8a7246 be3000d9ab9d4b57b270d9604e0b6c6d 67f42fbc87f441f08ae8df339361d823 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:02:45 compute-0 systemd[1]: machine-qemu\x2d7\x2dinstance\x2d0000000c.scope: Deactivated successfully.
Oct 02 12:02:45 compute-0 systemd[1]: machine-qemu\x2d7\x2dinstance\x2d0000000c.scope: Consumed 4.800s CPU time.
Oct 02 12:02:45 compute-0 systemd-machined[152150]: Machine qemu-7-instance-0000000c terminated.
Oct 02 12:02:45 compute-0 nova_compute[192079]: 2025-10-02 12:02:45.264 2 INFO nova.virt.libvirt.driver [-] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Instance destroyed successfully.
Oct 02 12:02:45 compute-0 nova_compute[192079]: 2025-10-02 12:02:45.264 2 DEBUG nova.objects.instance [None req-7dade690-c1bb-4097-8e37-fbf8fb8a7246 be3000d9ab9d4b57b270d9604e0b6c6d 67f42fbc87f441f08ae8df339361d823 - - default default] Lazy-loading 'resources' on Instance uuid 564d2c1b-397f-4f8c-9bf3-8251528aecd3 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:02:45 compute-0 nova_compute[192079]: 2025-10-02 12:02:45.297 2 INFO nova.virt.libvirt.driver [None req-7dade690-c1bb-4097-8e37-fbf8fb8a7246 be3000d9ab9d4b57b270d9604e0b6c6d 67f42fbc87f441f08ae8df339361d823 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Deleting instance files /var/lib/nova/instances/564d2c1b-397f-4f8c-9bf3-8251528aecd3_del
Oct 02 12:02:45 compute-0 nova_compute[192079]: 2025-10-02 12:02:45.298 2 INFO nova.virt.libvirt.driver [None req-7dade690-c1bb-4097-8e37-fbf8fb8a7246 be3000d9ab9d4b57b270d9604e0b6c6d 67f42fbc87f441f08ae8df339361d823 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Deletion of /var/lib/nova/instances/564d2c1b-397f-4f8c-9bf3-8251528aecd3_del complete
Oct 02 12:02:45 compute-0 nova_compute[192079]: 2025-10-02 12:02:45.315 2 DEBUG nova.network.neutron [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Updating instance_info_cache with network_info: [{"id": "75561bb8-bfb9-4100-9c79-271fd50011de", "address": "fa:16:3e:19:d8:66", "network": {"id": "020b4768-a07a-4769-8636-455566c87083", "bridge": "br-int", "label": "tempest-LiveAutoBlockMigrationV225Test-804372870-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "5cc73d75e0864e838eefa90cb33b7e01", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap75561bb8-bf", "ovs_interfaceid": "75561bb8-bfb9-4100-9c79-271fd50011de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {"os_vif_delegation": true}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:02:45 compute-0 nova_compute[192079]: 2025-10-02 12:02:45.430 2 DEBUG oslo_concurrency.lockutils [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Releasing lock "refresh_cache-f1267fe1-552c-4312-b9b0-c02eae82a77a" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:02:45 compute-0 nova_compute[192079]: 2025-10-02 12:02:45.618 2 DEBUG oslo_concurrency.lockutils [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.allocate_pci_devices_for_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:45 compute-0 nova_compute[192079]: 2025-10-02 12:02:45.619 2 DEBUG oslo_concurrency.lockutils [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.allocate_pci_devices_for_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:45 compute-0 nova_compute[192079]: 2025-10-02 12:02:45.619 2 DEBUG oslo_concurrency.lockutils [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.allocate_pci_devices_for_instance" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:45 compute-0 nova_compute[192079]: 2025-10-02 12:02:45.624 2 INFO nova.virt.libvirt.driver [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Sending announce-self command to QEMU monitor. Attempt 1 of 3
Oct 02 12:02:45 compute-0 virtqemud[191807]: Domain id=6 name='instance-0000000b' uuid=f1267fe1-552c-4312-b9b0-c02eae82a77a is tainted: custom-monitor
Oct 02 12:02:45 compute-0 nova_compute[192079]: 2025-10-02 12:02:45.648 2 INFO nova.compute.manager [None req-7dade690-c1bb-4097-8e37-fbf8fb8a7246 be3000d9ab9d4b57b270d9604e0b6c6d 67f42fbc87f441f08ae8df339361d823 - - default default] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Took 0.62 seconds to destroy the instance on the hypervisor.
Oct 02 12:02:45 compute-0 nova_compute[192079]: 2025-10-02 12:02:45.649 2 DEBUG oslo.service.loopingcall [None req-7dade690-c1bb-4097-8e37-fbf8fb8a7246 be3000d9ab9d4b57b270d9604e0b6c6d 67f42fbc87f441f08ae8df339361d823 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:02:45 compute-0 nova_compute[192079]: 2025-10-02 12:02:45.649 2 DEBUG nova.compute.manager [-] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:02:45 compute-0 nova_compute[192079]: 2025-10-02 12:02:45.649 2 DEBUG nova.network.neutron [-] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:02:45 compute-0 nova_compute[192079]: 2025-10-02 12:02:45.853 2 DEBUG nova.network.neutron [-] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:02:45 compute-0 nova_compute[192079]: 2025-10-02 12:02:45.884 2 DEBUG nova.network.neutron [-] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:02:45 compute-0 nova_compute[192079]: 2025-10-02 12:02:45.908 2 INFO nova.compute.manager [-] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Took 0.26 seconds to deallocate network for instance.
Oct 02 12:02:45 compute-0 nova_compute[192079]: 2025-10-02 12:02:45.987 2 DEBUG oslo_concurrency.lockutils [None req-7dade690-c1bb-4097-8e37-fbf8fb8a7246 be3000d9ab9d4b57b270d9604e0b6c6d 67f42fbc87f441f08ae8df339361d823 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:45 compute-0 nova_compute[192079]: 2025-10-02 12:02:45.987 2 DEBUG oslo_concurrency.lockutils [None req-7dade690-c1bb-4097-8e37-fbf8fb8a7246 be3000d9ab9d4b57b270d9604e0b6c6d 67f42fbc87f441f08ae8df339361d823 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:46 compute-0 nova_compute[192079]: 2025-10-02 12:02:46.063 2 DEBUG nova.compute.provider_tree [None req-7dade690-c1bb-4097-8e37-fbf8fb8a7246 be3000d9ab9d4b57b270d9604e0b6c6d 67f42fbc87f441f08ae8df339361d823 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:02:46 compute-0 nova_compute[192079]: 2025-10-02 12:02:46.078 2 DEBUG nova.scheduler.client.report [None req-7dade690-c1bb-4097-8e37-fbf8fb8a7246 be3000d9ab9d4b57b270d9604e0b6c6d 67f42fbc87f441f08ae8df339361d823 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:02:46 compute-0 nova_compute[192079]: 2025-10-02 12:02:46.099 2 DEBUG oslo_concurrency.lockutils [None req-7dade690-c1bb-4097-8e37-fbf8fb8a7246 be3000d9ab9d4b57b270d9604e0b6c6d 67f42fbc87f441f08ae8df339361d823 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.112s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:46 compute-0 nova_compute[192079]: 2025-10-02 12:02:46.124 2 INFO nova.scheduler.client.report [None req-7dade690-c1bb-4097-8e37-fbf8fb8a7246 be3000d9ab9d4b57b270d9604e0b6c6d 67f42fbc87f441f08ae8df339361d823 - - default default] Deleted allocations for instance 564d2c1b-397f-4f8c-9bf3-8251528aecd3
Oct 02 12:02:46 compute-0 nova_compute[192079]: 2025-10-02 12:02:46.190 2 DEBUG oslo_concurrency.lockutils [None req-7dade690-c1bb-4097-8e37-fbf8fb8a7246 be3000d9ab9d4b57b270d9604e0b6c6d 67f42fbc87f441f08ae8df339361d823 - - default default] Lock "564d2c1b-397f-4f8c-9bf3-8251528aecd3" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 2.515s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:46 compute-0 nova_compute[192079]: 2025-10-02 12:02:46.631 2 INFO nova.virt.libvirt.driver [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Sending announce-self command to QEMU monitor. Attempt 2 of 3
Oct 02 12:02:47 compute-0 nova_compute[192079]: 2025-10-02 12:02:47.172 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:47 compute-0 podman[221880]: 2025-10-02 12:02:47.183773482 +0000 UTC m=+0.094426374 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_id=ovn_controller, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, container_name=ovn_controller, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:02:47 compute-0 nova_compute[192079]: 2025-10-02 12:02:47.638 2 INFO nova.virt.libvirt.driver [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Sending announce-self command to QEMU monitor. Attempt 3 of 3
Oct 02 12:02:47 compute-0 nova_compute[192079]: 2025-10-02 12:02:47.643 2 DEBUG nova.compute.manager [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:02:47 compute-0 nova_compute[192079]: 2025-10-02 12:02:47.667 2 DEBUG nova.objects.instance [None req-21ca6e31-d6e2-4253-bdcb-0564f80409a6 4a8407cab3084bfc9d72832f5e66d8c5 5ee775f4f54b4dfda5adf759c97ba3ec - - default default] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Trying to apply a migration context that does not seem to be set for this instance apply_migration_context /usr/lib/python3.9/site-packages/nova/objects/instance.py:1032
Oct 02 12:02:49 compute-0 nova_compute[192079]: 2025-10-02 12:02:49.214 2 DEBUG nova.compute.manager [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Stashing vm_state: active _prep_resize /usr/lib/python3.9/site-packages/nova/compute/manager.py:5560
Oct 02 12:02:49 compute-0 nova_compute[192079]: 2025-10-02 12:02:49.299 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:49 compute-0 nova_compute[192079]: 2025-10-02 12:02:49.323 2 DEBUG oslo_concurrency.lockutils [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:49 compute-0 nova_compute[192079]: 2025-10-02 12:02:49.323 2 DEBUG oslo_concurrency.lockutils [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:49 compute-0 nova_compute[192079]: 2025-10-02 12:02:49.357 2 DEBUG nova.objects.instance [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Lazy-loading 'pci_requests' on Instance uuid 73cd9aef-a159-4d0e-9fc4-435f191db0b9 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:02:49 compute-0 nova_compute[192079]: 2025-10-02 12:02:49.374 2 DEBUG nova.virt.hardware [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:02:49 compute-0 nova_compute[192079]: 2025-10-02 12:02:49.374 2 INFO nova.compute.claims [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:02:49 compute-0 nova_compute[192079]: 2025-10-02 12:02:49.375 2 DEBUG nova.objects.instance [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Lazy-loading 'resources' on Instance uuid 73cd9aef-a159-4d0e-9fc4-435f191db0b9 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:02:49 compute-0 nova_compute[192079]: 2025-10-02 12:02:49.400 2 DEBUG nova.objects.instance [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Lazy-loading 'numa_topology' on Instance uuid 73cd9aef-a159-4d0e-9fc4-435f191db0b9 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:02:49 compute-0 nova_compute[192079]: 2025-10-02 12:02:49.422 2 DEBUG nova.objects.instance [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Lazy-loading 'pci_devices' on Instance uuid 73cd9aef-a159-4d0e-9fc4-435f191db0b9 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:02:49 compute-0 nova_compute[192079]: 2025-10-02 12:02:49.474 2 INFO nova.compute.resource_tracker [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Updating resource usage from migration f6996b59-9ec1-4f50-847f-7511c618a4bb
Oct 02 12:02:49 compute-0 nova_compute[192079]: 2025-10-02 12:02:49.474 2 DEBUG nova.compute.resource_tracker [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Starting to track incoming migration f6996b59-9ec1-4f50-847f-7511c618a4bb with flavor 9ac83da7-f31e-4467-8569-d28002f6aeed _update_usage_from_migration /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1431
Oct 02 12:02:49 compute-0 nova_compute[192079]: 2025-10-02 12:02:49.619 2 DEBUG nova.compute.provider_tree [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:02:49 compute-0 nova_compute[192079]: 2025-10-02 12:02:49.660 2 DEBUG nova.scheduler.client.report [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:02:49 compute-0 nova_compute[192079]: 2025-10-02 12:02:49.704 2 DEBUG oslo_concurrency.lockutils [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 0.381s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:49 compute-0 nova_compute[192079]: 2025-10-02 12:02:49.705 2 INFO nova.compute.manager [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Migrating
Oct 02 12:02:49 compute-0 nova_compute[192079]: 2025-10-02 12:02:49.776 2 DEBUG oslo_concurrency.lockutils [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Acquiring lock "96203b28-73b1-462a-87e9-4b0ca1d1f93b" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:49 compute-0 nova_compute[192079]: 2025-10-02 12:02:49.777 2 DEBUG oslo_concurrency.lockutils [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lock "96203b28-73b1-462a-87e9-4b0ca1d1f93b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:49 compute-0 nova_compute[192079]: 2025-10-02 12:02:49.876 2 DEBUG nova.compute.manager [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:02:49 compute-0 nova_compute[192079]: 2025-10-02 12:02:49.975 2 DEBUG oslo_concurrency.lockutils [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:49 compute-0 nova_compute[192079]: 2025-10-02 12:02:49.976 2 DEBUG oslo_concurrency.lockutils [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:49 compute-0 nova_compute[192079]: 2025-10-02 12:02:49.980 2 DEBUG nova.virt.hardware [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:02:49 compute-0 nova_compute[192079]: 2025-10-02 12:02:49.980 2 INFO nova.compute.claims [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.184 2 DEBUG nova.compute.provider_tree [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.198 2 DEBUG nova.scheduler.client.report [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.220 2 DEBUG oslo_concurrency.lockutils [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.244s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.221 2 DEBUG nova.compute.manager [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.316 2 DEBUG nova.compute.manager [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.317 2 DEBUG nova.network.neutron [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.358 2 INFO nova.virt.libvirt.driver [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.387 2 DEBUG nova.compute.manager [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.570 2 DEBUG nova.compute.manager [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.573 2 DEBUG nova.virt.libvirt.driver [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.574 2 INFO nova.virt.libvirt.driver [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Creating image(s)
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.576 2 DEBUG oslo_concurrency.lockutils [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Acquiring lock "/var/lib/nova/instances/96203b28-73b1-462a-87e9-4b0ca1d1f93b/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.576 2 DEBUG oslo_concurrency.lockutils [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lock "/var/lib/nova/instances/96203b28-73b1-462a-87e9-4b0ca1d1f93b/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.577 2 DEBUG oslo_concurrency.lockutils [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lock "/var/lib/nova/instances/96203b28-73b1-462a-87e9-4b0ca1d1f93b/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.597 2 DEBUG oslo_concurrency.processutils [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:50 compute-0 sshd-session[221907]: Accepted publickey for nova from 192.168.122.101 port 52302 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:02:50 compute-0 systemd[1]: Created slice User Slice of UID 42436.
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.617 2 DEBUG nova.network.neutron [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] No network configured allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1188
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.618 2 DEBUG nova.compute.manager [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Instance network_info: |[]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:02:50 compute-0 systemd[1]: Starting User Runtime Directory /run/user/42436...
Oct 02 12:02:50 compute-0 systemd-logind[827]: New session 35 of user nova.
Oct 02 12:02:50 compute-0 systemd[1]: Finished User Runtime Directory /run/user/42436.
Oct 02 12:02:50 compute-0 systemd[1]: Starting User Manager for UID 42436...
Oct 02 12:02:50 compute-0 systemd[221937]: pam_unix(systemd-user:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.667 2 DEBUG oslo_concurrency.processutils [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.070s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.668 2 DEBUG oslo_concurrency.lockutils [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.668 2 DEBUG oslo_concurrency.lockutils [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:50 compute-0 podman[221909]: 2025-10-02 12:02:50.671893252 +0000 UTC m=+0.062478260 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, tcib_managed=true, org.label-schema.license=GPLv2)
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.679 2 DEBUG oslo_concurrency.processutils [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.698 2 DEBUG oslo_concurrency.lockutils [None req-4c7b1039-3f74-458d-9bb8-cf1d9d692fb6 59e8135d73ee43e088ba5ee7d9bd84b1 5cc73d75e0864e838eefa90cb33b7e01 - - default default] Acquiring lock "f1267fe1-552c-4312-b9b0-c02eae82a77a" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.698 2 DEBUG oslo_concurrency.lockutils [None req-4c7b1039-3f74-458d-9bb8-cf1d9d692fb6 59e8135d73ee43e088ba5ee7d9bd84b1 5cc73d75e0864e838eefa90cb33b7e01 - - default default] Lock "f1267fe1-552c-4312-b9b0-c02eae82a77a" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.699 2 DEBUG oslo_concurrency.lockutils [None req-4c7b1039-3f74-458d-9bb8-cf1d9d692fb6 59e8135d73ee43e088ba5ee7d9bd84b1 5cc73d75e0864e838eefa90cb33b7e01 - - default default] Acquiring lock "f1267fe1-552c-4312-b9b0-c02eae82a77a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.699 2 DEBUG oslo_concurrency.lockutils [None req-4c7b1039-3f74-458d-9bb8-cf1d9d692fb6 59e8135d73ee43e088ba5ee7d9bd84b1 5cc73d75e0864e838eefa90cb33b7e01 - - default default] Lock "f1267fe1-552c-4312-b9b0-c02eae82a77a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.699 2 DEBUG oslo_concurrency.lockutils [None req-4c7b1039-3f74-458d-9bb8-cf1d9d692fb6 59e8135d73ee43e088ba5ee7d9bd84b1 5cc73d75e0864e838eefa90cb33b7e01 - - default default] Lock "f1267fe1-552c-4312-b9b0-c02eae82a77a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:50 compute-0 podman[221911]: 2025-10-02 12:02:50.699793955 +0000 UTC m=+0.086399804 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>)
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.717 2 INFO nova.compute.manager [None req-4c7b1039-3f74-458d-9bb8-cf1d9d692fb6 59e8135d73ee43e088ba5ee7d9bd84b1 5cc73d75e0864e838eefa90cb33b7e01 - - default default] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Terminating instance
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.732 2 DEBUG nova.compute.manager [None req-4c7b1039-3f74-458d-9bb8-cf1d9d692fb6 59e8135d73ee43e088ba5ee7d9bd84b1 5cc73d75e0864e838eefa90cb33b7e01 - - default default] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.740 2 DEBUG oslo_concurrency.processutils [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.061s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.740 2 DEBUG oslo_concurrency.processutils [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/96203b28-73b1-462a-87e9-4b0ca1d1f93b/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:50 compute-0 kernel: tap75561bb8-bf (unregistering): left promiscuous mode
Oct 02 12:02:50 compute-0 NetworkManager[51160]: <info>  [1759406570.7742] device (tap75561bb8-bf): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.778 2 DEBUG oslo_concurrency.processutils [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/96203b28-73b1-462a-87e9-4b0ca1d1f93b/disk 1073741824" returned: 0 in 0.038s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.779 2 DEBUG oslo_concurrency.lockutils [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.111s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.780 2 DEBUG oslo_concurrency.processutils [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:50 compute-0 ovn_controller[94336]: 2025-10-02T12:02:50Z|00079|binding|INFO|Releasing lport 75561bb8-bfb9-4100-9c79-271fd50011de from this chassis (sb_readonly=0)
Oct 02 12:02:50 compute-0 ovn_controller[94336]: 2025-10-02T12:02:50Z|00080|binding|INFO|Setting lport 75561bb8-bfb9-4100-9c79-271fd50011de down in Southbound
Oct 02 12:02:50 compute-0 ovn_controller[94336]: 2025-10-02T12:02:50Z|00081|binding|INFO|Releasing lport 3197a9b3-066e-4dd5-acdc-899f59bb4e28 from this chassis (sb_readonly=0)
Oct 02 12:02:50 compute-0 ovn_controller[94336]: 2025-10-02T12:02:50Z|00082|binding|INFO|Setting lport 3197a9b3-066e-4dd5-acdc-899f59bb4e28 down in Southbound
Oct 02 12:02:50 compute-0 ovn_controller[94336]: 2025-10-02T12:02:50Z|00083|binding|INFO|Removing iface tap75561bb8-bf ovn-installed in OVS
Oct 02 12:02:50 compute-0 systemd[221937]: Queued start job for default target Main User Target.
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.797 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:50 compute-0 ovn_controller[94336]: 2025-10-02T12:02:50Z|00084|binding|INFO|Releasing lport 7ad14bc1-f6e9-4852-aef9-ac72c7291cba from this chassis (sb_readonly=0)
Oct 02 12:02:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:50.802 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:e8:2d:9c 19.80.0.100'], port_security=['fa:16:3e:e8:2d:9c 19.80.0.100'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': ''}, parent_port=['75561bb8-bfb9-4100-9c79-271fd50011de'], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'name': 'tempest-subport-207340523', 'neutron:cidrs': '19.80.0.100/24', 'neutron:device_id': '', 'neutron:device_owner': 'trunk:subport', 'neutron:mtu': '', 'neutron:network_name': 'neutron-78c3d2d3-8bfe-47b8-9282-3e9091b37043', 'neutron:port_capabilities': '', 'neutron:port_name': 'tempest-subport-207340523', 'neutron:project_id': '5cc73d75e0864e838eefa90cb33b7e01', 'neutron:revision_number': '5', 'neutron:security_group_ids': 'f3fadef5-4bfc-406c-93c4-14d4abd0583e', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[42], additional_encap=[], encap=[], mirror_rules=[], datapath=1160f689-1347-48a0-ba14-b69afc977804, chassis=[], tunnel_key=2, gateway_chassis=[], requested_chassis=[], logical_port=3197a9b3-066e-4dd5-acdc-899f59bb4e28) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:02:50 compute-0 ovn_controller[94336]: 2025-10-02T12:02:50Z|00085|binding|INFO|Releasing lport ced2993d-8371-4d25-a439-bcab0bc7265c from this chassis (sb_readonly=0)
Oct 02 12:02:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:50.803 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:19:d8:66 10.100.0.6'], port_security=['fa:16:3e:19:d8:66 10.100.0.6'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'name': 'tempest-parent-2028677656', 'neutron:cidrs': '10.100.0.6/28', 'neutron:device_id': 'f1267fe1-552c-4312-b9b0-c02eae82a77a', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-020b4768-a07a-4769-8636-455566c87083', 'neutron:port_capabilities': '', 'neutron:port_name': 'tempest-parent-2028677656', 'neutron:project_id': '5cc73d75e0864e838eefa90cb33b7e01', 'neutron:revision_number': '13', 'neutron:security_group_ids': 'f3fadef5-4bfc-406c-93c4-14d4abd0583e', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=11c0be75-bb4b-4e01-8cfa-b9aa4fcaf0e9, chassis=[], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=75561bb8-bfb9-4100-9c79-271fd50011de) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:02:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:50.804 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 3197a9b3-066e-4dd5-acdc-899f59bb4e28 in datapath 78c3d2d3-8bfe-47b8-9282-3e9091b37043 unbound from our chassis
Oct 02 12:02:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:50.805 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 78c3d2d3-8bfe-47b8-9282-3e9091b37043, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:02:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:50.806 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c41a145c-5417-41c4-a9ef-262362755f45]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:50.807 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-78c3d2d3-8bfe-47b8-9282-3e9091b37043 namespace which is not needed anymore
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.808 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:50 compute-0 systemd[221937]: Created slice User Application Slice.
Oct 02 12:02:50 compute-0 systemd[221937]: Started Mark boot as successful after the user session has run 2 minutes.
Oct 02 12:02:50 compute-0 systemd[221937]: Started Daily Cleanup of User's Temporary Directories.
Oct 02 12:02:50 compute-0 systemd[221937]: Reached target Paths.
Oct 02 12:02:50 compute-0 systemd[221937]: Reached target Timers.
Oct 02 12:02:50 compute-0 systemd[221937]: Starting D-Bus User Message Bus Socket...
Oct 02 12:02:50 compute-0 systemd[221937]: Starting Create User's Volatile Files and Directories...
Oct 02 12:02:50 compute-0 systemd[221937]: Listening on D-Bus User Message Bus Socket.
Oct 02 12:02:50 compute-0 systemd[221937]: Reached target Sockets.
Oct 02 12:02:50 compute-0 systemd[221937]: Finished Create User's Volatile Files and Directories.
Oct 02 12:02:50 compute-0 systemd[221937]: Reached target Basic System.
Oct 02 12:02:50 compute-0 systemd[221937]: Reached target Main User Target.
Oct 02 12:02:50 compute-0 systemd[221937]: Startup finished in 157ms.
Oct 02 12:02:50 compute-0 systemd[1]: Started User Manager for UID 42436.
Oct 02 12:02:50 compute-0 systemd[1]: Started Session 35 of User nova.
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.839 2 DEBUG oslo_concurrency.processutils [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.059s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.840 2 DEBUG nova.virt.disk.api [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Checking if we can resize image /var/lib/nova/instances/96203b28-73b1-462a-87e9-4b0ca1d1f93b/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.841 2 DEBUG oslo_concurrency.processutils [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/96203b28-73b1-462a-87e9-4b0ca1d1f93b/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:50 compute-0 sshd-session[221907]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:02:50 compute-0 systemd[1]: machine-qemu\x2d6\x2dinstance\x2d0000000b.scope: Deactivated successfully.
Oct 02 12:02:50 compute-0 systemd[1]: machine-qemu\x2d6\x2dinstance\x2d0000000b.scope: Consumed 2.215s CPU time.
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.869 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:50 compute-0 systemd-machined[152150]: Machine qemu-6-instance-0000000b terminated.
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.898 2 DEBUG oslo_concurrency.processutils [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/96203b28-73b1-462a-87e9-4b0ca1d1f93b/disk --force-share --output=json" returned: 0 in 0.057s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.898 2 DEBUG nova.virt.disk.api [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Cannot resize image /var/lib/nova/instances/96203b28-73b1-462a-87e9-4b0ca1d1f93b/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.899 2 DEBUG nova.objects.instance [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lazy-loading 'migration_context' on Instance uuid 96203b28-73b1-462a-87e9-4b0ca1d1f93b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:02:50 compute-0 sshd-session[221988]: Received disconnect from 192.168.122.101 port 52302:11: disconnected by user
Oct 02 12:02:50 compute-0 sshd-session[221988]: Disconnected from user nova 192.168.122.101 port 52302
Oct 02 12:02:50 compute-0 sshd-session[221907]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:02:50 compute-0 systemd[1]: session-35.scope: Deactivated successfully.
Oct 02 12:02:50 compute-0 systemd-logind[827]: Session 35 logged out. Waiting for processes to exit.
Oct 02 12:02:50 compute-0 systemd-logind[827]: Removed session 35.
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.918 2 DEBUG nova.virt.libvirt.driver [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.918 2 DEBUG nova.virt.libvirt.driver [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Ensure instance console log exists: /var/lib/nova/instances/96203b28-73b1-462a-87e9-4b0ca1d1f93b/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.919 2 DEBUG oslo_concurrency.lockutils [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.919 2 DEBUG oslo_concurrency.lockutils [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.919 2 DEBUG oslo_concurrency.lockutils [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.921 2 DEBUG nova.virt.libvirt.driver [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Start _get_guest_xml network_info=[] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.925 2 WARNING nova.virt.libvirt.driver [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.930 2 DEBUG nova.virt.libvirt.host [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.931 2 DEBUG nova.virt.libvirt.host [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.933 2 DEBUG nova.virt.libvirt.host [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.934 2 DEBUG nova.virt.libvirt.host [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.935 2 DEBUG nova.virt.libvirt.driver [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:02:50 compute-0 neutron-haproxy-ovnmeta-78c3d2d3-8bfe-47b8-9282-3e9091b37043[221780]: [NOTICE]   (221784) : haproxy version is 2.8.14-c23fe91
Oct 02 12:02:50 compute-0 neutron-haproxy-ovnmeta-78c3d2d3-8bfe-47b8-9282-3e9091b37043[221780]: [NOTICE]   (221784) : path to executable is /usr/sbin/haproxy
Oct 02 12:02:50 compute-0 neutron-haproxy-ovnmeta-78c3d2d3-8bfe-47b8-9282-3e9091b37043[221780]: [WARNING]  (221784) : Exiting Master process...
Oct 02 12:02:50 compute-0 neutron-haproxy-ovnmeta-78c3d2d3-8bfe-47b8-9282-3e9091b37043[221780]: [ALERT]    (221784) : Current worker (221786) exited with code 143 (Terminated)
Oct 02 12:02:50 compute-0 neutron-haproxy-ovnmeta-78c3d2d3-8bfe-47b8-9282-3e9091b37043[221780]: [WARNING]  (221784) : All workers exited. Exiting... (0)
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.935 2 DEBUG nova.virt.hardware [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.936 2 DEBUG nova.virt.hardware [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.936 2 DEBUG nova.virt.hardware [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.936 2 DEBUG nova.virt.hardware [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.936 2 DEBUG nova.virt.hardware [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.937 2 DEBUG nova.virt.hardware [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.937 2 DEBUG nova.virt.hardware [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:02:50 compute-0 systemd[1]: libpod-f2bf576e540b47529440ab4caf418ac8043eb66fa79532a6b365177c2506ca53.scope: Deactivated successfully.
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.937 2 DEBUG nova.virt.hardware [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.937 2 DEBUG nova.virt.hardware [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.937 2 DEBUG nova.virt.hardware [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.938 2 DEBUG nova.virt.hardware [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.941 2 DEBUG nova.objects.instance [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lazy-loading 'pci_devices' on Instance uuid 96203b28-73b1-462a-87e9-4b0ca1d1f93b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:02:50 compute-0 podman[222004]: 2025-10-02 12:02:50.945241658 +0000 UTC m=+0.043448869 container died f2bf576e540b47529440ab4caf418ac8043eb66fa79532a6b365177c2506ca53 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-78c3d2d3-8bfe-47b8-9282-3e9091b37043, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:02:50 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-f2bf576e540b47529440ab4caf418ac8043eb66fa79532a6b365177c2506ca53-userdata-shm.mount: Deactivated successfully.
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.975 2 DEBUG nova.virt.libvirt.driver [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:02:50 compute-0 nova_compute[192079]:   <uuid>96203b28-73b1-462a-87e9-4b0ca1d1f93b</uuid>
Oct 02 12:02:50 compute-0 nova_compute[192079]:   <name>instance-0000000e</name>
Oct 02 12:02:50 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:02:50 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:02:50 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:02:50 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:       <nova:name>tempest-DeleteServersAdminTestJSON-server-810497072</nova:name>
Oct 02 12:02:50 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:02:50</nova:creationTime>
Oct 02 12:02:50 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:02:50 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:02:50 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:02:50 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:02:50 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:02:50 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:02:50 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:02:50 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:02:50 compute-0 nova_compute[192079]:         <nova:user uuid="bc9dc801fac849e18b73470021e7d314">tempest-DeleteServersAdminTestJSON-344517168-project-member</nova:user>
Oct 02 12:02:50 compute-0 nova_compute[192079]:         <nova:project uuid="d195e92cfe7049bf9f470765ff4435a9">tempest-DeleteServersAdminTestJSON-344517168</nova:project>
Oct 02 12:02:50 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:02:50 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:       <nova:ports/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:02:50 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:02:50 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <system>
Oct 02 12:02:50 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:02:50 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:02:50 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:02:50 compute-0 nova_compute[192079]:       <entry name="serial">96203b28-73b1-462a-87e9-4b0ca1d1f93b</entry>
Oct 02 12:02:50 compute-0 nova_compute[192079]:       <entry name="uuid">96203b28-73b1-462a-87e9-4b0ca1d1f93b</entry>
Oct 02 12:02:50 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     </system>
Oct 02 12:02:50 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:02:50 compute-0 nova_compute[192079]:   <os>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:   </os>
Oct 02 12:02:50 compute-0 nova_compute[192079]:   <features>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:   </features>
Oct 02 12:02:50 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:02:50 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:02:50 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:02:50 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/96203b28-73b1-462a-87e9-4b0ca1d1f93b/disk"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:02:50 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/96203b28-73b1-462a-87e9-4b0ca1d1f93b/disk.config"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:02:50 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/96203b28-73b1-462a-87e9-4b0ca1d1f93b/console.log" append="off"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <video>
Oct 02 12:02:50 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     </video>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:02:50 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:02:50 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:02:50 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:02:50 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:02:50 compute-0 nova_compute[192079]: </domain>
Oct 02 12:02:50 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.976 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:50 compute-0 systemd[1]: var-lib-containers-storage-overlay-9a44f79c747eb2cbfbc7cad74e58720c56ee60df34d877ce3b64803e50fd4ba0-merged.mount: Deactivated successfully.
Oct 02 12:02:50 compute-0 nova_compute[192079]: 2025-10-02 12:02:50.982 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:50 compute-0 podman[222004]: 2025-10-02 12:02:50.98884579 +0000 UTC m=+0.087053001 container cleanup f2bf576e540b47529440ab4caf418ac8043eb66fa79532a6b365177c2506ca53 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-78c3d2d3-8bfe-47b8-9282-3e9091b37043, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.schema-version=1.0)
Oct 02 12:02:50 compute-0 systemd[1]: libpod-conmon-f2bf576e540b47529440ab4caf418ac8043eb66fa79532a6b365177c2506ca53.scope: Deactivated successfully.
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.017 2 INFO nova.virt.libvirt.driver [-] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Instance destroyed successfully.
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.018 2 DEBUG nova.objects.instance [None req-4c7b1039-3f74-458d-9bb8-cf1d9d692fb6 59e8135d73ee43e088ba5ee7d9bd84b1 5cc73d75e0864e838eefa90cb33b7e01 - - default default] Lazy-loading 'resources' on Instance uuid f1267fe1-552c-4312-b9b0-c02eae82a77a obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:02:51 compute-0 sshd-session[222020]: Accepted publickey for nova from 192.168.122.101 port 52312 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:02:51 compute-0 systemd-logind[827]: New session 37 of user nova.
Oct 02 12:02:51 compute-0 podman[222051]: 2025-10-02 12:02:51.047559756 +0000 UTC m=+0.037046084 container remove f2bf576e540b47529440ab4caf418ac8043eb66fa79532a6b365177c2506ca53 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-78c3d2d3-8bfe-47b8-9282-3e9091b37043, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:02:51 compute-0 systemd[1]: Started Session 37 of User nova.
Oct 02 12:02:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:51.053 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8bdd2d30-647e-4b03-a20b-a21f8936df97]: (4, ('Thu Oct  2 12:02:50 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-78c3d2d3-8bfe-47b8-9282-3e9091b37043 (f2bf576e540b47529440ab4caf418ac8043eb66fa79532a6b365177c2506ca53)\nf2bf576e540b47529440ab4caf418ac8043eb66fa79532a6b365177c2506ca53\nThu Oct  2 12:02:50 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-78c3d2d3-8bfe-47b8-9282-3e9091b37043 (f2bf576e540b47529440ab4caf418ac8043eb66fa79532a6b365177c2506ca53)\nf2bf576e540b47529440ab4caf418ac8043eb66fa79532a6b365177c2506ca53\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:51.055 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3b403029-46bc-474c-9278-408da6d7030e]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:51.056 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap78c3d2d3-80, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:02:51 compute-0 sshd-session[222020]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.058 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:51 compute-0 kernel: tap78c3d2d3-80: left promiscuous mode
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.073 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:51.076 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[acfc9213-8ac0-46ff-a93a-9fc69e161e9a]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:51.102 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6d885ae5-f2b8-437e-8ce6-4ef957bc175b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:51.104 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6c69f7df-227c-46cb-829e-94c2814fb93b]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:51 compute-0 sshd-session[222068]: Received disconnect from 192.168.122.101 port 52312:11: disconnected by user
Oct 02 12:02:51 compute-0 sshd-session[222068]: Disconnected from user nova 192.168.122.101 port 52312
Oct 02 12:02:51 compute-0 sshd-session[222020]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:02:51 compute-0 systemd[1]: session-37.scope: Deactivated successfully.
Oct 02 12:02:51 compute-0 systemd-logind[827]: Session 37 logged out. Waiting for processes to exit.
Oct 02 12:02:51 compute-0 systemd-logind[827]: Removed session 37.
Oct 02 12:02:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:51.120 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[66a100bd-da5e-403c-9d6d-477af76bf277]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 455795, 'reachable_time': 16337, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 222075, 'error': None, 'target': 'ovnmeta-78c3d2d3-8bfe-47b8-9282-3e9091b37043', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:51.122 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-78c3d2d3-8bfe-47b8-9282-3e9091b37043 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:02:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:51.123 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[23144f82-84e1-4175-a9de-8bd0eb3e9de8]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:51.123 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 75561bb8-bfb9-4100-9c79-271fd50011de in datapath 020b4768-a07a-4769-8636-455566c87083 unbound from our chassis
Oct 02 12:02:51 compute-0 systemd[1]: run-netns-ovnmeta\x2d78c3d2d3\x2d8bfe\x2d47b8\x2d9282\x2d3e9091b37043.mount: Deactivated successfully.
Oct 02 12:02:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:51.124 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 020b4768-a07a-4769-8636-455566c87083, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.125 2 DEBUG nova.virt.libvirt.vif [None req-4c7b1039-3f74-458d-9bb8-cf1d9d692fb6 59e8135d73ee43e088ba5ee7d9bd84b1 5cc73d75e0864e838eefa90cb33b7e01 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=True,config_drive='True',created_at=2025-10-02T12:02:09Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description=None,display_name='tempest-LiveAutoBlockMigrationV225Test-server-1420306859',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-liveautoblockmigrationv225test-server-1420306859',id=11,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:02:21Z,launched_on='compute-2.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='5cc73d75e0864e838eefa90cb33b7e01',ramdisk_id='',reservation_id='r-f4n74pvj',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',clean_attempts='1',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-LiveAutoBlockMigrationV225Test-984573444',owner_user_name='tempest-LiveAutoBlockMigrationV225Test-984573444-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:02:47Z,user_data=None,user_id='59e8135d73ee43e088ba5ee7d9bd84b1',uuid=f1267fe1-552c-4312-b9b0-c02eae82a77a,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "75561bb8-bfb9-4100-9c79-271fd50011de", "address": "fa:16:3e:19:d8:66", "network": {"id": "020b4768-a07a-4769-8636-455566c87083", "bridge": "br-int", "label": "tempest-LiveAutoBlockMigrationV225Test-804372870-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "5cc73d75e0864e838eefa90cb33b7e01", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap75561bb8-bf", "ovs_interfaceid": "75561bb8-bfb9-4100-9c79-271fd50011de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {"os_vif_delegation": true}, "preserve_on_delete": true, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:02:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:51.125 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1640a297-b2fe-45f3-8653-7a9f47b3e1e7]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.126 2 DEBUG nova.network.os_vif_util [None req-4c7b1039-3f74-458d-9bb8-cf1d9d692fb6 59e8135d73ee43e088ba5ee7d9bd84b1 5cc73d75e0864e838eefa90cb33b7e01 - - default default] Converting VIF {"id": "75561bb8-bfb9-4100-9c79-271fd50011de", "address": "fa:16:3e:19:d8:66", "network": {"id": "020b4768-a07a-4769-8636-455566c87083", "bridge": "br-int", "label": "tempest-LiveAutoBlockMigrationV225Test-804372870-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "5cc73d75e0864e838eefa90cb33b7e01", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap75561bb8-bf", "ovs_interfaceid": "75561bb8-bfb9-4100-9c79-271fd50011de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {"os_vif_delegation": true}, "preserve_on_delete": true, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:02:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:51.126 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-020b4768-a07a-4769-8636-455566c87083 namespace which is not needed anymore
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.126 2 DEBUG nova.network.os_vif_util [None req-4c7b1039-3f74-458d-9bb8-cf1d9d692fb6 59e8135d73ee43e088ba5ee7d9bd84b1 5cc73d75e0864e838eefa90cb33b7e01 - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:19:d8:66,bridge_name='br-int',has_traffic_filtering=True,id=75561bb8-bfb9-4100-9c79-271fd50011de,network=Network(020b4768-a07a-4769-8636-455566c87083),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=True,vif_name='tap75561bb8-bf') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.127 2 DEBUG os_vif [None req-4c7b1039-3f74-458d-9bb8-cf1d9d692fb6 59e8135d73ee43e088ba5ee7d9bd84b1 5cc73d75e0864e838eefa90cb33b7e01 - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:19:d8:66,bridge_name='br-int',has_traffic_filtering=True,id=75561bb8-bfb9-4100-9c79-271fd50011de,network=Network(020b4768-a07a-4769-8636-455566c87083),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=True,vif_name='tap75561bb8-bf') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.128 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.128 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap75561bb8-bf, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.130 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.131 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.133 2 INFO os_vif [None req-4c7b1039-3f74-458d-9bb8-cf1d9d692fb6 59e8135d73ee43e088ba5ee7d9bd84b1 5cc73d75e0864e838eefa90cb33b7e01 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:19:d8:66,bridge_name='br-int',has_traffic_filtering=True,id=75561bb8-bfb9-4100-9c79-271fd50011de,network=Network(020b4768-a07a-4769-8636-455566c87083),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=True,vif_name='tap75561bb8-bf')
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.133 2 INFO nova.virt.libvirt.driver [None req-4c7b1039-3f74-458d-9bb8-cf1d9d692fb6 59e8135d73ee43e088ba5ee7d9bd84b1 5cc73d75e0864e838eefa90cb33b7e01 - - default default] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Deleting instance files /var/lib/nova/instances/f1267fe1-552c-4312-b9b0-c02eae82a77a_del
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.134 2 INFO nova.virt.libvirt.driver [None req-4c7b1039-3f74-458d-9bb8-cf1d9d692fb6 59e8135d73ee43e088ba5ee7d9bd84b1 5cc73d75e0864e838eefa90cb33b7e01 - - default default] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Deletion of /var/lib/nova/instances/f1267fe1-552c-4312-b9b0-c02eae82a77a_del complete
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.172 2 DEBUG nova.virt.libvirt.driver [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.172 2 DEBUG nova.virt.libvirt.driver [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.173 2 INFO nova.virt.libvirt.driver [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Using config drive
Oct 02 12:02:51 compute-0 neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083[221856]: [NOTICE]   (221860) : haproxy version is 2.8.14-c23fe91
Oct 02 12:02:51 compute-0 neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083[221856]: [NOTICE]   (221860) : path to executable is /usr/sbin/haproxy
Oct 02 12:02:51 compute-0 neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083[221856]: [WARNING]  (221860) : Exiting Master process...
Oct 02 12:02:51 compute-0 neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083[221856]: [ALERT]    (221860) : Current worker (221862) exited with code 143 (Terminated)
Oct 02 12:02:51 compute-0 neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083[221856]: [WARNING]  (221860) : All workers exited. Exiting... (0)
Oct 02 12:02:51 compute-0 systemd[1]: libpod-75ec33d6f9c3f98d32e379473a5c921553fa97d19026609b82cf165cc3ad0f27.scope: Deactivated successfully.
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.240 2 INFO nova.compute.manager [None req-4c7b1039-3f74-458d-9bb8-cf1d9d692fb6 59e8135d73ee43e088ba5ee7d9bd84b1 5cc73d75e0864e838eefa90cb33b7e01 - - default default] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Took 0.51 seconds to destroy the instance on the hypervisor.
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.241 2 DEBUG oslo.service.loopingcall [None req-4c7b1039-3f74-458d-9bb8-cf1d9d692fb6 59e8135d73ee43e088ba5ee7d9bd84b1 5cc73d75e0864e838eefa90cb33b7e01 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.241 2 DEBUG nova.compute.manager [-] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.241 2 DEBUG nova.network.neutron [-] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:02:51 compute-0 conmon[221856]: conmon 75ec33d6f9c3f98d32e3 <nwarn>: Failed to open cgroups file: /sys/fs/cgroup/machine.slice/libpod-75ec33d6f9c3f98d32e379473a5c921553fa97d19026609b82cf165cc3ad0f27.scope/container/memory.events
Oct 02 12:02:51 compute-0 podman[222093]: 2025-10-02 12:02:51.246659722 +0000 UTC m=+0.042706919 container died 75ec33d6f9c3f98d32e379473a5c921553fa97d19026609b82cf165cc3ad0f27 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_managed=true, io.buildah.version=1.41.3)
Oct 02 12:02:51 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-75ec33d6f9c3f98d32e379473a5c921553fa97d19026609b82cf165cc3ad0f27-userdata-shm.mount: Deactivated successfully.
Oct 02 12:02:51 compute-0 podman[222093]: 2025-10-02 12:02:51.282735478 +0000 UTC m=+0.078782655 container cleanup 75ec33d6f9c3f98d32e379473a5c921553fa97d19026609b82cf165cc3ad0f27 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS)
Oct 02 12:02:51 compute-0 systemd[1]: libpod-conmon-75ec33d6f9c3f98d32e379473a5c921553fa97d19026609b82cf165cc3ad0f27.scope: Deactivated successfully.
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.294 2 DEBUG nova.compute.manager [req-cb087208-82f3-44d1-acea-31198b6efc81 req-920c5c76-ac5a-4ca0-b517-761737fe01db 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Received event network-vif-unplugged-75561bb8-bfb9-4100-9c79-271fd50011de external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.294 2 DEBUG oslo_concurrency.lockutils [req-cb087208-82f3-44d1-acea-31198b6efc81 req-920c5c76-ac5a-4ca0-b517-761737fe01db 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "f1267fe1-552c-4312-b9b0-c02eae82a77a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.294 2 DEBUG oslo_concurrency.lockutils [req-cb087208-82f3-44d1-acea-31198b6efc81 req-920c5c76-ac5a-4ca0-b517-761737fe01db 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "f1267fe1-552c-4312-b9b0-c02eae82a77a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.295 2 DEBUG oslo_concurrency.lockutils [req-cb087208-82f3-44d1-acea-31198b6efc81 req-920c5c76-ac5a-4ca0-b517-761737fe01db 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "f1267fe1-552c-4312-b9b0-c02eae82a77a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.295 2 DEBUG nova.compute.manager [req-cb087208-82f3-44d1-acea-31198b6efc81 req-920c5c76-ac5a-4ca0-b517-761737fe01db 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] No waiting events found dispatching network-vif-unplugged-75561bb8-bfb9-4100-9c79-271fd50011de pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.295 2 DEBUG nova.compute.manager [req-cb087208-82f3-44d1-acea-31198b6efc81 req-920c5c76-ac5a-4ca0-b517-761737fe01db 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Received event network-vif-unplugged-75561bb8-bfb9-4100-9c79-271fd50011de for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:02:51 compute-0 podman[222123]: 2025-10-02 12:02:51.356381562 +0000 UTC m=+0.044832787 container remove 75ec33d6f9c3f98d32e379473a5c921553fa97d19026609b82cf165cc3ad0f27 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.build-date=20251001)
Oct 02 12:02:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:51.364 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8a91b335-bd55-49ac-ba04-7c310659589a]: (4, ('Thu Oct  2 12:02:51 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083 (75ec33d6f9c3f98d32e379473a5c921553fa97d19026609b82cf165cc3ad0f27)\n75ec33d6f9c3f98d32e379473a5c921553fa97d19026609b82cf165cc3ad0f27\nThu Oct  2 12:02:51 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-020b4768-a07a-4769-8636-455566c87083 (75ec33d6f9c3f98d32e379473a5c921553fa97d19026609b82cf165cc3ad0f27)\n75ec33d6f9c3f98d32e379473a5c921553fa97d19026609b82cf165cc3ad0f27\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:51.365 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[985d969c-ad03-4aa9-9226-7e866e061b11]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:51.366 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap020b4768-a0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.368 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:51 compute-0 kernel: tap020b4768-a0: left promiscuous mode
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.392 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.393 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.395 2 INFO nova.virt.libvirt.driver [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Creating config drive at /var/lib/nova/instances/96203b28-73b1-462a-87e9-4b0ca1d1f93b/disk.config
Oct 02 12:02:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:51.395 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9156d4f6-cc89-4b2e-b04c-0561c007549f]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.399 2 DEBUG oslo_concurrency.processutils [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/96203b28-73b1-462a-87e9-4b0ca1d1f93b/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpy75x9puv execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:51.427 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[648f1ed7-33a7-4768-921b-87396dc8e337]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:51.429 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3cbdf7c3-7490-474c-96cb-36be9ed11620]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:51.449 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[817b0486-323b-4c4a-8b10-6d902df34417]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 455881, 'reachable_time': 17832, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 222141, 'error': None, 'target': 'ovnmeta-020b4768-a07a-4769-8636-455566c87083', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:51.451 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-020b4768-a07a-4769-8636-455566c87083 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:02:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:51.451 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[3e4eb4db-b6b3-41ec-a916-9ecf036f84cc]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:02:51 compute-0 nova_compute[192079]: 2025-10-02 12:02:51.527 2 DEBUG oslo_concurrency.processutils [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/96203b28-73b1-462a-87e9-4b0ca1d1f93b/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpy75x9puv" returned: 0 in 0.128s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:51 compute-0 systemd-machined[152150]: New machine qemu-8-instance-0000000e.
Oct 02 12:02:51 compute-0 systemd[1]: Started Virtual Machine qemu-8-instance-0000000e.
Oct 02 12:02:51 compute-0 systemd[1]: var-lib-containers-storage-overlay-faf27e3c4a8deb1a7224a5828583149b36bd391b0a7f252f3accfc8a86aa5dbf-merged.mount: Deactivated successfully.
Oct 02 12:02:51 compute-0 systemd[1]: run-netns-ovnmeta\x2d020b4768\x2da07a\x2d4769\x2d8636\x2d455566c87083.mount: Deactivated successfully.
Oct 02 12:02:52 compute-0 nova_compute[192079]: 2025-10-02 12:02:52.650 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406572.6496325, 96203b28-73b1-462a-87e9-4b0ca1d1f93b => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:02:52 compute-0 nova_compute[192079]: 2025-10-02 12:02:52.651 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] VM Resumed (Lifecycle Event)
Oct 02 12:02:52 compute-0 nova_compute[192079]: 2025-10-02 12:02:52.653 2 DEBUG nova.compute.manager [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:02:52 compute-0 nova_compute[192079]: 2025-10-02 12:02:52.653 2 DEBUG nova.virt.libvirt.driver [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:02:52 compute-0 nova_compute[192079]: 2025-10-02 12:02:52.656 2 INFO nova.virt.libvirt.driver [-] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Instance spawned successfully.
Oct 02 12:02:52 compute-0 nova_compute[192079]: 2025-10-02 12:02:52.656 2 DEBUG nova.virt.libvirt.driver [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:02:52 compute-0 nova_compute[192079]: 2025-10-02 12:02:52.693 2 DEBUG nova.virt.libvirt.driver [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:02:52 compute-0 nova_compute[192079]: 2025-10-02 12:02:52.693 2 DEBUG nova.virt.libvirt.driver [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:02:52 compute-0 nova_compute[192079]: 2025-10-02 12:02:52.694 2 DEBUG nova.virt.libvirt.driver [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:02:52 compute-0 nova_compute[192079]: 2025-10-02 12:02:52.694 2 DEBUG nova.virt.libvirt.driver [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:02:52 compute-0 nova_compute[192079]: 2025-10-02 12:02:52.694 2 DEBUG nova.virt.libvirt.driver [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:02:52 compute-0 nova_compute[192079]: 2025-10-02 12:02:52.695 2 DEBUG nova.virt.libvirt.driver [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:02:52 compute-0 nova_compute[192079]: 2025-10-02 12:02:52.700 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:02:52 compute-0 nova_compute[192079]: 2025-10-02 12:02:52.702 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:02:52 compute-0 nova_compute[192079]: 2025-10-02 12:02:52.734 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:02:52 compute-0 nova_compute[192079]: 2025-10-02 12:02:52.734 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406572.6497633, 96203b28-73b1-462a-87e9-4b0ca1d1f93b => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:02:52 compute-0 nova_compute[192079]: 2025-10-02 12:02:52.734 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] VM Started (Lifecycle Event)
Oct 02 12:02:52 compute-0 nova_compute[192079]: 2025-10-02 12:02:52.759 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:02:52 compute-0 nova_compute[192079]: 2025-10-02 12:02:52.762 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:02:52 compute-0 nova_compute[192079]: 2025-10-02 12:02:52.776 2 INFO nova.compute.manager [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Took 2.20 seconds to spawn the instance on the hypervisor.
Oct 02 12:02:52 compute-0 nova_compute[192079]: 2025-10-02 12:02:52.777 2 DEBUG nova.compute.manager [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:02:52 compute-0 nova_compute[192079]: 2025-10-02 12:02:52.801 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:02:52 compute-0 nova_compute[192079]: 2025-10-02 12:02:52.876 2 INFO nova.compute.manager [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Took 2.95 seconds to build instance.
Oct 02 12:02:52 compute-0 nova_compute[192079]: 2025-10-02 12:02:52.903 2 DEBUG oslo_concurrency.lockutils [None req-a0c86e45-d80f-428a-b813-5299fa141f25 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lock "96203b28-73b1-462a-87e9-4b0ca1d1f93b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 3.126s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:53 compute-0 nova_compute[192079]: 2025-10-02 12:02:53.393 2 DEBUG nova.compute.manager [req-27d38c0a-6e92-4ae3-96fa-59b69492b5dd req-fb9952c8-7442-4aa7-8af1-7620bad44eba 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Received event network-vif-plugged-75561bb8-bfb9-4100-9c79-271fd50011de external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:02:53 compute-0 nova_compute[192079]: 2025-10-02 12:02:53.393 2 DEBUG oslo_concurrency.lockutils [req-27d38c0a-6e92-4ae3-96fa-59b69492b5dd req-fb9952c8-7442-4aa7-8af1-7620bad44eba 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "f1267fe1-552c-4312-b9b0-c02eae82a77a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:53 compute-0 nova_compute[192079]: 2025-10-02 12:02:53.393 2 DEBUG oslo_concurrency.lockutils [req-27d38c0a-6e92-4ae3-96fa-59b69492b5dd req-fb9952c8-7442-4aa7-8af1-7620bad44eba 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "f1267fe1-552c-4312-b9b0-c02eae82a77a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:53 compute-0 nova_compute[192079]: 2025-10-02 12:02:53.394 2 DEBUG oslo_concurrency.lockutils [req-27d38c0a-6e92-4ae3-96fa-59b69492b5dd req-fb9952c8-7442-4aa7-8af1-7620bad44eba 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "f1267fe1-552c-4312-b9b0-c02eae82a77a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:53 compute-0 nova_compute[192079]: 2025-10-02 12:02:53.394 2 DEBUG nova.compute.manager [req-27d38c0a-6e92-4ae3-96fa-59b69492b5dd req-fb9952c8-7442-4aa7-8af1-7620bad44eba 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] No waiting events found dispatching network-vif-plugged-75561bb8-bfb9-4100-9c79-271fd50011de pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:02:53 compute-0 nova_compute[192079]: 2025-10-02 12:02:53.394 2 WARNING nova.compute.manager [req-27d38c0a-6e92-4ae3-96fa-59b69492b5dd req-fb9952c8-7442-4aa7-8af1-7620bad44eba 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Received unexpected event network-vif-plugged-75561bb8-bfb9-4100-9c79-271fd50011de for instance with vm_state active and task_state deleting.
Oct 02 12:02:54 compute-0 nova_compute[192079]: 2025-10-02 12:02:54.300 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:55 compute-0 nova_compute[192079]: 2025-10-02 12:02:55.465 2 DEBUG oslo_concurrency.lockutils [None req-8eea60d3-9b42-46c0-8de3-2686e4fbad62 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Acquiring lock "96203b28-73b1-462a-87e9-4b0ca1d1f93b" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:55 compute-0 nova_compute[192079]: 2025-10-02 12:02:55.466 2 DEBUG oslo_concurrency.lockutils [None req-8eea60d3-9b42-46c0-8de3-2686e4fbad62 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lock "96203b28-73b1-462a-87e9-4b0ca1d1f93b" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:55 compute-0 nova_compute[192079]: 2025-10-02 12:02:55.466 2 DEBUG oslo_concurrency.lockutils [None req-8eea60d3-9b42-46c0-8de3-2686e4fbad62 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Acquiring lock "96203b28-73b1-462a-87e9-4b0ca1d1f93b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:55 compute-0 nova_compute[192079]: 2025-10-02 12:02:55.467 2 DEBUG oslo_concurrency.lockutils [None req-8eea60d3-9b42-46c0-8de3-2686e4fbad62 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lock "96203b28-73b1-462a-87e9-4b0ca1d1f93b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:55 compute-0 nova_compute[192079]: 2025-10-02 12:02:55.467 2 DEBUG oslo_concurrency.lockutils [None req-8eea60d3-9b42-46c0-8de3-2686e4fbad62 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lock "96203b28-73b1-462a-87e9-4b0ca1d1f93b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:55 compute-0 nova_compute[192079]: 2025-10-02 12:02:55.478 2 INFO nova.compute.manager [None req-8eea60d3-9b42-46c0-8de3-2686e4fbad62 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Terminating instance
Oct 02 12:02:55 compute-0 nova_compute[192079]: 2025-10-02 12:02:55.492 2 DEBUG oslo_concurrency.lockutils [None req-8eea60d3-9b42-46c0-8de3-2686e4fbad62 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Acquiring lock "refresh_cache-96203b28-73b1-462a-87e9-4b0ca1d1f93b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:02:55 compute-0 nova_compute[192079]: 2025-10-02 12:02:55.492 2 DEBUG oslo_concurrency.lockutils [None req-8eea60d3-9b42-46c0-8de3-2686e4fbad62 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Acquired lock "refresh_cache-96203b28-73b1-462a-87e9-4b0ca1d1f93b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:02:55 compute-0 nova_compute[192079]: 2025-10-02 12:02:55.493 2 DEBUG nova.network.neutron [None req-8eea60d3-9b42-46c0-8de3-2686e4fbad62 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:02:55 compute-0 nova_compute[192079]: 2025-10-02 12:02:55.657 2 DEBUG nova.network.neutron [-] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:02:55 compute-0 nova_compute[192079]: 2025-10-02 12:02:55.675 2 INFO nova.compute.manager [-] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Took 4.43 seconds to deallocate network for instance.
Oct 02 12:02:55 compute-0 nova_compute[192079]: 2025-10-02 12:02:55.704 2 DEBUG nova.network.neutron [None req-8eea60d3-9b42-46c0-8de3-2686e4fbad62 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:02:55 compute-0 nova_compute[192079]: 2025-10-02 12:02:55.763 2 DEBUG oslo_concurrency.lockutils [None req-4c7b1039-3f74-458d-9bb8-cf1d9d692fb6 59e8135d73ee43e088ba5ee7d9bd84b1 5cc73d75e0864e838eefa90cb33b7e01 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:55 compute-0 nova_compute[192079]: 2025-10-02 12:02:55.764 2 DEBUG oslo_concurrency.lockutils [None req-4c7b1039-3f74-458d-9bb8-cf1d9d692fb6 59e8135d73ee43e088ba5ee7d9bd84b1 5cc73d75e0864e838eefa90cb33b7e01 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:55 compute-0 nova_compute[192079]: 2025-10-02 12:02:55.768 2 DEBUG oslo_concurrency.lockutils [None req-4c7b1039-3f74-458d-9bb8-cf1d9d692fb6 59e8135d73ee43e088ba5ee7d9bd84b1 5cc73d75e0864e838eefa90cb33b7e01 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.004s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:55 compute-0 nova_compute[192079]: 2025-10-02 12:02:55.804 2 INFO nova.scheduler.client.report [None req-4c7b1039-3f74-458d-9bb8-cf1d9d692fb6 59e8135d73ee43e088ba5ee7d9bd84b1 5cc73d75e0864e838eefa90cb33b7e01 - - default default] Deleted allocations for instance f1267fe1-552c-4312-b9b0-c02eae82a77a
Oct 02 12:02:55 compute-0 nova_compute[192079]: 2025-10-02 12:02:55.895 2 DEBUG oslo_concurrency.lockutils [None req-4c7b1039-3f74-458d-9bb8-cf1d9d692fb6 59e8135d73ee43e088ba5ee7d9bd84b1 5cc73d75e0864e838eefa90cb33b7e01 - - default default] Lock "f1267fe1-552c-4312-b9b0-c02eae82a77a" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 5.196s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:55 compute-0 nova_compute[192079]: 2025-10-02 12:02:55.970 2 DEBUG nova.network.neutron [None req-8eea60d3-9b42-46c0-8de3-2686e4fbad62 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:02:55 compute-0 nova_compute[192079]: 2025-10-02 12:02:55.992 2 DEBUG oslo_concurrency.lockutils [None req-8eea60d3-9b42-46c0-8de3-2686e4fbad62 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Releasing lock "refresh_cache-96203b28-73b1-462a-87e9-4b0ca1d1f93b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:02:55 compute-0 nova_compute[192079]: 2025-10-02 12:02:55.993 2 DEBUG nova.compute.manager [None req-8eea60d3-9b42-46c0-8de3-2686e4fbad62 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:02:56 compute-0 systemd[1]: machine-qemu\x2d8\x2dinstance\x2d0000000e.scope: Deactivated successfully.
Oct 02 12:02:56 compute-0 systemd[1]: machine-qemu\x2d8\x2dinstance\x2d0000000e.scope: Consumed 4.347s CPU time.
Oct 02 12:02:56 compute-0 systemd-machined[152150]: Machine qemu-8-instance-0000000e terminated.
Oct 02 12:02:56 compute-0 nova_compute[192079]: 2025-10-02 12:02:56.132 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:56 compute-0 nova_compute[192079]: 2025-10-02 12:02:56.237 2 INFO nova.virt.libvirt.driver [-] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Instance destroyed successfully.
Oct 02 12:02:56 compute-0 nova_compute[192079]: 2025-10-02 12:02:56.238 2 DEBUG nova.objects.instance [None req-8eea60d3-9b42-46c0-8de3-2686e4fbad62 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lazy-loading 'resources' on Instance uuid 96203b28-73b1-462a-87e9-4b0ca1d1f93b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:02:56 compute-0 nova_compute[192079]: 2025-10-02 12:02:56.263 2 INFO nova.virt.libvirt.driver [None req-8eea60d3-9b42-46c0-8de3-2686e4fbad62 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Deleting instance files /var/lib/nova/instances/96203b28-73b1-462a-87e9-4b0ca1d1f93b_del
Oct 02 12:02:56 compute-0 nova_compute[192079]: 2025-10-02 12:02:56.263 2 INFO nova.virt.libvirt.driver [None req-8eea60d3-9b42-46c0-8de3-2686e4fbad62 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Deletion of /var/lib/nova/instances/96203b28-73b1-462a-87e9-4b0ca1d1f93b_del complete
Oct 02 12:02:56 compute-0 nova_compute[192079]: 2025-10-02 12:02:56.386 2 INFO nova.compute.manager [None req-8eea60d3-9b42-46c0-8de3-2686e4fbad62 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Took 0.39 seconds to destroy the instance on the hypervisor.
Oct 02 12:02:56 compute-0 nova_compute[192079]: 2025-10-02 12:02:56.387 2 DEBUG oslo.service.loopingcall [None req-8eea60d3-9b42-46c0-8de3-2686e4fbad62 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:02:56 compute-0 nova_compute[192079]: 2025-10-02 12:02:56.387 2 DEBUG nova.compute.manager [-] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:02:56 compute-0 nova_compute[192079]: 2025-10-02 12:02:56.388 2 DEBUG nova.network.neutron [-] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:02:57 compute-0 nova_compute[192079]: 2025-10-02 12:02:57.237 2 DEBUG nova.network.neutron [-] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:02:57 compute-0 nova_compute[192079]: 2025-10-02 12:02:57.257 2 DEBUG nova.network.neutron [-] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:02:57 compute-0 nova_compute[192079]: 2025-10-02 12:02:57.276 2 INFO nova.compute.manager [-] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Took 0.89 seconds to deallocate network for instance.
Oct 02 12:02:57 compute-0 nova_compute[192079]: 2025-10-02 12:02:57.356 2 DEBUG oslo_concurrency.lockutils [None req-8eea60d3-9b42-46c0-8de3-2686e4fbad62 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:57 compute-0 nova_compute[192079]: 2025-10-02 12:02:57.357 2 DEBUG oslo_concurrency.lockutils [None req-8eea60d3-9b42-46c0-8de3-2686e4fbad62 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:57 compute-0 nova_compute[192079]: 2025-10-02 12:02:57.439 2 DEBUG nova.compute.provider_tree [None req-8eea60d3-9b42-46c0-8de3-2686e4fbad62 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:02:57 compute-0 nova_compute[192079]: 2025-10-02 12:02:57.459 2 DEBUG nova.scheduler.client.report [None req-8eea60d3-9b42-46c0-8de3-2686e4fbad62 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:02:57 compute-0 nova_compute[192079]: 2025-10-02 12:02:57.491 2 DEBUG oslo_concurrency.lockutils [None req-8eea60d3-9b42-46c0-8de3-2686e4fbad62 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.134s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:57 compute-0 nova_compute[192079]: 2025-10-02 12:02:57.522 2 INFO nova.scheduler.client.report [None req-8eea60d3-9b42-46c0-8de3-2686e4fbad62 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Deleted allocations for instance 96203b28-73b1-462a-87e9-4b0ca1d1f93b
Oct 02 12:02:57 compute-0 nova_compute[192079]: 2025-10-02 12:02:57.592 2 DEBUG oslo_concurrency.lockutils [None req-8eea60d3-9b42-46c0-8de3-2686e4fbad62 bc9dc801fac849e18b73470021e7d314 d195e92cfe7049bf9f470765ff4435a9 - - default default] Lock "96203b28-73b1-462a-87e9-4b0ca1d1f93b" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 2.126s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:57 compute-0 nova_compute[192079]: 2025-10-02 12:02:57.951 2 DEBUG oslo_concurrency.lockutils [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Acquiring lock "8fd0525b-b74e-4fea-8a19-f03f445fbc07" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:57 compute-0 nova_compute[192079]: 2025-10-02 12:02:57.951 2 DEBUG oslo_concurrency.lockutils [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Lock "8fd0525b-b74e-4fea-8a19-f03f445fbc07" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:57 compute-0 nova_compute[192079]: 2025-10-02 12:02:57.968 2 DEBUG nova.compute.manager [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.051 2 DEBUG oslo_concurrency.lockutils [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.051 2 DEBUG oslo_concurrency.lockutils [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.055 2 DEBUG nova.virt.hardware [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.056 2 INFO nova.compute.claims [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:02:58 compute-0 podman[222174]: 2025-10-02 12:02:58.164310371 +0000 UTC m=+0.067557699 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, container_name=ceilometer_agent_compute, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible)
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.342 2 DEBUG nova.compute.provider_tree [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.355 2 DEBUG nova.scheduler.client.report [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.378 2 DEBUG oslo_concurrency.lockutils [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.327s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.379 2 DEBUG nova.compute.manager [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.439 2 DEBUG nova.compute.manager [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.440 2 DEBUG nova.network.neutron [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.468 2 INFO nova.virt.libvirt.driver [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.489 2 DEBUG nova.compute.manager [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.612 2 DEBUG nova.compute.manager [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.614 2 DEBUG nova.virt.libvirt.driver [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.615 2 INFO nova.virt.libvirt.driver [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Creating image(s)
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.616 2 DEBUG oslo_concurrency.lockutils [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Acquiring lock "/var/lib/nova/instances/8fd0525b-b74e-4fea-8a19-f03f445fbc07/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.617 2 DEBUG oslo_concurrency.lockutils [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Lock "/var/lib/nova/instances/8fd0525b-b74e-4fea-8a19-f03f445fbc07/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.618 2 DEBUG oslo_concurrency.lockutils [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Lock "/var/lib/nova/instances/8fd0525b-b74e-4fea-8a19-f03f445fbc07/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.646 2 DEBUG oslo_concurrency.processutils [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.740 2 DEBUG oslo_concurrency.processutils [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.094s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.742 2 DEBUG oslo_concurrency.lockutils [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.743 2 DEBUG oslo_concurrency.lockutils [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.768 2 DEBUG oslo_concurrency.processutils [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.795 2 DEBUG nova.network.neutron [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] No network configured allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1188
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.796 2 DEBUG nova.compute.manager [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Instance network_info: |[]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.851 2 DEBUG oslo_concurrency.processutils [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.083s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.852 2 DEBUG oslo_concurrency.processutils [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/8fd0525b-b74e-4fea-8a19-f03f445fbc07/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.890 2 DEBUG oslo_concurrency.processutils [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/8fd0525b-b74e-4fea-8a19-f03f445fbc07/disk 1073741824" returned: 0 in 0.038s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.891 2 DEBUG oslo_concurrency.lockutils [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.148s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:58 compute-0 nova_compute[192079]: 2025-10-02 12:02:58.892 2 DEBUG oslo_concurrency.processutils [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:58.971 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=6, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=5) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:02:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:58.972 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 1 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.017 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.025 2 DEBUG oslo_concurrency.processutils [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.133s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.026 2 DEBUG nova.virt.disk.api [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Checking if we can resize image /var/lib/nova/instances/8fd0525b-b74e-4fea-8a19-f03f445fbc07/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.027 2 DEBUG oslo_concurrency.processutils [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/8fd0525b-b74e-4fea-8a19-f03f445fbc07/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.092 2 DEBUG oslo_concurrency.processutils [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/8fd0525b-b74e-4fea-8a19-f03f445fbc07/disk --force-share --output=json" returned: 0 in 0.066s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.093 2 DEBUG nova.virt.disk.api [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Cannot resize image /var/lib/nova/instances/8fd0525b-b74e-4fea-8a19-f03f445fbc07/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.094 2 DEBUG nova.objects.instance [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Lazy-loading 'migration_context' on Instance uuid 8fd0525b-b74e-4fea-8a19-f03f445fbc07 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.107 2 DEBUG nova.virt.libvirt.driver [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.107 2 DEBUG nova.virt.libvirt.driver [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Ensure instance console log exists: /var/lib/nova/instances/8fd0525b-b74e-4fea-8a19-f03f445fbc07/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.108 2 DEBUG oslo_concurrency.lockutils [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.108 2 DEBUG oslo_concurrency.lockutils [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.109 2 DEBUG oslo_concurrency.lockutils [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.110 2 DEBUG nova.virt.libvirt.driver [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Start _get_guest_xml network_info=[] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.116 2 WARNING nova.virt.libvirt.driver [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.121 2 DEBUG nova.virt.libvirt.host [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.122 2 DEBUG nova.virt.libvirt.host [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.124 2 DEBUG nova.virt.libvirt.host [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.125 2 DEBUG nova.virt.libvirt.host [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.126 2 DEBUG nova.virt.libvirt.driver [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.126 2 DEBUG nova.virt.hardware [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.126 2 DEBUG nova.virt.hardware [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.127 2 DEBUG nova.virt.hardware [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.127 2 DEBUG nova.virt.hardware [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.127 2 DEBUG nova.virt.hardware [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.127 2 DEBUG nova.virt.hardware [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.127 2 DEBUG nova.virt.hardware [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.127 2 DEBUG nova.virt.hardware [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.127 2 DEBUG nova.virt.hardware [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.128 2 DEBUG nova.virt.hardware [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.128 2 DEBUG nova.virt.hardware [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.131 2 DEBUG nova.objects.instance [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Lazy-loading 'pci_devices' on Instance uuid 8fd0525b-b74e-4fea-8a19-f03f445fbc07 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.147 2 DEBUG nova.virt.libvirt.driver [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:02:59 compute-0 nova_compute[192079]:   <uuid>8fd0525b-b74e-4fea-8a19-f03f445fbc07</uuid>
Oct 02 12:02:59 compute-0 nova_compute[192079]:   <name>instance-00000010</name>
Oct 02 12:02:59 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:02:59 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:02:59 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:02:59 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:       <nova:name>tempest-ServersAdminNegativeTestJSON-server-1143653698</nova:name>
Oct 02 12:02:59 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:02:59</nova:creationTime>
Oct 02 12:02:59 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:02:59 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:02:59 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:02:59 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:02:59 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:02:59 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:02:59 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:02:59 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:02:59 compute-0 nova_compute[192079]:         <nova:user uuid="d413386800eb45c8959596be3a47c369">tempest-ServersAdminNegativeTestJSON-528126379-project-member</nova:user>
Oct 02 12:02:59 compute-0 nova_compute[192079]:         <nova:project uuid="8b0d43e818674dfd81b38d17af224b0d">tempest-ServersAdminNegativeTestJSON-528126379</nova:project>
Oct 02 12:02:59 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:02:59 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:       <nova:ports/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:02:59 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:02:59 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <system>
Oct 02 12:02:59 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:02:59 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:02:59 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:02:59 compute-0 nova_compute[192079]:       <entry name="serial">8fd0525b-b74e-4fea-8a19-f03f445fbc07</entry>
Oct 02 12:02:59 compute-0 nova_compute[192079]:       <entry name="uuid">8fd0525b-b74e-4fea-8a19-f03f445fbc07</entry>
Oct 02 12:02:59 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     </system>
Oct 02 12:02:59 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:02:59 compute-0 nova_compute[192079]:   <os>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:   </os>
Oct 02 12:02:59 compute-0 nova_compute[192079]:   <features>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:   </features>
Oct 02 12:02:59 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:02:59 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:02:59 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:02:59 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/8fd0525b-b74e-4fea-8a19-f03f445fbc07/disk"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:02:59 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/8fd0525b-b74e-4fea-8a19-f03f445fbc07/disk.config"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:02:59 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/8fd0525b-b74e-4fea-8a19-f03f445fbc07/console.log" append="off"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <video>
Oct 02 12:02:59 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     </video>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:02:59 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:02:59 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:02:59 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:02:59 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:02:59 compute-0 nova_compute[192079]: </domain>
Oct 02 12:02:59 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.194 2 DEBUG nova.virt.libvirt.driver [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.194 2 DEBUG nova.virt.libvirt.driver [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.195 2 INFO nova.virt.libvirt.driver [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Using config drive
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.301 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.704 2 INFO nova.virt.libvirt.driver [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Creating config drive at /var/lib/nova/instances/8fd0525b-b74e-4fea-8a19-f03f445fbc07/disk.config
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.709 2 DEBUG oslo_concurrency.processutils [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/8fd0525b-b74e-4fea-8a19-f03f445fbc07/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp6uga7t5k execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:02:59 compute-0 nova_compute[192079]: 2025-10-02 12:02:59.835 2 DEBUG oslo_concurrency.processutils [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/8fd0525b-b74e-4fea-8a19-f03f445fbc07/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp6uga7t5k" returned: 0 in 0.125s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:02:59 compute-0 systemd-machined[152150]: New machine qemu-9-instance-00000010.
Oct 02 12:02:59 compute-0 systemd[1]: Started Virtual Machine qemu-9-instance-00000010.
Oct 02 12:02:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:02:59.974 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '6'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:03:00 compute-0 nova_compute[192079]: 2025-10-02 12:03:00.263 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759406565.2622623, 564d2c1b-397f-4f8c-9bf3-8251528aecd3 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:03:00 compute-0 nova_compute[192079]: 2025-10-02 12:03:00.263 2 INFO nova.compute.manager [-] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] VM Stopped (Lifecycle Event)
Oct 02 12:03:00 compute-0 nova_compute[192079]: 2025-10-02 12:03:00.281 2 DEBUG nova.compute.manager [None req-871b849e-edd2-452d-80a1-f97e7a7fa7af - - - - - -] [instance: 564d2c1b-397f-4f8c-9bf3-8251528aecd3] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:03:00 compute-0 nova_compute[192079]: 2025-10-02 12:03:00.510 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406580.510521, 8fd0525b-b74e-4fea-8a19-f03f445fbc07 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:03:00 compute-0 nova_compute[192079]: 2025-10-02 12:03:00.511 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] VM Resumed (Lifecycle Event)
Oct 02 12:03:00 compute-0 nova_compute[192079]: 2025-10-02 12:03:00.513 2 DEBUG nova.compute.manager [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:03:00 compute-0 nova_compute[192079]: 2025-10-02 12:03:00.514 2 DEBUG nova.virt.libvirt.driver [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:03:00 compute-0 nova_compute[192079]: 2025-10-02 12:03:00.518 2 INFO nova.virt.libvirt.driver [-] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Instance spawned successfully.
Oct 02 12:03:00 compute-0 nova_compute[192079]: 2025-10-02 12:03:00.518 2 DEBUG nova.virt.libvirt.driver [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:03:00 compute-0 nova_compute[192079]: 2025-10-02 12:03:00.536 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:03:00 compute-0 nova_compute[192079]: 2025-10-02 12:03:00.542 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:03:00 compute-0 nova_compute[192079]: 2025-10-02 12:03:00.578 2 DEBUG nova.virt.libvirt.driver [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:03:00 compute-0 nova_compute[192079]: 2025-10-02 12:03:00.579 2 DEBUG nova.virt.libvirt.driver [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:03:00 compute-0 nova_compute[192079]: 2025-10-02 12:03:00.579 2 DEBUG nova.virt.libvirt.driver [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:03:00 compute-0 nova_compute[192079]: 2025-10-02 12:03:00.580 2 DEBUG nova.virt.libvirt.driver [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:03:00 compute-0 nova_compute[192079]: 2025-10-02 12:03:00.580 2 DEBUG nova.virt.libvirt.driver [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:03:00 compute-0 nova_compute[192079]: 2025-10-02 12:03:00.581 2 DEBUG nova.virt.libvirt.driver [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:03:00 compute-0 nova_compute[192079]: 2025-10-02 12:03:00.583 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:03:00 compute-0 nova_compute[192079]: 2025-10-02 12:03:00.584 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406580.5141807, 8fd0525b-b74e-4fea-8a19-f03f445fbc07 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:03:00 compute-0 nova_compute[192079]: 2025-10-02 12:03:00.584 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] VM Started (Lifecycle Event)
Oct 02 12:03:00 compute-0 nova_compute[192079]: 2025-10-02 12:03:00.622 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:03:00 compute-0 nova_compute[192079]: 2025-10-02 12:03:00.625 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:03:00 compute-0 nova_compute[192079]: 2025-10-02 12:03:00.650 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:03:00 compute-0 nova_compute[192079]: 2025-10-02 12:03:00.666 2 INFO nova.compute.manager [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Took 2.05 seconds to spawn the instance on the hypervisor.
Oct 02 12:03:00 compute-0 nova_compute[192079]: 2025-10-02 12:03:00.666 2 DEBUG nova.compute.manager [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:03:00 compute-0 nova_compute[192079]: 2025-10-02 12:03:00.744 2 INFO nova.compute.manager [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Took 2.72 seconds to build instance.
Oct 02 12:03:00 compute-0 nova_compute[192079]: 2025-10-02 12:03:00.763 2 DEBUG oslo_concurrency.lockutils [None req-6f31a8dd-0d21-45b0-9a3a-b69ac5414de2 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Lock "8fd0525b-b74e-4fea-8a19-f03f445fbc07" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 2.811s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:01 compute-0 nova_compute[192079]: 2025-10-02 12:03:01.178 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:01 compute-0 systemd[221937]: Activating special unit Exit the Session...
Oct 02 12:03:01 compute-0 systemd[221937]: Stopped target Main User Target.
Oct 02 12:03:01 compute-0 systemd[221937]: Stopped target Basic System.
Oct 02 12:03:01 compute-0 systemd[221937]: Stopped target Paths.
Oct 02 12:03:01 compute-0 systemd[221937]: Stopped target Sockets.
Oct 02 12:03:01 compute-0 systemd[221937]: Stopped target Timers.
Oct 02 12:03:01 compute-0 systemd[221937]: Stopped Mark boot as successful after the user session has run 2 minutes.
Oct 02 12:03:01 compute-0 systemd[221937]: Stopped Daily Cleanup of User's Temporary Directories.
Oct 02 12:03:01 compute-0 systemd[221937]: Closed D-Bus User Message Bus Socket.
Oct 02 12:03:01 compute-0 systemd[221937]: Stopped Create User's Volatile Files and Directories.
Oct 02 12:03:01 compute-0 systemd[221937]: Removed slice User Application Slice.
Oct 02 12:03:01 compute-0 systemd[221937]: Reached target Shutdown.
Oct 02 12:03:01 compute-0 systemd[221937]: Finished Exit the Session.
Oct 02 12:03:01 compute-0 systemd[221937]: Reached target Exit the Session.
Oct 02 12:03:01 compute-0 systemd[1]: Stopping User Manager for UID 42436...
Oct 02 12:03:01 compute-0 systemd[1]: user@42436.service: Deactivated successfully.
Oct 02 12:03:01 compute-0 systemd[1]: Stopped User Manager for UID 42436.
Oct 02 12:03:01 compute-0 systemd[1]: Stopping User Runtime Directory /run/user/42436...
Oct 02 12:03:01 compute-0 systemd[1]: run-user-42436.mount: Deactivated successfully.
Oct 02 12:03:01 compute-0 systemd[1]: user-runtime-dir@42436.service: Deactivated successfully.
Oct 02 12:03:01 compute-0 systemd[1]: Stopped User Runtime Directory /run/user/42436.
Oct 02 12:03:01 compute-0 systemd[1]: Removed slice User Slice of UID 42436.
Oct 02 12:03:01 compute-0 podman[222238]: 2025-10-02 12:03:01.440206047 +0000 UTC m=+0.079229657 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Red Hat, Inc., managed_by=edpm_ansible, release=1755695350, architecture=x86_64, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.buildah.version=1.33.7, distribution-scope=public, vcs-type=git, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., config_id=edpm, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, name=ubi9-minimal, build-date=2025-08-20T13:12:41, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, io.openshift.expose-services=, com.redhat.component=ubi9-minimal-container, url=https://catalog.redhat.com/en/search?searchType=containers, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, vendor=Red Hat, Inc., container_name=openstack_network_exporter, version=9.6, io.openshift.tags=minimal rhel9)
Oct 02 12:03:01 compute-0 podman[222239]: 2025-10-02 12:03:01.440520616 +0000 UTC m=+0.084095871 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, container_name=multipathd, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, managed_by=edpm_ansible)
Oct 02 12:03:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:02.203 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:02.204 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:02.204 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:02 compute-0 nova_compute[192079]: 2025-10-02 12:03:02.562 2 DEBUG nova.objects.instance [None req-4daf72bc-7692-4920-82ea-cf485e35fde2 243678949a5a47e981856403ce011d91 5675cd0f2dd24aa0980cfcbacbb96f92 - - default default] Lazy-loading 'pci_devices' on Instance uuid 8fd0525b-b74e-4fea-8a19-f03f445fbc07 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:03:02 compute-0 nova_compute[192079]: 2025-10-02 12:03:02.583 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406582.5827086, 8fd0525b-b74e-4fea-8a19-f03f445fbc07 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:03:02 compute-0 nova_compute[192079]: 2025-10-02 12:03:02.583 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] VM Paused (Lifecycle Event)
Oct 02 12:03:02 compute-0 nova_compute[192079]: 2025-10-02 12:03:02.604 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:03:02 compute-0 nova_compute[192079]: 2025-10-02 12:03:02.607 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: active, current task_state: suspending, current DB power_state: 1, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:03:02 compute-0 nova_compute[192079]: 2025-10-02 12:03:02.624 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] During sync_power_state the instance has a pending task (suspending). Skip.
Oct 02 12:03:03 compute-0 systemd[1]: machine-qemu\x2d9\x2dinstance\x2d00000010.scope: Deactivated successfully.
Oct 02 12:03:03 compute-0 systemd[1]: machine-qemu\x2d9\x2dinstance\x2d00000010.scope: Consumed 2.743s CPU time.
Oct 02 12:03:03 compute-0 systemd-machined[152150]: Machine qemu-9-instance-00000010 terminated.
Oct 02 12:03:03 compute-0 nova_compute[192079]: 2025-10-02 12:03:03.272 2 DEBUG nova.compute.manager [None req-4daf72bc-7692-4920-82ea-cf485e35fde2 243678949a5a47e981856403ce011d91 5675cd0f2dd24aa0980cfcbacbb96f92 - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:03:04 compute-0 nova_compute[192079]: 2025-10-02 12:03:04.303 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:04 compute-0 sshd-session[222291]: Accepted publickey for nova from 192.168.122.101 port 49822 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:03:04 compute-0 systemd[1]: Created slice User Slice of UID 42436.
Oct 02 12:03:04 compute-0 systemd[1]: Starting User Runtime Directory /run/user/42436...
Oct 02 12:03:04 compute-0 systemd-logind[827]: New session 38 of user nova.
Oct 02 12:03:04 compute-0 systemd[1]: Finished User Runtime Directory /run/user/42436.
Oct 02 12:03:04 compute-0 systemd[1]: Starting User Manager for UID 42436...
Oct 02 12:03:04 compute-0 systemd[222295]: pam_unix(systemd-user:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:03:04 compute-0 systemd[222295]: Queued start job for default target Main User Target.
Oct 02 12:03:04 compute-0 systemd[222295]: Created slice User Application Slice.
Oct 02 12:03:04 compute-0 systemd[222295]: Started Mark boot as successful after the user session has run 2 minutes.
Oct 02 12:03:04 compute-0 systemd[222295]: Started Daily Cleanup of User's Temporary Directories.
Oct 02 12:03:04 compute-0 systemd[222295]: Reached target Paths.
Oct 02 12:03:04 compute-0 systemd[222295]: Reached target Timers.
Oct 02 12:03:04 compute-0 systemd[222295]: Starting D-Bus User Message Bus Socket...
Oct 02 12:03:04 compute-0 systemd[222295]: Starting Create User's Volatile Files and Directories...
Oct 02 12:03:04 compute-0 systemd[222295]: Listening on D-Bus User Message Bus Socket.
Oct 02 12:03:04 compute-0 systemd[222295]: Reached target Sockets.
Oct 02 12:03:04 compute-0 systemd[222295]: Finished Create User's Volatile Files and Directories.
Oct 02 12:03:04 compute-0 systemd[222295]: Reached target Basic System.
Oct 02 12:03:04 compute-0 systemd[222295]: Reached target Main User Target.
Oct 02 12:03:04 compute-0 systemd[222295]: Startup finished in 159ms.
Oct 02 12:03:04 compute-0 systemd[1]: Started User Manager for UID 42436.
Oct 02 12:03:04 compute-0 systemd[1]: Started Session 38 of User nova.
Oct 02 12:03:04 compute-0 sshd-session[222291]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:03:05 compute-0 sshd-session[222310]: Received disconnect from 192.168.122.101 port 49822:11: disconnected by user
Oct 02 12:03:05 compute-0 sshd-session[222310]: Disconnected from user nova 192.168.122.101 port 49822
Oct 02 12:03:05 compute-0 sshd-session[222291]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:03:05 compute-0 systemd[1]: session-38.scope: Deactivated successfully.
Oct 02 12:03:05 compute-0 systemd-logind[827]: Session 38 logged out. Waiting for processes to exit.
Oct 02 12:03:05 compute-0 systemd-logind[827]: Removed session 38.
Oct 02 12:03:05 compute-0 sshd-session[222312]: Accepted publickey for nova from 192.168.122.101 port 49838 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:03:05 compute-0 systemd-logind[827]: New session 40 of user nova.
Oct 02 12:03:05 compute-0 systemd[1]: Started Session 40 of User nova.
Oct 02 12:03:05 compute-0 sshd-session[222312]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:03:05 compute-0 sshd-session[222315]: Received disconnect from 192.168.122.101 port 49838:11: disconnected by user
Oct 02 12:03:05 compute-0 sshd-session[222315]: Disconnected from user nova 192.168.122.101 port 49838
Oct 02 12:03:05 compute-0 sshd-session[222312]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:03:05 compute-0 systemd[1]: session-40.scope: Deactivated successfully.
Oct 02 12:03:05 compute-0 systemd-logind[827]: Session 40 logged out. Waiting for processes to exit.
Oct 02 12:03:05 compute-0 systemd-logind[827]: Removed session 40.
Oct 02 12:03:05 compute-0 sshd-session[222317]: Accepted publickey for nova from 192.168.122.101 port 49848 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:03:05 compute-0 systemd-logind[827]: New session 41 of user nova.
Oct 02 12:03:05 compute-0 systemd[1]: Started Session 41 of User nova.
Oct 02 12:03:05 compute-0 sshd-session[222317]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:03:05 compute-0 sshd-session[222320]: Received disconnect from 192.168.122.101 port 49848:11: disconnected by user
Oct 02 12:03:05 compute-0 sshd-session[222320]: Disconnected from user nova 192.168.122.101 port 49848
Oct 02 12:03:05 compute-0 sshd-session[222317]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:03:05 compute-0 systemd[1]: session-41.scope: Deactivated successfully.
Oct 02 12:03:05 compute-0 systemd-logind[827]: Session 41 logged out. Waiting for processes to exit.
Oct 02 12:03:05 compute-0 systemd-logind[827]: Removed session 41.
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.016 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759406571.0159028, f1267fe1-552c-4312-b9b0-c02eae82a77a => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.018 2 INFO nova.compute.manager [-] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] VM Stopped (Lifecycle Event)
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.038 2 DEBUG nova.compute.manager [None req-c02b7d36-e087-4a03-9e2b-80820e8200eb - - - - - -] [instance: f1267fe1-552c-4312-b9b0-c02eae82a77a] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.057 2 DEBUG oslo_concurrency.lockutils [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Acquiring lock "refresh_cache-73cd9aef-a159-4d0e-9fc4-435f191db0b9" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.058 2 DEBUG oslo_concurrency.lockutils [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Acquired lock "refresh_cache-73cd9aef-a159-4d0e-9fc4-435f191db0b9" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.058 2 DEBUG nova.network.neutron [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.206 2 DEBUG nova.network.neutron [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.216 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.423 2 DEBUG nova.network.neutron [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.439 2 DEBUG oslo_concurrency.lockutils [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Releasing lock "refresh_cache-73cd9aef-a159-4d0e-9fc4-435f191db0b9" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.567 2 DEBUG nova.virt.libvirt.driver [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Starting finish_migration finish_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:11698
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.570 2 DEBUG nova.virt.libvirt.driver [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Instance directory exists: not creating _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4719
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.571 2 INFO nova.virt.libvirt.driver [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Creating image(s)
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.573 2 DEBUG nova.objects.instance [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Lazy-loading 'trusted_certs' on Instance uuid 73cd9aef-a159-4d0e-9fc4-435f191db0b9 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.597 2 DEBUG oslo_concurrency.processutils [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.686 2 DEBUG oslo_concurrency.lockutils [None req-2b61f078-a8e0-4df1-85bf-3c47af4fbcf9 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Acquiring lock "8fd0525b-b74e-4fea-8a19-f03f445fbc07" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.686 2 DEBUG oslo_concurrency.lockutils [None req-2b61f078-a8e0-4df1-85bf-3c47af4fbcf9 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Lock "8fd0525b-b74e-4fea-8a19-f03f445fbc07" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.687 2 DEBUG oslo_concurrency.lockutils [None req-2b61f078-a8e0-4df1-85bf-3c47af4fbcf9 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Acquiring lock "8fd0525b-b74e-4fea-8a19-f03f445fbc07-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.688 2 DEBUG oslo_concurrency.lockutils [None req-2b61f078-a8e0-4df1-85bf-3c47af4fbcf9 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Lock "8fd0525b-b74e-4fea-8a19-f03f445fbc07-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.688 2 DEBUG oslo_concurrency.lockutils [None req-2b61f078-a8e0-4df1-85bf-3c47af4fbcf9 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Lock "8fd0525b-b74e-4fea-8a19-f03f445fbc07-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.693 2 DEBUG oslo_concurrency.processutils [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.096s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.694 2 DEBUG nova.virt.disk.api [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Checking if we can resize image /var/lib/nova/instances/73cd9aef-a159-4d0e-9fc4-435f191db0b9/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.694 2 DEBUG oslo_concurrency.processutils [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/73cd9aef-a159-4d0e-9fc4-435f191db0b9/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.724 2 INFO nova.compute.manager [None req-2b61f078-a8e0-4df1-85bf-3c47af4fbcf9 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Terminating instance
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.742 2 DEBUG oslo_concurrency.lockutils [None req-2b61f078-a8e0-4df1-85bf-3c47af4fbcf9 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Acquiring lock "refresh_cache-8fd0525b-b74e-4fea-8a19-f03f445fbc07" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.743 2 DEBUG oslo_concurrency.lockutils [None req-2b61f078-a8e0-4df1-85bf-3c47af4fbcf9 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Acquired lock "refresh_cache-8fd0525b-b74e-4fea-8a19-f03f445fbc07" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.743 2 DEBUG nova.network.neutron [None req-2b61f078-a8e0-4df1-85bf-3c47af4fbcf9 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.750 2 DEBUG oslo_concurrency.processutils [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/73cd9aef-a159-4d0e-9fc4-435f191db0b9/disk --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.751 2 DEBUG nova.virt.disk.api [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Cannot resize image /var/lib/nova/instances/73cd9aef-a159-4d0e-9fc4-435f191db0b9/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.767 2 DEBUG nova.virt.libvirt.driver [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Did not create local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4859
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.768 2 DEBUG nova.virt.libvirt.driver [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Ensure instance console log exists: /var/lib/nova/instances/73cd9aef-a159-4d0e-9fc4-435f191db0b9/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.768 2 DEBUG oslo_concurrency.lockutils [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.769 2 DEBUG oslo_concurrency.lockutils [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.770 2 DEBUG oslo_concurrency.lockutils [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.772 2 DEBUG nova.virt.libvirt.driver [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Start _get_guest_xml network_info=[] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.778 2 WARNING nova.virt.libvirt.driver [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.784 2 DEBUG nova.virt.libvirt.host [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.785 2 DEBUG nova.virt.libvirt.host [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.788 2 DEBUG nova.virt.libvirt.host [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.788 2 DEBUG nova.virt.libvirt.host [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.789 2 DEBUG nova.virt.libvirt.driver [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.790 2 DEBUG nova.virt.hardware [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.790 2 DEBUG nova.virt.hardware [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.791 2 DEBUG nova.virt.hardware [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.791 2 DEBUG nova.virt.hardware [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.791 2 DEBUG nova.virt.hardware [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.791 2 DEBUG nova.virt.hardware [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.792 2 DEBUG nova.virt.hardware [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.792 2 DEBUG nova.virt.hardware [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.792 2 DEBUG nova.virt.hardware [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.793 2 DEBUG nova.virt.hardware [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.793 2 DEBUG nova.virt.hardware [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.793 2 DEBUG nova.objects.instance [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Lazy-loading 'vcpu_model' on Instance uuid 73cd9aef-a159-4d0e-9fc4-435f191db0b9 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.813 2 DEBUG oslo_concurrency.processutils [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/73cd9aef-a159-4d0e-9fc4-435f191db0b9/disk.config --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.906 2 DEBUG oslo_concurrency.processutils [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/73cd9aef-a159-4d0e-9fc4-435f191db0b9/disk.config --force-share --output=json" returned: 0 in 0.093s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.907 2 DEBUG oslo_concurrency.lockutils [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Acquiring lock "/var/lib/nova/instances/73cd9aef-a159-4d0e-9fc4-435f191db0b9/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.907 2 DEBUG oslo_concurrency.lockutils [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Lock "/var/lib/nova/instances/73cd9aef-a159-4d0e-9fc4-435f191db0b9/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.908 2 DEBUG oslo_concurrency.lockutils [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] Lock "/var/lib/nova/instances/73cd9aef-a159-4d0e-9fc4-435f191db0b9/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.910 2 DEBUG nova.virt.libvirt.driver [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:03:06 compute-0 nova_compute[192079]:   <uuid>73cd9aef-a159-4d0e-9fc4-435f191db0b9</uuid>
Oct 02 12:03:06 compute-0 nova_compute[192079]:   <name>instance-0000000d</name>
Oct 02 12:03:06 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:03:06 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:03:06 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:03:06 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:       <nova:name>tempest-MigrationsAdminTest-server-1487341678</nova:name>
Oct 02 12:03:06 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:03:06</nova:creationTime>
Oct 02 12:03:06 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:03:06 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:03:06 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:03:06 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:03:06 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:03:06 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:03:06 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:03:06 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:03:06 compute-0 nova_compute[192079]:         <nova:user uuid="8da35688aa864e189f10b334a21bc6c4">tempest-MigrationsAdminTest-1651504538-project-member</nova:user>
Oct 02 12:03:06 compute-0 nova_compute[192079]:         <nova:project uuid="4dcc6c51db2640cbb04083b3336de813">tempest-MigrationsAdminTest-1651504538</nova:project>
Oct 02 12:03:06 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:03:06 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:       <nova:ports/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:03:06 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:03:06 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <system>
Oct 02 12:03:06 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:03:06 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:03:06 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:03:06 compute-0 nova_compute[192079]:       <entry name="serial">73cd9aef-a159-4d0e-9fc4-435f191db0b9</entry>
Oct 02 12:03:06 compute-0 nova_compute[192079]:       <entry name="uuid">73cd9aef-a159-4d0e-9fc4-435f191db0b9</entry>
Oct 02 12:03:06 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     </system>
Oct 02 12:03:06 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:03:06 compute-0 nova_compute[192079]:   <os>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:   </os>
Oct 02 12:03:06 compute-0 nova_compute[192079]:   <features>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:   </features>
Oct 02 12:03:06 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:03:06 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:03:06 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:03:06 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/73cd9aef-a159-4d0e-9fc4-435f191db0b9/disk"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:03:06 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/73cd9aef-a159-4d0e-9fc4-435f191db0b9/disk.config"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:03:06 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/73cd9aef-a159-4d0e-9fc4-435f191db0b9/console.log" append="off"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <video>
Oct 02 12:03:06 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     </video>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:03:06 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:03:06 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:03:06 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:03:06 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:03:06 compute-0 nova_compute[192079]: </domain>
Oct 02 12:03:06 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.921 2 DEBUG nova.network.neutron [None req-2b61f078-a8e0-4df1-85bf-3c47af4fbcf9 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.982 2 DEBUG nova.virt.libvirt.driver [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.982 2 DEBUG nova.virt.libvirt.driver [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:03:06 compute-0 nova_compute[192079]: 2025-10-02 12:03:06.983 2 INFO nova.virt.libvirt.driver [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Using config drive
Oct 02 12:03:07 compute-0 systemd-machined[152150]: New machine qemu-10-instance-0000000d.
Oct 02 12:03:07 compute-0 systemd[1]: Started Virtual Machine qemu-10-instance-0000000d.
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.262 2 DEBUG nova.network.neutron [None req-2b61f078-a8e0-4df1-85bf-3c47af4fbcf9 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.278 2 DEBUG oslo_concurrency.lockutils [None req-2b61f078-a8e0-4df1-85bf-3c47af4fbcf9 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Releasing lock "refresh_cache-8fd0525b-b74e-4fea-8a19-f03f445fbc07" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.279 2 DEBUG nova.compute.manager [None req-2b61f078-a8e0-4df1-85bf-3c47af4fbcf9 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.288 2 INFO nova.virt.libvirt.driver [-] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Instance destroyed successfully.
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.289 2 DEBUG nova.objects.instance [None req-2b61f078-a8e0-4df1-85bf-3c47af4fbcf9 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Lazy-loading 'resources' on Instance uuid 8fd0525b-b74e-4fea-8a19-f03f445fbc07 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.300 2 INFO nova.virt.libvirt.driver [None req-2b61f078-a8e0-4df1-85bf-3c47af4fbcf9 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Deleting instance files /var/lib/nova/instances/8fd0525b-b74e-4fea-8a19-f03f445fbc07_del
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.301 2 INFO nova.virt.libvirt.driver [None req-2b61f078-a8e0-4df1-85bf-3c47af4fbcf9 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Deletion of /var/lib/nova/instances/8fd0525b-b74e-4fea-8a19-f03f445fbc07_del complete
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.400 2 INFO nova.compute.manager [None req-2b61f078-a8e0-4df1-85bf-3c47af4fbcf9 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Took 0.12 seconds to destroy the instance on the hypervisor.
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.401 2 DEBUG oslo.service.loopingcall [None req-2b61f078-a8e0-4df1-85bf-3c47af4fbcf9 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.401 2 DEBUG nova.compute.manager [-] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.402 2 DEBUG nova.network.neutron [-] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.527 2 DEBUG nova.network.neutron [-] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.543 2 DEBUG nova.network.neutron [-] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.560 2 INFO nova.compute.manager [-] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Took 0.16 seconds to deallocate network for instance.
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.632 2 DEBUG oslo_concurrency.lockutils [None req-2b61f078-a8e0-4df1-85bf-3c47af4fbcf9 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.633 2 DEBUG oslo_concurrency.lockutils [None req-2b61f078-a8e0-4df1-85bf-3c47af4fbcf9 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.708 2 DEBUG nova.compute.provider_tree [None req-2b61f078-a8e0-4df1-85bf-3c47af4fbcf9 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.726 2 DEBUG nova.scheduler.client.report [None req-2b61f078-a8e0-4df1-85bf-3c47af4fbcf9 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.745 2 DEBUG oslo_concurrency.lockutils [None req-2b61f078-a8e0-4df1-85bf-3c47af4fbcf9 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.112s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.772 2 INFO nova.scheduler.client.report [None req-2b61f078-a8e0-4df1-85bf-3c47af4fbcf9 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Deleted allocations for instance 8fd0525b-b74e-4fea-8a19-f03f445fbc07
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.832 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406587.832323, 73cd9aef-a159-4d0e-9fc4-435f191db0b9 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.833 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] VM Resumed (Lifecycle Event)
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.834 2 DEBUG nova.compute.manager [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.841 2 INFO nova.virt.libvirt.driver [-] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Instance running successfully.
Oct 02 12:03:07 compute-0 virtqemud[191807]: argument unsupported: QEMU guest agent is not configured
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.847 2 DEBUG nova.virt.libvirt.guest [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Failed to set time: agent not configured sync_guest_time /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:200
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.848 2 DEBUG nova.virt.libvirt.driver [None req-5199b18d-0a27-4885-9422-69d2bb74ff61 a9d1adde5fcb4d3bab833619b44f7a7c 71d9b13feff24ebd81a067d702973a51 - - default default] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] finish_migration finished successfully. finish_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:11793
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.850 2 DEBUG oslo_concurrency.lockutils [None req-2b61f078-a8e0-4df1-85bf-3c47af4fbcf9 d413386800eb45c8959596be3a47c369 8b0d43e818674dfd81b38d17af224b0d - - default default] Lock "8fd0525b-b74e-4fea-8a19-f03f445fbc07" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.163s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.851 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.854 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: active, current task_state: resize_finish, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.887 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] During sync_power_state the instance has a pending task (resize_finish). Skip.
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.888 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406587.8324149, 73cd9aef-a159-4d0e-9fc4-435f191db0b9 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.888 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] VM Started (Lifecycle Event)
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.910 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:03:07 compute-0 nova_compute[192079]: 2025-10-02 12:03:07.914 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Synchronizing instance power state after lifecycle event "Started"; current vm_state: active, current task_state: resize_finish, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:03:08 compute-0 nova_compute[192079]: 2025-10-02 12:03:08.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:03:08 compute-0 nova_compute[192079]: 2025-10-02 12:03:08.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:03:09 compute-0 nova_compute[192079]: 2025-10-02 12:03:09.247 2 DEBUG oslo_concurrency.lockutils [None req-1bec2035-43e5-47a2-8ad9-4d711e23d226 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Acquiring lock "refresh_cache-73cd9aef-a159-4d0e-9fc4-435f191db0b9" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:03:09 compute-0 nova_compute[192079]: 2025-10-02 12:03:09.248 2 DEBUG oslo_concurrency.lockutils [None req-1bec2035-43e5-47a2-8ad9-4d711e23d226 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Acquired lock "refresh_cache-73cd9aef-a159-4d0e-9fc4-435f191db0b9" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:03:09 compute-0 nova_compute[192079]: 2025-10-02 12:03:09.248 2 DEBUG nova.network.neutron [None req-1bec2035-43e5-47a2-8ad9-4d711e23d226 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:03:09 compute-0 nova_compute[192079]: 2025-10-02 12:03:09.304 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:09 compute-0 nova_compute[192079]: 2025-10-02 12:03:09.399 2 DEBUG nova.network.neutron [None req-1bec2035-43e5-47a2-8ad9-4d711e23d226 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:03:09 compute-0 nova_compute[192079]: 2025-10-02 12:03:09.759 2 DEBUG nova.network.neutron [None req-1bec2035-43e5-47a2-8ad9-4d711e23d226 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:03:09 compute-0 nova_compute[192079]: 2025-10-02 12:03:09.772 2 DEBUG oslo_concurrency.lockutils [None req-1bec2035-43e5-47a2-8ad9-4d711e23d226 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Releasing lock "refresh_cache-73cd9aef-a159-4d0e-9fc4-435f191db0b9" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:03:09 compute-0 nova_compute[192079]: 2025-10-02 12:03:09.784 2 DEBUG nova.virt.libvirt.driver [None req-1bec2035-43e5-47a2-8ad9-4d711e23d226 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Creating tmpfile /var/lib/nova/instances/73cd9aef-a159-4d0e-9fc4-435f191db0b9/tmpqqx17057 to verify with other compute node that the instance is on the same shared storage. check_instance_shared_storage_local /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:9618
Oct 02 12:03:09 compute-0 systemd[1]: machine-qemu\x2d10\x2dinstance\x2d0000000d.scope: Deactivated successfully.
Oct 02 12:03:09 compute-0 systemd[1]: machine-qemu\x2d10\x2dinstance\x2d0000000d.scope: Consumed 2.698s CPU time.
Oct 02 12:03:09 compute-0 systemd-machined[152150]: Machine qemu-10-instance-0000000d terminated.
Oct 02 12:03:09 compute-0 podman[222369]: 2025-10-02 12:03:09.90677959 +0000 UTC m=+0.050361539 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>)
Oct 02 12:03:09 compute-0 podman[222370]: 2025-10-02 12:03:09.912146147 +0000 UTC m=+0.054433050 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, container_name=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, org.label-schema.license=GPLv2, tcib_managed=true, config_id=iscsid, io.buildah.version=1.41.3)
Oct 02 12:03:10 compute-0 nova_compute[192079]: 2025-10-02 12:03:10.041 2 INFO nova.virt.libvirt.driver [-] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Instance destroyed successfully.
Oct 02 12:03:10 compute-0 nova_compute[192079]: 2025-10-02 12:03:10.042 2 DEBUG nova.objects.instance [None req-1bec2035-43e5-47a2-8ad9-4d711e23d226 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Lazy-loading 'resources' on Instance uuid 73cd9aef-a159-4d0e-9fc4-435f191db0b9 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:03:10 compute-0 nova_compute[192079]: 2025-10-02 12:03:10.054 2 INFO nova.virt.libvirt.driver [None req-1bec2035-43e5-47a2-8ad9-4d711e23d226 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Deleting instance files /var/lib/nova/instances/73cd9aef-a159-4d0e-9fc4-435f191db0b9_del
Oct 02 12:03:10 compute-0 nova_compute[192079]: 2025-10-02 12:03:10.060 2 INFO nova.virt.libvirt.driver [None req-1bec2035-43e5-47a2-8ad9-4d711e23d226 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Deletion of /var/lib/nova/instances/73cd9aef-a159-4d0e-9fc4-435f191db0b9_del complete
Oct 02 12:03:10 compute-0 nova_compute[192079]: 2025-10-02 12:03:10.151 2 DEBUG oslo_concurrency.lockutils [None req-1bec2035-43e5-47a2-8ad9-4d711e23d226 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:10 compute-0 nova_compute[192079]: 2025-10-02 12:03:10.151 2 DEBUG oslo_concurrency.lockutils [None req-1bec2035-43e5-47a2-8ad9-4d711e23d226 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:10 compute-0 nova_compute[192079]: 2025-10-02 12:03:10.166 2 DEBUG nova.objects.instance [None req-1bec2035-43e5-47a2-8ad9-4d711e23d226 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Lazy-loading 'migration_context' on Instance uuid 73cd9aef-a159-4d0e-9fc4-435f191db0b9 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:03:10 compute-0 nova_compute[192079]: 2025-10-02 12:03:10.239 2 DEBUG nova.compute.provider_tree [None req-1bec2035-43e5-47a2-8ad9-4d711e23d226 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:03:10 compute-0 nova_compute[192079]: 2025-10-02 12:03:10.256 2 DEBUG nova.scheduler.client.report [None req-1bec2035-43e5-47a2-8ad9-4d711e23d226 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:03:10 compute-0 nova_compute[192079]: 2025-10-02 12:03:10.327 2 DEBUG oslo_concurrency.lockutils [None req-1bec2035-43e5-47a2-8ad9-4d711e23d226 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 0.176s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:10 compute-0 nova_compute[192079]: 2025-10-02 12:03:10.660 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:03:10 compute-0 nova_compute[192079]: 2025-10-02 12:03:10.663 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:03:10 compute-0 nova_compute[192079]: 2025-10-02 12:03:10.664 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:03:10 compute-0 nova_compute[192079]: 2025-10-02 12:03:10.664 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:03:11 compute-0 nova_compute[192079]: 2025-10-02 12:03:11.114 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "refresh_cache-e09de65a-0b2d-4aa5-9d9a-49f039add691" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:03:11 compute-0 nova_compute[192079]: 2025-10-02 12:03:11.114 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquired lock "refresh_cache-e09de65a-0b2d-4aa5-9d9a-49f039add691" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:03:11 compute-0 nova_compute[192079]: 2025-10-02 12:03:11.115 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Forcefully refreshing network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2004
Oct 02 12:03:11 compute-0 nova_compute[192079]: 2025-10-02 12:03:11.115 2 DEBUG nova.objects.instance [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lazy-loading 'info_cache' on Instance uuid e09de65a-0b2d-4aa5-9d9a-49f039add691 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:03:11 compute-0 nova_compute[192079]: 2025-10-02 12:03:11.218 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:11 compute-0 nova_compute[192079]: 2025-10-02 12:03:11.236 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759406576.2353828, 96203b28-73b1-462a-87e9-4b0ca1d1f93b => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:03:11 compute-0 nova_compute[192079]: 2025-10-02 12:03:11.236 2 INFO nova.compute.manager [-] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] VM Stopped (Lifecycle Event)
Oct 02 12:03:11 compute-0 nova_compute[192079]: 2025-10-02 12:03:11.279 2 DEBUG nova.compute.manager [None req-019bc73d-0349-44ca-9389-7f0ca077dd7a - - - - - -] [instance: 96203b28-73b1-462a-87e9-4b0ca1d1f93b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:03:11 compute-0 nova_compute[192079]: 2025-10-02 12:03:11.459 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:03:11 compute-0 nova_compute[192079]: 2025-10-02 12:03:11.752 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:03:11 compute-0 nova_compute[192079]: 2025-10-02 12:03:11.778 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Releasing lock "refresh_cache-e09de65a-0b2d-4aa5-9d9a-49f039add691" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:03:11 compute-0 nova_compute[192079]: 2025-10-02 12:03:11.779 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Updated the network info_cache for instance _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9929
Oct 02 12:03:11 compute-0 nova_compute[192079]: 2025-10-02 12:03:11.779 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:03:11 compute-0 nova_compute[192079]: 2025-10-02 12:03:11.779 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:03:11 compute-0 nova_compute[192079]: 2025-10-02 12:03:11.780 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:03:11 compute-0 nova_compute[192079]: 2025-10-02 12:03:11.780 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:03:11 compute-0 nova_compute[192079]: 2025-10-02 12:03:11.780 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:03:11 compute-0 nova_compute[192079]: 2025-10-02 12:03:11.819 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:11 compute-0 nova_compute[192079]: 2025-10-02 12:03:11.820 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:11 compute-0 nova_compute[192079]: 2025-10-02 12:03:11.820 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:11 compute-0 nova_compute[192079]: 2025-10-02 12:03:11.820 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:03:11 compute-0 nova_compute[192079]: 2025-10-02 12:03:11.927 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/e09de65a-0b2d-4aa5-9d9a-49f039add691/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:03:11 compute-0 nova_compute[192079]: 2025-10-02 12:03:11.983 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/e09de65a-0b2d-4aa5-9d9a-49f039add691/disk --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:03:11 compute-0 nova_compute[192079]: 2025-10-02 12:03:11.984 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/e09de65a-0b2d-4aa5-9d9a-49f039add691/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:03:12 compute-0 nova_compute[192079]: 2025-10-02 12:03:12.039 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/e09de65a-0b2d-4aa5-9d9a-49f039add691/disk --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:03:12 compute-0 nova_compute[192079]: 2025-10-02 12:03:12.164 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:03:12 compute-0 nova_compute[192079]: 2025-10-02 12:03:12.166 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5395MB free_disk=73.43130493164062GB free_vcpus=7 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:03:12 compute-0 nova_compute[192079]: 2025-10-02 12:03:12.166 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:12 compute-0 nova_compute[192079]: 2025-10-02 12:03:12.166 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:12 compute-0 nova_compute[192079]: 2025-10-02 12:03:12.275 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance e09de65a-0b2d-4aa5-9d9a-49f039add691 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:03:12 compute-0 nova_compute[192079]: 2025-10-02 12:03:12.275 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 1 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:03:12 compute-0 nova_compute[192079]: 2025-10-02 12:03:12.275 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=704MB phys_disk=79GB used_disk=1GB total_vcpus=8 used_vcpus=1 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:03:12 compute-0 nova_compute[192079]: 2025-10-02 12:03:12.317 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:03:12 compute-0 nova_compute[192079]: 2025-10-02 12:03:12.331 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:03:12 compute-0 nova_compute[192079]: 2025-10-02 12:03:12.371 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:03:12 compute-0 nova_compute[192079]: 2025-10-02 12:03:12.371 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.205s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:14 compute-0 nova_compute[192079]: 2025-10-02 12:03:14.306 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:15 compute-0 systemd[1]: Stopping User Manager for UID 42436...
Oct 02 12:03:15 compute-0 systemd[222295]: Activating special unit Exit the Session...
Oct 02 12:03:15 compute-0 systemd[222295]: Stopped target Main User Target.
Oct 02 12:03:15 compute-0 systemd[222295]: Stopped target Basic System.
Oct 02 12:03:15 compute-0 systemd[222295]: Stopped target Paths.
Oct 02 12:03:15 compute-0 systemd[222295]: Stopped target Sockets.
Oct 02 12:03:15 compute-0 systemd[222295]: Stopped target Timers.
Oct 02 12:03:15 compute-0 systemd[222295]: Stopped Mark boot as successful after the user session has run 2 minutes.
Oct 02 12:03:15 compute-0 systemd[222295]: Stopped Daily Cleanup of User's Temporary Directories.
Oct 02 12:03:15 compute-0 systemd[222295]: Closed D-Bus User Message Bus Socket.
Oct 02 12:03:15 compute-0 systemd[222295]: Stopped Create User's Volatile Files and Directories.
Oct 02 12:03:15 compute-0 systemd[222295]: Removed slice User Application Slice.
Oct 02 12:03:15 compute-0 systemd[222295]: Reached target Shutdown.
Oct 02 12:03:15 compute-0 systemd[222295]: Finished Exit the Session.
Oct 02 12:03:15 compute-0 systemd[222295]: Reached target Exit the Session.
Oct 02 12:03:15 compute-0 systemd[1]: user@42436.service: Deactivated successfully.
Oct 02 12:03:15 compute-0 systemd[1]: Stopped User Manager for UID 42436.
Oct 02 12:03:15 compute-0 systemd[1]: Stopping User Runtime Directory /run/user/42436...
Oct 02 12:03:15 compute-0 systemd[1]: run-user-42436.mount: Deactivated successfully.
Oct 02 12:03:15 compute-0 systemd[1]: user-runtime-dir@42436.service: Deactivated successfully.
Oct 02 12:03:15 compute-0 systemd[1]: Stopped User Runtime Directory /run/user/42436.
Oct 02 12:03:15 compute-0 systemd[1]: Removed slice User Slice of UID 42436.
Oct 02 12:03:16 compute-0 nova_compute[192079]: 2025-10-02 12:03:16.222 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.569 12 DEBUG novaclient.v2.client [-] REQ: curl -g -i -X GET https://nova-internal.openstack.svc:8774/v2.1/flavors?is_public=None -H "Accept: application/json" -H "User-Agent: python-novaclient" -H "X-Auth-Token: {SHA256}71356153e9cf84f21025dfc4736dd696f1dc7f2f65609442b5b4aacce068ebfe" -H "X-OpenStack-Nova-API-Version: 2.1" _http_log_request /usr/lib/python3.9/site-packages/keystoneauth1/session.py:519
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.670 12 DEBUG novaclient.v2.client [-] RESP: [200] Connection: Keep-Alive Content-Length: 644 Content-Type: application/json Date: Thu, 02 Oct 2025 12:03:17 GMT Keep-Alive: timeout=5, max=100 OpenStack-API-Version: compute 2.1 Server: Apache Vary: OpenStack-API-Version,X-OpenStack-Nova-API-Version X-OpenStack-Nova-API-Version: 2.1 x-compute-request-id: req-a20d4cca-ec72-4e0d-9e47-4dbdb72614f9 x-openstack-request-id: req-a20d4cca-ec72-4e0d-9e47-4dbdb72614f9 _http_log_response /usr/lib/python3.9/site-packages/keystoneauth1/session.py:550
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.670 12 DEBUG novaclient.v2.client [-] RESP BODY: {"flavors": [{"id": "9949d9da-6314-4ede-8797-6f2f0a6a64fc", "name": "m1.micro", "links": [{"rel": "self", "href": "https://nova-internal.openstack.svc:8774/v2.1/flavors/9949d9da-6314-4ede-8797-6f2f0a6a64fc"}, {"rel": "bookmark", "href": "https://nova-internal.openstack.svc:8774/flavors/9949d9da-6314-4ede-8797-6f2f0a6a64fc"}]}, {"id": "9ac83da7-f31e-4467-8569-d28002f6aeed", "name": "m1.nano", "links": [{"rel": "self", "href": "https://nova-internal.openstack.svc:8774/v2.1/flavors/9ac83da7-f31e-4467-8569-d28002f6aeed"}, {"rel": "bookmark", "href": "https://nova-internal.openstack.svc:8774/flavors/9ac83da7-f31e-4467-8569-d28002f6aeed"}]}]} _http_log_response /usr/lib/python3.9/site-packages/keystoneauth1/session.py:582
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.670 12 DEBUG novaclient.v2.client [-] GET call to compute for https://nova-internal.openstack.svc:8774/v2.1/flavors?is_public=None used request id req-a20d4cca-ec72-4e0d-9e47-4dbdb72614f9 request /usr/lib/python3.9/site-packages/keystoneauth1/session.py:954
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.672 12 DEBUG novaclient.v2.client [-] REQ: curl -g -i -X GET https://nova-internal.openstack.svc:8774/v2.1/flavors/9949d9da-6314-4ede-8797-6f2f0a6a64fc -H "Accept: application/json" -H "User-Agent: python-novaclient" -H "X-Auth-Token: {SHA256}71356153e9cf84f21025dfc4736dd696f1dc7f2f65609442b5b4aacce068ebfe" -H "X-OpenStack-Nova-API-Version: 2.1" _http_log_request /usr/lib/python3.9/site-packages/keystoneauth1/session.py:519
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.737 12 DEBUG novaclient.v2.client [-] RESP: [200] Connection: Keep-Alive Content-Length: 496 Content-Type: application/json Date: Thu, 02 Oct 2025 12:03:17 GMT Keep-Alive: timeout=5, max=99 OpenStack-API-Version: compute 2.1 Server: Apache Vary: OpenStack-API-Version,X-OpenStack-Nova-API-Version X-OpenStack-Nova-API-Version: 2.1 x-compute-request-id: req-607b0832-c0c3-4bb1-945a-7a5d2832c422 x-openstack-request-id: req-607b0832-c0c3-4bb1-945a-7a5d2832c422 _http_log_response /usr/lib/python3.9/site-packages/keystoneauth1/session.py:550
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.738 12 DEBUG novaclient.v2.client [-] RESP BODY: {"flavor": {"id": "9949d9da-6314-4ede-8797-6f2f0a6a64fc", "name": "m1.micro", "ram": 192, "disk": 1, "swap": "", "OS-FLV-EXT-DATA:ephemeral": 0, "OS-FLV-DISABLED:disabled": false, "vcpus": 1, "os-flavor-access:is_public": true, "rxtx_factor": 1.0, "links": [{"rel": "self", "href": "https://nova-internal.openstack.svc:8774/v2.1/flavors/9949d9da-6314-4ede-8797-6f2f0a6a64fc"}, {"rel": "bookmark", "href": "https://nova-internal.openstack.svc:8774/flavors/9949d9da-6314-4ede-8797-6f2f0a6a64fc"}]}} _http_log_response /usr/lib/python3.9/site-packages/keystoneauth1/session.py:582
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.738 12 DEBUG novaclient.v2.client [-] GET call to compute for https://nova-internal.openstack.svc:8774/v2.1/flavors/9949d9da-6314-4ede-8797-6f2f0a6a64fc used request id req-607b0832-c0c3-4bb1-945a-7a5d2832c422 request /usr/lib/python3.9/site-packages/keystoneauth1/session.py:954
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.739 12 DEBUG ceilometer.compute.discovery [-] instance data: {'id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691', 'name': 'tempest-MigrationsAdminTest-server-1510345576', 'flavor': {'id': '9949d9da-6314-4ede-8797-6f2f0a6a64fc', 'name': 'm1.micro', 'vcpus': 1, 'ram': 192, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'os_type': 'hvm', 'architecture': 'x86_64', 'OS-EXT-SRV-ATTR:instance_name': 'instance-00000009', 'OS-EXT-SRV-ATTR:host': 'compute-0.ctlplane.example.com', 'OS-EXT-STS:vm_state': 'running', 'tenant_id': '4dcc6c51db2640cbb04083b3336de813', 'user_id': '8da35688aa864e189f10b334a21bc6c4', 'hostId': 'fe48d2ffe40763b6cdd7abb7a26325f1d73d360e787ce17949377787', 'status': 'active', 'metadata': {}} discover_libvirt_polling /usr/lib/python3.9/site-packages/ceilometer/compute/discovery.py:228
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.739 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.latency in the context of pollsters
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.764 12 DEBUG ceilometer.compute.pollsters [-] e09de65a-0b2d-4aa5-9d9a-49f039add691/disk.device.write.latency volume: 101511725 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.764 12 DEBUG ceilometer.compute.pollsters [-] e09de65a-0b2d-4aa5-9d9a-49f039add691/disk.device.write.latency volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'a596c51d-5163-468d-a618-b9c696f83dcc', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 101511725, 'user_id': '8da35688aa864e189f10b334a21bc6c4', 'user_name': None, 'project_id': '4dcc6c51db2640cbb04083b3336de813', 'project_name': None, 'resource_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691-vda', 'timestamp': '2025-10-02T12:03:17.739878', 'resource_metadata': {'display_name': 'tempest-MigrationsAdminTest-server-1510345576', 'name': 'instance-00000009', 'instance_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691', 'instance_type': 'm1.micro', 'host': 'fe48d2ffe40763b6cdd7abb7a26325f1d73d360e787ce17949377787', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9949d9da-6314-4ede-8797-6f2f0a6a64fc', 'name': 'm1.micro', 'vcpus': 1, 'ram': 192, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 192, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'c841dd40-9f87-11f0-af18-fa163efc5e78', 'monotonic_time': 4595.427076966, 'message_signature': 'ebf813ae11aa9bad7fa59aa0467b0a62759d93a6eba1daa659286420ab2b4d46'}, {'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 0, 'user_id': '8da35688aa864e189f10b334a21bc6c4', 'user_name': None, 'project_id': '4dcc6c51db2640cbb04083b3336de813', 'project_name': None, 'resource_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691-sda', 'timestamp': '2025-10-02T12:03:17.739878', 'resource_metadata': {'display_name': 'tempest-MigrationsAdminTest-server-1510345576', 'name': 'instance-00000009', 'instance_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691', 'instance_type': 'm1.micro', 'host': 'fe48d2ffe40763b6cdd7abb7a26325f1d73d360e787ce17949377787', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9949d9da-6314-4ede-8797-6f2f0a6a64fc', 'name': 'm1.micro', 'vcpus': 1, 'ram': 192, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 192, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'c841ecc2-9f87-11f0-af18-fa163efc5e78', 'monotonic_time': 4595.427076966, 'message_signature': '739b85d263ccd66849de204bd0a4dd96cb8c24f2fb98bd27318279c2c07e150f'}]}, 'timestamp': '2025-10-02 12:03:17.765166', '_unique_id': '43f7f1793b32498cb8fa23cf0a843efe'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.769 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.772 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.allocation in the context of pollsters
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.785 12 DEBUG ceilometer.compute.pollsters [-] e09de65a-0b2d-4aa5-9d9a-49f039add691/disk.device.allocation volume: 30085120 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.785 12 DEBUG ceilometer.compute.pollsters [-] e09de65a-0b2d-4aa5-9d9a-49f039add691/disk.device.allocation volume: 487424 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '1c5bfc46-8d60-451a-bc63-7cdee28a9477', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 30085120, 'user_id': '8da35688aa864e189f10b334a21bc6c4', 'user_name': None, 'project_id': '4dcc6c51db2640cbb04083b3336de813', 'project_name': None, 'resource_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691-vda', 'timestamp': '2025-10-02T12:03:17.772721', 'resource_metadata': {'display_name': 'tempest-MigrationsAdminTest-server-1510345576', 'name': 'instance-00000009', 'instance_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691', 'instance_type': 'm1.micro', 'host': 'fe48d2ffe40763b6cdd7abb7a26325f1d73d360e787ce17949377787', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9949d9da-6314-4ede-8797-6f2f0a6a64fc', 'name': 'm1.micro', 'vcpus': 1, 'ram': 192, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 192, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'c8450f88-9f87-11f0-af18-fa163efc5e78', 'monotonic_time': 4595.459906104, 'message_signature': '0d7b13f637598c0c7d592d53030f6704b287fbe3178d840c127c9207f4648e6c'}, {'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 487424, 'user_id': '8da35688aa864e189f10b334a21bc6c4', 'user_name': None, 'project_id': '4dcc6c51db2640cbb04083b3336de813', 'project_name': None, 'resource_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691-sda', 'timestamp': '2025-10-02T12:03:17.772721', 'resource_metadata': {'display_name': 'tempest-MigrationsAdminTest-server-1510345576', 'name': 'instance-00000009', 'instance_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691', 'instance_type': 'm1.micro', 'host': 'fe48d2ffe40763b6cdd7abb7a26325f1d73d360e787ce17949377787', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9949d9da-6314-4ede-8797-6f2f0a6a64fc', 'name': 'm1.micro', 'vcpus': 1, 'ram': 192, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 192, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'c8451a64-9f87-11f0-af18-fa163efc5e78', 'monotonic_time': 4595.459906104, 'message_signature': 'cf50bd89ca4c152488f0c5515cb47124f84c81a2a1f04c4df48e2aa87eff47c3'}]}, 'timestamp': '2025-10-02 12:03:17.785974', '_unique_id': 'bac2389340f447c5b1427d2eb10211bc'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.786 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.787 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes in the context of pollsters
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.791 12 INFO ceilometer.polling.manager [-] Polling pollster cpu in the context of pollsters
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.806 12 DEBUG ceilometer.compute.pollsters [-] e09de65a-0b2d-4aa5-9d9a-49f039add691/cpu volume: 11850000000 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'd1d00ab3-ef78-4b01-99cb-c8a0117a82c7', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'cpu', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 11850000000, 'user_id': '8da35688aa864e189f10b334a21bc6c4', 'user_name': None, 'project_id': '4dcc6c51db2640cbb04083b3336de813', 'project_name': None, 'resource_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691', 'timestamp': '2025-10-02T12:03:17.791867', 'resource_metadata': {'display_name': 'tempest-MigrationsAdminTest-server-1510345576', 'name': 'instance-00000009', 'instance_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691', 'instance_type': 'm1.micro', 'host': 'fe48d2ffe40763b6cdd7abb7a26325f1d73d360e787ce17949377787', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9949d9da-6314-4ede-8797-6f2f0a6a64fc', 'name': 'm1.micro', 'vcpus': 1, 'ram': 192, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 192, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'cpu_number': 1}, 'message_id': 'c8483bd6-9f87-11f0-af18-fa163efc5e78', 'monotonic_time': 4595.492785483, 'message_signature': '646511fed13b9ff8a9017c1f4a8da17b001c6330f725e628c3df34b537525ea8'}]}, 'timestamp': '2025-10-02 12:03:17.806560', '_unique_id': '9c39f9bb65174e6d94efcd7965096be6'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.807 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.808 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.latency in the context of pollsters
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.808 12 DEBUG ceilometer.compute.pollsters [-] e09de65a-0b2d-4aa5-9d9a-49f039add691/disk.device.read.latency volume: 1471421572 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.808 12 DEBUG ceilometer.compute.pollsters [-] e09de65a-0b2d-4aa5-9d9a-49f039add691/disk.device.read.latency volume: 95179400 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'e21cc2c6-ca4b-441e-be9e-c3a7aad54e74', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 1471421572, 'user_id': '8da35688aa864e189f10b334a21bc6c4', 'user_name': None, 'project_id': '4dcc6c51db2640cbb04083b3336de813', 'project_name': None, 'resource_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691-vda', 'timestamp': '2025-10-02T12:03:17.808522', 'resource_metadata': {'display_name': 'tempest-MigrationsAdminTest-server-1510345576', 'name': 'instance-00000009', 'instance_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691', 'instance_type': 'm1.micro', 'host': 'fe48d2ffe40763b6cdd7abb7a26325f1d73d360e787ce17949377787', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9949d9da-6314-4ede-8797-6f2f0a6a64fc', 'name': 'm1.micro', 'vcpus': 1, 'ram': 192, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 192, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'c84894c8-9f87-11f0-af18-fa163efc5e78', 'monotonic_time': 4595.427076966, 'message_signature': '1d92b22834cad447008b1bdae144689f0d04fa1bd4ca63c06e0db71f746d5131'}, {'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 95179400, 'user_id': '8da35688aa864e189f10b334a21bc6c4', 'user_name': None, 'project_id': '4dcc6c51db2640cbb04083b3336de813', 'project_name': None, 'resource_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691-sda', 'timestamp': '2025-10-02T12:03:17.808522', 'resource_metadata': {'display_name': 'tempest-MigrationsAdminTest-server-1510345576', 'name': 'instance-00000009', 'instance_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691', 'instance_type': 'm1.micro', 'host': 'fe48d2ffe40763b6cdd7abb7a26325f1d73d360e787ce17949377787', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9949d9da-6314-4ede-8797-6f2f0a6a64fc', 'name': 'm1.micro', 'vcpus': 1, 'ram': 192, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 192, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'c8489f2c-9f87-11f0-af18-fa163efc5e78', 'monotonic_time': 4595.427076966, 'message_signature': '55d6e109ba2ee64daca67bfe4bfb070c7539dcdd74e93294de3b23782fe16f27'}]}, 'timestamp': '2025-10-02 12:03:17.809047', '_unique_id': '8b94cf165d1d4bda8adc1268faf42fdc'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.809 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.810 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets.drop in the context of pollsters
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.810 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes.delta in the context of pollsters
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.810 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.requests in the context of pollsters
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.810 12 DEBUG ceilometer.compute.pollsters [-] e09de65a-0b2d-4aa5-9d9a-49f039add691/disk.device.read.requests volume: 1206 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.810 12 DEBUG ceilometer.compute.pollsters [-] e09de65a-0b2d-4aa5-9d9a-49f039add691/disk.device.read.requests volume: 108 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'f7f2917c-e2fa-40b1-aff7-799288688131', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 1206, 'user_id': '8da35688aa864e189f10b334a21bc6c4', 'user_name': None, 'project_id': '4dcc6c51db2640cbb04083b3336de813', 'project_name': None, 'resource_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691-vda', 'timestamp': '2025-10-02T12:03:17.810432', 'resource_metadata': {'display_name': 'tempest-MigrationsAdminTest-server-1510345576', 'name': 'instance-00000009', 'instance_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691', 'instance_type': 'm1.micro', 'host': 'fe48d2ffe40763b6cdd7abb7a26325f1d73d360e787ce17949377787', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9949d9da-6314-4ede-8797-6f2f0a6a64fc', 'name': 'm1.micro', 'vcpus': 1, 'ram': 192, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 192, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'c848def6-9f87-11f0-af18-fa163efc5e78', 'monotonic_time': 4595.427076966, 'message_signature': '35ba4eb4d5bf876bb197eff1b79e36c7fdf703d0f1e8f9f83298c483d1bcacd5'}, {'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 108, 'user_id': '8da35688aa864e189f10b334a21bc6c4', 'user_name': None, 'project_id': '4dcc6c51db2640cbb04083b3336de813', 'project_name': None, 'resource_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691-sda', 'timestamp': '2025-10-02T12:03:17.810432', 'resource_metadata': {'display_name': 'tempest-MigrationsAdminTest-server-1510345576', 'name': 'instance-00000009', 'instance_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691', 'instance_type': 'm1.micro', 'host': 'fe48d2ffe40763b6cdd7abb7a26325f1d73d360e787ce17949377787', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9949d9da-6314-4ede-8797-6f2f0a6a64fc', 'name': 'm1.micro', 'vcpus': 1, 'ram': 192, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 192, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'c848e748-9f87-11f0-af18-fa163efc5e78', 'monotonic_time': 4595.427076966, 'message_signature': '3e94627c7a1d16caa581133995e6c00e0df01177db3f6c1c66f4ab53ad877c61'}]}, 'timestamp': '2025-10-02 12:03:17.810869', '_unique_id': '5ecdbf804eb54ec89d3b4210e3cb205e'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.811 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.812 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.requests in the context of pollsters
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.812 12 DEBUG ceilometer.compute.pollsters [-] e09de65a-0b2d-4aa5-9d9a-49f039add691/disk.device.write.requests volume: 41 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.812 12 DEBUG ceilometer.compute.pollsters [-] e09de65a-0b2d-4aa5-9d9a-49f039add691/disk.device.write.requests volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '8fd5047b-ebc0-40f0-80b7-ba8cb7d40d3e', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 41, 'user_id': '8da35688aa864e189f10b334a21bc6c4', 'user_name': None, 'project_id': '4dcc6c51db2640cbb04083b3336de813', 'project_name': None, 'resource_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691-vda', 'timestamp': '2025-10-02T12:03:17.812126', 'resource_metadata': {'display_name': 'tempest-MigrationsAdminTest-server-1510345576', 'name': 'instance-00000009', 'instance_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691', 'instance_type': 'm1.micro', 'host': 'fe48d2ffe40763b6cdd7abb7a26325f1d73d360e787ce17949377787', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9949d9da-6314-4ede-8797-6f2f0a6a64fc', 'name': 'm1.micro', 'vcpus': 1, 'ram': 192, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 192, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'c84921f4-9f87-11f0-af18-fa163efc5e78', 'monotonic_time': 4595.427076966, 'message_signature': '7b4cc68e14a16b6f38347cd86050ca9112b6ea08bacc194b23688f0eaab2cf7f'}, {'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 0, 'user_id': '8da35688aa864e189f10b334a21bc6c4', 'user_name': None, 'project_id': '4dcc6c51db2640cbb04083b3336de813', 'project_name': None, 'resource_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691-sda', 'timestamp': '2025-10-02T12:03:17.812126', 'resource_metadata': {'display_name': 'tempest-MigrationsAdminTest-server-1510345576', 'name': 'instance-00000009', 'instance_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691', 'instance_type': 'm1.micro', 'host': 'fe48d2ffe40763b6cdd7abb7a26325f1d73d360e787ce17949377787', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9949d9da-6314-4ede-8797-6f2f0a6a64fc', 'name': 'm1.micro', 'vcpus': 1, 'ram': 192, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 192, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'c8492af0-9f87-11f0-af18-fa163efc5e78', 'monotonic_time': 4595.427076966, 'message_signature': 'd839eb1acb2b2b2b96efb1d50611b98f83478d441069dd225a6ad54fe3beb656'}]}, 'timestamp': '2025-10-02 12:03:17.812622', '_unique_id': '84fdeaf8f78341e09d7521b1d5bdb95e'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.iops in the context of pollsters
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for PerDeviceDiskIOPSPollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.813 12 ERROR ceilometer.polling.manager [-] Prevent pollster disk.device.iops from polling [<NovaLikeServer: tempest-MigrationsAdminTest-server-1510345576>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-MigrationsAdminTest-server-1510345576>]
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.814 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes in the context of pollsters
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.814 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.bytes in the context of pollsters
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.814 12 DEBUG ceilometer.compute.pollsters [-] e09de65a-0b2d-4aa5-9d9a-49f039add691/disk.device.write.bytes volume: 335872 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.814 12 DEBUG ceilometer.compute.pollsters [-] e09de65a-0b2d-4aa5-9d9a-49f039add691/disk.device.write.bytes volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '37b087b7-1979-4d11-8a7c-fa0a823c2902', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 335872, 'user_id': '8da35688aa864e189f10b334a21bc6c4', 'user_name': None, 'project_id': '4dcc6c51db2640cbb04083b3336de813', 'project_name': None, 'resource_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691-vda', 'timestamp': '2025-10-02T12:03:17.814483', 'resource_metadata': {'display_name': 'tempest-MigrationsAdminTest-server-1510345576', 'name': 'instance-00000009', 'instance_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691', 'instance_type': 'm1.micro', 'host': 'fe48d2ffe40763b6cdd7abb7a26325f1d73d360e787ce17949377787', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9949d9da-6314-4ede-8797-6f2f0a6a64fc', 'name': 'm1.micro', 'vcpus': 1, 'ram': 192, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 192, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'c8497da2-9f87-11f0-af18-fa163efc5e78', 'monotonic_time': 4595.427076966, 'message_signature': '2824b4c39e41f6a5f9de8de6e469cd238bdbcac12a5771bfb6ce8abcad5fc1e7'}, {'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '8da35688aa864e189f10b334a21bc6c4', 'user_name': None, 'project_id': '4dcc6c51db2640cbb04083b3336de813', 'project_name': None, 'resource_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691-sda', 'timestamp': '2025-10-02T12:03:17.814483', 'resource_metadata': {'display_name': 'tempest-MigrationsAdminTest-server-1510345576', 'name': 'instance-00000009', 'instance_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691', 'instance_type': 'm1.micro', 'host': 'fe48d2ffe40763b6cdd7abb7a26325f1d73d360e787ce17949377787', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9949d9da-6314-4ede-8797-6f2f0a6a64fc', 'name': 'm1.micro', 'vcpus': 1, 'ram': 192, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 192, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'c84986b2-9f87-11f0-af18-fa163efc5e78', 'monotonic_time': 4595.427076966, 'message_signature': '4a72cf7350d887ccd7ee579d719c92217074de349f721be4045fe5c1557e74d0'}]}, 'timestamp': '2025-10-02 12:03:17.814975', '_unique_id': '9d5ef278bdde4b5e9e5a923361aef9f2'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.815 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.816 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.bytes in the context of pollsters
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.816 12 DEBUG ceilometer.compute.pollsters [-] e09de65a-0b2d-4aa5-9d9a-49f039add691/disk.device.read.bytes volume: 32020480 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.816 12 DEBUG ceilometer.compute.pollsters [-] e09de65a-0b2d-4aa5-9d9a-49f039add691/disk.device.read.bytes volume: 274750 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '87e4b16a-9b10-4edc-b5b3-11523410c288', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 32020480, 'user_id': '8da35688aa864e189f10b334a21bc6c4', 'user_name': None, 'project_id': '4dcc6c51db2640cbb04083b3336de813', 'project_name': None, 'resource_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691-vda', 'timestamp': '2025-10-02T12:03:17.816246', 'resource_metadata': {'display_name': 'tempest-MigrationsAdminTest-server-1510345576', 'name': 'instance-00000009', 'instance_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691', 'instance_type': 'm1.micro', 'host': 'fe48d2ffe40763b6cdd7abb7a26325f1d73d360e787ce17949377787', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9949d9da-6314-4ede-8797-6f2f0a6a64fc', 'name': 'm1.micro', 'vcpus': 1, 'ram': 192, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 192, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'c849c1ae-9f87-11f0-af18-fa163efc5e78', 'monotonic_time': 4595.427076966, 'message_signature': '9c33640305b1e14280828696a1363773a81325c0c34fd8638f6ba5b85825996b'}, {'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 274750, 'user_id': '8da35688aa864e189f10b334a21bc6c4', 'user_name': None, 'project_id': '4dcc6c51db2640cbb04083b3336de813', 'project_name': None, 'resource_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691-sda', 'timestamp': '2025-10-02T12:03:17.816246', 'resource_metadata': {'display_name': 'tempest-MigrationsAdminTest-server-1510345576', 'name': 'instance-00000009', 'instance_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691', 'instance_type': 'm1.micro', 'host': 'fe48d2ffe40763b6cdd7abb7a26325f1d73d360e787ce17949377787', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9949d9da-6314-4ede-8797-6f2f0a6a64fc', 'name': 'm1.micro', 'vcpus': 1, 'ram': 192, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 192, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'c849c960-9f87-11f0-af18-fa163efc5e78', 'monotonic_time': 4595.427076966, 'message_signature': '31205a6a4245eb590dd13855b8b0b8740b08dfa686326e206b3f37eb151102c8'}]}, 'timestamp': '2025-10-02 12:03:17.816655', '_unique_id': 'c067198a0b274ea18e26ae70ed979536'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.capacity in the context of pollsters
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.817 12 DEBUG ceilometer.compute.pollsters [-] e09de65a-0b2d-4aa5-9d9a-49f039add691/disk.device.capacity volume: 1073741824 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 DEBUG ceilometer.compute.pollsters [-] e09de65a-0b2d-4aa5-9d9a-49f039add691/disk.device.capacity volume: 485376 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'bd08221b-bd71-4fdf-8d80-b2bd439c1827', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 1073741824, 'user_id': '8da35688aa864e189f10b334a21bc6c4', 'user_name': None, 'project_id': '4dcc6c51db2640cbb04083b3336de813', 'project_name': None, 'resource_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691-vda', 'timestamp': '2025-10-02T12:03:17.817805', 'resource_metadata': {'display_name': 'tempest-MigrationsAdminTest-server-1510345576', 'name': 'instance-00000009', 'instance_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691', 'instance_type': 'm1.micro', 'host': 'fe48d2ffe40763b6cdd7abb7a26325f1d73d360e787ce17949377787', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9949d9da-6314-4ede-8797-6f2f0a6a64fc', 'name': 'm1.micro', 'vcpus': 1, 'ram': 192, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 192, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'c849fea8-9f87-11f0-af18-fa163efc5e78', 'monotonic_time': 4595.459906104, 'message_signature': '53fafb378cccc953ccc94b345e07925f8cba514793f61f52c3407b560f2f49c5'}, {'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 485376, 'user_id': '8da35688aa864e189f10b334a21bc6c4', 'user_name': None, 'project_id': '4dcc6c51db2640cbb04083b3336de813', 'project_name': None, 'resource_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691-sda', 'timestamp': '2025-10-02T12:03:17.817805', 'resource_metadata': {'display_name': 'tempest-MigrationsAdminTest-server-1510345576', 'name': 'instance-00000009', 'instance_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691', 'instance_type': 'm1.micro', 'host': 'fe48d2ffe40763b6cdd7abb7a26325f1d73d360e787ce17949377787', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9949d9da-6314-4ede-8797-6f2f0a6a64fc', 'name': 'm1.micro', 'vcpus': 1, 'ram': 192, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 192, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'c84a0740-9f87-11f0-af18-fa163efc5e78', 'monotonic_time': 4595.459906104, 'message_signature': 'e13e4328bca31ac63c6ee765bc7c0f476b56bd5057062852a3e09afd5fc1486b'}]}, 'timestamp': '2025-10-02 12:03:17.818239', '_unique_id': '1f92d9cf0c40478ea72ffc6060083520'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.818 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.819 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes.delta in the context of pollsters
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.819 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets.drop in the context of pollsters
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.819 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets in the context of pollsters
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.819 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes.rate in the context of pollsters
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.819 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for IncomingBytesRatePollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.819 12 ERROR ceilometer.polling.manager [-] Prevent pollster network.incoming.bytes.rate from polling [<NovaLikeServer: tempest-MigrationsAdminTest-server-1510345576>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-MigrationsAdminTest-server-1510345576>]
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.819 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes.rate in the context of pollsters
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.819 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for OutgoingBytesRatePollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.819 12 ERROR ceilometer.polling.manager [-] Prevent pollster network.outgoing.bytes.rate from polling [<NovaLikeServer: tempest-MigrationsAdminTest-server-1510345576>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-MigrationsAdminTest-server-1510345576>]
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.820 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.latency in the context of pollsters
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.820 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for PerDeviceDiskLatencyPollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.820 12 ERROR ceilometer.polling.manager [-] Prevent pollster disk.device.latency from polling [<NovaLikeServer: tempest-MigrationsAdminTest-server-1510345576>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-MigrationsAdminTest-server-1510345576>]
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.820 12 INFO ceilometer.polling.manager [-] Polling pollster memory.usage in the context of pollsters
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.820 12 DEBUG ceilometer.compute.pollsters [-] e09de65a-0b2d-4aa5-9d9a-49f039add691/memory.usage volume: 43.046875 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'c02b21c8-585b-43d1-9064-a5da4b641eda', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'memory.usage', 'counter_type': 'gauge', 'counter_unit': 'MB', 'counter_volume': 43.046875, 'user_id': '8da35688aa864e189f10b334a21bc6c4', 'user_name': None, 'project_id': '4dcc6c51db2640cbb04083b3336de813', 'project_name': None, 'resource_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691', 'timestamp': '2025-10-02T12:03:17.820528', 'resource_metadata': {'display_name': 'tempest-MigrationsAdminTest-server-1510345576', 'name': 'instance-00000009', 'instance_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691', 'instance_type': 'm1.micro', 'host': 'fe48d2ffe40763b6cdd7abb7a26325f1d73d360e787ce17949377787', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9949d9da-6314-4ede-8797-6f2f0a6a64fc', 'name': 'm1.micro', 'vcpus': 1, 'ram': 192, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 192, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1}, 'message_id': 'c84a69a6-9f87-11f0-af18-fa163efc5e78', 'monotonic_time': 4595.492785483, 'message_signature': '1571c26b32249dcbe48cb60e18553d409844eea7fc44d82e7bfd422049962b9c'}]}, 'timestamp': '2025-10-02 12:03:17.820768', '_unique_id': 'a33274063a73422fb28f7581782fc3ce'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets.error in the context of pollsters
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.821 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.usage in the context of pollsters
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 DEBUG ceilometer.compute.pollsters [-] e09de65a-0b2d-4aa5-9d9a-49f039add691/disk.device.usage volume: 30081024 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 DEBUG ceilometer.compute.pollsters [-] e09de65a-0b2d-4aa5-9d9a-49f039add691/disk.device.usage volume: 485376 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'ebd98218-a365-4441-b810-0ec7c253b594', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 30081024, 'user_id': '8da35688aa864e189f10b334a21bc6c4', 'user_name': None, 'project_id': '4dcc6c51db2640cbb04083b3336de813', 'project_name': None, 'resource_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691-vda', 'timestamp': '2025-10-02T12:03:17.822052', 'resource_metadata': {'display_name': 'tempest-MigrationsAdminTest-server-1510345576', 'name': 'instance-00000009', 'instance_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691', 'instance_type': 'm1.micro', 'host': 'fe48d2ffe40763b6cdd7abb7a26325f1d73d360e787ce17949377787', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9949d9da-6314-4ede-8797-6f2f0a6a64fc', 'name': 'm1.micro', 'vcpus': 1, 'ram': 192, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 192, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'c84aa47a-9f87-11f0-af18-fa163efc5e78', 'monotonic_time': 4595.459906104, 'message_signature': 'ca0e66c41fd3fb08f7d28dd3fcc742b2aa69b05ac5c0c3dddeb87d450c2abada'}, {'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 485376, 'user_id': '8da35688aa864e189f10b334a21bc6c4', 'user_name': None, 'project_id': '4dcc6c51db2640cbb04083b3336de813', 'project_name': None, 'resource_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691-sda', 'timestamp': '2025-10-02T12:03:17.822052', 'resource_metadata': {'display_name': 'tempest-MigrationsAdminTest-server-1510345576', 'name': 'instance-00000009', 'instance_id': 'e09de65a-0b2d-4aa5-9d9a-49f039add691', 'instance_type': 'm1.micro', 'host': 'fe48d2ffe40763b6cdd7abb7a26325f1d73d360e787ce17949377787', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9949d9da-6314-4ede-8797-6f2f0a6a64fc', 'name': 'm1.micro', 'vcpus': 1, 'ram': 192, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 192, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'c84aac72-9f87-11f0-af18-fa163efc5e78', 'monotonic_time': 4595.459906104, 'message_signature': 'edb479d1ad590a1303ceb823d2f91844781ee624a1b072464650fe16ce016c54'}]}, 'timestamp': '2025-10-02 12:03:17.822483', '_unique_id': 'f9266e18b38148509a0754b8557ec005'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.822 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.823 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets in the context of pollsters
Oct 02 12:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:03:17.823 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets.error in the context of pollsters
Oct 02 12:03:18 compute-0 podman[222426]: 2025-10-02 12:03:18.164066717 +0000 UTC m=+0.077806569 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_id=ovn_controller, container_name=ovn_controller, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:03:18 compute-0 nova_compute[192079]: 2025-10-02 12:03:18.273 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759406583.271821, 8fd0525b-b74e-4fea-8a19-f03f445fbc07 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:03:18 compute-0 nova_compute[192079]: 2025-10-02 12:03:18.274 2 INFO nova.compute.manager [-] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] VM Stopped (Lifecycle Event)
Oct 02 12:03:18 compute-0 nova_compute[192079]: 2025-10-02 12:03:18.296 2 DEBUG nova.compute.manager [None req-8d77227c-0a7c-491e-8c16-eb0c814b68ad - - - - - -] [instance: 8fd0525b-b74e-4fea-8a19-f03f445fbc07] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:03:19 compute-0 nova_compute[192079]: 2025-10-02 12:03:19.310 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:20 compute-0 nova_compute[192079]: 2025-10-02 12:03:20.754 2 DEBUG oslo_concurrency.lockutils [None req-416acaf2-e49f-4fcd-9384-f94a7d1ae1f3 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Acquiring lock "e09de65a-0b2d-4aa5-9d9a-49f039add691" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:20 compute-0 nova_compute[192079]: 2025-10-02 12:03:20.755 2 DEBUG oslo_concurrency.lockutils [None req-416acaf2-e49f-4fcd-9384-f94a7d1ae1f3 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Lock "e09de65a-0b2d-4aa5-9d9a-49f039add691" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:20 compute-0 nova_compute[192079]: 2025-10-02 12:03:20.755 2 DEBUG oslo_concurrency.lockutils [None req-416acaf2-e49f-4fcd-9384-f94a7d1ae1f3 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Acquiring lock "e09de65a-0b2d-4aa5-9d9a-49f039add691-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:20 compute-0 nova_compute[192079]: 2025-10-02 12:03:20.755 2 DEBUG oslo_concurrency.lockutils [None req-416acaf2-e49f-4fcd-9384-f94a7d1ae1f3 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Lock "e09de65a-0b2d-4aa5-9d9a-49f039add691-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:20 compute-0 nova_compute[192079]: 2025-10-02 12:03:20.756 2 DEBUG oslo_concurrency.lockutils [None req-416acaf2-e49f-4fcd-9384-f94a7d1ae1f3 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Lock "e09de65a-0b2d-4aa5-9d9a-49f039add691-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:20 compute-0 nova_compute[192079]: 2025-10-02 12:03:20.768 2 INFO nova.compute.manager [None req-416acaf2-e49f-4fcd-9384-f94a7d1ae1f3 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Terminating instance
Oct 02 12:03:20 compute-0 nova_compute[192079]: 2025-10-02 12:03:20.778 2 DEBUG oslo_concurrency.lockutils [None req-416acaf2-e49f-4fcd-9384-f94a7d1ae1f3 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Acquiring lock "refresh_cache-e09de65a-0b2d-4aa5-9d9a-49f039add691" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:03:20 compute-0 nova_compute[192079]: 2025-10-02 12:03:20.779 2 DEBUG oslo_concurrency.lockutils [None req-416acaf2-e49f-4fcd-9384-f94a7d1ae1f3 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Acquired lock "refresh_cache-e09de65a-0b2d-4aa5-9d9a-49f039add691" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:03:20 compute-0 nova_compute[192079]: 2025-10-02 12:03:20.779 2 DEBUG nova.network.neutron [None req-416acaf2-e49f-4fcd-9384-f94a7d1ae1f3 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:03:20 compute-0 nova_compute[192079]: 2025-10-02 12:03:20.996 2 DEBUG nova.network.neutron [None req-416acaf2-e49f-4fcd-9384-f94a7d1ae1f3 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:03:21 compute-0 podman[222453]: 2025-10-02 12:03:21.145323525 +0000 UTC m=+0.046874923 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:03:21 compute-0 podman[222452]: 2025-10-02 12:03:21.145906031 +0000 UTC m=+0.061352729 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, managed_by=edpm_ansible, config_id=ovn_metadata_agent, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, container_name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:03:21 compute-0 nova_compute[192079]: 2025-10-02 12:03:21.280 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:21 compute-0 nova_compute[192079]: 2025-10-02 12:03:21.292 2 DEBUG nova.network.neutron [None req-416acaf2-e49f-4fcd-9384-f94a7d1ae1f3 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:03:21 compute-0 nova_compute[192079]: 2025-10-02 12:03:21.307 2 DEBUG oslo_concurrency.lockutils [None req-416acaf2-e49f-4fcd-9384-f94a7d1ae1f3 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Releasing lock "refresh_cache-e09de65a-0b2d-4aa5-9d9a-49f039add691" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:03:21 compute-0 nova_compute[192079]: 2025-10-02 12:03:21.308 2 DEBUG nova.compute.manager [None req-416acaf2-e49f-4fcd-9384-f94a7d1ae1f3 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:03:21 compute-0 systemd[1]: machine-qemu\x2d5\x2dinstance\x2d00000009.scope: Deactivated successfully.
Oct 02 12:03:21 compute-0 systemd[1]: machine-qemu\x2d5\x2dinstance\x2d00000009.scope: Consumed 15.843s CPU time.
Oct 02 12:03:21 compute-0 systemd-machined[152150]: Machine qemu-5-instance-00000009 terminated.
Oct 02 12:03:21 compute-0 nova_compute[192079]: 2025-10-02 12:03:21.553 2 INFO nova.virt.libvirt.driver [-] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Instance destroyed successfully.
Oct 02 12:03:21 compute-0 nova_compute[192079]: 2025-10-02 12:03:21.553 2 DEBUG nova.objects.instance [None req-416acaf2-e49f-4fcd-9384-f94a7d1ae1f3 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Lazy-loading 'resources' on Instance uuid e09de65a-0b2d-4aa5-9d9a-49f039add691 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:03:21 compute-0 nova_compute[192079]: 2025-10-02 12:03:21.569 2 INFO nova.virt.libvirt.driver [None req-416acaf2-e49f-4fcd-9384-f94a7d1ae1f3 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Deleting instance files /var/lib/nova/instances/e09de65a-0b2d-4aa5-9d9a-49f039add691_del
Oct 02 12:03:21 compute-0 nova_compute[192079]: 2025-10-02 12:03:21.574 2 INFO nova.virt.libvirt.driver [None req-416acaf2-e49f-4fcd-9384-f94a7d1ae1f3 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Deletion of /var/lib/nova/instances/e09de65a-0b2d-4aa5-9d9a-49f039add691_del complete
Oct 02 12:03:21 compute-0 nova_compute[192079]: 2025-10-02 12:03:21.673 2 INFO nova.compute.manager [None req-416acaf2-e49f-4fcd-9384-f94a7d1ae1f3 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Took 0.36 seconds to destroy the instance on the hypervisor.
Oct 02 12:03:21 compute-0 nova_compute[192079]: 2025-10-02 12:03:21.673 2 DEBUG oslo.service.loopingcall [None req-416acaf2-e49f-4fcd-9384-f94a7d1ae1f3 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:03:21 compute-0 nova_compute[192079]: 2025-10-02 12:03:21.674 2 DEBUG nova.compute.manager [-] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:03:21 compute-0 nova_compute[192079]: 2025-10-02 12:03:21.674 2 DEBUG nova.network.neutron [-] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:03:22 compute-0 nova_compute[192079]: 2025-10-02 12:03:22.038 2 DEBUG nova.network.neutron [-] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:03:22 compute-0 nova_compute[192079]: 2025-10-02 12:03:22.054 2 DEBUG nova.network.neutron [-] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:03:22 compute-0 nova_compute[192079]: 2025-10-02 12:03:22.068 2 INFO nova.compute.manager [-] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Took 0.39 seconds to deallocate network for instance.
Oct 02 12:03:22 compute-0 nova_compute[192079]: 2025-10-02 12:03:22.241 2 DEBUG oslo_concurrency.lockutils [None req-416acaf2-e49f-4fcd-9384-f94a7d1ae1f3 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:22 compute-0 nova_compute[192079]: 2025-10-02 12:03:22.242 2 DEBUG oslo_concurrency.lockutils [None req-416acaf2-e49f-4fcd-9384-f94a7d1ae1f3 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:22 compute-0 nova_compute[192079]: 2025-10-02 12:03:22.312 2 DEBUG nova.compute.provider_tree [None req-416acaf2-e49f-4fcd-9384-f94a7d1ae1f3 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:03:22 compute-0 nova_compute[192079]: 2025-10-02 12:03:22.352 2 DEBUG nova.scheduler.client.report [None req-416acaf2-e49f-4fcd-9384-f94a7d1ae1f3 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:03:22 compute-0 nova_compute[192079]: 2025-10-02 12:03:22.373 2 DEBUG oslo_concurrency.lockutils [None req-416acaf2-e49f-4fcd-9384-f94a7d1ae1f3 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.132s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:22 compute-0 nova_compute[192079]: 2025-10-02 12:03:22.403 2 INFO nova.scheduler.client.report [None req-416acaf2-e49f-4fcd-9384-f94a7d1ae1f3 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Deleted allocations for instance e09de65a-0b2d-4aa5-9d9a-49f039add691
Oct 02 12:03:22 compute-0 nova_compute[192079]: 2025-10-02 12:03:22.484 2 DEBUG oslo_concurrency.lockutils [None req-416acaf2-e49f-4fcd-9384-f94a7d1ae1f3 8da35688aa864e189f10b334a21bc6c4 4dcc6c51db2640cbb04083b3336de813 - - default default] Lock "e09de65a-0b2d-4aa5-9d9a-49f039add691" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.729s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:24 compute-0 nova_compute[192079]: 2025-10-02 12:03:24.312 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:25 compute-0 nova_compute[192079]: 2025-10-02 12:03:25.044 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759406590.0396514, 73cd9aef-a159-4d0e-9fc4-435f191db0b9 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:03:25 compute-0 nova_compute[192079]: 2025-10-02 12:03:25.044 2 INFO nova.compute.manager [-] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] VM Stopped (Lifecycle Event)
Oct 02 12:03:25 compute-0 nova_compute[192079]: 2025-10-02 12:03:25.091 2 DEBUG nova.compute.manager [None req-88f03042-54b9-4caa-af6f-0ef18b08ccc0 - - - - - -] [instance: 73cd9aef-a159-4d0e-9fc4-435f191db0b9] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:03:26 compute-0 nova_compute[192079]: 2025-10-02 12:03:26.331 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:26 compute-0 nova_compute[192079]: 2025-10-02 12:03:26.681 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Acquiring lock "ce39a1f9-1883-4f3e-81e8-6da425b2d2bb" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:26 compute-0 nova_compute[192079]: 2025-10-02 12:03:26.682 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "ce39a1f9-1883-4f3e-81e8-6da425b2d2bb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:26 compute-0 nova_compute[192079]: 2025-10-02 12:03:26.702 2 DEBUG nova.compute.manager [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:03:26 compute-0 nova_compute[192079]: 2025-10-02 12:03:26.741 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Acquiring lock "98fe50ad-409f-4b57-a579-3b83bb089bd3" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:26 compute-0 nova_compute[192079]: 2025-10-02 12:03:26.741 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "98fe50ad-409f-4b57-a579-3b83bb089bd3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:26 compute-0 nova_compute[192079]: 2025-10-02 12:03:26.759 2 DEBUG nova.compute.manager [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:03:26 compute-0 nova_compute[192079]: 2025-10-02 12:03:26.802 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:26 compute-0 nova_compute[192079]: 2025-10-02 12:03:26.802 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:26 compute-0 nova_compute[192079]: 2025-10-02 12:03:26.808 2 DEBUG nova.virt.hardware [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:03:26 compute-0 nova_compute[192079]: 2025-10-02 12:03:26.808 2 INFO nova.compute.claims [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:03:26 compute-0 nova_compute[192079]: 2025-10-02 12:03:26.843 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:26 compute-0 nova_compute[192079]: 2025-10-02 12:03:26.953 2 DEBUG nova.compute.provider_tree [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:03:26 compute-0 nova_compute[192079]: 2025-10-02 12:03:26.970 2 DEBUG nova.scheduler.client.report [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:03:26 compute-0 nova_compute[192079]: 2025-10-02 12:03:26.997 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.195s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:26 compute-0 nova_compute[192079]: 2025-10-02 12:03:26.999 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.156s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.008 2 DEBUG nova.virt.hardware [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.008 2 INFO nova.compute.claims [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.029 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Acquiring lock "26911a49-22ae-43a9-a932-b0b2fa735980" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.<locals>._do_validation" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.030 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "26911a49-22ae-43a9-a932-b0b2fa735980" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.<locals>._do_validation" :: waited 0.002s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.054 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "26911a49-22ae-43a9-a932-b0b2fa735980" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.<locals>._do_validation" :: held 0.024s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.055 2 DEBUG nova.compute.manager [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.118 2 DEBUG nova.compute.manager [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.119 2 DEBUG nova.network.neutron [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.173 2 INFO nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.199 2 DEBUG nova.compute.provider_tree [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.218 2 DEBUG nova.scheduler.client.report [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.225 2 DEBUG nova.compute.manager [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.261 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.262s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.315 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Acquiring lock "26911a49-22ae-43a9-a932-b0b2fa735980" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.<locals>._do_validation" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.316 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "26911a49-22ae-43a9-a932-b0b2fa735980" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.<locals>._do_validation" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.348 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "26911a49-22ae-43a9-a932-b0b2fa735980" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.<locals>._do_validation" :: held 0.032s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.349 2 DEBUG nova.compute.manager [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.417 2 DEBUG nova.compute.manager [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.419 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.420 2 INFO nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Creating image(s)
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.421 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Acquiring lock "/var/lib/nova/instances/ce39a1f9-1883-4f3e-81e8-6da425b2d2bb/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.422 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "/var/lib/nova/instances/ce39a1f9-1883-4f3e-81e8-6da425b2d2bb/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.423 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "/var/lib/nova/instances/ce39a1f9-1883-4f3e-81e8-6da425b2d2bb/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.451 2 DEBUG nova.compute.manager [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.452 2 DEBUG nova.network.neutron [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.455 2 DEBUG oslo_concurrency.processutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.482 2 INFO nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.500 2 DEBUG nova.compute.manager [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.528 2 DEBUG oslo_concurrency.processutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.073s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.529 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.530 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.555 2 DEBUG oslo_concurrency.processutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.611 2 DEBUG oslo_concurrency.processutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.612 2 DEBUG oslo_concurrency.processutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/ce39a1f9-1883-4f3e-81e8-6da425b2d2bb/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.692 2 DEBUG nova.compute.manager [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.694 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.694 2 INFO nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Creating image(s)
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.695 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Acquiring lock "/var/lib/nova/instances/98fe50ad-409f-4b57-a579-3b83bb089bd3/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.696 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "/var/lib/nova/instances/98fe50ad-409f-4b57-a579-3b83bb089bd3/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.696 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "/var/lib/nova/instances/98fe50ad-409f-4b57-a579-3b83bb089bd3/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.711 2 DEBUG oslo_concurrency.processutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.728 2 DEBUG oslo_concurrency.processutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/ce39a1f9-1883-4f3e-81e8-6da425b2d2bb/disk 1073741824" returned: 0 in 0.116s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.731 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.201s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.732 2 DEBUG oslo_concurrency.processutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.765 2 DEBUG oslo_concurrency.processutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.766 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.767 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.782 2 DEBUG oslo_concurrency.processutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.799 2 DEBUG oslo_concurrency.processutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.067s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.800 2 DEBUG nova.virt.disk.api [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Checking if we can resize image /var/lib/nova/instances/ce39a1f9-1883-4f3e-81e8-6da425b2d2bb/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.801 2 DEBUG oslo_concurrency.processutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ce39a1f9-1883-4f3e-81e8-6da425b2d2bb/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.839 2 DEBUG oslo_concurrency.processutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.057s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.840 2 DEBUG oslo_concurrency.processutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/98fe50ad-409f-4b57-a579-3b83bb089bd3/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.857 2 DEBUG oslo_concurrency.processutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ce39a1f9-1883-4f3e-81e8-6da425b2d2bb/disk --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.858 2 DEBUG nova.virt.disk.api [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Cannot resize image /var/lib/nova/instances/ce39a1f9-1883-4f3e-81e8-6da425b2d2bb/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.859 2 DEBUG nova.objects.instance [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lazy-loading 'migration_context' on Instance uuid ce39a1f9-1883-4f3e-81e8-6da425b2d2bb obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.873 2 DEBUG oslo_concurrency.processutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/98fe50ad-409f-4b57-a579-3b83bb089bd3/disk 1073741824" returned: 0 in 0.034s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.874 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.107s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.874 2 DEBUG oslo_concurrency.processutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.899 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.900 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Ensure instance console log exists: /var/lib/nova/instances/ce39a1f9-1883-4f3e-81e8-6da425b2d2bb/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.900 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.901 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.901 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.929 2 DEBUG oslo_concurrency.processutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.929 2 DEBUG nova.virt.disk.api [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Checking if we can resize image /var/lib/nova/instances/98fe50ad-409f-4b57-a579-3b83bb089bd3/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.930 2 DEBUG oslo_concurrency.processutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/98fe50ad-409f-4b57-a579-3b83bb089bd3/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.993 2 DEBUG oslo_concurrency.processutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/98fe50ad-409f-4b57-a579-3b83bb089bd3/disk --force-share --output=json" returned: 0 in 0.063s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.994 2 DEBUG nova.virt.disk.api [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Cannot resize image /var/lib/nova/instances/98fe50ad-409f-4b57-a579-3b83bb089bd3/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:03:27 compute-0 nova_compute[192079]: 2025-10-02 12:03:27.994 2 DEBUG nova.objects.instance [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lazy-loading 'migration_context' on Instance uuid 98fe50ad-409f-4b57-a579-3b83bb089bd3 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.007 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.008 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Ensure instance console log exists: /var/lib/nova/instances/98fe50ad-409f-4b57-a579-3b83bb089bd3/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.008 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.009 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.009 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.102 2 DEBUG nova.network.neutron [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] No network configured allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1188
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.102 2 DEBUG nova.compute.manager [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Instance network_info: |[]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.105 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Start _get_guest_xml network_info=[] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.111 2 WARNING nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.116 2 DEBUG nova.network.neutron [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] No network configured allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1188
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.116 2 DEBUG nova.compute.manager [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Instance network_info: |[]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.118 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Start _get_guest_xml network_info=[] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.119 2 DEBUG nova.virt.libvirt.host [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.120 2 DEBUG nova.virt.libvirt.host [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.125 2 DEBUG nova.virt.libvirt.host [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.126 2 DEBUG nova.virt.libvirt.host [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.129 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.129 2 DEBUG nova.virt.hardware [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.130 2 DEBUG nova.virt.hardware [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.130 2 DEBUG nova.virt.hardware [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.131 2 DEBUG nova.virt.hardware [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.131 2 DEBUG nova.virt.hardware [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.132 2 DEBUG nova.virt.hardware [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.132 2 DEBUG nova.virt.hardware [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.133 2 DEBUG nova.virt.hardware [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.133 2 DEBUG nova.virt.hardware [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.133 2 DEBUG nova.virt.hardware [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.134 2 DEBUG nova.virt.hardware [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.141 2 DEBUG nova.objects.instance [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lazy-loading 'pci_devices' on Instance uuid ce39a1f9-1883-4f3e-81e8-6da425b2d2bb obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.144 2 WARNING nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.148 2 DEBUG nova.virt.libvirt.host [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.149 2 DEBUG nova.virt.libvirt.host [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.152 2 DEBUG nova.virt.libvirt.host [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.152 2 DEBUG nova.virt.libvirt.host [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.154 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.154 2 DEBUG nova.virt.hardware [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.155 2 DEBUG nova.virt.hardware [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.155 2 DEBUG nova.virt.hardware [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.156 2 DEBUG nova.virt.hardware [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.156 2 DEBUG nova.virt.hardware [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.157 2 DEBUG nova.virt.hardware [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.157 2 DEBUG nova.virt.hardware [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.157 2 DEBUG nova.virt.hardware [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.158 2 DEBUG nova.virt.hardware [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.158 2 DEBUG nova.virt.hardware [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.159 2 DEBUG nova.virt.hardware [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.164 2 DEBUG nova.objects.instance [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lazy-loading 'pci_devices' on Instance uuid 98fe50ad-409f-4b57-a579-3b83bb089bd3 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.169 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:03:28 compute-0 nova_compute[192079]:   <uuid>ce39a1f9-1883-4f3e-81e8-6da425b2d2bb</uuid>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   <name>instance-00000012</name>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <nova:name>tempest-ServersOnMultiNodesTest-server-190769314-1</nova:name>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:03:28</nova:creationTime>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:03:28 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:03:28 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:03:28 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:03:28 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:03:28 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:03:28 compute-0 nova_compute[192079]:         <nova:user uuid="d27eb44762f548fc96a3f2edcdb5537c">tempest-ServersOnMultiNodesTest-1227449327-project-member</nova:user>
Oct 02 12:03:28 compute-0 nova_compute[192079]:         <nova:project uuid="df2cf2fcc379455c90e6044b60e603c0">tempest-ServersOnMultiNodesTest-1227449327</nova:project>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <nova:ports/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <system>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <entry name="serial">ce39a1f9-1883-4f3e-81e8-6da425b2d2bb</entry>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <entry name="uuid">ce39a1f9-1883-4f3e-81e8-6da425b2d2bb</entry>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     </system>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   <os>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   </os>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   <features>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   </features>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/ce39a1f9-1883-4f3e-81e8-6da425b2d2bb/disk"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/ce39a1f9-1883-4f3e-81e8-6da425b2d2bb/disk.config"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/ce39a1f9-1883-4f3e-81e8-6da425b2d2bb/console.log" append="off"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <video>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     </video>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:03:28 compute-0 nova_compute[192079]: </domain>
Oct 02 12:03:28 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.178 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:03:28 compute-0 nova_compute[192079]:   <uuid>98fe50ad-409f-4b57-a579-3b83bb089bd3</uuid>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   <name>instance-00000013</name>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <nova:name>tempest-ServersOnMultiNodesTest-server-190769314-2</nova:name>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:03:28</nova:creationTime>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:03:28 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:03:28 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:03:28 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:03:28 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:03:28 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:03:28 compute-0 nova_compute[192079]:         <nova:user uuid="d27eb44762f548fc96a3f2edcdb5537c">tempest-ServersOnMultiNodesTest-1227449327-project-member</nova:user>
Oct 02 12:03:28 compute-0 nova_compute[192079]:         <nova:project uuid="df2cf2fcc379455c90e6044b60e603c0">tempest-ServersOnMultiNodesTest-1227449327</nova:project>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <nova:ports/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <system>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <entry name="serial">98fe50ad-409f-4b57-a579-3b83bb089bd3</entry>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <entry name="uuid">98fe50ad-409f-4b57-a579-3b83bb089bd3</entry>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     </system>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   <os>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   </os>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   <features>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   </features>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/98fe50ad-409f-4b57-a579-3b83bb089bd3/disk"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/98fe50ad-409f-4b57-a579-3b83bb089bd3/disk.config"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/98fe50ad-409f-4b57-a579-3b83bb089bd3/console.log" append="off"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <video>
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     </video>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:03:28 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:03:28 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:03:28 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:03:28 compute-0 nova_compute[192079]: </domain>
Oct 02 12:03:28 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.247 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.248 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.248 2 INFO nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Using config drive
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.252 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.252 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.253 2 INFO nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Using config drive
Oct 02 12:03:28 compute-0 podman[222534]: 2025-10-02 12:03:28.281426698 +0000 UTC m=+0.072078951 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, managed_by=edpm_ansible, config_id=edpm, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001)
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.427 2 INFO nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Creating config drive at /var/lib/nova/instances/ce39a1f9-1883-4f3e-81e8-6da425b2d2bb/disk.config
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.432 2 DEBUG oslo_concurrency.processutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/ce39a1f9-1883-4f3e-81e8-6da425b2d2bb/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpc3zg4wrp execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.452 2 INFO nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Creating config drive at /var/lib/nova/instances/98fe50ad-409f-4b57-a579-3b83bb089bd3/disk.config
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.456 2 DEBUG oslo_concurrency.processutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/98fe50ad-409f-4b57-a579-3b83bb089bd3/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpcw6lhs2o execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.560 2 DEBUG oslo_concurrency.processutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/ce39a1f9-1883-4f3e-81e8-6da425b2d2bb/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpc3zg4wrp" returned: 0 in 0.129s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:03:28 compute-0 nova_compute[192079]: 2025-10-02 12:03:28.584 2 DEBUG oslo_concurrency.processutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/98fe50ad-409f-4b57-a579-3b83bb089bd3/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpcw6lhs2o" returned: 0 in 0.128s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:03:28 compute-0 systemd-machined[152150]: New machine qemu-11-instance-00000012.
Oct 02 12:03:28 compute-0 systemd[1]: Started Virtual Machine qemu-11-instance-00000012.
Oct 02 12:03:28 compute-0 systemd-machined[152150]: New machine qemu-12-instance-00000013.
Oct 02 12:03:28 compute-0 systemd[1]: Started Virtual Machine qemu-12-instance-00000013.
Oct 02 12:03:29 compute-0 nova_compute[192079]: 2025-10-02 12:03:29.313 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:29 compute-0 nova_compute[192079]: 2025-10-02 12:03:29.528 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406609.527276, ce39a1f9-1883-4f3e-81e8-6da425b2d2bb => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:03:29 compute-0 nova_compute[192079]: 2025-10-02 12:03:29.528 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] VM Resumed (Lifecycle Event)
Oct 02 12:03:29 compute-0 nova_compute[192079]: 2025-10-02 12:03:29.533 2 DEBUG nova.compute.manager [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:03:29 compute-0 nova_compute[192079]: 2025-10-02 12:03:29.534 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:03:29 compute-0 nova_compute[192079]: 2025-10-02 12:03:29.540 2 INFO nova.virt.libvirt.driver [-] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Instance spawned successfully.
Oct 02 12:03:29 compute-0 nova_compute[192079]: 2025-10-02 12:03:29.540 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:03:29 compute-0 nova_compute[192079]: 2025-10-02 12:03:29.551 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:03:29 compute-0 nova_compute[192079]: 2025-10-02 12:03:29.561 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:03:29 compute-0 nova_compute[192079]: 2025-10-02 12:03:29.568 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:03:29 compute-0 nova_compute[192079]: 2025-10-02 12:03:29.569 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:03:29 compute-0 nova_compute[192079]: 2025-10-02 12:03:29.570 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:03:29 compute-0 nova_compute[192079]: 2025-10-02 12:03:29.571 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:03:29 compute-0 nova_compute[192079]: 2025-10-02 12:03:29.571 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:03:29 compute-0 nova_compute[192079]: 2025-10-02 12:03:29.572 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:03:29 compute-0 nova_compute[192079]: 2025-10-02 12:03:29.598 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:03:29 compute-0 nova_compute[192079]: 2025-10-02 12:03:29.598 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406609.532655, ce39a1f9-1883-4f3e-81e8-6da425b2d2bb => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:03:29 compute-0 nova_compute[192079]: 2025-10-02 12:03:29.599 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] VM Started (Lifecycle Event)
Oct 02 12:03:29 compute-0 nova_compute[192079]: 2025-10-02 12:03:29.664 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:03:29 compute-0 nova_compute[192079]: 2025-10-02 12:03:29.668 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:03:29 compute-0 nova_compute[192079]: 2025-10-02 12:03:29.711 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:03:29 compute-0 nova_compute[192079]: 2025-10-02 12:03:29.727 2 INFO nova.compute.manager [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Took 2.31 seconds to spawn the instance on the hypervisor.
Oct 02 12:03:29 compute-0 nova_compute[192079]: 2025-10-02 12:03:29.727 2 DEBUG nova.compute.manager [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:03:29 compute-0 nova_compute[192079]: 2025-10-02 12:03:29.802 2 INFO nova.compute.manager [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Took 3.03 seconds to build instance.
Oct 02 12:03:29 compute-0 nova_compute[192079]: 2025-10-02 12:03:29.818 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "ce39a1f9-1883-4f3e-81e8-6da425b2d2bb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 3.136s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:30 compute-0 nova_compute[192079]: 2025-10-02 12:03:30.139 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406610.1386344, 98fe50ad-409f-4b57-a579-3b83bb089bd3 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:03:30 compute-0 nova_compute[192079]: 2025-10-02 12:03:30.139 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] VM Resumed (Lifecycle Event)
Oct 02 12:03:30 compute-0 nova_compute[192079]: 2025-10-02 12:03:30.143 2 DEBUG nova.compute.manager [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:03:30 compute-0 nova_compute[192079]: 2025-10-02 12:03:30.144 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:03:30 compute-0 nova_compute[192079]: 2025-10-02 12:03:30.147 2 INFO nova.virt.libvirt.driver [-] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Instance spawned successfully.
Oct 02 12:03:30 compute-0 nova_compute[192079]: 2025-10-02 12:03:30.148 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:03:30 compute-0 nova_compute[192079]: 2025-10-02 12:03:30.187 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:03:30 compute-0 nova_compute[192079]: 2025-10-02 12:03:30.193 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:03:30 compute-0 nova_compute[192079]: 2025-10-02 12:03:30.196 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:03:30 compute-0 nova_compute[192079]: 2025-10-02 12:03:30.196 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:03:30 compute-0 nova_compute[192079]: 2025-10-02 12:03:30.196 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:03:30 compute-0 nova_compute[192079]: 2025-10-02 12:03:30.197 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:03:30 compute-0 nova_compute[192079]: 2025-10-02 12:03:30.197 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:03:30 compute-0 nova_compute[192079]: 2025-10-02 12:03:30.198 2 DEBUG nova.virt.libvirt.driver [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:03:30 compute-0 nova_compute[192079]: 2025-10-02 12:03:30.250 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:03:30 compute-0 nova_compute[192079]: 2025-10-02 12:03:30.251 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406610.143016, 98fe50ad-409f-4b57-a579-3b83bb089bd3 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:03:30 compute-0 nova_compute[192079]: 2025-10-02 12:03:30.251 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] VM Started (Lifecycle Event)
Oct 02 12:03:30 compute-0 nova_compute[192079]: 2025-10-02 12:03:30.280 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:03:30 compute-0 nova_compute[192079]: 2025-10-02 12:03:30.284 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:03:30 compute-0 nova_compute[192079]: 2025-10-02 12:03:30.326 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:03:30 compute-0 nova_compute[192079]: 2025-10-02 12:03:30.337 2 INFO nova.compute.manager [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Took 2.64 seconds to spawn the instance on the hypervisor.
Oct 02 12:03:30 compute-0 nova_compute[192079]: 2025-10-02 12:03:30.338 2 DEBUG nova.compute.manager [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:03:30 compute-0 nova_compute[192079]: 2025-10-02 12:03:30.445 2 INFO nova.compute.manager [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Took 3.63 seconds to build instance.
Oct 02 12:03:30 compute-0 nova_compute[192079]: 2025-10-02 12:03:30.482 2 DEBUG oslo_concurrency.lockutils [None req-d26047f6-9412-4447-ad3d-aa6ad6ef9944 d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "98fe50ad-409f-4b57-a579-3b83bb089bd3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 3.740s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:31 compute-0 nova_compute[192079]: 2025-10-02 12:03:31.334 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:32 compute-0 podman[222611]: 2025-10-02 12:03:32.156737699 +0000 UTC m=+0.067938450 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=multipathd)
Oct 02 12:03:32 compute-0 podman[222610]: 2025-10-02 12:03:32.185844095 +0000 UTC m=+0.097053486 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.openshift.tags=minimal rhel9, io.openshift.expose-services=, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.buildah.version=1.33.7, name=ubi9-minimal, maintainer=Red Hat, Inc., vendor=Red Hat, Inc., com.redhat.component=ubi9-minimal-container, config_id=edpm, version=9.6, managed_by=edpm_ansible, release=1755695350, vcs-type=git, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., container_name=openstack_network_exporter, distribution-scope=public, build-date=2025-08-20T13:12:41, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, url=https://catalog.redhat.com/en/search?searchType=containers, architecture=x86_64, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal)
Oct 02 12:03:34 compute-0 nova_compute[192079]: 2025-10-02 12:03:34.371 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:36 compute-0 nova_compute[192079]: 2025-10-02 12:03:36.338 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:36 compute-0 nova_compute[192079]: 2025-10-02 12:03:36.552 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759406601.550914, e09de65a-0b2d-4aa5-9d9a-49f039add691 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:03:36 compute-0 nova_compute[192079]: 2025-10-02 12:03:36.553 2 INFO nova.compute.manager [-] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] VM Stopped (Lifecycle Event)
Oct 02 12:03:36 compute-0 nova_compute[192079]: 2025-10-02 12:03:36.579 2 DEBUG nova.compute.manager [None req-06e30b15-845a-4386-9761-21fecfdc913b - - - - - -] [instance: e09de65a-0b2d-4aa5-9d9a-49f039add691] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:03:39 compute-0 nova_compute[192079]: 2025-10-02 12:03:39.397 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:39 compute-0 rsyslogd[1013]: imjournal: 2581 messages lost due to rate-limiting (20000 allowed within 600 seconds)
Oct 02 12:03:40 compute-0 podman[222647]: 2025-10-02 12:03:40.148758483 +0000 UTC m=+0.057608787 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']})
Oct 02 12:03:40 compute-0 podman[222648]: 2025-10-02 12:03:40.150957012 +0000 UTC m=+0.056429714 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, tcib_managed=true, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, config_id=iscsid, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, container_name=iscsid, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']})
Oct 02 12:03:41 compute-0 nova_compute[192079]: 2025-10-02 12:03:41.194 2 DEBUG nova.virt.libvirt.driver [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Creating tmpfile /var/lib/nova/instances/tmpeyk8xlf5 to notify to other compute nodes that they should mount the same storage. _create_shared_storage_test_file /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10041
Oct 02 12:03:41 compute-0 nova_compute[192079]: 2025-10-02 12:03:41.195 2 DEBUG nova.compute.manager [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] destination check data is LibvirtLiveMigrateData(bdms=<?>,block_migration=True,disk_available_mb=73728,disk_over_commit=False,dst_numa_info=<?>,dst_supports_numa_live_migration=<?>,dst_wants_file_backed_memory=False,file_backed_memory_discard=<?>,filename='tmpeyk8xlf5',graphics_listen_addr_spice=127.0.0.1,graphics_listen_addr_vnc=::,image_type='qcow2',instance_relative_path=<?>,is_shared_block_storage=<?>,is_shared_instance_path=<?>,is_volume_backed=<?>,migration=<?>,old_vol_attachment_ids=<?>,serial_listen_addr=None,serial_listen_ports=<?>,src_supports_native_luks=<?>,src_supports_numa_live_migration=<?>,supported_perf_events=<?>,target_connect_addr=<?>,vifs=[VIFMigrateData],wait_for_vif_plugged=<?>) check_can_live_migrate_destination /usr/lib/python3.9/site-packages/nova/compute/manager.py:8476
Oct 02 12:03:41 compute-0 nova_compute[192079]: 2025-10-02 12:03:41.343 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:42 compute-0 nova_compute[192079]: 2025-10-02 12:03:42.437 2 DEBUG nova.compute.manager [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] pre_live_migration data is LibvirtLiveMigrateData(bdms=<?>,block_migration=True,disk_available_mb=73728,disk_over_commit=False,dst_numa_info=<?>,dst_supports_numa_live_migration=<?>,dst_wants_file_backed_memory=False,file_backed_memory_discard=<?>,filename='tmpeyk8xlf5',graphics_listen_addr_spice=127.0.0.1,graphics_listen_addr_vnc=::,image_type='qcow2',instance_relative_path='356bc6d6-1101-467e-a020-65876724c955',is_shared_block_storage=False,is_shared_instance_path=False,is_volume_backed=False,migration=<?>,old_vol_attachment_ids=<?>,serial_listen_addr=None,serial_listen_ports=<?>,src_supports_native_luks=<?>,src_supports_numa_live_migration=<?>,supported_perf_events=<?>,target_connect_addr=<?>,vifs=[VIFMigrateData],wait_for_vif_plugged=<?>) pre_live_migration /usr/lib/python3.9/site-packages/nova/compute/manager.py:8604
Oct 02 12:03:42 compute-0 nova_compute[192079]: 2025-10-02 12:03:42.487 2 DEBUG oslo_concurrency.lockutils [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Acquiring lock "refresh_cache-356bc6d6-1101-467e-a020-65876724c955" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:03:42 compute-0 nova_compute[192079]: 2025-10-02 12:03:42.487 2 DEBUG oslo_concurrency.lockutils [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Acquired lock "refresh_cache-356bc6d6-1101-467e-a020-65876724c955" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:03:42 compute-0 nova_compute[192079]: 2025-10-02 12:03:42.487 2 DEBUG nova.network.neutron [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:03:44 compute-0 nova_compute[192079]: 2025-10-02 12:03:44.179 2 DEBUG nova.network.neutron [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Updating instance_info_cache with network_info: [{"id": "29214def-2450-4edd-acc6-84e165aa1e2c", "address": "fa:16:3e:1d:3d:20", "network": {"id": "664b6526-6df1-4024-9bab-37218e6c18bd", "bridge": "br-int", "label": "tempest-LiveMigrationTest-2017832683-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f7cb78d24d1a4511a59ced45ccc4a1c7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap29214def-24", "ovs_interfaceid": "29214def-2450-4edd-acc6-84e165aa1e2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:03:44 compute-0 nova_compute[192079]: 2025-10-02 12:03:44.205 2 DEBUG oslo_concurrency.lockutils [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Releasing lock "refresh_cache-356bc6d6-1101-467e-a020-65876724c955" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:03:44 compute-0 nova_compute[192079]: 2025-10-02 12:03:44.222 2 DEBUG nova.virt.libvirt.driver [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] migrate_data in pre_live_migration: LibvirtLiveMigrateData(bdms=<?>,block_migration=True,disk_available_mb=73728,disk_over_commit=False,dst_numa_info=<?>,dst_supports_numa_live_migration=<?>,dst_wants_file_backed_memory=False,file_backed_memory_discard=<?>,filename='tmpeyk8xlf5',graphics_listen_addr_spice=127.0.0.1,graphics_listen_addr_vnc=::,image_type='qcow2',instance_relative_path='356bc6d6-1101-467e-a020-65876724c955',is_shared_block_storage=False,is_shared_instance_path=False,is_volume_backed=False,migration=<?>,old_vol_attachment_ids={},serial_listen_addr=None,serial_listen_ports=<?>,src_supports_native_luks=<?>,src_supports_numa_live_migration=<?>,supported_perf_events=<?>,target_connect_addr=<?>,vifs=[VIFMigrateData],wait_for_vif_plugged=<?>) pre_live_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10827
Oct 02 12:03:44 compute-0 nova_compute[192079]: 2025-10-02 12:03:44.223 2 DEBUG nova.virt.libvirt.driver [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Creating instance directory: /var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955 pre_live_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10840
Oct 02 12:03:44 compute-0 nova_compute[192079]: 2025-10-02 12:03:44.223 2 DEBUG nova.virt.libvirt.driver [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Creating disk.info with the contents: {'/var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955/disk': 'qcow2', '/var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955/disk.config': 'raw'} pre_live_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10854
Oct 02 12:03:44 compute-0 nova_compute[192079]: 2025-10-02 12:03:44.224 2 DEBUG nova.virt.libvirt.driver [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Checking to make sure images and backing files are present before live migration. pre_live_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10864
Oct 02 12:03:44 compute-0 nova_compute[192079]: 2025-10-02 12:03:44.225 2 DEBUG nova.objects.instance [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Lazy-loading 'trusted_certs' on Instance uuid 356bc6d6-1101-467e-a020-65876724c955 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:03:44 compute-0 nova_compute[192079]: 2025-10-02 12:03:44.275 2 DEBUG oslo_concurrency.processutils [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:03:44 compute-0 nova_compute[192079]: 2025-10-02 12:03:44.346 2 DEBUG oslo_concurrency.processutils [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.071s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:03:44 compute-0 nova_compute[192079]: 2025-10-02 12:03:44.349 2 DEBUG oslo_concurrency.lockutils [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:44 compute-0 nova_compute[192079]: 2025-10-02 12:03:44.350 2 DEBUG oslo_concurrency.lockutils [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:44 compute-0 nova_compute[192079]: 2025-10-02 12:03:44.383 2 DEBUG oslo_concurrency.processutils [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:03:44 compute-0 nova_compute[192079]: 2025-10-02 12:03:44.412 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:44 compute-0 nova_compute[192079]: 2025-10-02 12:03:44.479 2 DEBUG oslo_concurrency.processutils [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.096s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:03:44 compute-0 nova_compute[192079]: 2025-10-02 12:03:44.480 2 DEBUG oslo_concurrency.processutils [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:03:44 compute-0 nova_compute[192079]: 2025-10-02 12:03:44.703 2 DEBUG oslo_concurrency.processutils [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955/disk 1073741824" returned: 0 in 0.223s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:03:44 compute-0 nova_compute[192079]: 2025-10-02 12:03:44.705 2 DEBUG oslo_concurrency.lockutils [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.355s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:44 compute-0 nova_compute[192079]: 2025-10-02 12:03:44.706 2 DEBUG oslo_concurrency.processutils [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:03:44 compute-0 nova_compute[192079]: 2025-10-02 12:03:44.763 2 DEBUG oslo_concurrency.processutils [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.057s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:03:44 compute-0 nova_compute[192079]: 2025-10-02 12:03:44.765 2 DEBUG nova.virt.disk.api [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Checking if we can resize image /var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:03:44 compute-0 nova_compute[192079]: 2025-10-02 12:03:44.766 2 DEBUG oslo_concurrency.processutils [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:03:44 compute-0 nova_compute[192079]: 2025-10-02 12:03:44.824 2 DEBUG oslo_concurrency.processutils [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955/disk --force-share --output=json" returned: 0 in 0.058s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:03:44 compute-0 nova_compute[192079]: 2025-10-02 12:03:44.825 2 DEBUG nova.virt.disk.api [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Cannot resize image /var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:03:44 compute-0 nova_compute[192079]: 2025-10-02 12:03:44.825 2 DEBUG nova.objects.instance [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Lazy-loading 'migration_context' on Instance uuid 356bc6d6-1101-467e-a020-65876724c955 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:03:44 compute-0 nova_compute[192079]: 2025-10-02 12:03:44.841 2 DEBUG oslo_concurrency.processutils [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f raw /var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955/disk.config 485376 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:03:44 compute-0 nova_compute[192079]: 2025-10-02 12:03:44.861 2 DEBUG oslo_concurrency.processutils [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f raw /var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955/disk.config 485376" returned: 0 in 0.020s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:03:44 compute-0 nova_compute[192079]: 2025-10-02 12:03:44.862 2 DEBUG nova.virt.libvirt.volume.remotefs [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Copying file compute-2.ctlplane.example.com:/var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955/disk.config to /var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955 copy_file /usr/lib/python3.9/site-packages/nova/virt/libvirt/volume/remotefs.py:103
Oct 02 12:03:44 compute-0 nova_compute[192079]: 2025-10-02 12:03:44.863 2 DEBUG oslo_concurrency.processutils [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Running cmd (subprocess): scp -C -r compute-2.ctlplane.example.com:/var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955/disk.config /var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:03:45 compute-0 nova_compute[192079]: 2025-10-02 12:03:45.367 2 DEBUG oslo_concurrency.processutils [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] CMD "scp -C -r compute-2.ctlplane.example.com:/var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955/disk.config /var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955" returned: 0 in 0.504s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:03:45 compute-0 nova_compute[192079]: 2025-10-02 12:03:45.368 2 DEBUG nova.virt.libvirt.driver [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Plugging VIFs using destination host port bindings before live migration. _pre_live_migration_plug_vifs /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10794
Oct 02 12:03:45 compute-0 nova_compute[192079]: 2025-10-02 12:03:45.370 2 DEBUG nova.virt.libvirt.vif [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:03:29Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-LiveMigrationTest-server-507794369',display_name='tempest-LiveMigrationTest-server-507794369',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-2.ctlplane.example.com',hostname='tempest-livemigrationtest-server-507794369',id=20,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:03:36Z,launched_on='compute-2.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-2.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='f7cb78d24d1a4511a59ced45ccc4a1c7',ramdisk_id='',reservation_id='r-hsf0qpxd',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-LiveMigrationTest-1666170212',owner_user_name='tempest-LiveMigrationTest-1666170212-project-member'},tags=<?>,task_state='migrating',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:03:36Z,user_data=None,user_id='5f75195e56504673bd403ce69cbc28ca',uuid=356bc6d6-1101-467e-a020-65876724c955,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "29214def-2450-4edd-acc6-84e165aa1e2c", "address": "fa:16:3e:1d:3d:20", "network": {"id": "664b6526-6df1-4024-9bab-37218e6c18bd", "bridge": "br-int", "label": "tempest-LiveMigrationTest-2017832683-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f7cb78d24d1a4511a59ced45ccc4a1c7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system"}, "devname": "tap29214def-24", "ovs_interfaceid": "29214def-2450-4edd-acc6-84e165aa1e2c", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {"os_vif_delegation": true}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:03:45 compute-0 nova_compute[192079]: 2025-10-02 12:03:45.370 2 DEBUG nova.network.os_vif_util [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Converting VIF {"id": "29214def-2450-4edd-acc6-84e165aa1e2c", "address": "fa:16:3e:1d:3d:20", "network": {"id": "664b6526-6df1-4024-9bab-37218e6c18bd", "bridge": "br-int", "label": "tempest-LiveMigrationTest-2017832683-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f7cb78d24d1a4511a59ced45ccc4a1c7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system"}, "devname": "tap29214def-24", "ovs_interfaceid": "29214def-2450-4edd-acc6-84e165aa1e2c", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {"os_vif_delegation": true}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:03:45 compute-0 nova_compute[192079]: 2025-10-02 12:03:45.371 2 DEBUG nova.network.os_vif_util [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:1d:3d:20,bridge_name='br-int',has_traffic_filtering=True,id=29214def-2450-4edd-acc6-84e165aa1e2c,network=Network(664b6526-6df1-4024-9bab-37218e6c18bd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap29214def-24') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:03:45 compute-0 nova_compute[192079]: 2025-10-02 12:03:45.371 2 DEBUG os_vif [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:1d:3d:20,bridge_name='br-int',has_traffic_filtering=True,id=29214def-2450-4edd-acc6-84e165aa1e2c,network=Network(664b6526-6df1-4024-9bab-37218e6c18bd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap29214def-24') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:03:45 compute-0 nova_compute[192079]: 2025-10-02 12:03:45.372 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:45 compute-0 nova_compute[192079]: 2025-10-02 12:03:45.372 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:03:45 compute-0 nova_compute[192079]: 2025-10-02 12:03:45.372 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:03:45 compute-0 nova_compute[192079]: 2025-10-02 12:03:45.374 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:45 compute-0 nova_compute[192079]: 2025-10-02 12:03:45.375 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap29214def-24, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:03:45 compute-0 nova_compute[192079]: 2025-10-02 12:03:45.375 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap29214def-24, col_values=(('external_ids', {'iface-id': '29214def-2450-4edd-acc6-84e165aa1e2c', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:1d:3d:20', 'vm-uuid': '356bc6d6-1101-467e-a020-65876724c955'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:03:45 compute-0 NetworkManager[51160]: <info>  [1759406625.3785] manager: (tap29214def-24): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/49)
Oct 02 12:03:45 compute-0 nova_compute[192079]: 2025-10-02 12:03:45.377 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:45 compute-0 nova_compute[192079]: 2025-10-02 12:03:45.382 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:03:45 compute-0 nova_compute[192079]: 2025-10-02 12:03:45.384 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:45 compute-0 nova_compute[192079]: 2025-10-02 12:03:45.385 2 INFO os_vif [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:1d:3d:20,bridge_name='br-int',has_traffic_filtering=True,id=29214def-2450-4edd-acc6-84e165aa1e2c,network=Network(664b6526-6df1-4024-9bab-37218e6c18bd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap29214def-24')
Oct 02 12:03:45 compute-0 nova_compute[192079]: 2025-10-02 12:03:45.386 2 DEBUG nova.virt.libvirt.driver [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] No dst_numa_info in migrate_data, no cores to power up in pre_live_migration. pre_live_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10954
Oct 02 12:03:45 compute-0 nova_compute[192079]: 2025-10-02 12:03:45.387 2 DEBUG nova.compute.manager [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] driver pre_live_migration data is LibvirtLiveMigrateData(bdms=[],block_migration=True,disk_available_mb=73728,disk_over_commit=False,dst_numa_info=<?>,dst_supports_numa_live_migration=<?>,dst_wants_file_backed_memory=False,file_backed_memory_discard=<?>,filename='tmpeyk8xlf5',graphics_listen_addr_spice=127.0.0.1,graphics_listen_addr_vnc=::,image_type='qcow2',instance_relative_path='356bc6d6-1101-467e-a020-65876724c955',is_shared_block_storage=False,is_shared_instance_path=False,is_volume_backed=False,migration=<?>,old_vol_attachment_ids={},serial_listen_addr=None,serial_listen_ports=[],src_supports_native_luks=<?>,src_supports_numa_live_migration=<?>,supported_perf_events=[],target_connect_addr=None,vifs=[VIFMigrateData],wait_for_vif_plugged=<?>) pre_live_migration /usr/lib/python3.9/site-packages/nova/compute/manager.py:8668
Oct 02 12:03:46 compute-0 nova_compute[192079]: 2025-10-02 12:03:46.252 2 DEBUG oslo_concurrency.lockutils [None req-2be22ab1-ff70-4bb6-9320-a5ff5111c5ef d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Acquiring lock "ce39a1f9-1883-4f3e-81e8-6da425b2d2bb" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:46 compute-0 nova_compute[192079]: 2025-10-02 12:03:46.253 2 DEBUG oslo_concurrency.lockutils [None req-2be22ab1-ff70-4bb6-9320-a5ff5111c5ef d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "ce39a1f9-1883-4f3e-81e8-6da425b2d2bb" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:46 compute-0 nova_compute[192079]: 2025-10-02 12:03:46.253 2 DEBUG oslo_concurrency.lockutils [None req-2be22ab1-ff70-4bb6-9320-a5ff5111c5ef d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Acquiring lock "ce39a1f9-1883-4f3e-81e8-6da425b2d2bb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:46 compute-0 nova_compute[192079]: 2025-10-02 12:03:46.253 2 DEBUG oslo_concurrency.lockutils [None req-2be22ab1-ff70-4bb6-9320-a5ff5111c5ef d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "ce39a1f9-1883-4f3e-81e8-6da425b2d2bb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:46 compute-0 nova_compute[192079]: 2025-10-02 12:03:46.253 2 DEBUG oslo_concurrency.lockutils [None req-2be22ab1-ff70-4bb6-9320-a5ff5111c5ef d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "ce39a1f9-1883-4f3e-81e8-6da425b2d2bb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:46 compute-0 nova_compute[192079]: 2025-10-02 12:03:46.279 2 INFO nova.compute.manager [None req-2be22ab1-ff70-4bb6-9320-a5ff5111c5ef d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Terminating instance
Oct 02 12:03:46 compute-0 nova_compute[192079]: 2025-10-02 12:03:46.308 2 DEBUG oslo_concurrency.lockutils [None req-2be22ab1-ff70-4bb6-9320-a5ff5111c5ef d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Acquiring lock "refresh_cache-ce39a1f9-1883-4f3e-81e8-6da425b2d2bb" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:03:46 compute-0 nova_compute[192079]: 2025-10-02 12:03:46.309 2 DEBUG oslo_concurrency.lockutils [None req-2be22ab1-ff70-4bb6-9320-a5ff5111c5ef d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Acquired lock "refresh_cache-ce39a1f9-1883-4f3e-81e8-6da425b2d2bb" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:03:46 compute-0 nova_compute[192079]: 2025-10-02 12:03:46.309 2 DEBUG nova.network.neutron [None req-2be22ab1-ff70-4bb6-9320-a5ff5111c5ef d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:03:46 compute-0 ovn_controller[94336]: 2025-10-02T12:03:46Z|00086|memory_trim|INFO|Detected inactivity (last active 30002 ms ago): trimming memory
Oct 02 12:03:46 compute-0 nova_compute[192079]: 2025-10-02 12:03:46.518 2 DEBUG nova.network.neutron [None req-2be22ab1-ff70-4bb6-9320-a5ff5111c5ef d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:03:46 compute-0 nova_compute[192079]: 2025-10-02 12:03:46.524 2 DEBUG oslo_concurrency.lockutils [None req-fd9a90be-2ecb-46df-9b21-cd35610201da d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Acquiring lock "98fe50ad-409f-4b57-a579-3b83bb089bd3" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:46 compute-0 nova_compute[192079]: 2025-10-02 12:03:46.525 2 DEBUG oslo_concurrency.lockutils [None req-fd9a90be-2ecb-46df-9b21-cd35610201da d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "98fe50ad-409f-4b57-a579-3b83bb089bd3" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:46 compute-0 nova_compute[192079]: 2025-10-02 12:03:46.525 2 DEBUG oslo_concurrency.lockutils [None req-fd9a90be-2ecb-46df-9b21-cd35610201da d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Acquiring lock "98fe50ad-409f-4b57-a579-3b83bb089bd3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:46 compute-0 nova_compute[192079]: 2025-10-02 12:03:46.526 2 DEBUG oslo_concurrency.lockutils [None req-fd9a90be-2ecb-46df-9b21-cd35610201da d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "98fe50ad-409f-4b57-a579-3b83bb089bd3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:46 compute-0 nova_compute[192079]: 2025-10-02 12:03:46.526 2 DEBUG oslo_concurrency.lockutils [None req-fd9a90be-2ecb-46df-9b21-cd35610201da d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "98fe50ad-409f-4b57-a579-3b83bb089bd3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:46 compute-0 nova_compute[192079]: 2025-10-02 12:03:46.571 2 INFO nova.compute.manager [None req-fd9a90be-2ecb-46df-9b21-cd35610201da d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Terminating instance
Oct 02 12:03:46 compute-0 nova_compute[192079]: 2025-10-02 12:03:46.647 2 DEBUG oslo_concurrency.lockutils [None req-fd9a90be-2ecb-46df-9b21-cd35610201da d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Acquiring lock "refresh_cache-98fe50ad-409f-4b57-a579-3b83bb089bd3" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:03:46 compute-0 nova_compute[192079]: 2025-10-02 12:03:46.647 2 DEBUG oslo_concurrency.lockutils [None req-fd9a90be-2ecb-46df-9b21-cd35610201da d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Acquired lock "refresh_cache-98fe50ad-409f-4b57-a579-3b83bb089bd3" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:03:46 compute-0 nova_compute[192079]: 2025-10-02 12:03:46.648 2 DEBUG nova.network.neutron [None req-fd9a90be-2ecb-46df-9b21-cd35610201da d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:03:47 compute-0 nova_compute[192079]: 2025-10-02 12:03:47.238 2 DEBUG nova.network.neutron [None req-fd9a90be-2ecb-46df-9b21-cd35610201da d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:03:47 compute-0 nova_compute[192079]: 2025-10-02 12:03:47.243 2 DEBUG nova.network.neutron [None req-2be22ab1-ff70-4bb6-9320-a5ff5111c5ef d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:03:47 compute-0 nova_compute[192079]: 2025-10-02 12:03:47.266 2 DEBUG oslo_concurrency.lockutils [None req-2be22ab1-ff70-4bb6-9320-a5ff5111c5ef d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Releasing lock "refresh_cache-ce39a1f9-1883-4f3e-81e8-6da425b2d2bb" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:03:47 compute-0 nova_compute[192079]: 2025-10-02 12:03:47.267 2 DEBUG nova.compute.manager [None req-2be22ab1-ff70-4bb6-9320-a5ff5111c5ef d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:03:47 compute-0 systemd[1]: machine-qemu\x2d11\x2dinstance\x2d00000012.scope: Deactivated successfully.
Oct 02 12:03:47 compute-0 systemd[1]: machine-qemu\x2d11\x2dinstance\x2d00000012.scope: Consumed 13.591s CPU time.
Oct 02 12:03:47 compute-0 systemd-machined[152150]: Machine qemu-11-instance-00000012 terminated.
Oct 02 12:03:47 compute-0 nova_compute[192079]: 2025-10-02 12:03:47.521 2 INFO nova.virt.libvirt.driver [-] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Instance destroyed successfully.
Oct 02 12:03:47 compute-0 nova_compute[192079]: 2025-10-02 12:03:47.522 2 DEBUG nova.objects.instance [None req-2be22ab1-ff70-4bb6-9320-a5ff5111c5ef d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lazy-loading 'resources' on Instance uuid ce39a1f9-1883-4f3e-81e8-6da425b2d2bb obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:03:47 compute-0 nova_compute[192079]: 2025-10-02 12:03:47.542 2 INFO nova.virt.libvirt.driver [None req-2be22ab1-ff70-4bb6-9320-a5ff5111c5ef d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Deleting instance files /var/lib/nova/instances/ce39a1f9-1883-4f3e-81e8-6da425b2d2bb_del
Oct 02 12:03:47 compute-0 nova_compute[192079]: 2025-10-02 12:03:47.543 2 INFO nova.virt.libvirt.driver [None req-2be22ab1-ff70-4bb6-9320-a5ff5111c5ef d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Deletion of /var/lib/nova/instances/ce39a1f9-1883-4f3e-81e8-6da425b2d2bb_del complete
Oct 02 12:03:47 compute-0 nova_compute[192079]: 2025-10-02 12:03:47.570 2 DEBUG nova.network.neutron [None req-fd9a90be-2ecb-46df-9b21-cd35610201da d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:03:47 compute-0 nova_compute[192079]: 2025-10-02 12:03:47.611 2 DEBUG oslo_concurrency.lockutils [None req-fd9a90be-2ecb-46df-9b21-cd35610201da d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Releasing lock "refresh_cache-98fe50ad-409f-4b57-a579-3b83bb089bd3" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:03:47 compute-0 nova_compute[192079]: 2025-10-02 12:03:47.612 2 DEBUG nova.compute.manager [None req-fd9a90be-2ecb-46df-9b21-cd35610201da d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:03:47 compute-0 nova_compute[192079]: 2025-10-02 12:03:47.661 2 INFO nova.compute.manager [None req-2be22ab1-ff70-4bb6-9320-a5ff5111c5ef d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Took 0.39 seconds to destroy the instance on the hypervisor.
Oct 02 12:03:47 compute-0 nova_compute[192079]: 2025-10-02 12:03:47.662 2 DEBUG oslo.service.loopingcall [None req-2be22ab1-ff70-4bb6-9320-a5ff5111c5ef d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:03:47 compute-0 nova_compute[192079]: 2025-10-02 12:03:47.662 2 DEBUG nova.compute.manager [-] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:03:47 compute-0 nova_compute[192079]: 2025-10-02 12:03:47.663 2 DEBUG nova.network.neutron [-] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:03:47 compute-0 systemd[1]: machine-qemu\x2d12\x2dinstance\x2d00000013.scope: Deactivated successfully.
Oct 02 12:03:47 compute-0 systemd[1]: machine-qemu\x2d12\x2dinstance\x2d00000013.scope: Consumed 13.680s CPU time.
Oct 02 12:03:47 compute-0 systemd-machined[152150]: Machine qemu-12-instance-00000013 terminated.
Oct 02 12:03:47 compute-0 nova_compute[192079]: 2025-10-02 12:03:47.672 2 DEBUG nova.network.neutron [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Port 29214def-2450-4edd-acc6-84e165aa1e2c updated with migration profile {'migrating_to': 'compute-0.ctlplane.example.com'} successfully _setup_migration_port_profile /usr/lib/python3.9/site-packages/nova/network/neutron.py:354
Oct 02 12:03:47 compute-0 nova_compute[192079]: 2025-10-02 12:03:47.692 2 DEBUG nova.compute.manager [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] pre_live_migration result data is LibvirtLiveMigrateData(bdms=[],block_migration=True,disk_available_mb=73728,disk_over_commit=False,dst_numa_info=<?>,dst_supports_numa_live_migration=<?>,dst_wants_file_backed_memory=False,file_backed_memory_discard=<?>,filename='tmpeyk8xlf5',graphics_listen_addr_spice=127.0.0.1,graphics_listen_addr_vnc=::,image_type='qcow2',instance_relative_path='356bc6d6-1101-467e-a020-65876724c955',is_shared_block_storage=False,is_shared_instance_path=False,is_volume_backed=False,migration=<?>,old_vol_attachment_ids={},serial_listen_addr=None,serial_listen_ports=[],src_supports_native_luks=<?>,src_supports_numa_live_migration=<?>,supported_perf_events=[],target_connect_addr=None,vifs=[VIFMigrateData],wait_for_vif_plugged=True) pre_live_migration /usr/lib/python3.9/site-packages/nova/compute/manager.py:8723
Oct 02 12:03:47 compute-0 nova_compute[192079]: 2025-10-02 12:03:47.873 2 INFO nova.virt.libvirt.driver [-] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Instance destroyed successfully.
Oct 02 12:03:47 compute-0 nova_compute[192079]: 2025-10-02 12:03:47.873 2 DEBUG nova.objects.instance [None req-fd9a90be-2ecb-46df-9b21-cd35610201da d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lazy-loading 'resources' on Instance uuid 98fe50ad-409f-4b57-a579-3b83bb089bd3 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:03:47 compute-0 nova_compute[192079]: 2025-10-02 12:03:47.897 2 INFO nova.virt.libvirt.driver [None req-fd9a90be-2ecb-46df-9b21-cd35610201da d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Deleting instance files /var/lib/nova/instances/98fe50ad-409f-4b57-a579-3b83bb089bd3_del
Oct 02 12:03:47 compute-0 nova_compute[192079]: 2025-10-02 12:03:47.897 2 INFO nova.virt.libvirt.driver [None req-fd9a90be-2ecb-46df-9b21-cd35610201da d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Deletion of /var/lib/nova/instances/98fe50ad-409f-4b57-a579-3b83bb089bd3_del complete
Oct 02 12:03:48 compute-0 kernel: tap29214def-24: entered promiscuous mode
Oct 02 12:03:48 compute-0 NetworkManager[51160]: <info>  [1759406628.0094] manager: (tap29214def-24): new Tun device (/org/freedesktop/NetworkManager/Devices/50)
Oct 02 12:03:48 compute-0 systemd-udevd[222750]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:03:48 compute-0 ovn_controller[94336]: 2025-10-02T12:03:48Z|00087|binding|INFO|Claiming lport 29214def-2450-4edd-acc6-84e165aa1e2c for this additional chassis.
Oct 02 12:03:48 compute-0 ovn_controller[94336]: 2025-10-02T12:03:48Z|00088|binding|INFO|29214def-2450-4edd-acc6-84e165aa1e2c: Claiming fa:16:3e:1d:3d:20 10.100.0.14
Oct 02 12:03:48 compute-0 nova_compute[192079]: 2025-10-02 12:03:48.013 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:48 compute-0 nova_compute[192079]: 2025-10-02 12:03:48.019 2 INFO nova.compute.manager [None req-fd9a90be-2ecb-46df-9b21-cd35610201da d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Took 0.41 seconds to destroy the instance on the hypervisor.
Oct 02 12:03:48 compute-0 nova_compute[192079]: 2025-10-02 12:03:48.020 2 DEBUG oslo.service.loopingcall [None req-fd9a90be-2ecb-46df-9b21-cd35610201da d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:03:48 compute-0 nova_compute[192079]: 2025-10-02 12:03:48.020 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:48 compute-0 nova_compute[192079]: 2025-10-02 12:03:48.021 2 DEBUG nova.compute.manager [-] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:03:48 compute-0 nova_compute[192079]: 2025-10-02 12:03:48.021 2 DEBUG nova.network.neutron [-] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:03:48 compute-0 NetworkManager[51160]: <info>  [1759406628.0339] device (tap29214def-24): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:03:48 compute-0 NetworkManager[51160]: <info>  [1759406628.0347] device (tap29214def-24): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:03:48 compute-0 systemd-machined[152150]: New machine qemu-13-instance-00000014.
Oct 02 12:03:48 compute-0 nova_compute[192079]: 2025-10-02 12:03:48.105 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:48 compute-0 systemd[1]: Started Virtual Machine qemu-13-instance-00000014.
Oct 02 12:03:48 compute-0 nova_compute[192079]: 2025-10-02 12:03:48.111 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:48 compute-0 nova_compute[192079]: 2025-10-02 12:03:48.113 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:48 compute-0 ovn_controller[94336]: 2025-10-02T12:03:48Z|00089|binding|INFO|Setting lport 29214def-2450-4edd-acc6-84e165aa1e2c ovn-installed in OVS
Oct 02 12:03:48 compute-0 nova_compute[192079]: 2025-10-02 12:03:48.238 2 DEBUG nova.network.neutron [-] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:03:48 compute-0 nova_compute[192079]: 2025-10-02 12:03:48.245 2 DEBUG nova.network.neutron [-] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:03:48 compute-0 nova_compute[192079]: 2025-10-02 12:03:48.262 2 DEBUG nova.network.neutron [-] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:03:48 compute-0 nova_compute[192079]: 2025-10-02 12:03:48.263 2 DEBUG nova.network.neutron [-] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:03:48 compute-0 nova_compute[192079]: 2025-10-02 12:03:48.285 2 INFO nova.compute.manager [-] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Took 0.26 seconds to deallocate network for instance.
Oct 02 12:03:48 compute-0 nova_compute[192079]: 2025-10-02 12:03:48.286 2 INFO nova.compute.manager [-] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Took 0.62 seconds to deallocate network for instance.
Oct 02 12:03:48 compute-0 nova_compute[192079]: 2025-10-02 12:03:48.480 2 DEBUG oslo_concurrency.lockutils [None req-fd9a90be-2ecb-46df-9b21-cd35610201da d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:48 compute-0 nova_compute[192079]: 2025-10-02 12:03:48.480 2 DEBUG oslo_concurrency.lockutils [None req-fd9a90be-2ecb-46df-9b21-cd35610201da d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:48 compute-0 nova_compute[192079]: 2025-10-02 12:03:48.503 2 DEBUG oslo_concurrency.lockutils [None req-2be22ab1-ff70-4bb6-9320-a5ff5111c5ef d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:03:48 compute-0 nova_compute[192079]: 2025-10-02 12:03:48.610 2 DEBUG nova.compute.provider_tree [None req-fd9a90be-2ecb-46df-9b21-cd35610201da d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:03:48 compute-0 nova_compute[192079]: 2025-10-02 12:03:48.632 2 DEBUG nova.scheduler.client.report [None req-fd9a90be-2ecb-46df-9b21-cd35610201da d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:03:48 compute-0 nova_compute[192079]: 2025-10-02 12:03:48.658 2 DEBUG oslo_concurrency.lockutils [None req-fd9a90be-2ecb-46df-9b21-cd35610201da d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.177s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:48 compute-0 nova_compute[192079]: 2025-10-02 12:03:48.661 2 DEBUG oslo_concurrency.lockutils [None req-2be22ab1-ff70-4bb6-9320-a5ff5111c5ef d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.159s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:03:48 compute-0 nova_compute[192079]: 2025-10-02 12:03:48.685 2 INFO nova.scheduler.client.report [None req-fd9a90be-2ecb-46df-9b21-cd35610201da d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Deleted allocations for instance 98fe50ad-409f-4b57-a579-3b83bb089bd3
Oct 02 12:03:48 compute-0 nova_compute[192079]: 2025-10-02 12:03:48.750 2 DEBUG nova.compute.provider_tree [None req-2be22ab1-ff70-4bb6-9320-a5ff5111c5ef d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:03:48 compute-0 nova_compute[192079]: 2025-10-02 12:03:48.785 2 DEBUG nova.scheduler.client.report [None req-2be22ab1-ff70-4bb6-9320-a5ff5111c5ef d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:03:48 compute-0 nova_compute[192079]: 2025-10-02 12:03:48.851 2 DEBUG oslo_concurrency.lockutils [None req-fd9a90be-2ecb-46df-9b21-cd35610201da d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "98fe50ad-409f-4b57-a579-3b83bb089bd3" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 2.326s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:48 compute-0 nova_compute[192079]: 2025-10-02 12:03:48.863 2 DEBUG oslo_concurrency.lockutils [None req-2be22ab1-ff70-4bb6-9320-a5ff5111c5ef d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.202s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:48 compute-0 nova_compute[192079]: 2025-10-02 12:03:48.888 2 INFO nova.scheduler.client.report [None req-2be22ab1-ff70-4bb6-9320-a5ff5111c5ef d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Deleted allocations for instance ce39a1f9-1883-4f3e-81e8-6da425b2d2bb
Oct 02 12:03:48 compute-0 nova_compute[192079]: 2025-10-02 12:03:48.976 2 DEBUG oslo_concurrency.lockutils [None req-2be22ab1-ff70-4bb6-9320-a5ff5111c5ef d27eb44762f548fc96a3f2edcdb5537c df2cf2fcc379455c90e6044b60e603c0 - - default default] Lock "ce39a1f9-1883-4f3e-81e8-6da425b2d2bb" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 2.723s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:03:49 compute-0 podman[222791]: 2025-10-02 12:03:49.188875611 +0000 UTC m=+0.097758115 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_controller, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.license=GPLv2)
Oct 02 12:03:49 compute-0 nova_compute[192079]: 2025-10-02 12:03:49.401 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:50 compute-0 nova_compute[192079]: 2025-10-02 12:03:50.225 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406630.2247033, 356bc6d6-1101-467e-a020-65876724c955 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:03:50 compute-0 nova_compute[192079]: 2025-10-02 12:03:50.225 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 356bc6d6-1101-467e-a020-65876724c955] VM Started (Lifecycle Event)
Oct 02 12:03:50 compute-0 nova_compute[192079]: 2025-10-02 12:03:50.252 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 356bc6d6-1101-467e-a020-65876724c955] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:03:50 compute-0 nova_compute[192079]: 2025-10-02 12:03:50.378 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:52 compute-0 podman[222840]: 2025-10-02 12:03:52.17238658 +0000 UTC m=+0.072140134 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, config_id=ovn_metadata_agent, org.label-schema.schema-version=1.0, tcib_managed=true, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, org.label-schema.build-date=20251001)
Oct 02 12:03:52 compute-0 podman[222841]: 2025-10-02 12:03:52.19579015 +0000 UTC m=+0.091455053 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:03:54 compute-0 nova_compute[192079]: 2025-10-02 12:03:54.466 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:55 compute-0 nova_compute[192079]: 2025-10-02 12:03:55.380 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:56 compute-0 nova_compute[192079]: 2025-10-02 12:03:56.327 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406636.327388, 356bc6d6-1101-467e-a020-65876724c955 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:03:56 compute-0 nova_compute[192079]: 2025-10-02 12:03:56.328 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 356bc6d6-1101-467e-a020-65876724c955] VM Resumed (Lifecycle Event)
Oct 02 12:03:56 compute-0 nova_compute[192079]: 2025-10-02 12:03:56.353 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 356bc6d6-1101-467e-a020-65876724c955] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:03:56 compute-0 nova_compute[192079]: 2025-10-02 12:03:56.360 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 356bc6d6-1101-467e-a020-65876724c955] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: active, current task_state: migrating, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:03:56 compute-0 nova_compute[192079]: 2025-10-02 12:03:56.380 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 356bc6d6-1101-467e-a020-65876724c955] During the sync_power process the instance has moved from host compute-2.ctlplane.example.com to host compute-0.ctlplane.example.com
Oct 02 12:03:58 compute-0 ovn_controller[94336]: 2025-10-02T12:03:58Z|00090|binding|INFO|Claiming lport 29214def-2450-4edd-acc6-84e165aa1e2c for this chassis.
Oct 02 12:03:58 compute-0 ovn_controller[94336]: 2025-10-02T12:03:58Z|00091|binding|INFO|29214def-2450-4edd-acc6-84e165aa1e2c: Claiming fa:16:3e:1d:3d:20 10.100.0.14
Oct 02 12:03:58 compute-0 ovn_controller[94336]: 2025-10-02T12:03:58Z|00092|binding|INFO|Setting lport 29214def-2450-4edd-acc6-84e165aa1e2c up in Southbound
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:58.279 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:1d:3d:20 10.100.0.14'], port_security=['fa:16:3e:1d:3d:20 10.100.0.14'], type=, nat_addresses=[], virtual_parent=[], up=[True], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.14/28', 'neutron:device_id': '356bc6d6-1101-467e-a020-65876724c955', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-664b6526-6df1-4024-9bab-37218e6c18bd', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'f7cb78d24d1a4511a59ced45ccc4a1c7', 'neutron:revision_number': '11', 'neutron:security_group_ids': 'a459d514-aab4-4030-9850-e066abdeaccc', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=eddfb51e-1095-4b3d-a2dc-f2557cf13b11, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=29214def-2450-4edd-acc6-84e165aa1e2c) old=Port_Binding(up=[False], additional_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:58.281 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 29214def-2450-4edd-acc6-84e165aa1e2c in datapath 664b6526-6df1-4024-9bab-37218e6c18bd bound to our chassis
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:58.283 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 664b6526-6df1-4024-9bab-37218e6c18bd
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:58.302 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[113b135e-b7e6-49a9-924a-c53bf5ec046e]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:58.303 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap664b6526-61 in ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:58.306 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap664b6526-60 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:58.307 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[55d1c00d-3d83-445d-aabf-84e145c0f99d]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:58.308 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5866a42b-7e76-4d92-8a9a-19fa7e00c5d0]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:58.328 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[676fce73-0564-4168-9468-2f4275f8c2c6]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:58.351 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4950a2d8-ca9c-4feb-b1bd-74b402f35b40]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:58.398 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[ba999660-c3d9-4d6a-a363-9fd94be5a5c0]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:58.406 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6a7acc82-faf5-49f2-bdbc-0e57ffadb5ab]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:03:58 compute-0 NetworkManager[51160]: <info>  [1759406638.4100] manager: (tap664b6526-60): new Veth device (/org/freedesktop/NetworkManager/Devices/51)
Oct 02 12:03:58 compute-0 nova_compute[192079]: 2025-10-02 12:03:58.448 2 INFO nova.compute.manager [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Post operation of migration started
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:58.448 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[fdea9b70-f84a-47e0-bf19-e1a1a943452f]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:03:58 compute-0 podman[222883]: 2025-10-02 12:03:58.453064277 +0000 UTC m=+0.093224361 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=edpm, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_managed=true, container_name=ceilometer_agent_compute, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0)
Oct 02 12:03:58 compute-0 systemd-udevd[222906]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:58.451 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[851180a2-43a3-4847-a0d8-d46ea10f7535]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:03:58 compute-0 NetworkManager[51160]: <info>  [1759406638.4903] device (tap664b6526-60): carrier: link connected
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:58.495 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[c19572ce-6525-4573-ac43-a14e7f963544]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:58.509 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[bee9eb3c-e095-4971-832f-401bb9866687]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap664b6526-61'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:5c:8c:2f'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 30], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 463611, 'reachable_time': 18607, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 222925, 'error': None, 'target': 'ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:58.522 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5f5610a1-ec11-4bb9-855e-b59b28c447cc]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe5c:8c2f'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 463611, 'tstamp': 463611}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 222926, 'error': None, 'target': 'ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:58.536 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2c0c95c7-1c03-4c9b-a8fd-7c425aba8d99]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap664b6526-61'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:5c:8c:2f'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 30], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 463611, 'reachable_time': 18607, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 222927, 'error': None, 'target': 'ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:58.561 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6a76693c-c62a-4c87-b93b-9288ff0b6e1e]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:58.618 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d8829a7b-e130-4d9f-bff5-3e75d07b400e]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:58.619 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap664b6526-60, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:58.619 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:58.620 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap664b6526-60, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:03:58 compute-0 nova_compute[192079]: 2025-10-02 12:03:58.621 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:58 compute-0 NetworkManager[51160]: <info>  [1759406638.6219] manager: (tap664b6526-60): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/52)
Oct 02 12:03:58 compute-0 kernel: tap664b6526-60: entered promiscuous mode
Oct 02 12:03:58 compute-0 nova_compute[192079]: 2025-10-02 12:03:58.623 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:58.624 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap664b6526-60, col_values=(('external_ids', {'iface-id': '2f7dc774-b718-4d9e-9655-fbc5ffa141e8'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:03:58 compute-0 nova_compute[192079]: 2025-10-02 12:03:58.625 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:58 compute-0 ovn_controller[94336]: 2025-10-02T12:03:58Z|00093|binding|INFO|Releasing lport 2f7dc774-b718-4d9e-9655-fbc5ffa141e8 from this chassis (sb_readonly=0)
Oct 02 12:03:58 compute-0 nova_compute[192079]: 2025-10-02 12:03:58.642 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:58.642 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/664b6526-6df1-4024-9bab-37218e6c18bd.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/664b6526-6df1-4024-9bab-37218e6c18bd.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:58.643 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[79a8def1-45f0-4545-83f7-785ac6bc7917]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:58.643 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-664b6526-6df1-4024-9bab-37218e6c18bd
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/664b6526-6df1-4024-9bab-37218e6c18bd.pid.haproxy
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 664b6526-6df1-4024-9bab-37218e6c18bd
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:03:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:03:58.644 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd', 'env', 'PROCESS_TAG=haproxy-664b6526-6df1-4024-9bab-37218e6c18bd', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/664b6526-6df1-4024-9bab-37218e6c18bd.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:03:59 compute-0 nova_compute[192079]: 2025-10-02 12:03:59.001 2 DEBUG oslo_concurrency.lockutils [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Acquiring lock "refresh_cache-356bc6d6-1101-467e-a020-65876724c955" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:03:59 compute-0 nova_compute[192079]: 2025-10-02 12:03:59.002 2 DEBUG oslo_concurrency.lockutils [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Acquired lock "refresh_cache-356bc6d6-1101-467e-a020-65876724c955" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:03:59 compute-0 nova_compute[192079]: 2025-10-02 12:03:59.002 2 DEBUG nova.network.neutron [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:03:59 compute-0 podman[222959]: 2025-10-02 12:03:58.948831546 +0000 UTC m=+0.025286962 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:03:59 compute-0 podman[222959]: 2025-10-02 12:03:59.367910568 +0000 UTC m=+0.444365904 container create 3f86b816abdb6e4a862a95b77272825206958d251195675a2da77cfd6c12b467 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS)
Oct 02 12:03:59 compute-0 nova_compute[192079]: 2025-10-02 12:03:59.509 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:03:59 compute-0 systemd[1]: Started libpod-conmon-3f86b816abdb6e4a862a95b77272825206958d251195675a2da77cfd6c12b467.scope.
Oct 02 12:03:59 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:03:59 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/dfb8f09afdf99f22b8a4d15541efff6083a2a1624a8a8db0d0b670b7f1f89ff9/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:03:59 compute-0 podman[222959]: 2025-10-02 12:03:59.732149049 +0000 UTC m=+0.808604395 container init 3f86b816abdb6e4a862a95b77272825206958d251195675a2da77cfd6c12b467 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:03:59 compute-0 podman[222959]: 2025-10-02 12:03:59.742162864 +0000 UTC m=+0.818618190 container start 3f86b816abdb6e4a862a95b77272825206958d251195675a2da77cfd6c12b467 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, tcib_managed=true, org.label-schema.schema-version=1.0)
Oct 02 12:03:59 compute-0 neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd[222974]: [NOTICE]   (222978) : New worker (222980) forked
Oct 02 12:03:59 compute-0 neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd[222974]: [NOTICE]   (222978) : Loading success.
Oct 02 12:04:00 compute-0 nova_compute[192079]: 2025-10-02 12:04:00.383 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:01 compute-0 nova_compute[192079]: 2025-10-02 12:04:01.030 2 DEBUG nova.network.neutron [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Updating instance_info_cache with network_info: [{"id": "29214def-2450-4edd-acc6-84e165aa1e2c", "address": "fa:16:3e:1d:3d:20", "network": {"id": "664b6526-6df1-4024-9bab-37218e6c18bd", "bridge": "br-int", "label": "tempest-LiveMigrationTest-2017832683-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f7cb78d24d1a4511a59ced45ccc4a1c7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap29214def-24", "ovs_interfaceid": "29214def-2450-4edd-acc6-84e165aa1e2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {"os_vif_delegation": true}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:04:01 compute-0 nova_compute[192079]: 2025-10-02 12:04:01.056 2 DEBUG oslo_concurrency.lockutils [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Releasing lock "refresh_cache-356bc6d6-1101-467e-a020-65876724c955" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:04:01 compute-0 nova_compute[192079]: 2025-10-02 12:04:01.090 2 DEBUG oslo_concurrency.lockutils [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.allocate_pci_devices_for_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:01 compute-0 nova_compute[192079]: 2025-10-02 12:04:01.090 2 DEBUG oslo_concurrency.lockutils [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.allocate_pci_devices_for_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:01 compute-0 nova_compute[192079]: 2025-10-02 12:04:01.091 2 DEBUG oslo_concurrency.lockutils [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.allocate_pci_devices_for_instance" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:01 compute-0 nova_compute[192079]: 2025-10-02 12:04:01.096 2 INFO nova.virt.libvirt.driver [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Sending announce-self command to QEMU monitor. Attempt 1 of 3
Oct 02 12:04:01 compute-0 virtqemud[191807]: Domain id=13 name='instance-00000014' uuid=356bc6d6-1101-467e-a020-65876724c955 is tainted: custom-monitor
Oct 02 12:04:02 compute-0 nova_compute[192079]: 2025-10-02 12:04:02.108 2 INFO nova.virt.libvirt.driver [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Sending announce-self command to QEMU monitor. Attempt 2 of 3
Oct 02 12:04:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:04:02.204 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:04:02.205 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:04:02.206 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:02 compute-0 nova_compute[192079]: 2025-10-02 12:04:02.520 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759406627.5187123, ce39a1f9-1883-4f3e-81e8-6da425b2d2bb => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:04:02 compute-0 nova_compute[192079]: 2025-10-02 12:04:02.520 2 INFO nova.compute.manager [-] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] VM Stopped (Lifecycle Event)
Oct 02 12:04:02 compute-0 nova_compute[192079]: 2025-10-02 12:04:02.539 2 DEBUG nova.compute.manager [None req-d9ef3a31-95ed-4a1b-890f-f37939b6ac02 - - - - - -] [instance: ce39a1f9-1883-4f3e-81e8-6da425b2d2bb] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:04:02 compute-0 nova_compute[192079]: 2025-10-02 12:04:02.574 2 DEBUG oslo_concurrency.lockutils [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Acquiring lock "2f0ec710-6070-4bb8-ac27-21d96a184569" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:02 compute-0 nova_compute[192079]: 2025-10-02 12:04:02.574 2 DEBUG oslo_concurrency.lockutils [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Lock "2f0ec710-6070-4bb8-ac27-21d96a184569" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:02 compute-0 nova_compute[192079]: 2025-10-02 12:04:02.590 2 DEBUG nova.compute.manager [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:04:02 compute-0 nova_compute[192079]: 2025-10-02 12:04:02.692 2 DEBUG oslo_concurrency.lockutils [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:02 compute-0 nova_compute[192079]: 2025-10-02 12:04:02.693 2 DEBUG oslo_concurrency.lockutils [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:02 compute-0 nova_compute[192079]: 2025-10-02 12:04:02.699 2 DEBUG nova.virt.hardware [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:04:02 compute-0 nova_compute[192079]: 2025-10-02 12:04:02.700 2 INFO nova.compute.claims [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:04:02 compute-0 nova_compute[192079]: 2025-10-02 12:04:02.819 2 DEBUG nova.compute.provider_tree [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:04:02 compute-0 nova_compute[192079]: 2025-10-02 12:04:02.832 2 DEBUG nova.scheduler.client.report [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:04:02 compute-0 nova_compute[192079]: 2025-10-02 12:04:02.858 2 DEBUG oslo_concurrency.lockutils [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.165s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:02 compute-0 nova_compute[192079]: 2025-10-02 12:04:02.858 2 DEBUG nova.compute.manager [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:04:02 compute-0 nova_compute[192079]: 2025-10-02 12:04:02.870 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759406627.8695092, 98fe50ad-409f-4b57-a579-3b83bb089bd3 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:04:02 compute-0 nova_compute[192079]: 2025-10-02 12:04:02.871 2 INFO nova.compute.manager [-] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] VM Stopped (Lifecycle Event)
Oct 02 12:04:02 compute-0 nova_compute[192079]: 2025-10-02 12:04:02.888 2 DEBUG nova.compute.manager [None req-0abddf3f-df59-4478-bbc9-79e23a5d578f - - - - - -] [instance: 98fe50ad-409f-4b57-a579-3b83bb089bd3] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:04:02 compute-0 nova_compute[192079]: 2025-10-02 12:04:02.918 2 DEBUG nova.compute.manager [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:04:02 compute-0 nova_compute[192079]: 2025-10-02 12:04:02.919 2 DEBUG nova.network.neutron [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:04:02 compute-0 nova_compute[192079]: 2025-10-02 12:04:02.935 2 INFO nova.virt.libvirt.driver [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:04:02 compute-0 nova_compute[192079]: 2025-10-02 12:04:02.954 2 DEBUG nova.compute.manager [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.076 2 DEBUG nova.compute.manager [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.077 2 DEBUG nova.virt.libvirt.driver [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.078 2 INFO nova.virt.libvirt.driver [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Creating image(s)
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.078 2 DEBUG oslo_concurrency.lockutils [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Acquiring lock "/var/lib/nova/instances/2f0ec710-6070-4bb8-ac27-21d96a184569/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.078 2 DEBUG oslo_concurrency.lockutils [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Lock "/var/lib/nova/instances/2f0ec710-6070-4bb8-ac27-21d96a184569/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.079 2 DEBUG oslo_concurrency.lockutils [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Lock "/var/lib/nova/instances/2f0ec710-6070-4bb8-ac27-21d96a184569/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.090 2 DEBUG oslo_concurrency.processutils [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.113 2 INFO nova.virt.libvirt.driver [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Sending announce-self command to QEMU monitor. Attempt 3 of 3
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.118 2 DEBUG nova.compute.manager [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.139 2 DEBUG nova.objects.instance [None req-08fef021-4e4c-49d5-b19b-f48e19ff10a8 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Trying to apply a migration context that does not seem to be set for this instance apply_migration_context /usr/lib/python3.9/site-packages/nova/objects/instance.py:1032
Oct 02 12:04:03 compute-0 podman[222989]: 2025-10-02 12:04:03.144691802 +0000 UTC m=+0.057937215 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, url=https://catalog.redhat.com/en/search?searchType=containers, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, config_id=edpm, io.openshift.tags=minimal rhel9, name=ubi9-minimal, vendor=Red Hat, Inc., container_name=openstack_network_exporter, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vcs-type=git, com.redhat.component=ubi9-minimal-container, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., release=1755695350, architecture=x86_64, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.buildah.version=1.33.7, version=9.6, distribution-scope=public, io.openshift.expose-services=, maintainer=Red Hat, Inc., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, build-date=2025-08-20T13:12:41, managed_by=edpm_ansible)
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.165 2 DEBUG oslo_concurrency.processutils [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.075s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.166 2 DEBUG oslo_concurrency.lockutils [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.166 2 DEBUG oslo_concurrency.lockutils [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:03 compute-0 podman[222990]: 2025-10-02 12:04:03.173538052 +0000 UTC m=+0.085823649 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, config_id=multipathd, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=multipathd, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_managed=true)
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.180 2 DEBUG oslo_concurrency.processutils [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.232 2 DEBUG oslo_concurrency.processutils [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.053s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.233 2 DEBUG oslo_concurrency.processutils [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/2f0ec710-6070-4bb8-ac27-21d96a184569/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:04:03.322 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=7, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=6) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.322 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:04:03.324 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 9 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.358 2 DEBUG nova.network.neutron [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] No network configured allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1188
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.358 2 DEBUG nova.compute.manager [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Instance network_info: |[]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.661 2 DEBUG oslo_concurrency.processutils [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/2f0ec710-6070-4bb8-ac27-21d96a184569/disk 1073741824" returned: 0 in 0.428s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.662 2 DEBUG oslo_concurrency.lockutils [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.496s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.663 2 DEBUG oslo_concurrency.processutils [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.687 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_incomplete_migrations run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.687 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Cleaning up deleted instances with incomplete migration  _cleanup_incomplete_migrations /usr/lib/python3.9/site-packages/nova/compute/manager.py:11183
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.752 2 DEBUG oslo_concurrency.processutils [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.089s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.753 2 DEBUG nova.virt.disk.api [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Checking if we can resize image /var/lib/nova/instances/2f0ec710-6070-4bb8-ac27-21d96a184569/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.753 2 DEBUG oslo_concurrency.processutils [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2f0ec710-6070-4bb8-ac27-21d96a184569/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.820 2 DEBUG oslo_concurrency.processutils [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2f0ec710-6070-4bb8-ac27-21d96a184569/disk --force-share --output=json" returned: 0 in 0.067s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.821 2 DEBUG nova.virt.disk.api [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Cannot resize image /var/lib/nova/instances/2f0ec710-6070-4bb8-ac27-21d96a184569/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.821 2 DEBUG nova.objects.instance [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Lazy-loading 'migration_context' on Instance uuid 2f0ec710-6070-4bb8-ac27-21d96a184569 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.835 2 DEBUG nova.virt.libvirt.driver [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.836 2 DEBUG nova.virt.libvirt.driver [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Ensure instance console log exists: /var/lib/nova/instances/2f0ec710-6070-4bb8-ac27-21d96a184569/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.836 2 DEBUG oslo_concurrency.lockutils [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.837 2 DEBUG oslo_concurrency.lockutils [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.837 2 DEBUG oslo_concurrency.lockutils [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.839 2 DEBUG nova.virt.libvirt.driver [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Start _get_guest_xml network_info=[] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.844 2 WARNING nova.virt.libvirt.driver [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.849 2 DEBUG nova.virt.libvirt.host [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.849 2 DEBUG nova.virt.libvirt.host [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.852 2 DEBUG nova.virt.libvirt.host [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.852 2 DEBUG nova.virt.libvirt.host [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.853 2 DEBUG nova.virt.libvirt.driver [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.854 2 DEBUG nova.virt.hardware [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.854 2 DEBUG nova.virt.hardware [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.854 2 DEBUG nova.virt.hardware [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.855 2 DEBUG nova.virt.hardware [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.855 2 DEBUG nova.virt.hardware [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.855 2 DEBUG nova.virt.hardware [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.855 2 DEBUG nova.virt.hardware [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.855 2 DEBUG nova.virt.hardware [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.856 2 DEBUG nova.virt.hardware [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.856 2 DEBUG nova.virt.hardware [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.856 2 DEBUG nova.virt.hardware [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.859 2 DEBUG nova.objects.instance [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Lazy-loading 'pci_devices' on Instance uuid 2f0ec710-6070-4bb8-ac27-21d96a184569 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.883 2 DEBUG nova.virt.libvirt.driver [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:04:03 compute-0 nova_compute[192079]:   <uuid>2f0ec710-6070-4bb8-ac27-21d96a184569</uuid>
Oct 02 12:04:03 compute-0 nova_compute[192079]:   <name>instance-00000017</name>
Oct 02 12:04:03 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:04:03 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:04:03 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:04:03 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:       <nova:name>tempest-ServerDiagnosticsNegativeTest-server-319237310</nova:name>
Oct 02 12:04:03 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:04:03</nova:creationTime>
Oct 02 12:04:03 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:04:03 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:04:03 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:04:03 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:04:03 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:04:03 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:04:03 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:04:03 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:04:03 compute-0 nova_compute[192079]:         <nova:user uuid="c9f66d2490b24421826bdf15e6fee495">tempest-ServerDiagnosticsNegativeTest-1536351087-project-member</nova:user>
Oct 02 12:04:03 compute-0 nova_compute[192079]:         <nova:project uuid="276d3d51fba7485bb858fbcb2d176461">tempest-ServerDiagnosticsNegativeTest-1536351087</nova:project>
Oct 02 12:04:03 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:04:03 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:       <nova:ports/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:04:03 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:04:03 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <system>
Oct 02 12:04:03 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:04:03 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:04:03 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:04:03 compute-0 nova_compute[192079]:       <entry name="serial">2f0ec710-6070-4bb8-ac27-21d96a184569</entry>
Oct 02 12:04:03 compute-0 nova_compute[192079]:       <entry name="uuid">2f0ec710-6070-4bb8-ac27-21d96a184569</entry>
Oct 02 12:04:03 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     </system>
Oct 02 12:04:03 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:04:03 compute-0 nova_compute[192079]:   <os>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:   </os>
Oct 02 12:04:03 compute-0 nova_compute[192079]:   <features>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:   </features>
Oct 02 12:04:03 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:04:03 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:04:03 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:04:03 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/2f0ec710-6070-4bb8-ac27-21d96a184569/disk"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:04:03 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/2f0ec710-6070-4bb8-ac27-21d96a184569/disk.config"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:04:03 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/2f0ec710-6070-4bb8-ac27-21d96a184569/console.log" append="off"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <video>
Oct 02 12:04:03 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     </video>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:04:03 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:04:03 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:04:03 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:04:03 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:04:03 compute-0 nova_compute[192079]: </domain>
Oct 02 12:04:03 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.983 2 DEBUG nova.virt.libvirt.driver [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.983 2 DEBUG nova.virt.libvirt.driver [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:04:03 compute-0 nova_compute[192079]: 2025-10-02 12:04:03.983 2 INFO nova.virt.libvirt.driver [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Using config drive
Oct 02 12:04:04 compute-0 nova_compute[192079]: 2025-10-02 12:04:04.187 2 INFO nova.virt.libvirt.driver [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Creating config drive at /var/lib/nova/instances/2f0ec710-6070-4bb8-ac27-21d96a184569/disk.config
Oct 02 12:04:04 compute-0 nova_compute[192079]: 2025-10-02 12:04:04.191 2 DEBUG oslo_concurrency.processutils [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/2f0ec710-6070-4bb8-ac27-21d96a184569/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpuvf61bxv execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:04 compute-0 nova_compute[192079]: 2025-10-02 12:04:04.319 2 DEBUG oslo_concurrency.processutils [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/2f0ec710-6070-4bb8-ac27-21d96a184569/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpuvf61bxv" returned: 0 in 0.128s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:04 compute-0 systemd-machined[152150]: New machine qemu-14-instance-00000017.
Oct 02 12:04:04 compute-0 systemd[1]: Started Virtual Machine qemu-14-instance-00000017.
Oct 02 12:04:04 compute-0 nova_compute[192079]: 2025-10-02 12:04:04.511 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:05 compute-0 nova_compute[192079]: 2025-10-02 12:04:05.294 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406645.293088, 2f0ec710-6070-4bb8-ac27-21d96a184569 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:04:05 compute-0 nova_compute[192079]: 2025-10-02 12:04:05.294 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] VM Resumed (Lifecycle Event)
Oct 02 12:04:05 compute-0 nova_compute[192079]: 2025-10-02 12:04:05.296 2 DEBUG nova.compute.manager [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:04:05 compute-0 nova_compute[192079]: 2025-10-02 12:04:05.296 2 DEBUG nova.virt.libvirt.driver [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:04:05 compute-0 nova_compute[192079]: 2025-10-02 12:04:05.303 2 INFO nova.virt.libvirt.driver [-] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Instance spawned successfully.
Oct 02 12:04:05 compute-0 nova_compute[192079]: 2025-10-02 12:04:05.303 2 DEBUG nova.virt.libvirt.driver [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:04:05 compute-0 nova_compute[192079]: 2025-10-02 12:04:05.319 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:04:05 compute-0 nova_compute[192079]: 2025-10-02 12:04:05.327 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:04:05 compute-0 nova_compute[192079]: 2025-10-02 12:04:05.329 2 DEBUG nova.virt.libvirt.driver [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:04:05 compute-0 nova_compute[192079]: 2025-10-02 12:04:05.329 2 DEBUG nova.virt.libvirt.driver [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:04:05 compute-0 nova_compute[192079]: 2025-10-02 12:04:05.329 2 DEBUG nova.virt.libvirt.driver [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:04:05 compute-0 nova_compute[192079]: 2025-10-02 12:04:05.330 2 DEBUG nova.virt.libvirt.driver [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:04:05 compute-0 nova_compute[192079]: 2025-10-02 12:04:05.330 2 DEBUG nova.virt.libvirt.driver [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:04:05 compute-0 nova_compute[192079]: 2025-10-02 12:04:05.330 2 DEBUG nova.virt.libvirt.driver [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:04:05 compute-0 nova_compute[192079]: 2025-10-02 12:04:05.354 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:04:05 compute-0 nova_compute[192079]: 2025-10-02 12:04:05.354 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406645.2936425, 2f0ec710-6070-4bb8-ac27-21d96a184569 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:04:05 compute-0 nova_compute[192079]: 2025-10-02 12:04:05.354 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] VM Started (Lifecycle Event)
Oct 02 12:04:05 compute-0 nova_compute[192079]: 2025-10-02 12:04:05.378 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:04:05 compute-0 nova_compute[192079]: 2025-10-02 12:04:05.385 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:04:05 compute-0 nova_compute[192079]: 2025-10-02 12:04:05.388 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:05 compute-0 nova_compute[192079]: 2025-10-02 12:04:05.404 2 INFO nova.compute.manager [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Took 2.33 seconds to spawn the instance on the hypervisor.
Oct 02 12:04:05 compute-0 nova_compute[192079]: 2025-10-02 12:04:05.405 2 DEBUG nova.compute.manager [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:04:05 compute-0 nova_compute[192079]: 2025-10-02 12:04:05.416 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:04:05 compute-0 nova_compute[192079]: 2025-10-02 12:04:05.516 2 INFO nova.compute.manager [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Took 2.87 seconds to build instance.
Oct 02 12:04:05 compute-0 nova_compute[192079]: 2025-10-02 12:04:05.544 2 DEBUG oslo_concurrency.lockutils [None req-1c80e83d-a8c1-42f7-bccd-d06fc677b3e2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Lock "2f0ec710-6070-4bb8-ac27-21d96a184569" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 2.970s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:06 compute-0 nova_compute[192079]: 2025-10-02 12:04:06.679 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:04:06 compute-0 nova_compute[192079]: 2025-10-02 12:04:06.776 2 DEBUG nova.virt.libvirt.driver [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Check if temp file /var/lib/nova/instances/tmp6xzn3h2n exists to indicate shared storage is being used for migration. Exists? False _check_shared_storage_test_file /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10065
Oct 02 12:04:06 compute-0 nova_compute[192079]: 2025-10-02 12:04:06.780 2 DEBUG oslo_concurrency.processutils [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:06 compute-0 nova_compute[192079]: 2025-10-02 12:04:06.845 2 DEBUG oslo_concurrency.processutils [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955/disk --force-share --output=json" returned: 0 in 0.065s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:06 compute-0 nova_compute[192079]: 2025-10-02 12:04:06.847 2 DEBUG oslo_concurrency.processutils [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:06 compute-0 nova_compute[192079]: 2025-10-02 12:04:06.909 2 DEBUG oslo_concurrency.processutils [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955/disk --force-share --output=json" returned: 0 in 0.062s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:06 compute-0 nova_compute[192079]: 2025-10-02 12:04:06.912 2 DEBUG nova.compute.manager [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] source check data is LibvirtLiveMigrateData(bdms=<?>,block_migration=True,disk_available_mb=74752,disk_over_commit=False,dst_numa_info=<?>,dst_supports_numa_live_migration=<?>,dst_wants_file_backed_memory=False,file_backed_memory_discard=<?>,filename='tmp6xzn3h2n',graphics_listen_addr_spice=127.0.0.1,graphics_listen_addr_vnc=::,image_type='qcow2',instance_relative_path='356bc6d6-1101-467e-a020-65876724c955',is_shared_block_storage=False,is_shared_instance_path=False,is_volume_backed=False,migration=<?>,old_vol_attachment_ids=<?>,serial_listen_addr=None,serial_listen_ports=<?>,src_supports_native_luks=<?>,src_supports_numa_live_migration=<?>,supported_perf_events=<?>,target_connect_addr=<?>,vifs=[VIFMigrateData],wait_for_vif_plugged=<?>) check_can_live_migrate_source /usr/lib/python3.9/site-packages/nova/compute/manager.py:8587
Oct 02 12:04:07 compute-0 nova_compute[192079]: 2025-10-02 12:04:07.554 2 DEBUG oslo_concurrency.lockutils [None req-78f0dcc6-53bc-49e6-b95e-19fb52ccc0a2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Acquiring lock "2f0ec710-6070-4bb8-ac27-21d96a184569" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:07 compute-0 nova_compute[192079]: 2025-10-02 12:04:07.554 2 DEBUG oslo_concurrency.lockutils [None req-78f0dcc6-53bc-49e6-b95e-19fb52ccc0a2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Lock "2f0ec710-6070-4bb8-ac27-21d96a184569" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:07 compute-0 nova_compute[192079]: 2025-10-02 12:04:07.555 2 DEBUG oslo_concurrency.lockutils [None req-78f0dcc6-53bc-49e6-b95e-19fb52ccc0a2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Acquiring lock "2f0ec710-6070-4bb8-ac27-21d96a184569-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:07 compute-0 nova_compute[192079]: 2025-10-02 12:04:07.555 2 DEBUG oslo_concurrency.lockutils [None req-78f0dcc6-53bc-49e6-b95e-19fb52ccc0a2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Lock "2f0ec710-6070-4bb8-ac27-21d96a184569-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:07 compute-0 nova_compute[192079]: 2025-10-02 12:04:07.555 2 DEBUG oslo_concurrency.lockutils [None req-78f0dcc6-53bc-49e6-b95e-19fb52ccc0a2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Lock "2f0ec710-6070-4bb8-ac27-21d96a184569-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:07 compute-0 nova_compute[192079]: 2025-10-02 12:04:07.568 2 INFO nova.compute.manager [None req-78f0dcc6-53bc-49e6-b95e-19fb52ccc0a2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Terminating instance
Oct 02 12:04:07 compute-0 nova_compute[192079]: 2025-10-02 12:04:07.582 2 DEBUG oslo_concurrency.lockutils [None req-78f0dcc6-53bc-49e6-b95e-19fb52ccc0a2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Acquiring lock "refresh_cache-2f0ec710-6070-4bb8-ac27-21d96a184569" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:04:07 compute-0 nova_compute[192079]: 2025-10-02 12:04:07.583 2 DEBUG oslo_concurrency.lockutils [None req-78f0dcc6-53bc-49e6-b95e-19fb52ccc0a2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Acquired lock "refresh_cache-2f0ec710-6070-4bb8-ac27-21d96a184569" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:04:07 compute-0 nova_compute[192079]: 2025-10-02 12:04:07.583 2 DEBUG nova.network.neutron [None req-78f0dcc6-53bc-49e6-b95e-19fb52ccc0a2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:04:07 compute-0 nova_compute[192079]: 2025-10-02 12:04:07.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:04:07 compute-0 nova_compute[192079]: 2025-10-02 12:04:07.816 2 DEBUG nova.network.neutron [None req-78f0dcc6-53bc-49e6-b95e-19fb52ccc0a2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:04:07 compute-0 nova_compute[192079]: 2025-10-02 12:04:07.862 2 DEBUG oslo_concurrency.processutils [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:07 compute-0 nova_compute[192079]: 2025-10-02 12:04:07.920 2 DEBUG oslo_concurrency.processutils [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955/disk --force-share --output=json" returned: 0 in 0.058s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:07 compute-0 nova_compute[192079]: 2025-10-02 12:04:07.921 2 DEBUG oslo_concurrency.processutils [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:07 compute-0 nova_compute[192079]: 2025-10-02 12:04:07.981 2 DEBUG oslo_concurrency.processutils [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955/disk --force-share --output=json" returned: 0 in 0.060s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:08 compute-0 nova_compute[192079]: 2025-10-02 12:04:08.103 2 DEBUG nova.network.neutron [None req-78f0dcc6-53bc-49e6-b95e-19fb52ccc0a2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:04:08 compute-0 nova_compute[192079]: 2025-10-02 12:04:08.117 2 DEBUG oslo_concurrency.lockutils [None req-78f0dcc6-53bc-49e6-b95e-19fb52ccc0a2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Releasing lock "refresh_cache-2f0ec710-6070-4bb8-ac27-21d96a184569" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:04:08 compute-0 nova_compute[192079]: 2025-10-02 12:04:08.118 2 DEBUG nova.compute.manager [None req-78f0dcc6-53bc-49e6-b95e-19fb52ccc0a2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:04:08 compute-0 systemd[1]: machine-qemu\x2d14\x2dinstance\x2d00000017.scope: Deactivated successfully.
Oct 02 12:04:08 compute-0 systemd[1]: machine-qemu\x2d14\x2dinstance\x2d00000017.scope: Consumed 3.622s CPU time.
Oct 02 12:04:08 compute-0 systemd-machined[152150]: Machine qemu-14-instance-00000017 terminated.
Oct 02 12:04:08 compute-0 nova_compute[192079]: 2025-10-02 12:04:08.382 2 INFO nova.virt.libvirt.driver [-] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Instance destroyed successfully.
Oct 02 12:04:08 compute-0 nova_compute[192079]: 2025-10-02 12:04:08.383 2 DEBUG nova.objects.instance [None req-78f0dcc6-53bc-49e6-b95e-19fb52ccc0a2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Lazy-loading 'resources' on Instance uuid 2f0ec710-6070-4bb8-ac27-21d96a184569 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:04:08 compute-0 nova_compute[192079]: 2025-10-02 12:04:08.396 2 INFO nova.virt.libvirt.driver [None req-78f0dcc6-53bc-49e6-b95e-19fb52ccc0a2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Deleting instance files /var/lib/nova/instances/2f0ec710-6070-4bb8-ac27-21d96a184569_del
Oct 02 12:04:08 compute-0 nova_compute[192079]: 2025-10-02 12:04:08.396 2 INFO nova.virt.libvirt.driver [None req-78f0dcc6-53bc-49e6-b95e-19fb52ccc0a2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Deletion of /var/lib/nova/instances/2f0ec710-6070-4bb8-ac27-21d96a184569_del complete
Oct 02 12:04:08 compute-0 nova_compute[192079]: 2025-10-02 12:04:08.454 2 INFO nova.compute.manager [None req-78f0dcc6-53bc-49e6-b95e-19fb52ccc0a2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Took 0.34 seconds to destroy the instance on the hypervisor.
Oct 02 12:04:08 compute-0 nova_compute[192079]: 2025-10-02 12:04:08.454 2 DEBUG oslo.service.loopingcall [None req-78f0dcc6-53bc-49e6-b95e-19fb52ccc0a2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:04:08 compute-0 nova_compute[192079]: 2025-10-02 12:04:08.455 2 DEBUG nova.compute.manager [-] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:04:08 compute-0 nova_compute[192079]: 2025-10-02 12:04:08.455 2 DEBUG nova.network.neutron [-] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:04:08 compute-0 nova_compute[192079]: 2025-10-02 12:04:08.549 2 DEBUG nova.network.neutron [-] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:04:08 compute-0 nova_compute[192079]: 2025-10-02 12:04:08.561 2 DEBUG nova.network.neutron [-] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:04:08 compute-0 nova_compute[192079]: 2025-10-02 12:04:08.573 2 INFO nova.compute.manager [-] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Took 0.12 seconds to deallocate network for instance.
Oct 02 12:04:08 compute-0 nova_compute[192079]: 2025-10-02 12:04:08.684 2 DEBUG oslo_concurrency.lockutils [None req-78f0dcc6-53bc-49e6-b95e-19fb52ccc0a2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:08 compute-0 nova_compute[192079]: 2025-10-02 12:04:08.685 2 DEBUG oslo_concurrency.lockutils [None req-78f0dcc6-53bc-49e6-b95e-19fb52ccc0a2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:08 compute-0 nova_compute[192079]: 2025-10-02 12:04:08.700 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:04:08 compute-0 nova_compute[192079]: 2025-10-02 12:04:08.761 2 DEBUG nova.compute.provider_tree [None req-78f0dcc6-53bc-49e6-b95e-19fb52ccc0a2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:04:08 compute-0 nova_compute[192079]: 2025-10-02 12:04:08.773 2 DEBUG nova.scheduler.client.report [None req-78f0dcc6-53bc-49e6-b95e-19fb52ccc0a2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:04:08 compute-0 nova_compute[192079]: 2025-10-02 12:04:08.793 2 DEBUG oslo_concurrency.lockutils [None req-78f0dcc6-53bc-49e6-b95e-19fb52ccc0a2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.107s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:08 compute-0 nova_compute[192079]: 2025-10-02 12:04:08.813 2 INFO nova.scheduler.client.report [None req-78f0dcc6-53bc-49e6-b95e-19fb52ccc0a2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Deleted allocations for instance 2f0ec710-6070-4bb8-ac27-21d96a184569
Oct 02 12:04:08 compute-0 nova_compute[192079]: 2025-10-02 12:04:08.925 2 DEBUG oslo_concurrency.lockutils [None req-78f0dcc6-53bc-49e6-b95e-19fb52ccc0a2 c9f66d2490b24421826bdf15e6fee495 276d3d51fba7485bb858fbcb2d176461 - - default default] Lock "2f0ec710-6070-4bb8-ac27-21d96a184569" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.371s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:09 compute-0 nova_compute[192079]: 2025-10-02 12:04:09.556 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:09 compute-0 nova_compute[192079]: 2025-10-02 12:04:09.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:04:10 compute-0 nova_compute[192079]: 2025-10-02 12:04:10.390 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:10 compute-0 sshd-session[223091]: Accepted publickey for nova from 192.168.122.102 port 42456 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:04:10 compute-0 systemd[1]: Created slice User Slice of UID 42436.
Oct 02 12:04:10 compute-0 systemd[1]: Starting User Runtime Directory /run/user/42436...
Oct 02 12:04:10 compute-0 systemd-logind[827]: New session 42 of user nova.
Oct 02 12:04:10 compute-0 podman[223093]: 2025-10-02 12:04:10.544735625 +0000 UTC m=+0.076755930 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:04:10 compute-0 systemd[1]: Finished User Runtime Directory /run/user/42436.
Oct 02 12:04:10 compute-0 podman[223094]: 2025-10-02 12:04:10.547907641 +0000 UTC m=+0.077331226 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, config_id=iscsid, org.label-schema.license=GPLv2, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, container_name=iscsid, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:04:10 compute-0 systemd[1]: Starting User Manager for UID 42436...
Oct 02 12:04:10 compute-0 systemd[223136]: pam_unix(systemd-user:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:04:10 compute-0 nova_compute[192079]: 2025-10-02 12:04:10.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:04:10 compute-0 nova_compute[192079]: 2025-10-02 12:04:10.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:04:10 compute-0 nova_compute[192079]: 2025-10-02 12:04:10.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:04:10 compute-0 nova_compute[192079]: 2025-10-02 12:04:10.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:04:10 compute-0 systemd[223136]: Queued start job for default target Main User Target.
Oct 02 12:04:10 compute-0 systemd[223136]: Created slice User Application Slice.
Oct 02 12:04:10 compute-0 systemd[223136]: Started Mark boot as successful after the user session has run 2 minutes.
Oct 02 12:04:10 compute-0 systemd[223136]: Started Daily Cleanup of User's Temporary Directories.
Oct 02 12:04:10 compute-0 systemd[223136]: Reached target Paths.
Oct 02 12:04:10 compute-0 systemd[223136]: Reached target Timers.
Oct 02 12:04:10 compute-0 nova_compute[192079]: 2025-10-02 12:04:10.705 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:10 compute-0 nova_compute[192079]: 2025-10-02 12:04:10.705 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:10 compute-0 nova_compute[192079]: 2025-10-02 12:04:10.705 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:10 compute-0 nova_compute[192079]: 2025-10-02 12:04:10.705 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:04:10 compute-0 systemd[223136]: Starting D-Bus User Message Bus Socket...
Oct 02 12:04:10 compute-0 systemd[223136]: Starting Create User's Volatile Files and Directories...
Oct 02 12:04:10 compute-0 systemd[223136]: Listening on D-Bus User Message Bus Socket.
Oct 02 12:04:10 compute-0 systemd[223136]: Reached target Sockets.
Oct 02 12:04:10 compute-0 systemd[223136]: Finished Create User's Volatile Files and Directories.
Oct 02 12:04:10 compute-0 systemd[223136]: Reached target Basic System.
Oct 02 12:04:10 compute-0 systemd[223136]: Reached target Main User Target.
Oct 02 12:04:10 compute-0 systemd[223136]: Startup finished in 149ms.
Oct 02 12:04:10 compute-0 systemd[1]: Started User Manager for UID 42436.
Oct 02 12:04:10 compute-0 systemd[1]: Started Session 42 of User nova.
Oct 02 12:04:10 compute-0 sshd-session[223091]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:04:10 compute-0 nova_compute[192079]: 2025-10-02 12:04:10.771 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:10 compute-0 sshd-session[223152]: Received disconnect from 192.168.122.102 port 42456:11: disconnected by user
Oct 02 12:04:10 compute-0 sshd-session[223152]: Disconnected from user nova 192.168.122.102 port 42456
Oct 02 12:04:10 compute-0 sshd-session[223091]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:04:10 compute-0 systemd[1]: session-42.scope: Deactivated successfully.
Oct 02 12:04:10 compute-0 systemd-logind[827]: Session 42 logged out. Waiting for processes to exit.
Oct 02 12:04:10 compute-0 systemd-logind[827]: Removed session 42.
Oct 02 12:04:10 compute-0 nova_compute[192079]: 2025-10-02 12:04:10.869 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955/disk --force-share --output=json" returned: 0 in 0.098s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:10 compute-0 nova_compute[192079]: 2025-10-02 12:04:10.870 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:10 compute-0 nova_compute[192079]: 2025-10-02 12:04:10.930 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955/disk --force-share --output=json" returned: 0 in 0.060s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:11 compute-0 nova_compute[192079]: 2025-10-02 12:04:11.069 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:04:11 compute-0 nova_compute[192079]: 2025-10-02 12:04:11.070 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5530MB free_disk=73.43102645874023GB free_vcpus=7 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:04:11 compute-0 nova_compute[192079]: 2025-10-02 12:04:11.071 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:11 compute-0 nova_compute[192079]: 2025-10-02 12:04:11.071 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:11 compute-0 nova_compute[192079]: 2025-10-02 12:04:11.132 2 INFO nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 356bc6d6-1101-467e-a020-65876724c955] Updating resource usage from migration 8b92b020-fef8-4e24-b417-6318a75e3466
Oct 02 12:04:11 compute-0 nova_compute[192079]: 2025-10-02 12:04:11.168 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Migration 8b92b020-fef8-4e24-b417-6318a75e3466 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1640
Oct 02 12:04:11 compute-0 nova_compute[192079]: 2025-10-02 12:04:11.169 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 1 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:04:11 compute-0 nova_compute[192079]: 2025-10-02 12:04:11.169 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=640MB phys_disk=79GB used_disk=1GB total_vcpus=8 used_vcpus=1 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:04:11 compute-0 nova_compute[192079]: 2025-10-02 12:04:11.219 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:04:11 compute-0 nova_compute[192079]: 2025-10-02 12:04:11.238 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:04:11 compute-0 nova_compute[192079]: 2025-10-02 12:04:11.301 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:04:11 compute-0 nova_compute[192079]: 2025-10-02 12:04:11.301 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.230s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:11 compute-0 nova_compute[192079]: 2025-10-02 12:04:11.301 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._run_pending_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:04:11 compute-0 nova_compute[192079]: 2025-10-02 12:04:11.302 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Cleaning up deleted instances _run_pending_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:11145
Oct 02 12:04:11 compute-0 nova_compute[192079]: 2025-10-02 12:04:11.329 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] There are 0 instances to clean _run_pending_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:11154
Oct 02 12:04:11 compute-0 nova_compute[192079]: 2025-10-02 12:04:11.817 2 DEBUG nova.compute.manager [req-7e62d41b-bf04-4941-85ca-28e3880fd7fd req-c977f8f5-128d-4993-9e46-175ae8ac766e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Received event network-vif-unplugged-29214def-2450-4edd-acc6-84e165aa1e2c external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:04:11 compute-0 nova_compute[192079]: 2025-10-02 12:04:11.818 2 DEBUG oslo_concurrency.lockutils [req-7e62d41b-bf04-4941-85ca-28e3880fd7fd req-c977f8f5-128d-4993-9e46-175ae8ac766e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "356bc6d6-1101-467e-a020-65876724c955-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:11 compute-0 nova_compute[192079]: 2025-10-02 12:04:11.818 2 DEBUG oslo_concurrency.lockutils [req-7e62d41b-bf04-4941-85ca-28e3880fd7fd req-c977f8f5-128d-4993-9e46-175ae8ac766e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "356bc6d6-1101-467e-a020-65876724c955-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:11 compute-0 nova_compute[192079]: 2025-10-02 12:04:11.818 2 DEBUG oslo_concurrency.lockutils [req-7e62d41b-bf04-4941-85ca-28e3880fd7fd req-c977f8f5-128d-4993-9e46-175ae8ac766e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "356bc6d6-1101-467e-a020-65876724c955-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:11 compute-0 nova_compute[192079]: 2025-10-02 12:04:11.819 2 DEBUG nova.compute.manager [req-7e62d41b-bf04-4941-85ca-28e3880fd7fd req-c977f8f5-128d-4993-9e46-175ae8ac766e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] No waiting events found dispatching network-vif-unplugged-29214def-2450-4edd-acc6-84e165aa1e2c pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:04:11 compute-0 nova_compute[192079]: 2025-10-02 12:04:11.819 2 DEBUG nova.compute.manager [req-7e62d41b-bf04-4941-85ca-28e3880fd7fd req-c977f8f5-128d-4993-9e46-175ae8ac766e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Received event network-vif-unplugged-29214def-2450-4edd-acc6-84e165aa1e2c for instance with task_state migrating. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:04:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:04:12.326 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '7'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:04:12 compute-0 nova_compute[192079]: 2025-10-02 12:04:12.326 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:04:12 compute-0 nova_compute[192079]: 2025-10-02 12:04:12.326 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:04:12 compute-0 nova_compute[192079]: 2025-10-02 12:04:12.364 2 INFO nova.compute.manager [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Took 4.38 seconds for pre_live_migration on destination host compute-2.ctlplane.example.com.
Oct 02 12:04:12 compute-0 nova_compute[192079]: 2025-10-02 12:04:12.365 2 DEBUG nova.compute.manager [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:04:12 compute-0 nova_compute[192079]: 2025-10-02 12:04:12.390 2 DEBUG nova.compute.manager [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] live_migration data is LibvirtLiveMigrateData(bdms=[],block_migration=True,disk_available_mb=74752,disk_over_commit=False,dst_numa_info=<?>,dst_supports_numa_live_migration=<?>,dst_wants_file_backed_memory=False,file_backed_memory_discard=<?>,filename='tmp6xzn3h2n',graphics_listen_addr_spice=127.0.0.1,graphics_listen_addr_vnc=::,image_type='qcow2',instance_relative_path='356bc6d6-1101-467e-a020-65876724c955',is_shared_block_storage=False,is_shared_instance_path=False,is_volume_backed=False,migration=Migration(8b92b020-fef8-4e24-b417-6318a75e3466),old_vol_attachment_ids={},serial_listen_addr=None,serial_listen_ports=[],src_supports_native_luks=<?>,src_supports_numa_live_migration=<?>,supported_perf_events=[],target_connect_addr=None,vifs=[VIFMigrateData],wait_for_vif_plugged=True) _do_live_migration /usr/lib/python3.9/site-packages/nova/compute/manager.py:8939
Oct 02 12:04:12 compute-0 nova_compute[192079]: 2025-10-02 12:04:12.414 2 DEBUG nova.objects.instance [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Lazy-loading 'migration_context' on Instance uuid 356bc6d6-1101-467e-a020-65876724c955 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:04:12 compute-0 nova_compute[192079]: 2025-10-02 12:04:12.415 2 DEBUG nova.virt.libvirt.driver [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Starting monitoring of live migration _live_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10639
Oct 02 12:04:12 compute-0 nova_compute[192079]: 2025-10-02 12:04:12.416 2 DEBUG nova.virt.libvirt.driver [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Operation thread is still running _live_migration_monitor /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10440
Oct 02 12:04:12 compute-0 nova_compute[192079]: 2025-10-02 12:04:12.416 2 DEBUG nova.virt.libvirt.driver [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Migration not running yet _live_migration_monitor /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10449
Oct 02 12:04:12 compute-0 nova_compute[192079]: 2025-10-02 12:04:12.435 2 DEBUG nova.virt.libvirt.vif [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=True,config_drive='True',created_at=2025-10-02T12:03:29Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-LiveMigrationTest-server-507794369',display_name='tempest-LiveMigrationTest-server-507794369',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-livemigrationtest-server-507794369',id=20,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:03:36Z,launched_on='compute-2.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='f7cb78d24d1a4511a59ced45ccc4a1c7',ramdisk_id='',reservation_id='r-hsf0qpxd',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',clean_attempts='1',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-LiveMigrationTest-1666170212',owner_user_name='tempest-LiveMigrationTest-1666170212-project-member'},tags=<?>,task_state='migrating',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:04:03Z,user_data=None,user_id='5f75195e56504673bd403ce69cbc28ca',uuid=356bc6d6-1101-467e-a020-65876724c955,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "29214def-2450-4edd-acc6-84e165aa1e2c", "address": "fa:16:3e:1d:3d:20", "network": {"id": "664b6526-6df1-4024-9bab-37218e6c18bd", "bridge": "br-int", "label": "tempest-LiveMigrationTest-2017832683-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f7cb78d24d1a4511a59ced45ccc4a1c7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system"}, "devname": "tap29214def-24", "ovs_interfaceid": "29214def-2450-4edd-acc6-84e165aa1e2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {"os_vif_delegation": true}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:04:12 compute-0 nova_compute[192079]: 2025-10-02 12:04:12.435 2 DEBUG nova.network.os_vif_util [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Converting VIF {"id": "29214def-2450-4edd-acc6-84e165aa1e2c", "address": "fa:16:3e:1d:3d:20", "network": {"id": "664b6526-6df1-4024-9bab-37218e6c18bd", "bridge": "br-int", "label": "tempest-LiveMigrationTest-2017832683-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f7cb78d24d1a4511a59ced45ccc4a1c7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system"}, "devname": "tap29214def-24", "ovs_interfaceid": "29214def-2450-4edd-acc6-84e165aa1e2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {"os_vif_delegation": true}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:04:12 compute-0 nova_compute[192079]: 2025-10-02 12:04:12.436 2 DEBUG nova.network.os_vif_util [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:1d:3d:20,bridge_name='br-int',has_traffic_filtering=True,id=29214def-2450-4edd-acc6-84e165aa1e2c,network=Network(664b6526-6df1-4024-9bab-37218e6c18bd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap29214def-24') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:04:12 compute-0 nova_compute[192079]: 2025-10-02 12:04:12.437 2 DEBUG nova.virt.libvirt.migration [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Updating guest XML with vif config: <interface type="ethernet">
Oct 02 12:04:12 compute-0 nova_compute[192079]:   <mac address="fa:16:3e:1d:3d:20"/>
Oct 02 12:04:12 compute-0 nova_compute[192079]:   <model type="virtio"/>
Oct 02 12:04:12 compute-0 nova_compute[192079]:   <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:04:12 compute-0 nova_compute[192079]:   <mtu size="1442"/>
Oct 02 12:04:12 compute-0 nova_compute[192079]:   <target dev="tap29214def-24"/>
Oct 02 12:04:12 compute-0 nova_compute[192079]: </interface>
Oct 02 12:04:12 compute-0 nova_compute[192079]:  _update_vif_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/migration.py:388
Oct 02 12:04:12 compute-0 nova_compute[192079]: 2025-10-02 12:04:12.437 2 DEBUG nova.virt.libvirt.driver [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] About to invoke the migrate API _live_migration_operation /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10272
Oct 02 12:04:12 compute-0 nova_compute[192079]: 2025-10-02 12:04:12.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:04:12 compute-0 nova_compute[192079]: 2025-10-02 12:04:12.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:04:12 compute-0 nova_compute[192079]: 2025-10-02 12:04:12.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:04:12 compute-0 nova_compute[192079]: 2025-10-02 12:04:12.724 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "refresh_cache-356bc6d6-1101-467e-a020-65876724c955" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:04:12 compute-0 nova_compute[192079]: 2025-10-02 12:04:12.724 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquired lock "refresh_cache-356bc6d6-1101-467e-a020-65876724c955" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:04:12 compute-0 nova_compute[192079]: 2025-10-02 12:04:12.724 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 356bc6d6-1101-467e-a020-65876724c955] Forcefully refreshing network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2004
Oct 02 12:04:12 compute-0 nova_compute[192079]: 2025-10-02 12:04:12.725 2 DEBUG nova.objects.instance [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lazy-loading 'info_cache' on Instance uuid 356bc6d6-1101-467e-a020-65876724c955 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:04:12 compute-0 nova_compute[192079]: 2025-10-02 12:04:12.918 2 DEBUG nova.virt.libvirt.migration [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Current None elapsed 0 steps [(0, 50), (300, 95), (600, 140), (900, 185), (1200, 230), (1500, 275), (1800, 320), (2100, 365), (2400, 410), (2700, 455), (3000, 500)] update_downtime /usr/lib/python3.9/site-packages/nova/virt/libvirt/migration.py:512
Oct 02 12:04:12 compute-0 nova_compute[192079]: 2025-10-02 12:04:12.919 2 INFO nova.virt.libvirt.migration [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Increasing downtime to 50 ms after 0 sec elapsed time
Oct 02 12:04:13 compute-0 nova_compute[192079]: 2025-10-02 12:04:13.062 2 INFO nova.virt.libvirt.driver [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Migration running for 0 secs, memory 100% remaining (bytes processed=0, remaining=0, total=0); disk 100% remaining (bytes processed=0, remaining=0, total=0).
Oct 02 12:04:13 compute-0 nova_compute[192079]: 2025-10-02 12:04:13.566 2 DEBUG nova.virt.libvirt.migration [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Current 50 elapsed 1 steps [(0, 50), (300, 95), (600, 140), (900, 185), (1200, 230), (1500, 275), (1800, 320), (2100, 365), (2400, 410), (2700, 455), (3000, 500)] update_downtime /usr/lib/python3.9/site-packages/nova/virt/libvirt/migration.py:512
Oct 02 12:04:13 compute-0 nova_compute[192079]: 2025-10-02 12:04:13.567 2 DEBUG nova.virt.libvirt.migration [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Downtime does not need to change update_downtime /usr/lib/python3.9/site-packages/nova/virt/libvirt/migration.py:525
Oct 02 12:04:14 compute-0 nova_compute[192079]: 2025-10-02 12:04:14.071 2 DEBUG nova.virt.libvirt.migration [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Current 50 elapsed 1 steps [(0, 50), (300, 95), (600, 140), (900, 185), (1200, 230), (1500, 275), (1800, 320), (2100, 365), (2400, 410), (2700, 455), (3000, 500)] update_downtime /usr/lib/python3.9/site-packages/nova/virt/libvirt/migration.py:512
Oct 02 12:04:14 compute-0 nova_compute[192079]: 2025-10-02 12:04:14.072 2 DEBUG nova.virt.libvirt.migration [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Downtime does not need to change update_downtime /usr/lib/python3.9/site-packages/nova/virt/libvirt/migration.py:525
Oct 02 12:04:14 compute-0 nova_compute[192079]: 2025-10-02 12:04:14.077 2 DEBUG nova.compute.manager [req-6224898f-2830-4a25-9b4a-0b57a3293c22 req-8a022032-07c8-4b32-9606-4639ca7a90a9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Received event network-vif-plugged-29214def-2450-4edd-acc6-84e165aa1e2c external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:04:14 compute-0 nova_compute[192079]: 2025-10-02 12:04:14.078 2 DEBUG oslo_concurrency.lockutils [req-6224898f-2830-4a25-9b4a-0b57a3293c22 req-8a022032-07c8-4b32-9606-4639ca7a90a9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "356bc6d6-1101-467e-a020-65876724c955-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:14 compute-0 nova_compute[192079]: 2025-10-02 12:04:14.078 2 DEBUG oslo_concurrency.lockutils [req-6224898f-2830-4a25-9b4a-0b57a3293c22 req-8a022032-07c8-4b32-9606-4639ca7a90a9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "356bc6d6-1101-467e-a020-65876724c955-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:14 compute-0 nova_compute[192079]: 2025-10-02 12:04:14.079 2 DEBUG oslo_concurrency.lockutils [req-6224898f-2830-4a25-9b4a-0b57a3293c22 req-8a022032-07c8-4b32-9606-4639ca7a90a9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "356bc6d6-1101-467e-a020-65876724c955-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:14 compute-0 nova_compute[192079]: 2025-10-02 12:04:14.079 2 DEBUG nova.compute.manager [req-6224898f-2830-4a25-9b4a-0b57a3293c22 req-8a022032-07c8-4b32-9606-4639ca7a90a9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] No waiting events found dispatching network-vif-plugged-29214def-2450-4edd-acc6-84e165aa1e2c pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:04:14 compute-0 nova_compute[192079]: 2025-10-02 12:04:14.080 2 WARNING nova.compute.manager [req-6224898f-2830-4a25-9b4a-0b57a3293c22 req-8a022032-07c8-4b32-9606-4639ca7a90a9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Received unexpected event network-vif-plugged-29214def-2450-4edd-acc6-84e165aa1e2c for instance with vm_state active and task_state migrating.
Oct 02 12:04:14 compute-0 nova_compute[192079]: 2025-10-02 12:04:14.080 2 DEBUG nova.compute.manager [req-6224898f-2830-4a25-9b4a-0b57a3293c22 req-8a022032-07c8-4b32-9606-4639ca7a90a9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Received event network-changed-29214def-2450-4edd-acc6-84e165aa1e2c external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:04:14 compute-0 nova_compute[192079]: 2025-10-02 12:04:14.081 2 DEBUG nova.compute.manager [req-6224898f-2830-4a25-9b4a-0b57a3293c22 req-8a022032-07c8-4b32-9606-4639ca7a90a9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Refreshing instance network info cache due to event network-changed-29214def-2450-4edd-acc6-84e165aa1e2c. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:04:14 compute-0 nova_compute[192079]: 2025-10-02 12:04:14.081 2 DEBUG oslo_concurrency.lockutils [req-6224898f-2830-4a25-9b4a-0b57a3293c22 req-8a022032-07c8-4b32-9606-4639ca7a90a9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-356bc6d6-1101-467e-a020-65876724c955" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:04:14 compute-0 nova_compute[192079]: 2025-10-02 12:04:14.117 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406654.1172557, 356bc6d6-1101-467e-a020-65876724c955 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:04:14 compute-0 nova_compute[192079]: 2025-10-02 12:04:14.118 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 356bc6d6-1101-467e-a020-65876724c955] VM Paused (Lifecycle Event)
Oct 02 12:04:14 compute-0 nova_compute[192079]: 2025-10-02 12:04:14.143 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 356bc6d6-1101-467e-a020-65876724c955] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:04:14 compute-0 nova_compute[192079]: 2025-10-02 12:04:14.201 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 356bc6d6-1101-467e-a020-65876724c955] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: active, current task_state: migrating, current DB power_state: 1, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:04:14 compute-0 nova_compute[192079]: 2025-10-02 12:04:14.225 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 356bc6d6-1101-467e-a020-65876724c955] During sync_power_state the instance has a pending task (migrating). Skip.
Oct 02 12:04:14 compute-0 kernel: tap29214def-24 (unregistering): left promiscuous mode
Oct 02 12:04:14 compute-0 NetworkManager[51160]: <info>  [1759406654.3279] device (tap29214def-24): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:04:14 compute-0 nova_compute[192079]: 2025-10-02 12:04:14.342 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:14 compute-0 ovn_controller[94336]: 2025-10-02T12:04:14Z|00094|binding|INFO|Releasing lport 29214def-2450-4edd-acc6-84e165aa1e2c from this chassis (sb_readonly=0)
Oct 02 12:04:14 compute-0 ovn_controller[94336]: 2025-10-02T12:04:14Z|00095|binding|INFO|Setting lport 29214def-2450-4edd-acc6-84e165aa1e2c down in Southbound
Oct 02 12:04:14 compute-0 ovn_controller[94336]: 2025-10-02T12:04:14Z|00096|binding|INFO|Removing iface tap29214def-24 ovn-installed in OVS
Oct 02 12:04:14 compute-0 nova_compute[192079]: 2025-10-02 12:04:14.345 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:04:14.370 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:1d:3d:20 10.100.0.14'], port_security=['fa:16:3e:1d:3d:20 10.100.0.14'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com,compute-2.ctlplane.example.com', 'activation-strategy': 'rarp', 'additional-chassis-activated': '1fc220e5-4479-4f53-8f4d-9aefe7dad458'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.14/28', 'neutron:device_id': '356bc6d6-1101-467e-a020-65876724c955', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-664b6526-6df1-4024-9bab-37218e6c18bd', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'f7cb78d24d1a4511a59ced45ccc4a1c7', 'neutron:revision_number': '18', 'neutron:security_group_ids': 'a459d514-aab4-4030-9850-e066abdeaccc', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=eddfb51e-1095-4b3d-a2dc-f2557cf13b11, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=29214def-2450-4edd-acc6-84e165aa1e2c) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:04:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:04:14.372 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 29214def-2450-4edd-acc6-84e165aa1e2c in datapath 664b6526-6df1-4024-9bab-37218e6c18bd unbound from our chassis
Oct 02 12:04:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:04:14.374 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 664b6526-6df1-4024-9bab-37218e6c18bd, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:04:14 compute-0 nova_compute[192079]: 2025-10-02 12:04:14.376 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:04:14.375 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a95d3bbe-3036-4b1a-872c-b3033fe9ddaa]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:04:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:04:14.378 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd namespace which is not needed anymore
Oct 02 12:04:14 compute-0 systemd[1]: machine-qemu\x2d13\x2dinstance\x2d00000014.scope: Deactivated successfully.
Oct 02 12:04:14 compute-0 systemd[1]: machine-qemu\x2d13\x2dinstance\x2d00000014.scope: Consumed 3.798s CPU time.
Oct 02 12:04:14 compute-0 systemd-machined[152150]: Machine qemu-13-instance-00000014 terminated.
Oct 02 12:04:14 compute-0 kernel: tap29214def-24: entered promiscuous mode
Oct 02 12:04:14 compute-0 NetworkManager[51160]: <info>  [1759406654.5284] manager: (tap29214def-24): new Tun device (/org/freedesktop/NetworkManager/Devices/53)
Oct 02 12:04:14 compute-0 kernel: tap29214def-24 (unregistering): left promiscuous mode
Oct 02 12:04:14 compute-0 nova_compute[192079]: 2025-10-02 12:04:14.584 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:14 compute-0 nova_compute[192079]: 2025-10-02 12:04:14.631 2 DEBUG nova.virt.libvirt.guest [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Domain has shutdown/gone away: Requested operation is not valid: domain is not running get_job_info /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:688
Oct 02 12:04:14 compute-0 nova_compute[192079]: 2025-10-02 12:04:14.632 2 INFO nova.virt.libvirt.driver [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Migration operation has completed
Oct 02 12:04:14 compute-0 nova_compute[192079]: 2025-10-02 12:04:14.632 2 INFO nova.compute.manager [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] _post_live_migration() is started..
Oct 02 12:04:14 compute-0 nova_compute[192079]: 2025-10-02 12:04:14.634 2 DEBUG nova.virt.libvirt.driver [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Migrate API has completed _live_migration_operation /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10279
Oct 02 12:04:14 compute-0 nova_compute[192079]: 2025-10-02 12:04:14.635 2 DEBUG nova.virt.libvirt.driver [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Migration operation thread has finished _live_migration_operation /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10327
Oct 02 12:04:14 compute-0 nova_compute[192079]: 2025-10-02 12:04:14.635 2 DEBUG nova.virt.libvirt.driver [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Migration operation thread notification thread_finished /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10630
Oct 02 12:04:14 compute-0 neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd[222974]: [NOTICE]   (222978) : haproxy version is 2.8.14-c23fe91
Oct 02 12:04:14 compute-0 neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd[222974]: [NOTICE]   (222978) : path to executable is /usr/sbin/haproxy
Oct 02 12:04:14 compute-0 neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd[222974]: [WARNING]  (222978) : Exiting Master process...
Oct 02 12:04:14 compute-0 neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd[222974]: [ALERT]    (222978) : Current worker (222980) exited with code 143 (Terminated)
Oct 02 12:04:14 compute-0 neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd[222974]: [WARNING]  (222978) : All workers exited. Exiting... (0)
Oct 02 12:04:14 compute-0 systemd[1]: libpod-3f86b816abdb6e4a862a95b77272825206958d251195675a2da77cfd6c12b467.scope: Deactivated successfully.
Oct 02 12:04:14 compute-0 podman[223196]: 2025-10-02 12:04:14.709401663 +0000 UTC m=+0.197305499 container died 3f86b816abdb6e4a862a95b77272825206958d251195675a2da77cfd6c12b467 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS)
Oct 02 12:04:14 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-3f86b816abdb6e4a862a95b77272825206958d251195675a2da77cfd6c12b467-userdata-shm.mount: Deactivated successfully.
Oct 02 12:04:14 compute-0 systemd[1]: var-lib-containers-storage-overlay-dfb8f09afdf99f22b8a4d15541efff6083a2a1624a8a8db0d0b670b7f1f89ff9-merged.mount: Deactivated successfully.
Oct 02 12:04:15 compute-0 podman[223196]: 2025-10-02 12:04:15.196802855 +0000 UTC m=+0.684706721 container cleanup 3f86b816abdb6e4a862a95b77272825206958d251195675a2da77cfd6c12b467 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:04:15 compute-0 systemd[1]: libpod-conmon-3f86b816abdb6e4a862a95b77272825206958d251195675a2da77cfd6c12b467.scope: Deactivated successfully.
Oct 02 12:04:15 compute-0 nova_compute[192079]: 2025-10-02 12:04:15.393 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:15 compute-0 nova_compute[192079]: 2025-10-02 12:04:15.538 2 DEBUG nova.network.neutron [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Activated binding for port 29214def-2450-4edd-acc6-84e165aa1e2c and host compute-2.ctlplane.example.com migrate_instance_start /usr/lib/python3.9/site-packages/nova/network/neutron.py:3181
Oct 02 12:04:15 compute-0 nova_compute[192079]: 2025-10-02 12:04:15.539 2 DEBUG nova.compute.manager [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Calling driver.post_live_migration_at_source with original source VIFs from migrate_data: [{"id": "29214def-2450-4edd-acc6-84e165aa1e2c", "address": "fa:16:3e:1d:3d:20", "network": {"id": "664b6526-6df1-4024-9bab-37218e6c18bd", "bridge": "br-int", "label": "tempest-LiveMigrationTest-2017832683-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f7cb78d24d1a4511a59ced45ccc4a1c7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap29214def-24", "ovs_interfaceid": "29214def-2450-4edd-acc6-84e165aa1e2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {"os_vif_delegation": true}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] _post_live_migration /usr/lib/python3.9/site-packages/nova/compute/manager.py:9326
Oct 02 12:04:15 compute-0 nova_compute[192079]: 2025-10-02 12:04:15.540 2 DEBUG nova.virt.libvirt.vif [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=True,config_drive='True',created_at=2025-10-02T12:03:29Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-LiveMigrationTest-server-507794369',display_name='tempest-LiveMigrationTest-server-507794369',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-livemigrationtest-server-507794369',id=20,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:03:36Z,launched_on='compute-2.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='f7cb78d24d1a4511a59ced45ccc4a1c7',ramdisk_id='',reservation_id='r-hsf0qpxd',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',clean_attempts='1',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-LiveMigrationTest-1666170212',owner_user_name='tempest-LiveMigrationTest-1666170212-project-member'},tags=<?>,task_state='migrating',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:04:06Z,user_data=None,user_id='5f75195e56504673bd403ce69cbc28ca',uuid=356bc6d6-1101-467e-a020-65876724c955,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "29214def-2450-4edd-acc6-84e165aa1e2c", "address": "fa:16:3e:1d:3d:20", "network": {"id": "664b6526-6df1-4024-9bab-37218e6c18bd", "bridge": "br-int", "label": "tempest-LiveMigrationTest-2017832683-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f7cb78d24d1a4511a59ced45ccc4a1c7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap29214def-24", "ovs_interfaceid": "29214def-2450-4edd-acc6-84e165aa1e2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {"os_vif_delegation": true}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:04:15 compute-0 nova_compute[192079]: 2025-10-02 12:04:15.541 2 DEBUG nova.network.os_vif_util [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Converting VIF {"id": "29214def-2450-4edd-acc6-84e165aa1e2c", "address": "fa:16:3e:1d:3d:20", "network": {"id": "664b6526-6df1-4024-9bab-37218e6c18bd", "bridge": "br-int", "label": "tempest-LiveMigrationTest-2017832683-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f7cb78d24d1a4511a59ced45ccc4a1c7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap29214def-24", "ovs_interfaceid": "29214def-2450-4edd-acc6-84e165aa1e2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {"os_vif_delegation": true}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:04:15 compute-0 nova_compute[192079]: 2025-10-02 12:04:15.542 2 DEBUG nova.network.os_vif_util [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:1d:3d:20,bridge_name='br-int',has_traffic_filtering=True,id=29214def-2450-4edd-acc6-84e165aa1e2c,network=Network(664b6526-6df1-4024-9bab-37218e6c18bd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap29214def-24') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:04:15 compute-0 nova_compute[192079]: 2025-10-02 12:04:15.543 2 DEBUG os_vif [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:1d:3d:20,bridge_name='br-int',has_traffic_filtering=True,id=29214def-2450-4edd-acc6-84e165aa1e2c,network=Network(664b6526-6df1-4024-9bab-37218e6c18bd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap29214def-24') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:04:15 compute-0 nova_compute[192079]: 2025-10-02 12:04:15.547 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:15 compute-0 nova_compute[192079]: 2025-10-02 12:04:15.548 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap29214def-24, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:04:15 compute-0 nova_compute[192079]: 2025-10-02 12:04:15.550 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:15 compute-0 nova_compute[192079]: 2025-10-02 12:04:15.553 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:04:15 compute-0 nova_compute[192079]: 2025-10-02 12:04:15.556 2 INFO os_vif [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:1d:3d:20,bridge_name='br-int',has_traffic_filtering=True,id=29214def-2450-4edd-acc6-84e165aa1e2c,network=Network(664b6526-6df1-4024-9bab-37218e6c18bd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap29214def-24')
Oct 02 12:04:15 compute-0 nova_compute[192079]: 2025-10-02 12:04:15.557 2 DEBUG oslo_concurrency.lockutils [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.free_pci_device_allocations_for_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:15 compute-0 nova_compute[192079]: 2025-10-02 12:04:15.558 2 DEBUG oslo_concurrency.lockutils [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.free_pci_device_allocations_for_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:15 compute-0 nova_compute[192079]: 2025-10-02 12:04:15.558 2 DEBUG oslo_concurrency.lockutils [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.free_pci_device_allocations_for_instance" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:15 compute-0 nova_compute[192079]: 2025-10-02 12:04:15.559 2 DEBUG nova.compute.manager [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Calling driver.cleanup from _post_live_migration _post_live_migration /usr/lib/python3.9/site-packages/nova/compute/manager.py:9349
Oct 02 12:04:15 compute-0 nova_compute[192079]: 2025-10-02 12:04:15.560 2 INFO nova.virt.libvirt.driver [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Deleting instance files /var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955_del
Oct 02 12:04:15 compute-0 nova_compute[192079]: 2025-10-02 12:04:15.561 2 INFO nova.virt.libvirt.driver [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Deletion of /var/lib/nova/instances/356bc6d6-1101-467e-a020-65876724c955_del complete
Oct 02 12:04:15 compute-0 nova_compute[192079]: 2025-10-02 12:04:15.747 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 356bc6d6-1101-467e-a020-65876724c955] Updating instance_info_cache with network_info: [{"id": "29214def-2450-4edd-acc6-84e165aa1e2c", "address": "fa:16:3e:1d:3d:20", "network": {"id": "664b6526-6df1-4024-9bab-37218e6c18bd", "bridge": "br-int", "label": "tempest-LiveMigrationTest-2017832683-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f7cb78d24d1a4511a59ced45ccc4a1c7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap29214def-24", "ovs_interfaceid": "29214def-2450-4edd-acc6-84e165aa1e2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {"os_vif_delegation": true, "migrating_to": "compute-2.ctlplane.example.com"}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:04:15 compute-0 podman[223241]: 2025-10-02 12:04:15.830685445 +0000 UTC m=+0.594915116 container remove 3f86b816abdb6e4a862a95b77272825206958d251195675a2da77cfd6c12b467 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2)
Oct 02 12:04:15 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:04:15.835 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1512453e-e98d-4a69-92bf-04cecc049ef8]: (4, ('Thu Oct  2 12:04:14 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd (3f86b816abdb6e4a862a95b77272825206958d251195675a2da77cfd6c12b467)\n3f86b816abdb6e4a862a95b77272825206958d251195675a2da77cfd6c12b467\nThu Oct  2 12:04:15 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd (3f86b816abdb6e4a862a95b77272825206958d251195675a2da77cfd6c12b467)\n3f86b816abdb6e4a862a95b77272825206958d251195675a2da77cfd6c12b467\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:04:15 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:04:15.837 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5ad8442a-6fcf-44f6-893e-0f3b5f415381]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:04:15 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:04:15.838 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap664b6526-60, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:04:15 compute-0 kernel: tap664b6526-60: left promiscuous mode
Oct 02 12:04:15 compute-0 nova_compute[192079]: 2025-10-02 12:04:15.854 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:15 compute-0 nova_compute[192079]: 2025-10-02 12:04:15.866 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:15 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:04:15.868 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c6440f67-7149-400f-8aa7-dec81a0f1333]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:04:15 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:04:15.912 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[87a03cc1-46cb-4875-9179-27c9d15f58f4]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:04:15 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:04:15.913 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0216d039-8243-4c19-b984-8e260690a7e2]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:04:15 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:04:15.929 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2ad38e8a-a09c-4251-abf5-437f89da104c]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 463601, 'reachable_time': 34907, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 223257, 'error': None, 'target': 'ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:04:15 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:04:15.932 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:04:15 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:04:15.932 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[d4ff4bf5-9d9d-4935-ac1c-befd8a94fd93]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:04:15 compute-0 systemd[1]: run-netns-ovnmeta\x2d664b6526\x2d6df1\x2d4024\x2d9bab\x2d37218e6c18bd.mount: Deactivated successfully.
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.005 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Releasing lock "refresh_cache-356bc6d6-1101-467e-a020-65876724c955" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.005 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 356bc6d6-1101-467e-a020-65876724c955] Updated the network info_cache for instance _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9929
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.005 2 DEBUG oslo_concurrency.lockutils [req-6224898f-2830-4a25-9b4a-0b57a3293c22 req-8a022032-07c8-4b32-9606-4639ca7a90a9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-356bc6d6-1101-467e-a020-65876724c955" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.006 2 DEBUG nova.network.neutron [req-6224898f-2830-4a25-9b4a-0b57a3293c22 req-8a022032-07c8-4b32-9606-4639ca7a90a9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Refreshing network info cache for port 29214def-2450-4edd-acc6-84e165aa1e2c _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.006 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.217 2 DEBUG nova.compute.manager [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Received event network-vif-unplugged-29214def-2450-4edd-acc6-84e165aa1e2c external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.218 2 DEBUG oslo_concurrency.lockutils [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "356bc6d6-1101-467e-a020-65876724c955-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.218 2 DEBUG oslo_concurrency.lockutils [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "356bc6d6-1101-467e-a020-65876724c955-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.219 2 DEBUG oslo_concurrency.lockutils [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "356bc6d6-1101-467e-a020-65876724c955-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.219 2 DEBUG nova.compute.manager [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] No waiting events found dispatching network-vif-unplugged-29214def-2450-4edd-acc6-84e165aa1e2c pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.220 2 DEBUG nova.compute.manager [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Received event network-vif-unplugged-29214def-2450-4edd-acc6-84e165aa1e2c for instance with task_state migrating. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.220 2 DEBUG nova.compute.manager [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Received event network-vif-plugged-29214def-2450-4edd-acc6-84e165aa1e2c external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.221 2 DEBUG oslo_concurrency.lockutils [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "356bc6d6-1101-467e-a020-65876724c955-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.222 2 DEBUG oslo_concurrency.lockutils [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "356bc6d6-1101-467e-a020-65876724c955-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.222 2 DEBUG oslo_concurrency.lockutils [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "356bc6d6-1101-467e-a020-65876724c955-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.223 2 DEBUG nova.compute.manager [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] No waiting events found dispatching network-vif-plugged-29214def-2450-4edd-acc6-84e165aa1e2c pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.224 2 WARNING nova.compute.manager [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Received unexpected event network-vif-plugged-29214def-2450-4edd-acc6-84e165aa1e2c for instance with vm_state active and task_state migrating.
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.224 2 DEBUG nova.compute.manager [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Received event network-vif-plugged-29214def-2450-4edd-acc6-84e165aa1e2c external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.225 2 DEBUG oslo_concurrency.lockutils [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "356bc6d6-1101-467e-a020-65876724c955-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.225 2 DEBUG oslo_concurrency.lockutils [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "356bc6d6-1101-467e-a020-65876724c955-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.226 2 DEBUG oslo_concurrency.lockutils [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "356bc6d6-1101-467e-a020-65876724c955-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.227 2 DEBUG nova.compute.manager [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] No waiting events found dispatching network-vif-plugged-29214def-2450-4edd-acc6-84e165aa1e2c pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.228 2 WARNING nova.compute.manager [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Received unexpected event network-vif-plugged-29214def-2450-4edd-acc6-84e165aa1e2c for instance with vm_state active and task_state migrating.
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.228 2 DEBUG nova.compute.manager [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Received event network-vif-unplugged-29214def-2450-4edd-acc6-84e165aa1e2c external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.229 2 DEBUG oslo_concurrency.lockutils [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "356bc6d6-1101-467e-a020-65876724c955-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.229 2 DEBUG oslo_concurrency.lockutils [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "356bc6d6-1101-467e-a020-65876724c955-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.230 2 DEBUG oslo_concurrency.lockutils [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "356bc6d6-1101-467e-a020-65876724c955-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.231 2 DEBUG nova.compute.manager [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] No waiting events found dispatching network-vif-unplugged-29214def-2450-4edd-acc6-84e165aa1e2c pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.231 2 DEBUG nova.compute.manager [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Received event network-vif-unplugged-29214def-2450-4edd-acc6-84e165aa1e2c for instance with task_state migrating. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.232 2 DEBUG nova.compute.manager [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Received event network-vif-plugged-29214def-2450-4edd-acc6-84e165aa1e2c external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.232 2 DEBUG oslo_concurrency.lockutils [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "356bc6d6-1101-467e-a020-65876724c955-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.233 2 DEBUG oslo_concurrency.lockutils [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "356bc6d6-1101-467e-a020-65876724c955-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.234 2 DEBUG oslo_concurrency.lockutils [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "356bc6d6-1101-467e-a020-65876724c955-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.234 2 DEBUG nova.compute.manager [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] No waiting events found dispatching network-vif-plugged-29214def-2450-4edd-acc6-84e165aa1e2c pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:04:16 compute-0 nova_compute[192079]: 2025-10-02 12:04:16.235 2 WARNING nova.compute.manager [req-c1340ff3-4f77-4e96-afcb-07acce8e9e08 req-de9be2a3-f5d5-43bb-9a38-010769a758a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Received unexpected event network-vif-plugged-29214def-2450-4edd-acc6-84e165aa1e2c for instance with vm_state active and task_state migrating.
Oct 02 12:04:17 compute-0 nova_compute[192079]: 2025-10-02 12:04:17.473 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_power_states run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:04:17 compute-0 nova_compute[192079]: 2025-10-02 12:04:17.491 2 WARNING nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] While synchronizing instance power states, found 1 instances in the database and 0 instances on the hypervisor.
Oct 02 12:04:17 compute-0 nova_compute[192079]: 2025-10-02 12:04:17.491 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Triggering sync for uuid 356bc6d6-1101-467e-a020-65876724c955 _sync_power_states /usr/lib/python3.9/site-packages/nova/compute/manager.py:10268
Oct 02 12:04:17 compute-0 nova_compute[192079]: 2025-10-02 12:04:17.491 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "356bc6d6-1101-467e-a020-65876724c955" by "nova.compute.manager.ComputeManager._sync_power_states.<locals>._sync.<locals>.query_driver_power_state_and_sync" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:17 compute-0 nova_compute[192079]: 2025-10-02 12:04:17.491 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "356bc6d6-1101-467e-a020-65876724c955" acquired by "nova.compute.manager.ComputeManager._sync_power_states.<locals>._sync.<locals>.query_driver_power_state_and_sync" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:17 compute-0 nova_compute[192079]: 2025-10-02 12:04:17.492 2 INFO nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 356bc6d6-1101-467e-a020-65876724c955] During sync_power_state the instance has a pending task (migrating). Skip.
Oct 02 12:04:17 compute-0 nova_compute[192079]: 2025-10-02 12:04:17.492 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "356bc6d6-1101-467e-a020-65876724c955" "released" by "nova.compute.manager.ComputeManager._sync_power_states.<locals>._sync.<locals>.query_driver_power_state_and_sync" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:18 compute-0 nova_compute[192079]: 2025-10-02 12:04:18.767 2 DEBUG nova.network.neutron [req-6224898f-2830-4a25-9b4a-0b57a3293c22 req-8a022032-07c8-4b32-9606-4639ca7a90a9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Updated VIF entry in instance network info cache for port 29214def-2450-4edd-acc6-84e165aa1e2c. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:04:18 compute-0 nova_compute[192079]: 2025-10-02 12:04:18.767 2 DEBUG nova.network.neutron [req-6224898f-2830-4a25-9b4a-0b57a3293c22 req-8a022032-07c8-4b32-9606-4639ca7a90a9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Updating instance_info_cache with network_info: [{"id": "29214def-2450-4edd-acc6-84e165aa1e2c", "address": "fa:16:3e:1d:3d:20", "network": {"id": "664b6526-6df1-4024-9bab-37218e6c18bd", "bridge": "br-int", "label": "tempest-LiveMigrationTest-2017832683-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f7cb78d24d1a4511a59ced45ccc4a1c7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap29214def-24", "ovs_interfaceid": "29214def-2450-4edd-acc6-84e165aa1e2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {"os_vif_delegation": true}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:04:19 compute-0 nova_compute[192079]: 2025-10-02 12:04:19.539 2 DEBUG nova.compute.manager [req-ee76469b-8e85-4073-9b33-41375239ab0b req-163d0750-5f36-4a3e-8b0f-af5de0bee989 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Received event network-vif-plugged-29214def-2450-4edd-acc6-84e165aa1e2c external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:04:19 compute-0 nova_compute[192079]: 2025-10-02 12:04:19.540 2 DEBUG oslo_concurrency.lockutils [req-ee76469b-8e85-4073-9b33-41375239ab0b req-163d0750-5f36-4a3e-8b0f-af5de0bee989 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "356bc6d6-1101-467e-a020-65876724c955-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:19 compute-0 nova_compute[192079]: 2025-10-02 12:04:19.540 2 DEBUG oslo_concurrency.lockutils [req-ee76469b-8e85-4073-9b33-41375239ab0b req-163d0750-5f36-4a3e-8b0f-af5de0bee989 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "356bc6d6-1101-467e-a020-65876724c955-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:19 compute-0 nova_compute[192079]: 2025-10-02 12:04:19.540 2 DEBUG oslo_concurrency.lockutils [req-ee76469b-8e85-4073-9b33-41375239ab0b req-163d0750-5f36-4a3e-8b0f-af5de0bee989 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "356bc6d6-1101-467e-a020-65876724c955-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:19 compute-0 nova_compute[192079]: 2025-10-02 12:04:19.540 2 DEBUG nova.compute.manager [req-ee76469b-8e85-4073-9b33-41375239ab0b req-163d0750-5f36-4a3e-8b0f-af5de0bee989 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] No waiting events found dispatching network-vif-plugged-29214def-2450-4edd-acc6-84e165aa1e2c pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:04:19 compute-0 nova_compute[192079]: 2025-10-02 12:04:19.541 2 WARNING nova.compute.manager [req-ee76469b-8e85-4073-9b33-41375239ab0b req-163d0750-5f36-4a3e-8b0f-af5de0bee989 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Received unexpected event network-vif-plugged-29214def-2450-4edd-acc6-84e165aa1e2c for instance with vm_state active and task_state migrating.
Oct 02 12:04:19 compute-0 nova_compute[192079]: 2025-10-02 12:04:19.587 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:19 compute-0 nova_compute[192079]: 2025-10-02 12:04:19.597 2 DEBUG oslo_concurrency.lockutils [req-6224898f-2830-4a25-9b4a-0b57a3293c22 req-8a022032-07c8-4b32-9606-4639ca7a90a9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-356bc6d6-1101-467e-a020-65876724c955" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:04:20 compute-0 podman[223258]: 2025-10-02 12:04:20.155917812 +0000 UTC m=+0.073603305 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_id=ovn_controller, container_name=ovn_controller, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible)
Oct 02 12:04:20 compute-0 nova_compute[192079]: 2025-10-02 12:04:20.550 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:21 compute-0 systemd[1]: Stopping User Manager for UID 42436...
Oct 02 12:04:21 compute-0 systemd[223136]: Activating special unit Exit the Session...
Oct 02 12:04:21 compute-0 systemd[223136]: Stopped target Main User Target.
Oct 02 12:04:21 compute-0 systemd[223136]: Stopped target Basic System.
Oct 02 12:04:21 compute-0 systemd[223136]: Stopped target Paths.
Oct 02 12:04:21 compute-0 systemd[223136]: Stopped target Sockets.
Oct 02 12:04:21 compute-0 systemd[223136]: Stopped target Timers.
Oct 02 12:04:21 compute-0 systemd[223136]: Stopped Mark boot as successful after the user session has run 2 minutes.
Oct 02 12:04:21 compute-0 systemd[223136]: Stopped Daily Cleanup of User's Temporary Directories.
Oct 02 12:04:21 compute-0 systemd[223136]: Closed D-Bus User Message Bus Socket.
Oct 02 12:04:21 compute-0 systemd[223136]: Stopped Create User's Volatile Files and Directories.
Oct 02 12:04:21 compute-0 systemd[223136]: Removed slice User Application Slice.
Oct 02 12:04:21 compute-0 systemd[223136]: Reached target Shutdown.
Oct 02 12:04:21 compute-0 systemd[223136]: Finished Exit the Session.
Oct 02 12:04:21 compute-0 systemd[223136]: Reached target Exit the Session.
Oct 02 12:04:21 compute-0 systemd[1]: user@42436.service: Deactivated successfully.
Oct 02 12:04:21 compute-0 systemd[1]: Stopped User Manager for UID 42436.
Oct 02 12:04:21 compute-0 systemd[1]: Stopping User Runtime Directory /run/user/42436...
Oct 02 12:04:21 compute-0 systemd[1]: run-user-42436.mount: Deactivated successfully.
Oct 02 12:04:21 compute-0 systemd[1]: user-runtime-dir@42436.service: Deactivated successfully.
Oct 02 12:04:21 compute-0 systemd[1]: Stopped User Runtime Directory /run/user/42436.
Oct 02 12:04:21 compute-0 systemd[1]: Removed slice User Slice of UID 42436.
Oct 02 12:04:22 compute-0 nova_compute[192079]: 2025-10-02 12:04:22.868 2 DEBUG oslo_concurrency.lockutils [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Acquiring lock "356bc6d6-1101-467e-a020-65876724c955-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:22 compute-0 nova_compute[192079]: 2025-10-02 12:04:22.869 2 DEBUG oslo_concurrency.lockutils [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Lock "356bc6d6-1101-467e-a020-65876724c955-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:22 compute-0 nova_compute[192079]: 2025-10-02 12:04:22.869 2 DEBUG oslo_concurrency.lockutils [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Lock "356bc6d6-1101-467e-a020-65876724c955-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:22 compute-0 nova_compute[192079]: 2025-10-02 12:04:22.919 2 DEBUG oslo_concurrency.lockutils [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:22 compute-0 nova_compute[192079]: 2025-10-02 12:04:22.919 2 DEBUG oslo_concurrency.lockutils [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:22 compute-0 nova_compute[192079]: 2025-10-02 12:04:22.920 2 DEBUG oslo_concurrency.lockutils [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:22 compute-0 nova_compute[192079]: 2025-10-02 12:04:22.920 2 DEBUG nova.compute.resource_tracker [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:04:23 compute-0 podman[223286]: 2025-10-02 12:04:23.006461644 +0000 UTC m=+0.046628386 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, container_name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:04:23 compute-0 podman[223287]: 2025-10-02 12:04:23.014901625 +0000 UTC m=+0.051102139 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:04:23 compute-0 nova_compute[192079]: 2025-10-02 12:04:23.091 2 WARNING nova.virt.libvirt.driver [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:04:23 compute-0 nova_compute[192079]: 2025-10-02 12:04:23.092 2 DEBUG nova.compute.resource_tracker [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5730MB free_disk=73.45991134643555GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:04:23 compute-0 nova_compute[192079]: 2025-10-02 12:04:23.092 2 DEBUG oslo_concurrency.lockutils [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:23 compute-0 nova_compute[192079]: 2025-10-02 12:04:23.093 2 DEBUG oslo_concurrency.lockutils [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:23 compute-0 nova_compute[192079]: 2025-10-02 12:04:23.199 2 DEBUG nova.compute.resource_tracker [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Migration for instance 356bc6d6-1101-467e-a020-65876724c955 refers to another host's instance! _pair_instances_to_migrations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:903
Oct 02 12:04:23 compute-0 nova_compute[192079]: 2025-10-02 12:04:23.380 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759406648.3778038, 2f0ec710-6070-4bb8-ac27-21d96a184569 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:04:23 compute-0 nova_compute[192079]: 2025-10-02 12:04:23.380 2 INFO nova.compute.manager [-] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] VM Stopped (Lifecycle Event)
Oct 02 12:04:23 compute-0 nova_compute[192079]: 2025-10-02 12:04:23.426 2 DEBUG nova.compute.resource_tracker [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Skipping migration as instance is neither resizing nor live-migrating. _update_usage_from_migrations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1491
Oct 02 12:04:23 compute-0 nova_compute[192079]: 2025-10-02 12:04:23.431 2 DEBUG nova.compute.manager [None req-fbfdd73c-4172-43db-8069-55787964e97a - - - - - -] [instance: 2f0ec710-6070-4bb8-ac27-21d96a184569] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:04:23 compute-0 nova_compute[192079]: 2025-10-02 12:04:23.454 2 DEBUG nova.compute.resource_tracker [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Migration 8b92b020-fef8-4e24-b417-6318a75e3466 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1640
Oct 02 12:04:23 compute-0 nova_compute[192079]: 2025-10-02 12:04:23.454 2 DEBUG nova.compute.resource_tracker [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:04:23 compute-0 nova_compute[192079]: 2025-10-02 12:04:23.454 2 DEBUG nova.compute.resource_tracker [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:04:23 compute-0 nova_compute[192079]: 2025-10-02 12:04:23.498 2 DEBUG nova.compute.provider_tree [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:04:23 compute-0 nova_compute[192079]: 2025-10-02 12:04:23.544 2 DEBUG nova.scheduler.client.report [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:04:23 compute-0 nova_compute[192079]: 2025-10-02 12:04:23.599 2 DEBUG nova.compute.resource_tracker [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:04:23 compute-0 nova_compute[192079]: 2025-10-02 12:04:23.600 2 DEBUG oslo_concurrency.lockutils [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.507s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:23 compute-0 nova_compute[192079]: 2025-10-02 12:04:23.691 2 INFO nova.compute.manager [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Migrating instance to compute-2.ctlplane.example.com finished successfully.
Oct 02 12:04:24 compute-0 nova_compute[192079]: 2025-10-02 12:04:24.088 2 INFO nova.scheduler.client.report [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Deleted allocation for migration 8b92b020-fef8-4e24-b417-6318a75e3466
Oct 02 12:04:24 compute-0 nova_compute[192079]: 2025-10-02 12:04:24.088 2 DEBUG nova.virt.libvirt.driver [None req-0a34d7c2-6d84-4b74-ba5b-49c921863df7 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: 356bc6d6-1101-467e-a020-65876724c955] Live migration monitoring is all done _live_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10662
Oct 02 12:04:24 compute-0 nova_compute[192079]: 2025-10-02 12:04:24.589 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.164 2 DEBUG oslo_concurrency.lockutils [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Acquiring lock "c3b78b62-b1f8-477d-8ae4-9af540dc72ff" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.165 2 DEBUG oslo_concurrency.lockutils [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lock "c3b78b62-b1f8-477d-8ae4-9af540dc72ff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.184 2 DEBUG nova.compute.manager [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.273 2 DEBUG oslo_concurrency.lockutils [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.274 2 DEBUG oslo_concurrency.lockutils [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.280 2 DEBUG nova.virt.hardware [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.281 2 INFO nova.compute.claims [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.385 2 DEBUG nova.compute.provider_tree [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.421 2 DEBUG nova.scheduler.client.report [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.442 2 DEBUG oslo_concurrency.lockutils [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.168s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.442 2 DEBUG nova.compute.manager [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.494 2 DEBUG nova.compute.manager [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Not allocating networking since 'none' was specified. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1948
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.516 2 INFO nova.virt.libvirt.driver [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.538 2 DEBUG nova.compute.manager [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.552 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.720 2 DEBUG nova.compute.manager [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.721 2 DEBUG nova.virt.libvirt.driver [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.722 2 INFO nova.virt.libvirt.driver [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Creating image(s)
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.722 2 DEBUG oslo_concurrency.lockutils [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Acquiring lock "/var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.723 2 DEBUG oslo_concurrency.lockutils [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lock "/var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.723 2 DEBUG oslo_concurrency.lockutils [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lock "/var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.735 2 DEBUG oslo_concurrency.processutils [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.792 2 DEBUG oslo_concurrency.processutils [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.057s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.793 2 DEBUG oslo_concurrency.lockutils [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.794 2 DEBUG oslo_concurrency.lockutils [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.804 2 DEBUG oslo_concurrency.processutils [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.857 2 DEBUG oslo_concurrency.processutils [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.053s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.858 2 DEBUG oslo_concurrency.processutils [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.895 2 DEBUG oslo_concurrency.processutils [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk 1073741824" returned: 0 in 0.037s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.896 2 DEBUG oslo_concurrency.lockutils [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.102s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.896 2 DEBUG oslo_concurrency.processutils [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.948 2 DEBUG oslo_concurrency.processutils [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.052s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.949 2 DEBUG nova.virt.disk.api [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Checking if we can resize image /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:04:25 compute-0 nova_compute[192079]: 2025-10-02 12:04:25.950 2 DEBUG oslo_concurrency.processutils [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.002 2 DEBUG oslo_concurrency.processutils [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk --force-share --output=json" returned: 0 in 0.052s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.002 2 DEBUG nova.virt.disk.api [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Cannot resize image /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.003 2 DEBUG nova.objects.instance [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lazy-loading 'migration_context' on Instance uuid c3b78b62-b1f8-477d-8ae4-9af540dc72ff obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.017 2 DEBUG nova.virt.libvirt.driver [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.018 2 DEBUG nova.virt.libvirt.driver [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Ensure instance console log exists: /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.018 2 DEBUG oslo_concurrency.lockutils [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.018 2 DEBUG oslo_concurrency.lockutils [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.019 2 DEBUG oslo_concurrency.lockutils [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.020 2 DEBUG nova.virt.libvirt.driver [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Start _get_guest_xml network_info=[] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.024 2 WARNING nova.virt.libvirt.driver [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.027 2 DEBUG nova.virt.libvirt.host [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.028 2 DEBUG nova.virt.libvirt.host [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.030 2 DEBUG nova.virt.libvirt.host [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.031 2 DEBUG nova.virt.libvirt.host [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.032 2 DEBUG nova.virt.libvirt.driver [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.032 2 DEBUG nova.virt.hardware [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.033 2 DEBUG nova.virt.hardware [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.033 2 DEBUG nova.virt.hardware [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.033 2 DEBUG nova.virt.hardware [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.033 2 DEBUG nova.virt.hardware [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.033 2 DEBUG nova.virt.hardware [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.034 2 DEBUG nova.virt.hardware [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.034 2 DEBUG nova.virt.hardware [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.034 2 DEBUG nova.virt.hardware [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.034 2 DEBUG nova.virt.hardware [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.034 2 DEBUG nova.virt.hardware [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.037 2 DEBUG nova.objects.instance [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lazy-loading 'pci_devices' on Instance uuid c3b78b62-b1f8-477d-8ae4-9af540dc72ff obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.050 2 DEBUG nova.virt.libvirt.driver [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:04:26 compute-0 nova_compute[192079]:   <uuid>c3b78b62-b1f8-477d-8ae4-9af540dc72ff</uuid>
Oct 02 12:04:26 compute-0 nova_compute[192079]:   <name>instance-0000001b</name>
Oct 02 12:04:26 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:04:26 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:04:26 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:04:26 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:       <nova:name>tempest-ServersAdmin275Test-server-1693910444</nova:name>
Oct 02 12:04:26 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:04:26</nova:creationTime>
Oct 02 12:04:26 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:04:26 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:04:26 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:04:26 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:04:26 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:04:26 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:04:26 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:04:26 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:04:26 compute-0 nova_compute[192079]:         <nova:user uuid="e88312197d9e4d24a0c49002f36053ba">tempest-ServersAdmin275Test-1657122363-project-member</nova:user>
Oct 02 12:04:26 compute-0 nova_compute[192079]:         <nova:project uuid="dbe4d58d976745ec9575dfbfcfb67333">tempest-ServersAdmin275Test-1657122363</nova:project>
Oct 02 12:04:26 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:04:26 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:       <nova:ports/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:04:26 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:04:26 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <system>
Oct 02 12:04:26 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:04:26 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:04:26 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:04:26 compute-0 nova_compute[192079]:       <entry name="serial">c3b78b62-b1f8-477d-8ae4-9af540dc72ff</entry>
Oct 02 12:04:26 compute-0 nova_compute[192079]:       <entry name="uuid">c3b78b62-b1f8-477d-8ae4-9af540dc72ff</entry>
Oct 02 12:04:26 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     </system>
Oct 02 12:04:26 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:04:26 compute-0 nova_compute[192079]:   <os>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:   </os>
Oct 02 12:04:26 compute-0 nova_compute[192079]:   <features>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:   </features>
Oct 02 12:04:26 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:04:26 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:04:26 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:04:26 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:04:26 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk.config"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:04:26 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/console.log" append="off"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <video>
Oct 02 12:04:26 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     </video>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:04:26 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:04:26 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:04:26 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:04:26 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:04:26 compute-0 nova_compute[192079]: </domain>
Oct 02 12:04:26 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.095 2 DEBUG nova.virt.libvirt.driver [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.096 2 DEBUG nova.virt.libvirt.driver [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.096 2 INFO nova.virt.libvirt.driver [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Using config drive
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.267 2 INFO nova.virt.libvirt.driver [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Creating config drive at /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk.config
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.271 2 DEBUG oslo_concurrency.processutils [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmptz8epc91 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:26 compute-0 nova_compute[192079]: 2025-10-02 12:04:26.393 2 DEBUG oslo_concurrency.processutils [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmptz8epc91" returned: 0 in 0.122s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:26 compute-0 systemd-machined[152150]: New machine qemu-15-instance-0000001b.
Oct 02 12:04:26 compute-0 systemd[1]: Started Virtual Machine qemu-15-instance-0000001b.
Oct 02 12:04:27 compute-0 nova_compute[192079]: 2025-10-02 12:04:27.087 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406667.0870595, c3b78b62-b1f8-477d-8ae4-9af540dc72ff => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:04:27 compute-0 nova_compute[192079]: 2025-10-02 12:04:27.088 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] VM Resumed (Lifecycle Event)
Oct 02 12:04:27 compute-0 nova_compute[192079]: 2025-10-02 12:04:27.090 2 DEBUG nova.compute.manager [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:04:27 compute-0 nova_compute[192079]: 2025-10-02 12:04:27.091 2 DEBUG nova.virt.libvirt.driver [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:04:27 compute-0 nova_compute[192079]: 2025-10-02 12:04:27.094 2 INFO nova.virt.libvirt.driver [-] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Instance spawned successfully.
Oct 02 12:04:27 compute-0 nova_compute[192079]: 2025-10-02 12:04:27.095 2 DEBUG nova.virt.libvirt.driver [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:04:27 compute-0 nova_compute[192079]: 2025-10-02 12:04:27.126 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:04:27 compute-0 nova_compute[192079]: 2025-10-02 12:04:27.129 2 DEBUG nova.virt.libvirt.driver [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:04:27 compute-0 nova_compute[192079]: 2025-10-02 12:04:27.130 2 DEBUG nova.virt.libvirt.driver [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:04:27 compute-0 nova_compute[192079]: 2025-10-02 12:04:27.130 2 DEBUG nova.virt.libvirt.driver [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:04:27 compute-0 nova_compute[192079]: 2025-10-02 12:04:27.130 2 DEBUG nova.virt.libvirt.driver [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:04:27 compute-0 nova_compute[192079]: 2025-10-02 12:04:27.131 2 DEBUG nova.virt.libvirt.driver [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:04:27 compute-0 nova_compute[192079]: 2025-10-02 12:04:27.131 2 DEBUG nova.virt.libvirt.driver [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:04:27 compute-0 nova_compute[192079]: 2025-10-02 12:04:27.135 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:04:27 compute-0 nova_compute[192079]: 2025-10-02 12:04:27.181 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:04:27 compute-0 nova_compute[192079]: 2025-10-02 12:04:27.181 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406667.087162, c3b78b62-b1f8-477d-8ae4-9af540dc72ff => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:04:27 compute-0 nova_compute[192079]: 2025-10-02 12:04:27.181 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] VM Started (Lifecycle Event)
Oct 02 12:04:27 compute-0 nova_compute[192079]: 2025-10-02 12:04:27.214 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:04:27 compute-0 nova_compute[192079]: 2025-10-02 12:04:27.217 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:04:27 compute-0 nova_compute[192079]: 2025-10-02 12:04:27.241 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:04:27 compute-0 nova_compute[192079]: 2025-10-02 12:04:27.287 2 INFO nova.compute.manager [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Took 1.57 seconds to spawn the instance on the hypervisor.
Oct 02 12:04:27 compute-0 nova_compute[192079]: 2025-10-02 12:04:27.288 2 DEBUG nova.compute.manager [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:04:27 compute-0 nova_compute[192079]: 2025-10-02 12:04:27.420 2 INFO nova.compute.manager [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Took 2.18 seconds to build instance.
Oct 02 12:04:27 compute-0 nova_compute[192079]: 2025-10-02 12:04:27.443 2 DEBUG oslo_concurrency.lockutils [None req-c9cefffe-3697-4eac-8d5a-bf3d2c4f573b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lock "c3b78b62-b1f8-477d-8ae4-9af540dc72ff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 2.279s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:28 compute-0 nova_compute[192079]: 2025-10-02 12:04:28.612 2 INFO nova.compute.manager [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Rebuilding instance
Oct 02 12:04:28 compute-0 nova_compute[192079]: 2025-10-02 12:04:28.927 2 DEBUG nova.compute.manager [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:04:29 compute-0 nova_compute[192079]: 2025-10-02 12:04:29.001 2 DEBUG nova.objects.instance [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lazy-loading 'pci_requests' on Instance uuid c3b78b62-b1f8-477d-8ae4-9af540dc72ff obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:04:29 compute-0 nova_compute[192079]: 2025-10-02 12:04:29.012 2 DEBUG nova.objects.instance [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lazy-loading 'pci_devices' on Instance uuid c3b78b62-b1f8-477d-8ae4-9af540dc72ff obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:04:29 compute-0 nova_compute[192079]: 2025-10-02 12:04:29.026 2 DEBUG nova.objects.instance [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lazy-loading 'resources' on Instance uuid c3b78b62-b1f8-477d-8ae4-9af540dc72ff obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:04:29 compute-0 nova_compute[192079]: 2025-10-02 12:04:29.039 2 DEBUG nova.objects.instance [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lazy-loading 'migration_context' on Instance uuid c3b78b62-b1f8-477d-8ae4-9af540dc72ff obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:04:29 compute-0 nova_compute[192079]: 2025-10-02 12:04:29.050 2 DEBUG nova.objects.instance [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Trying to apply a migration context that does not seem to be set for this instance apply_migration_context /usr/lib/python3.9/site-packages/nova/objects/instance.py:1032
Oct 02 12:04:29 compute-0 nova_compute[192079]: 2025-10-02 12:04:29.053 2 DEBUG nova.virt.libvirt.driver [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Shutting down instance from state 1 _clean_shutdown /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4071
Oct 02 12:04:29 compute-0 podman[223371]: 2025-10-02 12:04:29.146261518 +0000 UTC m=+0.058703747 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, container_name=ceilometer_agent_compute, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, tcib_managed=true, config_id=edpm, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:04:29 compute-0 nova_compute[192079]: 2025-10-02 12:04:29.591 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:29 compute-0 nova_compute[192079]: 2025-10-02 12:04:29.630 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759406654.6299384, 356bc6d6-1101-467e-a020-65876724c955 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:04:29 compute-0 nova_compute[192079]: 2025-10-02 12:04:29.631 2 INFO nova.compute.manager [-] [instance: 356bc6d6-1101-467e-a020-65876724c955] VM Stopped (Lifecycle Event)
Oct 02 12:04:29 compute-0 nova_compute[192079]: 2025-10-02 12:04:29.654 2 DEBUG nova.compute.manager [None req-a087bcea-e6e8-4719-a2e6-e8d979ebd153 - - - - - -] [instance: 356bc6d6-1101-467e-a020-65876724c955] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:04:30 compute-0 nova_compute[192079]: 2025-10-02 12:04:30.615 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:34 compute-0 podman[223391]: 2025-10-02 12:04:34.141831153 +0000 UTC m=+0.058947523 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, name=ubi9-minimal, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, version=9.6, build-date=2025-08-20T13:12:41, com.redhat.component=ubi9-minimal-container, url=https://catalog.redhat.com/en/search?searchType=containers, vendor=Red Hat, Inc., container_name=openstack_network_exporter, managed_by=edpm_ansible, release=1755695350, io.openshift.tags=minimal rhel9, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., distribution-scope=public, io.buildah.version=1.33.7, io.openshift.expose-services=, architecture=x86_64, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, config_id=edpm, vcs-type=git, maintainer=Red Hat, Inc., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b)
Oct 02 12:04:34 compute-0 podman[223392]: 2025-10-02 12:04:34.143287374 +0000 UTC m=+0.056016684 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, config_id=multipathd, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:04:34 compute-0 nova_compute[192079]: 2025-10-02 12:04:34.592 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:35 compute-0 nova_compute[192079]: 2025-10-02 12:04:35.622 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:39 compute-0 nova_compute[192079]: 2025-10-02 12:04:39.099 2 DEBUG nova.virt.libvirt.driver [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Instance in state 1 after 10 seconds - resending shutdown _clean_shutdown /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4101
Oct 02 12:04:39 compute-0 nova_compute[192079]: 2025-10-02 12:04:39.593 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:40 compute-0 nova_compute[192079]: 2025-10-02 12:04:40.625 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:41 compute-0 podman[223450]: 2025-10-02 12:04:41.153256849 +0000 UTC m=+0.063801196 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:04:41 compute-0 podman[223451]: 2025-10-02 12:04:41.17776738 +0000 UTC m=+0.074305404 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, container_name=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid, io.buildah.version=1.41.3, managed_by=edpm_ansible, tcib_managed=true)
Oct 02 12:04:42 compute-0 systemd[1]: machine-qemu\x2d15\x2dinstance\x2d0000001b.scope: Deactivated successfully.
Oct 02 12:04:42 compute-0 systemd[1]: machine-qemu\x2d15\x2dinstance\x2d0000001b.scope: Consumed 12.988s CPU time.
Oct 02 12:04:42 compute-0 systemd-machined[152150]: Machine qemu-15-instance-0000001b terminated.
Oct 02 12:04:42 compute-0 nova_compute[192079]: 2025-10-02 12:04:42.296 2 INFO nova.virt.libvirt.driver [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Instance shutdown successfully after 13 seconds.
Oct 02 12:04:42 compute-0 nova_compute[192079]: 2025-10-02 12:04:42.304 2 INFO nova.virt.libvirt.driver [-] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Instance destroyed successfully.
Oct 02 12:04:42 compute-0 nova_compute[192079]: 2025-10-02 12:04:42.309 2 INFO nova.virt.libvirt.driver [-] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Instance destroyed successfully.
Oct 02 12:04:42 compute-0 nova_compute[192079]: 2025-10-02 12:04:42.310 2 INFO nova.virt.libvirt.driver [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Deleting instance files /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff_del
Oct 02 12:04:42 compute-0 nova_compute[192079]: 2025-10-02 12:04:42.311 2 INFO nova.virt.libvirt.driver [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Deletion of /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff_del complete
Oct 02 12:04:43 compute-0 nova_compute[192079]: 2025-10-02 12:04:43.473 2 DEBUG nova.virt.libvirt.driver [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Creating tmpfile /var/lib/nova/instances/tmp1_i3q1h5 to notify to other compute nodes that they should mount the same storage. _create_shared_storage_test_file /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10041
Oct 02 12:04:43 compute-0 nova_compute[192079]: 2025-10-02 12:04:43.474 2 DEBUG nova.compute.manager [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] destination check data is LibvirtLiveMigrateData(bdms=<?>,block_migration=True,disk_available_mb=74752,disk_over_commit=False,dst_numa_info=<?>,dst_supports_numa_live_migration=<?>,dst_wants_file_backed_memory=False,file_backed_memory_discard=<?>,filename='tmp1_i3q1h5',graphics_listen_addr_spice=127.0.0.1,graphics_listen_addr_vnc=::,image_type='qcow2',instance_relative_path=<?>,is_shared_block_storage=<?>,is_shared_instance_path=<?>,is_volume_backed=<?>,migration=<?>,old_vol_attachment_ids=<?>,serial_listen_addr=None,serial_listen_ports=<?>,src_supports_native_luks=<?>,src_supports_numa_live_migration=<?>,supported_perf_events=<?>,target_connect_addr=<?>,vifs=[VIFMigrateData],wait_for_vif_plugged=<?>) check_can_live_migrate_destination /usr/lib/python3.9/site-packages/nova/compute/manager.py:8476
Oct 02 12:04:43 compute-0 nova_compute[192079]: 2025-10-02 12:04:43.861 2 DEBUG nova.virt.libvirt.driver [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:04:43 compute-0 nova_compute[192079]: 2025-10-02 12:04:43.862 2 INFO nova.virt.libvirt.driver [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Creating image(s)
Oct 02 12:04:43 compute-0 nova_compute[192079]: 2025-10-02 12:04:43.863 2 DEBUG oslo_concurrency.lockutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Acquiring lock "/var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:43 compute-0 nova_compute[192079]: 2025-10-02 12:04:43.864 2 DEBUG oslo_concurrency.lockutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lock "/var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:43 compute-0 nova_compute[192079]: 2025-10-02 12:04:43.865 2 DEBUG oslo_concurrency.lockutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lock "/var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:43 compute-0 nova_compute[192079]: 2025-10-02 12:04:43.866 2 DEBUG oslo_concurrency.lockutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Acquiring lock "d7f074efa852dc950deac120296f6eecf48a40d2" by "nova.virt.libvirt.imagebackend.Image.cache.<locals>.fetch_func_sync" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:43 compute-0 nova_compute[192079]: 2025-10-02 12:04:43.866 2 DEBUG oslo_concurrency.lockutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lock "d7f074efa852dc950deac120296f6eecf48a40d2" acquired by "nova.virt.libvirt.imagebackend.Image.cache.<locals>.fetch_func_sync" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:44 compute-0 nova_compute[192079]: 2025-10-02 12:04:44.595 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:45 compute-0 nova_compute[192079]: 2025-10-02 12:04:45.720 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:47 compute-0 nova_compute[192079]: 2025-10-02 12:04:47.700 2 DEBUG oslo_concurrency.processutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2.part --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:47 compute-0 nova_compute[192079]: 2025-10-02 12:04:47.757 2 DEBUG oslo_concurrency.processutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2.part --force-share --output=json" returned: 0 in 0.057s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:47 compute-0 nova_compute[192079]: 2025-10-02 12:04:47.758 2 DEBUG nova.virt.images [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] 062d9f80-76b6-42ce-bee7-0fb82a008353 was qcow2, converting to raw fetch_to_raw /usr/lib/python3.9/site-packages/nova/virt/images.py:242
Oct 02 12:04:47 compute-0 nova_compute[192079]: 2025-10-02 12:04:47.760 2 DEBUG nova.privsep.utils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Path '/var/lib/nova/instances' supports direct I/O supports_direct_io /usr/lib/python3.9/site-packages/nova/privsep/utils.py:63
Oct 02 12:04:47 compute-0 nova_compute[192079]: 2025-10-02 12:04:47.760 2 DEBUG oslo_concurrency.processutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Running cmd (subprocess): qemu-img convert -t none -O raw -f qcow2 /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2.part /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2.converted execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.043 2 DEBUG oslo_concurrency.processutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] CMD "qemu-img convert -t none -O raw -f qcow2 /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2.part /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2.converted" returned: 0 in 0.282s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.047 2 DEBUG oslo_concurrency.processutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2.converted --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.104 2 DEBUG oslo_concurrency.processutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2.converted --force-share --output=json" returned: 0 in 0.057s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.105 2 DEBUG oslo_concurrency.lockutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lock "d7f074efa852dc950deac120296f6eecf48a40d2" "released" by "nova.virt.libvirt.imagebackend.Image.cache.<locals>.fetch_func_sync" :: held 4.239s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.118 2 DEBUG oslo_concurrency.processutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.177 2 DEBUG oslo_concurrency.processutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2 --force-share --output=json" returned: 0 in 0.059s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.178 2 DEBUG oslo_concurrency.lockutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Acquiring lock "d7f074efa852dc950deac120296f6eecf48a40d2" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.179 2 DEBUG oslo_concurrency.lockutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lock "d7f074efa852dc950deac120296f6eecf48a40d2" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.190 2 DEBUG oslo_concurrency.processutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.247 2 DEBUG oslo_concurrency.processutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2 --force-share --output=json" returned: 0 in 0.057s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.248 2 DEBUG oslo_concurrency.processutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2,backing_fmt=raw /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.281 2 DEBUG oslo_concurrency.processutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2,backing_fmt=raw /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk 1073741824" returned: 0 in 0.032s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.282 2 DEBUG oslo_concurrency.lockutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lock "d7f074efa852dc950deac120296f6eecf48a40d2" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.103s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.282 2 DEBUG oslo_concurrency.processutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.337 2 DEBUG oslo_concurrency.processutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2 --force-share --output=json" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.338 2 DEBUG nova.virt.disk.api [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Checking if we can resize image /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.338 2 DEBUG oslo_concurrency.processutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.395 2 DEBUG oslo_concurrency.processutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk --force-share --output=json" returned: 0 in 0.057s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.396 2 DEBUG nova.virt.disk.api [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Cannot resize image /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.397 2 DEBUG nova.virt.libvirt.driver [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.397 2 DEBUG nova.virt.libvirt.driver [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Ensure instance console log exists: /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.397 2 DEBUG oslo_concurrency.lockutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.398 2 DEBUG oslo_concurrency.lockutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.398 2 DEBUG oslo_concurrency.lockutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.399 2 DEBUG nova.virt.libvirt.driver [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Start _get_guest_xml network_info=[] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:28Z,direct_url=<?>,disk_format='qcow2',id=062d9f80-76b6-42ce-bee7-0fb82a008353,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img_alt',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:29Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.404 2 WARNING nova.virt.libvirt.driver [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.: NotImplementedError
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.410 2 DEBUG nova.virt.libvirt.host [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.410 2 DEBUG nova.virt.libvirt.host [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.413 2 DEBUG nova.virt.libvirt.host [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.414 2 DEBUG nova.virt.libvirt.host [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.415 2 DEBUG nova.virt.libvirt.driver [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.415 2 DEBUG nova.virt.hardware [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:28Z,direct_url=<?>,disk_format='qcow2',id=062d9f80-76b6-42ce-bee7-0fb82a008353,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img_alt',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:29Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.415 2 DEBUG nova.virt.hardware [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.416 2 DEBUG nova.virt.hardware [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.416 2 DEBUG nova.virt.hardware [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.416 2 DEBUG nova.virt.hardware [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.416 2 DEBUG nova.virt.hardware [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.417 2 DEBUG nova.virt.hardware [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.417 2 DEBUG nova.virt.hardware [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.417 2 DEBUG nova.virt.hardware [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.417 2 DEBUG nova.virt.hardware [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.417 2 DEBUG nova.virt.hardware [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.418 2 DEBUG nova.objects.instance [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lazy-loading 'vcpu_model' on Instance uuid c3b78b62-b1f8-477d-8ae4-9af540dc72ff obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.438 2 DEBUG nova.virt.libvirt.driver [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:04:48 compute-0 nova_compute[192079]:   <uuid>c3b78b62-b1f8-477d-8ae4-9af540dc72ff</uuid>
Oct 02 12:04:48 compute-0 nova_compute[192079]:   <name>instance-0000001b</name>
Oct 02 12:04:48 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:04:48 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:04:48 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:04:48 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:       <nova:name>tempest-ServersAdmin275Test-server-1693910444</nova:name>
Oct 02 12:04:48 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:04:48</nova:creationTime>
Oct 02 12:04:48 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:04:48 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:04:48 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:04:48 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:04:48 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:04:48 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:04:48 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:04:48 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:04:48 compute-0 nova_compute[192079]:         <nova:user uuid="e88312197d9e4d24a0c49002f36053ba">tempest-ServersAdmin275Test-1657122363-project-member</nova:user>
Oct 02 12:04:48 compute-0 nova_compute[192079]:         <nova:project uuid="dbe4d58d976745ec9575dfbfcfb67333">tempest-ServersAdmin275Test-1657122363</nova:project>
Oct 02 12:04:48 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:04:48 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="062d9f80-76b6-42ce-bee7-0fb82a008353"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:       <nova:ports/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:04:48 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:04:48 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <system>
Oct 02 12:04:48 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:04:48 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:04:48 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:04:48 compute-0 nova_compute[192079]:       <entry name="serial">c3b78b62-b1f8-477d-8ae4-9af540dc72ff</entry>
Oct 02 12:04:48 compute-0 nova_compute[192079]:       <entry name="uuid">c3b78b62-b1f8-477d-8ae4-9af540dc72ff</entry>
Oct 02 12:04:48 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     </system>
Oct 02 12:04:48 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:04:48 compute-0 nova_compute[192079]:   <os>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:   </os>
Oct 02 12:04:48 compute-0 nova_compute[192079]:   <features>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:   </features>
Oct 02 12:04:48 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:04:48 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:04:48 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:04:48 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:04:48 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk.config"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:04:48 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/console.log" append="off"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <video>
Oct 02 12:04:48 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     </video>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:04:48 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:04:48 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:04:48 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:04:48 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:04:48 compute-0 nova_compute[192079]: </domain>
Oct 02 12:04:48 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.497 2 DEBUG nova.virt.libvirt.driver [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.498 2 DEBUG nova.virt.libvirt.driver [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.498 2 INFO nova.virt.libvirt.driver [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Using config drive
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.524 2 DEBUG nova.objects.instance [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lazy-loading 'ec2_ids' on Instance uuid c3b78b62-b1f8-477d-8ae4-9af540dc72ff obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.594 2 DEBUG nova.compute.manager [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] pre_live_migration data is LibvirtLiveMigrateData(bdms=<?>,block_migration=True,disk_available_mb=74752,disk_over_commit=False,dst_numa_info=<?>,dst_supports_numa_live_migration=<?>,dst_wants_file_backed_memory=False,file_backed_memory_discard=<?>,filename='tmp1_i3q1h5',graphics_listen_addr_spice=127.0.0.1,graphics_listen_addr_vnc=::,image_type='qcow2',instance_relative_path='a6bb5263-b0c7-4282-8e02-3503fd778e6f',is_shared_block_storage=False,is_shared_instance_path=False,is_volume_backed=False,migration=<?>,old_vol_attachment_ids=<?>,serial_listen_addr=None,serial_listen_ports=<?>,src_supports_native_luks=<?>,src_supports_numa_live_migration=<?>,supported_perf_events=<?>,target_connect_addr=<?>,vifs=[VIFMigrateData],wait_for_vif_plugged=<?>) pre_live_migration /usr/lib/python3.9/site-packages/nova/compute/manager.py:8604
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.618 2 DEBUG nova.objects.instance [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lazy-loading 'keypairs' on Instance uuid c3b78b62-b1f8-477d-8ae4-9af540dc72ff obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.663 2 DEBUG oslo_concurrency.lockutils [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Acquiring lock "refresh_cache-a6bb5263-b0c7-4282-8e02-3503fd778e6f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.663 2 DEBUG oslo_concurrency.lockutils [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Acquired lock "refresh_cache-a6bb5263-b0c7-4282-8e02-3503fd778e6f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:04:48 compute-0 nova_compute[192079]: 2025-10-02 12:04:48.664 2 DEBUG nova.network.neutron [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:04:49 compute-0 nova_compute[192079]: 2025-10-02 12:04:49.087 2 INFO nova.virt.libvirt.driver [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Creating config drive at /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk.config
Oct 02 12:04:49 compute-0 nova_compute[192079]: 2025-10-02 12:04:49.092 2 DEBUG oslo_concurrency.processutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpckyfw1k3 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:49 compute-0 nova_compute[192079]: 2025-10-02 12:04:49.216 2 DEBUG oslo_concurrency.processutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpckyfw1k3" returned: 0 in 0.124s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:49 compute-0 systemd-machined[152150]: New machine qemu-16-instance-0000001b.
Oct 02 12:04:49 compute-0 systemd[1]: Started Virtual Machine qemu-16-instance-0000001b.
Oct 02 12:04:49 compute-0 nova_compute[192079]: 2025-10-02 12:04:49.597 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.006 2 DEBUG nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Removed pending event for c3b78b62-b1f8-477d-8ae4-9af540dc72ff due to event _event_emit_delayed /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:438
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.007 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406690.0053124, c3b78b62-b1f8-477d-8ae4-9af540dc72ff => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.008 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] VM Resumed (Lifecycle Event)
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.012 2 DEBUG nova.compute.manager [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.012 2 DEBUG nova.virt.libvirt.driver [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.017 2 INFO nova.virt.libvirt.driver [-] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Instance spawned successfully.
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.019 2 DEBUG nova.virt.libvirt.driver [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.041 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.045 2 DEBUG nova.virt.libvirt.driver [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.046 2 DEBUG nova.virt.libvirt.driver [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.046 2 DEBUG nova.virt.libvirt.driver [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.047 2 DEBUG nova.virt.libvirt.driver [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.047 2 DEBUG nova.virt.libvirt.driver [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.048 2 DEBUG nova.virt.libvirt.driver [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.052 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: active, current task_state: rebuild_spawning, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.089 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] During sync_power_state the instance has a pending task (rebuild_spawning). Skip.
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.090 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406690.0081818, c3b78b62-b1f8-477d-8ae4-9af540dc72ff => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.090 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] VM Started (Lifecycle Event)
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.113 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.117 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Synchronizing instance power state after lifecycle event "Started"; current vm_state: active, current task_state: rebuild_spawning, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.145 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] During sync_power_state the instance has a pending task (rebuild_spawning). Skip.
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.147 2 DEBUG nova.compute.manager [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.234 2 DEBUG oslo_concurrency.lockutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.235 2 DEBUG oslo_concurrency.lockutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.235 2 DEBUG nova.objects.instance [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Trying to apply a migration context that does not seem to be set for this instance apply_migration_context /usr/lib/python3.9/site-packages/nova/objects/instance.py:1032
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.321 2 DEBUG oslo_concurrency.lockutils [None req-fa07e5d7-74df-4dea-a8a2-19b83625a46b e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 0.086s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.533 2 DEBUG nova.network.neutron [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Updating instance_info_cache with network_info: [{"id": "5e772b33-6577-4ba1-b187-e4779ef49ed6", "address": "fa:16:3e:09:b4:ad", "network": {"id": "664b6526-6df1-4024-9bab-37218e6c18bd", "bridge": "br-int", "label": "tempest-LiveMigrationTest-2017832683-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f7cb78d24d1a4511a59ced45ccc4a1c7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5e772b33-65", "ovs_interfaceid": "5e772b33-6577-4ba1-b187-e4779ef49ed6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.571 2 DEBUG oslo_concurrency.lockutils [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Releasing lock "refresh_cache-a6bb5263-b0c7-4282-8e02-3503fd778e6f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.589 2 DEBUG nova.virt.libvirt.driver [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] migrate_data in pre_live_migration: LibvirtLiveMigrateData(bdms=<?>,block_migration=True,disk_available_mb=74752,disk_over_commit=False,dst_numa_info=<?>,dst_supports_numa_live_migration=<?>,dst_wants_file_backed_memory=False,file_backed_memory_discard=<?>,filename='tmp1_i3q1h5',graphics_listen_addr_spice=127.0.0.1,graphics_listen_addr_vnc=::,image_type='qcow2',instance_relative_path='a6bb5263-b0c7-4282-8e02-3503fd778e6f',is_shared_block_storage=False,is_shared_instance_path=False,is_volume_backed=False,migration=<?>,old_vol_attachment_ids={},serial_listen_addr=None,serial_listen_ports=<?>,src_supports_native_luks=<?>,src_supports_numa_live_migration=<?>,supported_perf_events=<?>,target_connect_addr=<?>,vifs=[VIFMigrateData],wait_for_vif_plugged=<?>) pre_live_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10827
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.590 2 DEBUG nova.virt.libvirt.driver [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Creating instance directory: /var/lib/nova/instances/a6bb5263-b0c7-4282-8e02-3503fd778e6f pre_live_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10840
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.591 2 DEBUG nova.virt.libvirt.driver [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Creating disk.info with the contents: {'/var/lib/nova/instances/a6bb5263-b0c7-4282-8e02-3503fd778e6f/disk': 'qcow2', '/var/lib/nova/instances/a6bb5263-b0c7-4282-8e02-3503fd778e6f/disk.config': 'raw'} pre_live_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10854
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.592 2 DEBUG nova.virt.libvirt.driver [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Checking to make sure images and backing files are present before live migration. pre_live_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10864
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.593 2 DEBUG nova.objects.instance [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Lazy-loading 'trusted_certs' on Instance uuid a6bb5263-b0c7-4282-8e02-3503fd778e6f obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.622 2 DEBUG oslo_concurrency.processutils [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.714 2 DEBUG oslo_concurrency.processutils [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.092s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.715 2 DEBUG oslo_concurrency.lockutils [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.716 2 DEBUG oslo_concurrency.lockutils [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.726 2 DEBUG oslo_concurrency.processutils [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.744 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.796 2 DEBUG oslo_concurrency.processutils [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.070s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.797 2 DEBUG oslo_concurrency.processutils [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/a6bb5263-b0c7-4282-8e02-3503fd778e6f/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.840 2 DEBUG oslo_concurrency.processutils [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/a6bb5263-b0c7-4282-8e02-3503fd778e6f/disk 1073741824" returned: 0 in 0.043s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.841 2 DEBUG oslo_concurrency.lockutils [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.126s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.842 2 DEBUG oslo_concurrency.processutils [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.932 2 DEBUG oslo_concurrency.processutils [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.090s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.934 2 DEBUG nova.virt.disk.api [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Checking if we can resize image /var/lib/nova/instances/a6bb5263-b0c7-4282-8e02-3503fd778e6f/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.935 2 DEBUG oslo_concurrency.processutils [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a6bb5263-b0c7-4282-8e02-3503fd778e6f/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.997 2 DEBUG oslo_concurrency.processutils [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a6bb5263-b0c7-4282-8e02-3503fd778e6f/disk --force-share --output=json" returned: 0 in 0.062s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.998 2 DEBUG nova.virt.disk.api [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Cannot resize image /var/lib/nova/instances/a6bb5263-b0c7-4282-8e02-3503fd778e6f/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:04:50 compute-0 nova_compute[192079]: 2025-10-02 12:04:50.999 2 DEBUG nova.objects.instance [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Lazy-loading 'migration_context' on Instance uuid a6bb5263-b0c7-4282-8e02-3503fd778e6f obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:04:51 compute-0 nova_compute[192079]: 2025-10-02 12:04:51.020 2 DEBUG oslo_concurrency.processutils [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f raw /var/lib/nova/instances/a6bb5263-b0c7-4282-8e02-3503fd778e6f/disk.config 485376 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:51 compute-0 nova_compute[192079]: 2025-10-02 12:04:51.045 2 DEBUG oslo_concurrency.processutils [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f raw /var/lib/nova/instances/a6bb5263-b0c7-4282-8e02-3503fd778e6f/disk.config 485376" returned: 0 in 0.024s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:51 compute-0 nova_compute[192079]: 2025-10-02 12:04:51.047 2 DEBUG nova.virt.libvirt.volume.remotefs [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Copying file compute-1.ctlplane.example.com:/var/lib/nova/instances/a6bb5263-b0c7-4282-8e02-3503fd778e6f/disk.config to /var/lib/nova/instances/a6bb5263-b0c7-4282-8e02-3503fd778e6f copy_file /usr/lib/python3.9/site-packages/nova/virt/libvirt/volume/remotefs.py:103
Oct 02 12:04:51 compute-0 nova_compute[192079]: 2025-10-02 12:04:51.048 2 DEBUG oslo_concurrency.processutils [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Running cmd (subprocess): scp -C -r compute-1.ctlplane.example.com:/var/lib/nova/instances/a6bb5263-b0c7-4282-8e02-3503fd778e6f/disk.config /var/lib/nova/instances/a6bb5263-b0c7-4282-8e02-3503fd778e6f execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:04:51 compute-0 podman[223579]: 2025-10-02 12:04:51.222427654 +0000 UTC m=+0.127002985 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_id=ovn_controller, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3)
Oct 02 12:04:51 compute-0 nova_compute[192079]: 2025-10-02 12:04:51.520 2 DEBUG oslo_concurrency.processutils [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] CMD "scp -C -r compute-1.ctlplane.example.com:/var/lib/nova/instances/a6bb5263-b0c7-4282-8e02-3503fd778e6f/disk.config /var/lib/nova/instances/a6bb5263-b0c7-4282-8e02-3503fd778e6f" returned: 0 in 0.472s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:04:51 compute-0 nova_compute[192079]: 2025-10-02 12:04:51.522 2 DEBUG nova.virt.libvirt.driver [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Plugging VIFs using destination host port bindings before live migration. _pre_live_migration_plug_vifs /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10794
Oct 02 12:04:51 compute-0 nova_compute[192079]: 2025-10-02 12:04:51.524 2 DEBUG nova.virt.libvirt.vif [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:04:30Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-LiveMigrationTest-server-1169459074',display_name='tempest-LiveMigrationTest-server-1169459074',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-1.ctlplane.example.com',hostname='tempest-livemigrationtest-server-1169459074',id=28,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:04:37Z,launched_on='compute-1.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-1.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='f7cb78d24d1a4511a59ced45ccc4a1c7',ramdisk_id='',reservation_id='r-b554y970',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-LiveMigrationTest-1666170212',owner_user_name='tempest-LiveMigrationTest-1666170212-project-member'},tags=<?>,task_state='migrating',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:04:37Z,user_data=None,user_id='5f75195e56504673bd403ce69cbc28ca',uuid=a6bb5263-b0c7-4282-8e02-3503fd778e6f,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "5e772b33-6577-4ba1-b187-e4779ef49ed6", "address": "fa:16:3e:09:b4:ad", "network": {"id": "664b6526-6df1-4024-9bab-37218e6c18bd", "bridge": "br-int", "label": "tempest-LiveMigrationTest-2017832683-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f7cb78d24d1a4511a59ced45ccc4a1c7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system"}, "devname": "tap5e772b33-65", "ovs_interfaceid": "5e772b33-6577-4ba1-b187-e4779ef49ed6", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {"os_vif_delegation": true}, "preserve_on_delete": true, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:04:51 compute-0 nova_compute[192079]: 2025-10-02 12:04:51.525 2 DEBUG nova.network.os_vif_util [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Converting VIF {"id": "5e772b33-6577-4ba1-b187-e4779ef49ed6", "address": "fa:16:3e:09:b4:ad", "network": {"id": "664b6526-6df1-4024-9bab-37218e6c18bd", "bridge": "br-int", "label": "tempest-LiveMigrationTest-2017832683-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f7cb78d24d1a4511a59ced45ccc4a1c7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system"}, "devname": "tap5e772b33-65", "ovs_interfaceid": "5e772b33-6577-4ba1-b187-e4779ef49ed6", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {"os_vif_delegation": true}, "preserve_on_delete": true, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:04:51 compute-0 nova_compute[192079]: 2025-10-02 12:04:51.527 2 DEBUG nova.network.os_vif_util [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:09:b4:ad,bridge_name='br-int',has_traffic_filtering=True,id=5e772b33-6577-4ba1-b187-e4779ef49ed6,network=Network(664b6526-6df1-4024-9bab-37218e6c18bd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=True,vif_name='tap5e772b33-65') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:04:51 compute-0 nova_compute[192079]: 2025-10-02 12:04:51.527 2 DEBUG os_vif [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:09:b4:ad,bridge_name='br-int',has_traffic_filtering=True,id=5e772b33-6577-4ba1-b187-e4779ef49ed6,network=Network(664b6526-6df1-4024-9bab-37218e6c18bd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=True,vif_name='tap5e772b33-65') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:04:51 compute-0 nova_compute[192079]: 2025-10-02 12:04:51.528 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:51 compute-0 nova_compute[192079]: 2025-10-02 12:04:51.529 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:04:51 compute-0 nova_compute[192079]: 2025-10-02 12:04:51.530 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:04:51 compute-0 nova_compute[192079]: 2025-10-02 12:04:51.532 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:51 compute-0 nova_compute[192079]: 2025-10-02 12:04:51.532 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap5e772b33-65, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:04:51 compute-0 nova_compute[192079]: 2025-10-02 12:04:51.533 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap5e772b33-65, col_values=(('external_ids', {'iface-id': '5e772b33-6577-4ba1-b187-e4779ef49ed6', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:09:b4:ad', 'vm-uuid': 'a6bb5263-b0c7-4282-8e02-3503fd778e6f'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:04:51 compute-0 NetworkManager[51160]: <info>  [1759406691.5369] manager: (tap5e772b33-65): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/54)
Oct 02 12:04:51 compute-0 nova_compute[192079]: 2025-10-02 12:04:51.535 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:51 compute-0 nova_compute[192079]: 2025-10-02 12:04:51.541 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:04:51 compute-0 nova_compute[192079]: 2025-10-02 12:04:51.542 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:51 compute-0 nova_compute[192079]: 2025-10-02 12:04:51.543 2 INFO os_vif [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:09:b4:ad,bridge_name='br-int',has_traffic_filtering=True,id=5e772b33-6577-4ba1-b187-e4779ef49ed6,network=Network(664b6526-6df1-4024-9bab-37218e6c18bd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=True,vif_name='tap5e772b33-65')
Oct 02 12:04:51 compute-0 nova_compute[192079]: 2025-10-02 12:04:51.543 2 DEBUG nova.virt.libvirt.driver [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] No dst_numa_info in migrate_data, no cores to power up in pre_live_migration. pre_live_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:10954
Oct 02 12:04:51 compute-0 nova_compute[192079]: 2025-10-02 12:04:51.544 2 DEBUG nova.compute.manager [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] driver pre_live_migration data is LibvirtLiveMigrateData(bdms=[],block_migration=True,disk_available_mb=74752,disk_over_commit=False,dst_numa_info=<?>,dst_supports_numa_live_migration=<?>,dst_wants_file_backed_memory=False,file_backed_memory_discard=<?>,filename='tmp1_i3q1h5',graphics_listen_addr_spice=127.0.0.1,graphics_listen_addr_vnc=::,image_type='qcow2',instance_relative_path='a6bb5263-b0c7-4282-8e02-3503fd778e6f',is_shared_block_storage=False,is_shared_instance_path=False,is_volume_backed=False,migration=<?>,old_vol_attachment_ids={},serial_listen_addr=None,serial_listen_ports=[],src_supports_native_luks=<?>,src_supports_numa_live_migration=<?>,supported_perf_events=[],target_connect_addr=None,vifs=[VIFMigrateData],wait_for_vif_plugged=<?>) pre_live_migration /usr/lib/python3.9/site-packages/nova/compute/manager.py:8668
Oct 02 12:04:52 compute-0 nova_compute[192079]: 2025-10-02 12:04:52.643 2 INFO nova.compute.manager [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Rebuilding instance
Oct 02 12:04:53 compute-0 nova_compute[192079]: 2025-10-02 12:04:53.004 2 DEBUG nova.compute.manager [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:04:53 compute-0 nova_compute[192079]: 2025-10-02 12:04:53.075 2 DEBUG nova.objects.instance [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Lazy-loading 'pci_requests' on Instance uuid c3b78b62-b1f8-477d-8ae4-9af540dc72ff obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:04:53 compute-0 nova_compute[192079]: 2025-10-02 12:04:53.088 2 DEBUG nova.objects.instance [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Lazy-loading 'pci_devices' on Instance uuid c3b78b62-b1f8-477d-8ae4-9af540dc72ff obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:04:53 compute-0 nova_compute[192079]: 2025-10-02 12:04:53.099 2 DEBUG nova.objects.instance [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Lazy-loading 'resources' on Instance uuid c3b78b62-b1f8-477d-8ae4-9af540dc72ff obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:04:53 compute-0 nova_compute[192079]: 2025-10-02 12:04:53.110 2 DEBUG nova.objects.instance [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Lazy-loading 'migration_context' on Instance uuid c3b78b62-b1f8-477d-8ae4-9af540dc72ff obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:04:53 compute-0 nova_compute[192079]: 2025-10-02 12:04:53.122 2 DEBUG nova.objects.instance [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Trying to apply a migration context that does not seem to be set for this instance apply_migration_context /usr/lib/python3.9/site-packages/nova/objects/instance.py:1032
Oct 02 12:04:53 compute-0 nova_compute[192079]: 2025-10-02 12:04:53.134 2 DEBUG nova.virt.libvirt.driver [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Shutting down instance from state 1 _clean_shutdown /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4071
Oct 02 12:04:53 compute-0 podman[223609]: 2025-10-02 12:04:53.141909389 +0000 UTC m=+0.053707831 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, container_name=ovn_metadata_agent, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true)
Oct 02 12:04:53 compute-0 podman[223610]: 2025-10-02 12:04:53.153121446 +0000 UTC m=+0.058935603 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:04:54 compute-0 nova_compute[192079]: 2025-10-02 12:04:54.599 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:56 compute-0 nova_compute[192079]: 2025-10-02 12:04:56.179 2 DEBUG nova.network.neutron [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Port 5e772b33-6577-4ba1-b187-e4779ef49ed6 updated with migration profile {'migrating_to': 'compute-0.ctlplane.example.com'} successfully _setup_migration_port_profile /usr/lib/python3.9/site-packages/nova/network/neutron.py:354
Oct 02 12:04:56 compute-0 nova_compute[192079]: 2025-10-02 12:04:56.263 2 DEBUG nova.compute.manager [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] pre_live_migration result data is LibvirtLiveMigrateData(bdms=[],block_migration=True,disk_available_mb=74752,disk_over_commit=False,dst_numa_info=<?>,dst_supports_numa_live_migration=<?>,dst_wants_file_backed_memory=False,file_backed_memory_discard=<?>,filename='tmp1_i3q1h5',graphics_listen_addr_spice=127.0.0.1,graphics_listen_addr_vnc=::,image_type='qcow2',instance_relative_path='a6bb5263-b0c7-4282-8e02-3503fd778e6f',is_shared_block_storage=False,is_shared_instance_path=False,is_volume_backed=False,migration=<?>,old_vol_attachment_ids={},serial_listen_addr=None,serial_listen_ports=[],src_supports_native_luks=<?>,src_supports_numa_live_migration=<?>,supported_perf_events=[],target_connect_addr=None,vifs=[VIFMigrateData],wait_for_vif_plugged=True) pre_live_migration /usr/lib/python3.9/site-packages/nova/compute/manager.py:8723
Oct 02 12:04:56 compute-0 kernel: tap5e772b33-65: entered promiscuous mode
Oct 02 12:04:56 compute-0 ovn_controller[94336]: 2025-10-02T12:04:56Z|00097|binding|INFO|Claiming lport 5e772b33-6577-4ba1-b187-e4779ef49ed6 for this additional chassis.
Oct 02 12:04:56 compute-0 ovn_controller[94336]: 2025-10-02T12:04:56Z|00098|binding|INFO|5e772b33-6577-4ba1-b187-e4779ef49ed6: Claiming fa:16:3e:09:b4:ad 10.100.0.5
Oct 02 12:04:56 compute-0 ovn_controller[94336]: 2025-10-02T12:04:56Z|00099|binding|INFO|Claiming lport 6ca21a4d-cad8-4eff-bb5a-78e0705eaf1f for this additional chassis.
Oct 02 12:04:56 compute-0 ovn_controller[94336]: 2025-10-02T12:04:56Z|00100|binding|INFO|6ca21a4d-cad8-4eff-bb5a-78e0705eaf1f: Claiming fa:16:3e:90:47:36 19.80.0.218
Oct 02 12:04:56 compute-0 nova_compute[192079]: 2025-10-02 12:04:56.533 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:56 compute-0 NetworkManager[51160]: <info>  [1759406696.5370] manager: (tap5e772b33-65): new Tun device (/org/freedesktop/NetworkManager/Devices/55)
Oct 02 12:04:56 compute-0 systemd-udevd[223662]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:04:56 compute-0 nova_compute[192079]: 2025-10-02 12:04:56.571 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:56 compute-0 NetworkManager[51160]: <info>  [1759406696.5737] device (tap5e772b33-65): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:04:56 compute-0 NetworkManager[51160]: <info>  [1759406696.5748] device (tap5e772b33-65): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:04:56 compute-0 ovn_controller[94336]: 2025-10-02T12:04:56Z|00101|binding|INFO|Setting lport 5e772b33-6577-4ba1-b187-e4779ef49ed6 ovn-installed in OVS
Oct 02 12:04:56 compute-0 nova_compute[192079]: 2025-10-02 12:04:56.579 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:04:56 compute-0 systemd-machined[152150]: New machine qemu-17-instance-0000001c.
Oct 02 12:04:56 compute-0 systemd[1]: Started Virtual Machine qemu-17-instance-0000001c.
Oct 02 12:04:57 compute-0 nova_compute[192079]: 2025-10-02 12:04:57.508 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406697.5076597, a6bb5263-b0c7-4282-8e02-3503fd778e6f => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:04:57 compute-0 nova_compute[192079]: 2025-10-02 12:04:57.510 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] VM Started (Lifecycle Event)
Oct 02 12:04:57 compute-0 nova_compute[192079]: 2025-10-02 12:04:57.538 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:04:58 compute-0 nova_compute[192079]: 2025-10-02 12:04:58.314 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406698.3145003, a6bb5263-b0c7-4282-8e02-3503fd778e6f => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:04:58 compute-0 nova_compute[192079]: 2025-10-02 12:04:58.315 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] VM Resumed (Lifecycle Event)
Oct 02 12:04:58 compute-0 nova_compute[192079]: 2025-10-02 12:04:58.336 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:04:58 compute-0 nova_compute[192079]: 2025-10-02 12:04:58.338 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: active, current task_state: migrating, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:04:58 compute-0 nova_compute[192079]: 2025-10-02 12:04:58.357 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] During the sync_power process the instance has moved from host compute-1.ctlplane.example.com to host compute-0.ctlplane.example.com
Oct 02 12:04:59 compute-0 nova_compute[192079]: 2025-10-02 12:04:59.608 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:00 compute-0 podman[223696]: 2025-10-02 12:05:00.146554559 +0000 UTC m=+0.059227651 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, container_name=ceilometer_agent_compute, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, managed_by=edpm_ansible, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, org.label-schema.schema-version=1.0)
Oct 02 12:05:00 compute-0 ovn_controller[94336]: 2025-10-02T12:05:00Z|00102|binding|INFO|Claiming lport 5e772b33-6577-4ba1-b187-e4779ef49ed6 for this chassis.
Oct 02 12:05:00 compute-0 ovn_controller[94336]: 2025-10-02T12:05:00Z|00103|binding|INFO|5e772b33-6577-4ba1-b187-e4779ef49ed6: Claiming fa:16:3e:09:b4:ad 10.100.0.5
Oct 02 12:05:00 compute-0 ovn_controller[94336]: 2025-10-02T12:05:00Z|00104|binding|INFO|Claiming lport 6ca21a4d-cad8-4eff-bb5a-78e0705eaf1f for this chassis.
Oct 02 12:05:00 compute-0 ovn_controller[94336]: 2025-10-02T12:05:00Z|00105|binding|INFO|6ca21a4d-cad8-4eff-bb5a-78e0705eaf1f: Claiming fa:16:3e:90:47:36 19.80.0.218
Oct 02 12:05:00 compute-0 ovn_controller[94336]: 2025-10-02T12:05:00Z|00106|binding|INFO|Setting lport 5e772b33-6577-4ba1-b187-e4779ef49ed6 up in Southbound
Oct 02 12:05:00 compute-0 ovn_controller[94336]: 2025-10-02T12:05:00Z|00107|binding|INFO|Setting lport 6ca21a4d-cad8-4eff-bb5a-78e0705eaf1f up in Southbound
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:00.400 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:90:47:36 19.80.0.218'], port_security=['fa:16:3e:90:47:36 19.80.0.218'], type=, nat_addresses=[], virtual_parent=[], up=[True], options={'requested-chassis': ''}, parent_port=['5e772b33-6577-4ba1-b187-e4779ef49ed6'], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'name': 'tempest-subport-890758388', 'neutron:cidrs': '19.80.0.218/24', 'neutron:device_id': '', 'neutron:device_owner': 'trunk:subport', 'neutron:mtu': '', 'neutron:network_name': 'neutron-c91b95f8-b43d-450e-bf75-7418a7f0c3c0', 'neutron:port_capabilities': '', 'neutron:port_name': 'tempest-subport-890758388', 'neutron:project_id': 'f7cb78d24d1a4511a59ced45ccc4a1c7', 'neutron:revision_number': '4', 'neutron:security_group_ids': 'a459d514-aab4-4030-9850-e066abdeaccc', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[42], additional_encap=[], encap=[], mirror_rules=[], datapath=a6f25993-8956-421b-9333-413f987f6201, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=2, gateway_chassis=[], requested_chassis=[], logical_port=6ca21a4d-cad8-4eff-bb5a-78e0705eaf1f) old=Port_Binding(up=[False], additional_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:00.402 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:09:b4:ad 10.100.0.5'], port_security=['fa:16:3e:09:b4:ad 10.100.0.5'], type=, nat_addresses=[], virtual_parent=[], up=[True], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'name': 'tempest-parent-983948384', 'neutron:cidrs': '10.100.0.5/28', 'neutron:device_id': 'a6bb5263-b0c7-4282-8e02-3503fd778e6f', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-664b6526-6df1-4024-9bab-37218e6c18bd', 'neutron:port_capabilities': '', 'neutron:port_name': 'tempest-parent-983948384', 'neutron:project_id': 'f7cb78d24d1a4511a59ced45ccc4a1c7', 'neutron:revision_number': '11', 'neutron:security_group_ids': 'a459d514-aab4-4030-9850-e066abdeaccc', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=eddfb51e-1095-4b3d-a2dc-f2557cf13b11, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=5e772b33-6577-4ba1-b187-e4779ef49ed6) old=Port_Binding(up=[False], additional_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:00.403 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 6ca21a4d-cad8-4eff-bb5a-78e0705eaf1f in datapath c91b95f8-b43d-450e-bf75-7418a7f0c3c0 bound to our chassis
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:00.404 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network c91b95f8-b43d-450e-bf75-7418a7f0c3c0
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:00.415 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[95f0b3eb-57f6-4340-b253-b8ecdaa0d6e9]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:00.416 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tapc91b95f8-b1 in ovnmeta-c91b95f8-b43d-450e-bf75-7418a7f0c3c0 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:00.420 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tapc91b95f8-b0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:00.420 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[27288749-6dbc-40e9-a52d-081605d7e6f0]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:00.421 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f876270f-f9aa-4d3e-ae58-a638300a44e6]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:00.432 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[a52151b4-110e-4151-b01e-76cfeb036889]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:00.454 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c46fd9ae-a576-4b3b-912a-24b353893509]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:00.483 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[6e3be101-adf1-42e1-9782-c4a24597751c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:00.487 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[72775beb-cb9c-451b-be9e-8f0302a7b086]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:00 compute-0 NetworkManager[51160]: <info>  [1759406700.4889] manager: (tapc91b95f8-b0): new Veth device (/org/freedesktop/NetworkManager/Devices/56)
Oct 02 12:05:00 compute-0 systemd-udevd[223723]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:00.517 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[169e278a-230e-4163-9ad5-d19aca675e6d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:00.522 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[abebca0b-36ac-4c1f-a487-66bdc2f589a7]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:00 compute-0 NetworkManager[51160]: <info>  [1759406700.5464] device (tapc91b95f8-b0): carrier: link connected
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:00.551 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[5a59fb93-f74c-4491-a2d5-fb3ce5955af5]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:00.566 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[39cf9f9c-1faa-4a3d-b8bd-91e4ec2f4a32]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapc91b95f8-b1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:02:23:93'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 33], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 469817, 'reachable_time': 31564, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 223742, 'error': None, 'target': 'ovnmeta-c91b95f8-b43d-450e-bf75-7418a7f0c3c0', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:00.581 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9260c7d2-3611-4bce-be89-fa0c6e4af365]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe02:2393'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 469817, 'tstamp': 469817}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 223743, 'error': None, 'target': 'ovnmeta-c91b95f8-b43d-450e-bf75-7418a7f0c3c0', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:00.597 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ab8ed7fe-adac-431d-8a6d-069388195955]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapc91b95f8-b1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:02:23:93'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 33], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 469817, 'reachable_time': 31564, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 223744, 'error': None, 'target': 'ovnmeta-c91b95f8-b43d-450e-bf75-7418a7f0c3c0', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:00.627 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[02c6b725-b7b8-4a77-b047-bd59142ca18a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:00 compute-0 nova_compute[192079]: 2025-10-02 12:05:00.642 2 INFO nova.compute.manager [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Post operation of migration started
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:00.684 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[30cf4d5a-d811-4021-a646-926d92ae8fc5]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:00.686 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapc91b95f8-b0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:00.686 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:00.686 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapc91b95f8-b0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:05:00 compute-0 nova_compute[192079]: 2025-10-02 12:05:00.688 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:00 compute-0 kernel: tapc91b95f8-b0: entered promiscuous mode
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:00.691 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tapc91b95f8-b0, col_values=(('external_ids', {'iface-id': 'efb84ecd-545f-42a1-ad69-585d2998efac'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:05:00 compute-0 NetworkManager[51160]: <info>  [1759406700.6928] manager: (tapc91b95f8-b0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/57)
Oct 02 12:05:00 compute-0 ovn_controller[94336]: 2025-10-02T12:05:00Z|00108|binding|INFO|Releasing lport efb84ecd-545f-42a1-ad69-585d2998efac from this chassis (sb_readonly=0)
Oct 02 12:05:00 compute-0 nova_compute[192079]: 2025-10-02 12:05:00.693 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:00 compute-0 nova_compute[192079]: 2025-10-02 12:05:00.704 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:00.705 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/c91b95f8-b43d-450e-bf75-7418a7f0c3c0.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/c91b95f8-b43d-450e-bf75-7418a7f0c3c0.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:00.706 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[42e08d7c-9b93-4f28-92b1-b3eabdb9686b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:00.707 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-c91b95f8-b43d-450e-bf75-7418a7f0c3c0
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/c91b95f8-b43d-450e-bf75-7418a7f0c3c0.pid.haproxy
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID c91b95f8-b43d-450e-bf75-7418a7f0c3c0
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:05:00 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:00.708 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-c91b95f8-b43d-450e-bf75-7418a7f0c3c0', 'env', 'PROCESS_TAG=haproxy-c91b95f8-b43d-450e-bf75-7418a7f0c3c0', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/c91b95f8-b43d-450e-bf75-7418a7f0c3c0.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:05:01 compute-0 podman[223778]: 2025-10-02 12:05:01.061139927 +0000 UTC m=+0.049014243 container create c129b48b89b4473725b80abec3728191c5d88e34c287ac27973f7c19eaffd1cc (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-c91b95f8-b43d-450e-bf75-7418a7f0c3c0, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:05:01 compute-0 systemd[1]: Started libpod-conmon-c129b48b89b4473725b80abec3728191c5d88e34c287ac27973f7c19eaffd1cc.scope.
Oct 02 12:05:01 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:05:01 compute-0 podman[223778]: 2025-10-02 12:05:01.031480755 +0000 UTC m=+0.019355101 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:05:01 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/68511f20dd6bf659b11358109da6b0e08440bf88638ee227c3361e85b9009417/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:05:01 compute-0 podman[223778]: 2025-10-02 12:05:01.142211643 +0000 UTC m=+0.130085989 container init c129b48b89b4473725b80abec3728191c5d88e34c287ac27973f7c19eaffd1cc (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-c91b95f8-b43d-450e-bf75-7418a7f0c3c0, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.build-date=20251001)
Oct 02 12:05:01 compute-0 podman[223778]: 2025-10-02 12:05:01.14791869 +0000 UTC m=+0.135793006 container start c129b48b89b4473725b80abec3728191c5d88e34c287ac27973f7c19eaffd1cc (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-c91b95f8-b43d-450e-bf75-7418a7f0c3c0, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, org.label-schema.build-date=20251001)
Oct 02 12:05:01 compute-0 neutron-haproxy-ovnmeta-c91b95f8-b43d-450e-bf75-7418a7f0c3c0[223793]: [NOTICE]   (223797) : New worker (223799) forked
Oct 02 12:05:01 compute-0 neutron-haproxy-ovnmeta-c91b95f8-b43d-450e-bf75-7418a7f0c3c0[223793]: [NOTICE]   (223797) : Loading success.
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:01.204 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 5e772b33-6577-4ba1-b187-e4779ef49ed6 in datapath 664b6526-6df1-4024-9bab-37218e6c18bd unbound from our chassis
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:01.206 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 664b6526-6df1-4024-9bab-37218e6c18bd
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:01.215 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[571abe89-96fd-4742-a2c5-9eba07a3e4e1]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:01.216 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap664b6526-61 in ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:01.218 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap664b6526-60 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:01.218 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1b7eedc6-7ce2-499b-8e90-e46b7914edc8]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:01.219 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ef4778ca-58a8-44d1-8a84-dfed6dbc58e7]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:01.230 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[84cce5db-0d06-4faf-b902-a82bd620be7a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:01.254 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[676874d3-4a81-40cd-9dd3-ecc2d170b1a5]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:01.277 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[a213548d-2eb3-418b-b839-a76c4bdb101e]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:01 compute-0 systemd-udevd[223726]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:01.285 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b717d2c5-7282-48a3-8795-99c838d37cdb]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:01 compute-0 NetworkManager[51160]: <info>  [1759406701.2860] manager: (tap664b6526-60): new Veth device (/org/freedesktop/NetworkManager/Devices/58)
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:01.319 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[0d2cb040-6c1f-4192-9fe2-1d41a79e74d8]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:01 compute-0 nova_compute[192079]: 2025-10-02 12:05:01.322 2 DEBUG oslo_concurrency.lockutils [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Acquiring lock "refresh_cache-a6bb5263-b0c7-4282-8e02-3503fd778e6f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:05:01 compute-0 nova_compute[192079]: 2025-10-02 12:05:01.322 2 DEBUG oslo_concurrency.lockutils [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Acquired lock "refresh_cache-a6bb5263-b0c7-4282-8e02-3503fd778e6f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:05:01 compute-0 nova_compute[192079]: 2025-10-02 12:05:01.323 2 DEBUG nova.network.neutron [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:01.323 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[53aedcb5-0d4e-408d-90a8-aa83cd6dd51c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:01 compute-0 NetworkManager[51160]: <info>  [1759406701.3473] device (tap664b6526-60): carrier: link connected
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:01.353 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[9e312ef6-a1be-4047-9c52-9301bc6d9e11]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:01.369 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[93b28ab0-8b95-41db-a723-5274a75c9a00]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap664b6526-61'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:5c:8c:2f'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 34], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 469897, 'reachable_time': 32811, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 223825, 'error': None, 'target': 'ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:01.386 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c87af1ab-8033-4e3a-a8aa-b9cbd9339469]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe5c:8c2f'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 469897, 'tstamp': 469897}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 223826, 'error': None, 'target': 'ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:01.402 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6447a23e-a1f9-47b4-8293-15e1de87d6e2]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap664b6526-61'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:5c:8c:2f'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 34], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 469897, 'reachable_time': 32811, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 223827, 'error': None, 'target': 'ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:01.434 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[fc3dc137-b77d-4308-8095-f4bf8ac703eb]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:01.486 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[de96a211-d57a-462c-ab28-0ba0fdeac429]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:01.487 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap664b6526-60, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:01.488 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:01.488 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap664b6526-60, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:05:01 compute-0 nova_compute[192079]: 2025-10-02 12:05:01.490 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:01 compute-0 NetworkManager[51160]: <info>  [1759406701.4909] manager: (tap664b6526-60): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/59)
Oct 02 12:05:01 compute-0 kernel: tap664b6526-60: entered promiscuous mode
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:01.498 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap664b6526-60, col_values=(('external_ids', {'iface-id': '2f7dc774-b718-4d9e-9655-fbc5ffa141e8'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:05:01 compute-0 nova_compute[192079]: 2025-10-02 12:05:01.500 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:01 compute-0 ovn_controller[94336]: 2025-10-02T12:05:01Z|00109|binding|INFO|Releasing lport 2f7dc774-b718-4d9e-9655-fbc5ffa141e8 from this chassis (sb_readonly=0)
Oct 02 12:05:01 compute-0 nova_compute[192079]: 2025-10-02 12:05:01.500 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:01.503 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/664b6526-6df1-4024-9bab-37218e6c18bd.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/664b6526-6df1-4024-9bab-37218e6c18bd.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:01.504 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[909d3b19-2dbd-4ca5-91dc-07ee80165d6b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:01.505 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-664b6526-6df1-4024-9bab-37218e6c18bd
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/664b6526-6df1-4024-9bab-37218e6c18bd.pid.haproxy
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 664b6526-6df1-4024-9bab-37218e6c18bd
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:05:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:01.505 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd', 'env', 'PROCESS_TAG=haproxy-664b6526-6df1-4024-9bab-37218e6c18bd', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/664b6526-6df1-4024-9bab-37218e6c18bd.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:05:01 compute-0 nova_compute[192079]: 2025-10-02 12:05:01.513 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:01 compute-0 nova_compute[192079]: 2025-10-02 12:05:01.535 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:01 compute-0 podman[223877]: 2025-10-02 12:05:01.839152597 +0000 UTC m=+0.021035566 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:05:02 compute-0 podman[223877]: 2025-10-02 12:05:02.118847108 +0000 UTC m=+0.300730057 container create 643c6aa59b0a86c66ac8c41e2d338fb0ac7682a44815333054c23c070d2a2e09 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:05:02 compute-0 systemd[1]: Started libpod-conmon-643c6aa59b0a86c66ac8c41e2d338fb0ac7682a44815333054c23c070d2a2e09.scope.
Oct 02 12:05:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:02.206 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:05:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:02.207 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:05:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:02.208 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:05:02 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:05:02 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/27c90b63ff0b33d76dedf732f0a465fce46088ecd124dd067abc0b9b01ed9a98/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:05:02 compute-0 podman[223877]: 2025-10-02 12:05:02.238564652 +0000 UTC m=+0.420447621 container init 643c6aa59b0a86c66ac8c41e2d338fb0ac7682a44815333054c23c070d2a2e09 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:05:02 compute-0 podman[223877]: 2025-10-02 12:05:02.24506378 +0000 UTC m=+0.426946729 container start 643c6aa59b0a86c66ac8c41e2d338fb0ac7682a44815333054c23c070d2a2e09 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:05:02 compute-0 neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd[223893]: [NOTICE]   (223897) : New worker (223899) forked
Oct 02 12:05:02 compute-0 neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd[223893]: [NOTICE]   (223897) : Loading success.
Oct 02 12:05:03 compute-0 nova_compute[192079]: 2025-10-02 12:05:03.178 2 DEBUG nova.virt.libvirt.driver [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Instance in state 1 after 10 seconds - resending shutdown _clean_shutdown /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4101
Oct 02 12:05:03 compute-0 nova_compute[192079]: 2025-10-02 12:05:03.280 2 DEBUG nova.network.neutron [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Updating instance_info_cache with network_info: [{"id": "5e772b33-6577-4ba1-b187-e4779ef49ed6", "address": "fa:16:3e:09:b4:ad", "network": {"id": "664b6526-6df1-4024-9bab-37218e6c18bd", "bridge": "br-int", "label": "tempest-LiveMigrationTest-2017832683-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f7cb78d24d1a4511a59ced45ccc4a1c7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5e772b33-65", "ovs_interfaceid": "5e772b33-6577-4ba1-b187-e4779ef49ed6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {"os_vif_delegation": true}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:05:03 compute-0 nova_compute[192079]: 2025-10-02 12:05:03.324 2 DEBUG oslo_concurrency.lockutils [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Releasing lock "refresh_cache-a6bb5263-b0c7-4282-8e02-3503fd778e6f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:05:03 compute-0 nova_compute[192079]: 2025-10-02 12:05:03.460 2 DEBUG oslo_concurrency.lockutils [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.allocate_pci_devices_for_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:05:03 compute-0 nova_compute[192079]: 2025-10-02 12:05:03.460 2 DEBUG oslo_concurrency.lockutils [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.allocate_pci_devices_for_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:05:03 compute-0 nova_compute[192079]: 2025-10-02 12:05:03.461 2 DEBUG oslo_concurrency.lockutils [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.allocate_pci_devices_for_instance" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:05:03 compute-0 nova_compute[192079]: 2025-10-02 12:05:03.464 2 INFO nova.virt.libvirt.driver [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Sending announce-self command to QEMU monitor. Attempt 1 of 3
Oct 02 12:05:03 compute-0 virtqemud[191807]: Domain id=17 name='instance-0000001c' uuid=a6bb5263-b0c7-4282-8e02-3503fd778e6f is tainted: custom-monitor
Oct 02 12:05:04 compute-0 nova_compute[192079]: 2025-10-02 12:05:04.479 2 INFO nova.virt.libvirt.driver [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Sending announce-self command to QEMU monitor. Attempt 2 of 3
Oct 02 12:05:04 compute-0 nova_compute[192079]: 2025-10-02 12:05:04.610 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:05 compute-0 podman[223908]: 2025-10-02 12:05:05.203090221 +0000 UTC m=+0.104200291 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, name=ubi9-minimal, vendor=Red Hat, Inc., url=https://catalog.redhat.com/en/search?searchType=containers, container_name=openstack_network_exporter, maintainer=Red Hat, Inc., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, architecture=x86_64, io.openshift.expose-services=, release=1755695350, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, build-date=2025-08-20T13:12:41, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.buildah.version=1.33.7, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, config_id=edpm, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vcs-type=git, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, com.redhat.component=ubi9-minimal-container, version=9.6, distribution-scope=public, io.openshift.tags=minimal rhel9)
Oct 02 12:05:05 compute-0 podman[223909]: 2025-10-02 12:05:05.203112441 +0000 UTC m=+0.087596027 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_id=multipathd, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']})
Oct 02 12:05:05 compute-0 nova_compute[192079]: 2025-10-02 12:05:05.484 2 INFO nova.virt.libvirt.driver [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Sending announce-self command to QEMU monitor. Attempt 3 of 3
Oct 02 12:05:05 compute-0 nova_compute[192079]: 2025-10-02 12:05:05.490 2 DEBUG nova.compute.manager [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:05:05 compute-0 nova_compute[192079]: 2025-10-02 12:05:05.546 2 DEBUG nova.objects.instance [None req-3cd747b3-fa97-4d66-8972-f85e23e31784 ba082148882647d48482e0be9e06c582 9ec1cf31f7044579b02c7077aa7d0973 - - default default] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Trying to apply a migration context that does not seem to be set for this instance apply_migration_context /usr/lib/python3.9/site-packages/nova/objects/instance.py:1032
Oct 02 12:05:06 compute-0 systemd[1]: machine-qemu\x2d16\x2dinstance\x2d0000001b.scope: Deactivated successfully.
Oct 02 12:05:06 compute-0 systemd[1]: machine-qemu\x2d16\x2dinstance\x2d0000001b.scope: Consumed 12.916s CPU time.
Oct 02 12:05:06 compute-0 systemd-machined[152150]: Machine qemu-16-instance-0000001b terminated.
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.326 2 INFO nova.virt.libvirt.driver [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Instance shutdown successfully after 13 seconds.
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.334 2 INFO nova.virt.libvirt.driver [-] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Instance destroyed successfully.
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.340 2 INFO nova.virt.libvirt.driver [-] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Instance destroyed successfully.
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.340 2 INFO nova.virt.libvirt.driver [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Deleting instance files /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff_del
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.341 2 INFO nova.virt.libvirt.driver [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Deletion of /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff_del complete
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.537 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.589 2 DEBUG nova.virt.libvirt.driver [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.590 2 INFO nova.virt.libvirt.driver [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Creating image(s)
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.591 2 DEBUG oslo_concurrency.lockutils [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Acquiring lock "/var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.591 2 DEBUG oslo_concurrency.lockutils [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Lock "/var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.592 2 DEBUG oslo_concurrency.lockutils [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Lock "/var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.617 2 DEBUG oslo_concurrency.processutils [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.711 2 DEBUG oslo_concurrency.processutils [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.094s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.713 2 DEBUG oslo_concurrency.lockutils [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.714 2 DEBUG oslo_concurrency.lockutils [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.742 2 DEBUG oslo_concurrency.processutils [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.826 2 DEBUG oslo_concurrency.processutils [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.084s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.828 2 DEBUG oslo_concurrency.processutils [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.869 2 DEBUG oslo_concurrency.processutils [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk 1073741824" returned: 0 in 0.041s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.871 2 DEBUG oslo_concurrency.lockutils [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.157s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.871 2 DEBUG oslo_concurrency.processutils [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.925 2 DEBUG oslo_concurrency.processutils [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.053s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.926 2 DEBUG nova.virt.disk.api [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Checking if we can resize image /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.927 2 DEBUG oslo_concurrency.processutils [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.993 2 DEBUG oslo_concurrency.processutils [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk --force-share --output=json" returned: 0 in 0.066s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.994 2 DEBUG nova.virt.disk.api [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Cannot resize image /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.995 2 DEBUG nova.virt.libvirt.driver [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.996 2 DEBUG nova.virt.libvirt.driver [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Ensure instance console log exists: /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.997 2 DEBUG oslo_concurrency.lockutils [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.997 2 DEBUG oslo_concurrency.lockutils [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:05:06 compute-0 nova_compute[192079]: 2025-10-02 12:05:06.998 2 DEBUG oslo_concurrency.lockutils [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:05:07 compute-0 nova_compute[192079]: 2025-10-02 12:05:07.001 2 DEBUG nova.virt.libvirt.driver [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Start _get_guest_xml network_info=[] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:05:07 compute-0 nova_compute[192079]: 2025-10-02 12:05:07.007 2 WARNING nova.virt.libvirt.driver [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.: NotImplementedError
Oct 02 12:05:07 compute-0 nova_compute[192079]: 2025-10-02 12:05:07.019 2 DEBUG nova.virt.libvirt.host [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:05:07 compute-0 nova_compute[192079]: 2025-10-02 12:05:07.020 2 DEBUG nova.virt.libvirt.host [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:05:07 compute-0 nova_compute[192079]: 2025-10-02 12:05:07.024 2 DEBUG nova.virt.libvirt.host [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:05:07 compute-0 nova_compute[192079]: 2025-10-02 12:05:07.024 2 DEBUG nova.virt.libvirt.host [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:05:07 compute-0 nova_compute[192079]: 2025-10-02 12:05:07.026 2 DEBUG nova.virt.libvirt.driver [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:05:07 compute-0 nova_compute[192079]: 2025-10-02 12:05:07.026 2 DEBUG nova.virt.hardware [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:05:07 compute-0 nova_compute[192079]: 2025-10-02 12:05:07.026 2 DEBUG nova.virt.hardware [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:05:07 compute-0 nova_compute[192079]: 2025-10-02 12:05:07.027 2 DEBUG nova.virt.hardware [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:05:07 compute-0 nova_compute[192079]: 2025-10-02 12:05:07.027 2 DEBUG nova.virt.hardware [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:05:07 compute-0 nova_compute[192079]: 2025-10-02 12:05:07.027 2 DEBUG nova.virt.hardware [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:05:07 compute-0 nova_compute[192079]: 2025-10-02 12:05:07.027 2 DEBUG nova.virt.hardware [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:05:07 compute-0 nova_compute[192079]: 2025-10-02 12:05:07.027 2 DEBUG nova.virt.hardware [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:05:07 compute-0 nova_compute[192079]: 2025-10-02 12:05:07.028 2 DEBUG nova.virt.hardware [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:05:07 compute-0 nova_compute[192079]: 2025-10-02 12:05:07.028 2 DEBUG nova.virt.hardware [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:05:07 compute-0 nova_compute[192079]: 2025-10-02 12:05:07.028 2 DEBUG nova.virt.hardware [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:05:07 compute-0 nova_compute[192079]: 2025-10-02 12:05:07.028 2 DEBUG nova.virt.hardware [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:05:07 compute-0 nova_compute[192079]: 2025-10-02 12:05:07.029 2 DEBUG nova.objects.instance [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Lazy-loading 'vcpu_model' on Instance uuid c3b78b62-b1f8-477d-8ae4-9af540dc72ff obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:05:07 compute-0 nova_compute[192079]: 2025-10-02 12:05:07.683 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:05:09 compute-0 nova_compute[192079]: 2025-10-02 12:05:09.613 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:09 compute-0 nova_compute[192079]: 2025-10-02 12:05:09.733 2 DEBUG nova.virt.libvirt.driver [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:05:09 compute-0 nova_compute[192079]:   <uuid>c3b78b62-b1f8-477d-8ae4-9af540dc72ff</uuid>
Oct 02 12:05:09 compute-0 nova_compute[192079]:   <name>instance-0000001b</name>
Oct 02 12:05:09 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:05:09 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:05:09 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:05:09 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:       <nova:name>tempest-ServersAdmin275Test-server-1693910444</nova:name>
Oct 02 12:05:09 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:05:07</nova:creationTime>
Oct 02 12:05:09 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:05:09 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:05:09 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:05:09 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:05:09 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:05:09 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:05:09 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:05:09 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:05:09 compute-0 nova_compute[192079]:         <nova:user uuid="e88312197d9e4d24a0c49002f36053ba">tempest-ServersAdmin275Test-1657122363-project-member</nova:user>
Oct 02 12:05:09 compute-0 nova_compute[192079]:         <nova:project uuid="dbe4d58d976745ec9575dfbfcfb67333">tempest-ServersAdmin275Test-1657122363</nova:project>
Oct 02 12:05:09 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:05:09 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:       <nova:ports/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:05:09 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:05:09 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <system>
Oct 02 12:05:09 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:05:09 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:05:09 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:05:09 compute-0 nova_compute[192079]:       <entry name="serial">c3b78b62-b1f8-477d-8ae4-9af540dc72ff</entry>
Oct 02 12:05:09 compute-0 nova_compute[192079]:       <entry name="uuid">c3b78b62-b1f8-477d-8ae4-9af540dc72ff</entry>
Oct 02 12:05:09 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     </system>
Oct 02 12:05:09 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:05:09 compute-0 nova_compute[192079]:   <os>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:   </os>
Oct 02 12:05:09 compute-0 nova_compute[192079]:   <features>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:   </features>
Oct 02 12:05:09 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:05:09 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:05:09 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:05:09 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:05:09 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk.config"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:05:09 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/console.log" append="off"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <video>
Oct 02 12:05:09 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     </video>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:05:09 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:05:09 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:05:09 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:05:09 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:05:09 compute-0 nova_compute[192079]: </domain>
Oct 02 12:05:09 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:05:09 compute-0 nova_compute[192079]: 2025-10-02 12:05:09.810 2 DEBUG oslo_concurrency.lockutils [None req-c6683d03-b790-46f1-8332-207174d80bfb 5f75195e56504673bd403ce69cbc28ca f7cb78d24d1a4511a59ced45ccc4a1c7 - - default default] Acquiring lock "a6bb5263-b0c7-4282-8e02-3503fd778e6f" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:05:09 compute-0 nova_compute[192079]: 2025-10-02 12:05:09.811 2 DEBUG oslo_concurrency.lockutils [None req-c6683d03-b790-46f1-8332-207174d80bfb 5f75195e56504673bd403ce69cbc28ca f7cb78d24d1a4511a59ced45ccc4a1c7 - - default default] Lock "a6bb5263-b0c7-4282-8e02-3503fd778e6f" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:05:09 compute-0 nova_compute[192079]: 2025-10-02 12:05:09.811 2 DEBUG oslo_concurrency.lockutils [None req-c6683d03-b790-46f1-8332-207174d80bfb 5f75195e56504673bd403ce69cbc28ca f7cb78d24d1a4511a59ced45ccc4a1c7 - - default default] Acquiring lock "a6bb5263-b0c7-4282-8e02-3503fd778e6f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:05:09 compute-0 nova_compute[192079]: 2025-10-02 12:05:09.811 2 DEBUG oslo_concurrency.lockutils [None req-c6683d03-b790-46f1-8332-207174d80bfb 5f75195e56504673bd403ce69cbc28ca f7cb78d24d1a4511a59ced45ccc4a1c7 - - default default] Lock "a6bb5263-b0c7-4282-8e02-3503fd778e6f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:05:09 compute-0 nova_compute[192079]: 2025-10-02 12:05:09.811 2 DEBUG oslo_concurrency.lockutils [None req-c6683d03-b790-46f1-8332-207174d80bfb 5f75195e56504673bd403ce69cbc28ca f7cb78d24d1a4511a59ced45ccc4a1c7 - - default default] Lock "a6bb5263-b0c7-4282-8e02-3503fd778e6f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:05:09 compute-0 nova_compute[192079]: 2025-10-02 12:05:09.817 2 DEBUG nova.virt.libvirt.driver [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:05:09 compute-0 nova_compute[192079]: 2025-10-02 12:05:09.818 2 DEBUG nova.virt.libvirt.driver [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:05:09 compute-0 nova_compute[192079]: 2025-10-02 12:05:09.818 2 INFO nova.virt.libvirt.driver [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Using config drive
Oct 02 12:05:09 compute-0 nova_compute[192079]: 2025-10-02 12:05:09.837 2 INFO nova.compute.manager [None req-c6683d03-b790-46f1-8332-207174d80bfb 5f75195e56504673bd403ce69cbc28ca f7cb78d24d1a4511a59ced45ccc4a1c7 - - default default] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Terminating instance
Oct 02 12:05:09 compute-0 nova_compute[192079]: 2025-10-02 12:05:09.870 2 DEBUG nova.compute.manager [None req-c6683d03-b790-46f1-8332-207174d80bfb 5f75195e56504673bd403ce69cbc28ca f7cb78d24d1a4511a59ced45ccc4a1c7 - - default default] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:05:09 compute-0 nova_compute[192079]: 2025-10-02 12:05:09.871 2 DEBUG nova.objects.instance [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Lazy-loading 'ec2_ids' on Instance uuid c3b78b62-b1f8-477d-8ae4-9af540dc72ff obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:05:09 compute-0 kernel: tap5e772b33-65 (unregistering): left promiscuous mode
Oct 02 12:05:09 compute-0 NetworkManager[51160]: <info>  [1759406709.9111] device (tap5e772b33-65): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:05:09 compute-0 nova_compute[192079]: 2025-10-02 12:05:09.913 2 DEBUG nova.objects.instance [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Lazy-loading 'keypairs' on Instance uuid c3b78b62-b1f8-477d-8ae4-9af540dc72ff obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:05:09 compute-0 nova_compute[192079]: 2025-10-02 12:05:09.928 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:09 compute-0 ovn_controller[94336]: 2025-10-02T12:05:09Z|00110|binding|INFO|Releasing lport 5e772b33-6577-4ba1-b187-e4779ef49ed6 from this chassis (sb_readonly=0)
Oct 02 12:05:09 compute-0 ovn_controller[94336]: 2025-10-02T12:05:09Z|00111|binding|INFO|Setting lport 5e772b33-6577-4ba1-b187-e4779ef49ed6 down in Southbound
Oct 02 12:05:09 compute-0 ovn_controller[94336]: 2025-10-02T12:05:09Z|00112|binding|INFO|Releasing lport 6ca21a4d-cad8-4eff-bb5a-78e0705eaf1f from this chassis (sb_readonly=0)
Oct 02 12:05:09 compute-0 ovn_controller[94336]: 2025-10-02T12:05:09Z|00113|binding|INFO|Setting lport 6ca21a4d-cad8-4eff-bb5a-78e0705eaf1f down in Southbound
Oct 02 12:05:09 compute-0 ovn_controller[94336]: 2025-10-02T12:05:09Z|00114|binding|INFO|Removing iface tap5e772b33-65 ovn-installed in OVS
Oct 02 12:05:09 compute-0 nova_compute[192079]: 2025-10-02 12:05:09.932 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:09.946 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:90:47:36 19.80.0.218'], port_security=['fa:16:3e:90:47:36 19.80.0.218'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': ''}, parent_port=['5e772b33-6577-4ba1-b187-e4779ef49ed6'], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'name': 'tempest-subport-890758388', 'neutron:cidrs': '19.80.0.218/24', 'neutron:device_id': '', 'neutron:device_owner': 'trunk:subport', 'neutron:mtu': '', 'neutron:network_name': 'neutron-c91b95f8-b43d-450e-bf75-7418a7f0c3c0', 'neutron:port_capabilities': '', 'neutron:port_name': 'tempest-subport-890758388', 'neutron:project_id': 'f7cb78d24d1a4511a59ced45ccc4a1c7', 'neutron:revision_number': '5', 'neutron:security_group_ids': 'a459d514-aab4-4030-9850-e066abdeaccc', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[42], additional_encap=[], encap=[], mirror_rules=[], datapath=a6f25993-8956-421b-9333-413f987f6201, chassis=[], tunnel_key=2, gateway_chassis=[], requested_chassis=[], logical_port=6ca21a4d-cad8-4eff-bb5a-78e0705eaf1f) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:05:09 compute-0 ovn_controller[94336]: 2025-10-02T12:05:09Z|00115|binding|INFO|Releasing lport 2f7dc774-b718-4d9e-9655-fbc5ffa141e8 from this chassis (sb_readonly=0)
Oct 02 12:05:09 compute-0 ovn_controller[94336]: 2025-10-02T12:05:09Z|00116|binding|INFO|Releasing lport efb84ecd-545f-42a1-ad69-585d2998efac from this chassis (sb_readonly=0)
Oct 02 12:05:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:09.947 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:09:b4:ad 10.100.0.5'], port_security=['fa:16:3e:09:b4:ad 10.100.0.5'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'name': 'tempest-parent-983948384', 'neutron:cidrs': '10.100.0.5/28', 'neutron:device_id': 'a6bb5263-b0c7-4282-8e02-3503fd778e6f', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-664b6526-6df1-4024-9bab-37218e6c18bd', 'neutron:port_capabilities': '', 'neutron:port_name': 'tempest-parent-983948384', 'neutron:project_id': 'f7cb78d24d1a4511a59ced45ccc4a1c7', 'neutron:revision_number': '13', 'neutron:security_group_ids': 'a459d514-aab4-4030-9850-e066abdeaccc', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=eddfb51e-1095-4b3d-a2dc-f2557cf13b11, chassis=[], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=5e772b33-6577-4ba1-b187-e4779ef49ed6) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:05:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:09.948 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 6ca21a4d-cad8-4eff-bb5a-78e0705eaf1f in datapath c91b95f8-b43d-450e-bf75-7418a7f0c3c0 unbound from our chassis
Oct 02 12:05:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:09.950 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network c91b95f8-b43d-450e-bf75-7418a7f0c3c0, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:05:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:09.950 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d8bd819d-d3a6-4c0c-bb8b-e33968bd5b36]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:09.951 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-c91b95f8-b43d-450e-bf75-7418a7f0c3c0 namespace which is not needed anymore
Oct 02 12:05:09 compute-0 nova_compute[192079]: 2025-10-02 12:05:09.970 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.037 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:10 compute-0 systemd[1]: machine-qemu\x2d17\x2dinstance\x2d0000001c.scope: Deactivated successfully.
Oct 02 12:05:10 compute-0 systemd[1]: machine-qemu\x2d17\x2dinstance\x2d0000001c.scope: Consumed 1.857s CPU time.
Oct 02 12:05:10 compute-0 systemd-machined[152150]: Machine qemu-17-instance-0000001c terminated.
Oct 02 12:05:10 compute-0 neutron-haproxy-ovnmeta-c91b95f8-b43d-450e-bf75-7418a7f0c3c0[223793]: [NOTICE]   (223797) : haproxy version is 2.8.14-c23fe91
Oct 02 12:05:10 compute-0 neutron-haproxy-ovnmeta-c91b95f8-b43d-450e-bf75-7418a7f0c3c0[223793]: [NOTICE]   (223797) : path to executable is /usr/sbin/haproxy
Oct 02 12:05:10 compute-0 neutron-haproxy-ovnmeta-c91b95f8-b43d-450e-bf75-7418a7f0c3c0[223793]: [WARNING]  (223797) : Exiting Master process...
Oct 02 12:05:10 compute-0 neutron-haproxy-ovnmeta-c91b95f8-b43d-450e-bf75-7418a7f0c3c0[223793]: [WARNING]  (223797) : Exiting Master process...
Oct 02 12:05:10 compute-0 neutron-haproxy-ovnmeta-c91b95f8-b43d-450e-bf75-7418a7f0c3c0[223793]: [ALERT]    (223797) : Current worker (223799) exited with code 143 (Terminated)
Oct 02 12:05:10 compute-0 neutron-haproxy-ovnmeta-c91b95f8-b43d-450e-bf75-7418a7f0c3c0[223793]: [WARNING]  (223797) : All workers exited. Exiting... (0)
Oct 02 12:05:10 compute-0 systemd[1]: libpod-c129b48b89b4473725b80abec3728191c5d88e34c287ac27973f7c19eaffd1cc.scope: Deactivated successfully.
Oct 02 12:05:10 compute-0 podman[223996]: 2025-10-02 12:05:10.087867666 +0000 UTC m=+0.043546562 container died c129b48b89b4473725b80abec3728191c5d88e34c287ac27973f7c19eaffd1cc (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-c91b95f8-b43d-450e-bf75-7418a7f0c3c0, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, org.label-schema.schema-version=1.0)
Oct 02 12:05:10 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-c129b48b89b4473725b80abec3728191c5d88e34c287ac27973f7c19eaffd1cc-userdata-shm.mount: Deactivated successfully.
Oct 02 12:05:10 compute-0 systemd[1]: var-lib-containers-storage-overlay-68511f20dd6bf659b11358109da6b0e08440bf88638ee227c3361e85b9009417-merged.mount: Deactivated successfully.
Oct 02 12:05:10 compute-0 podman[223996]: 2025-10-02 12:05:10.12859327 +0000 UTC m=+0.084272186 container cleanup c129b48b89b4473725b80abec3728191c5d88e34c287ac27973f7c19eaffd1cc (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-c91b95f8-b43d-450e-bf75-7418a7f0c3c0, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:05:10 compute-0 systemd[1]: libpod-conmon-c129b48b89b4473725b80abec3728191c5d88e34c287ac27973f7c19eaffd1cc.scope: Deactivated successfully.
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.161 2 INFO nova.virt.libvirt.driver [-] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Instance destroyed successfully.
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.162 2 DEBUG nova.objects.instance [None req-c6683d03-b790-46f1-8332-207174d80bfb 5f75195e56504673bd403ce69cbc28ca f7cb78d24d1a4511a59ced45ccc4a1c7 - - default default] Lazy-loading 'resources' on Instance uuid a6bb5263-b0c7-4282-8e02-3503fd778e6f obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:05:10 compute-0 podman[224037]: 2025-10-02 12:05:10.193064983 +0000 UTC m=+0.038273307 container remove c129b48b89b4473725b80abec3728191c5d88e34c287ac27973f7c19eaffd1cc (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-c91b95f8-b43d-450e-bf75-7418a7f0c3c0, org.label-schema.vendor=CentOS, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:05:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:10.198 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e20dce78-5e85-469c-951d-ce554f829a97]: (4, ('Thu Oct  2 12:05:10 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-c91b95f8-b43d-450e-bf75-7418a7f0c3c0 (c129b48b89b4473725b80abec3728191c5d88e34c287ac27973f7c19eaffd1cc)\nc129b48b89b4473725b80abec3728191c5d88e34c287ac27973f7c19eaffd1cc\nThu Oct  2 12:05:10 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-c91b95f8-b43d-450e-bf75-7418a7f0c3c0 (c129b48b89b4473725b80abec3728191c5d88e34c287ac27973f7c19eaffd1cc)\nc129b48b89b4473725b80abec3728191c5d88e34c287ac27973f7c19eaffd1cc\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:10.199 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1814c9c3-f41f-46e9-8645-0fecf5d8f617]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:10.200 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapc91b95f8-b0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.241 2 INFO nova.virt.libvirt.driver [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Creating config drive at /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk.config
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.245 2 DEBUG oslo_concurrency.processutils [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpcowxe9ri execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:05:10 compute-0 kernel: tapc91b95f8-b0: left promiscuous mode
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.262 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.266 2 DEBUG nova.virt.libvirt.vif [None req-c6683d03-b790-46f1-8332-207174d80bfb 5f75195e56504673bd403ce69cbc28ca f7cb78d24d1a4511a59ced45ccc4a1c7 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=True,config_drive='True',created_at=2025-10-02T12:04:30Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-LiveMigrationTest-server-1169459074',display_name='tempest-LiveMigrationTest-server-1169459074',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-livemigrationtest-server-1169459074',id=28,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:04:37Z,launched_on='compute-1.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='f7cb78d24d1a4511a59ced45ccc4a1c7',ramdisk_id='',reservation_id='r-b554y970',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',clean_attempts='1',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-LiveMigrationTest-1666170212',owner_user_name='tempest-LiveMigrationTest-1666170212-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:05:05Z,user_data=None,user_id='5f75195e56504673bd403ce69cbc28ca',uuid=a6bb5263-b0c7-4282-8e02-3503fd778e6f,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "5e772b33-6577-4ba1-b187-e4779ef49ed6", "address": "fa:16:3e:09:b4:ad", "network": {"id": "664b6526-6df1-4024-9bab-37218e6c18bd", "bridge": "br-int", "label": "tempest-LiveMigrationTest-2017832683-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f7cb78d24d1a4511a59ced45ccc4a1c7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5e772b33-65", "ovs_interfaceid": "5e772b33-6577-4ba1-b187-e4779ef49ed6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {"os_vif_delegation": true}, "preserve_on_delete": true, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.267 2 DEBUG nova.network.os_vif_util [None req-c6683d03-b790-46f1-8332-207174d80bfb 5f75195e56504673bd403ce69cbc28ca f7cb78d24d1a4511a59ced45ccc4a1c7 - - default default] Converting VIF {"id": "5e772b33-6577-4ba1-b187-e4779ef49ed6", "address": "fa:16:3e:09:b4:ad", "network": {"id": "664b6526-6df1-4024-9bab-37218e6c18bd", "bridge": "br-int", "label": "tempest-LiveMigrationTest-2017832683-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f7cb78d24d1a4511a59ced45ccc4a1c7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5e772b33-65", "ovs_interfaceid": "5e772b33-6577-4ba1-b187-e4779ef49ed6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {"os_vif_delegation": true}, "preserve_on_delete": true, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.268 2 DEBUG nova.network.os_vif_util [None req-c6683d03-b790-46f1-8332-207174d80bfb 5f75195e56504673bd403ce69cbc28ca f7cb78d24d1a4511a59ced45ccc4a1c7 - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:09:b4:ad,bridge_name='br-int',has_traffic_filtering=True,id=5e772b33-6577-4ba1-b187-e4779ef49ed6,network=Network(664b6526-6df1-4024-9bab-37218e6c18bd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=True,vif_name='tap5e772b33-65') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.268 2 DEBUG os_vif [None req-c6683d03-b790-46f1-8332-207174d80bfb 5f75195e56504673bd403ce69cbc28ca f7cb78d24d1a4511a59ced45ccc4a1c7 - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:09:b4:ad,bridge_name='br-int',has_traffic_filtering=True,id=5e772b33-6577-4ba1-b187-e4779ef49ed6,network=Network(664b6526-6df1-4024-9bab-37218e6c18bd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=True,vif_name='tap5e772b33-65') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.270 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.270 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap5e772b33-65, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.271 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:10.271 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0ba147ef-b537-4e28-a647-54422aaae886]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.272 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.277 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.279 2 INFO os_vif [None req-c6683d03-b790-46f1-8332-207174d80bfb 5f75195e56504673bd403ce69cbc28ca f7cb78d24d1a4511a59ced45ccc4a1c7 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:09:b4:ad,bridge_name='br-int',has_traffic_filtering=True,id=5e772b33-6577-4ba1-b187-e4779ef49ed6,network=Network(664b6526-6df1-4024-9bab-37218e6c18bd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=True,vif_name='tap5e772b33-65')
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.280 2 INFO nova.virt.libvirt.driver [None req-c6683d03-b790-46f1-8332-207174d80bfb 5f75195e56504673bd403ce69cbc28ca f7cb78d24d1a4511a59ced45ccc4a1c7 - - default default] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Deleting instance files /var/lib/nova/instances/a6bb5263-b0c7-4282-8e02-3503fd778e6f_del
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.281 2 INFO nova.virt.libvirt.driver [None req-c6683d03-b790-46f1-8332-207174d80bfb 5f75195e56504673bd403ce69cbc28ca f7cb78d24d1a4511a59ced45ccc4a1c7 - - default default] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Deletion of /var/lib/nova/instances/a6bb5263-b0c7-4282-8e02-3503fd778e6f_del complete
Oct 02 12:05:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:10.299 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[bedc4df8-faae-4f1d-9fa3-62162f34346a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:10.300 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e702f7f4-4991-4297-b384-bb6ce0331868]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:10.312 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a3e73ff2-41e4-441e-bc21-f72019d5d78d]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 469810, 'reachable_time': 34280, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 224060, 'error': None, 'target': 'ovnmeta-c91b95f8-b43d-450e-bf75-7418a7f0c3c0', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:10 compute-0 systemd[1]: run-netns-ovnmeta\x2dc91b95f8\x2db43d\x2d450e\x2dbf75\x2d7418a7f0c3c0.mount: Deactivated successfully.
Oct 02 12:05:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:10.314 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-c91b95f8-b43d-450e-bf75-7418a7f0c3c0 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:05:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:10.314 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[ae8e45ab-ceba-48dc-8a9b-2aa8ba7f6d71]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:10.316 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 5e772b33-6577-4ba1-b187-e4779ef49ed6 in datapath 664b6526-6df1-4024-9bab-37218e6c18bd unbound from our chassis
Oct 02 12:05:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:10.317 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 664b6526-6df1-4024-9bab-37218e6c18bd, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:05:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:10.318 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ce143226-21b6-4ca3-8be5-39223086b83d]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:10.318 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd namespace which is not needed anymore
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.368 2 DEBUG oslo_concurrency.processutils [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpcowxe9ri" returned: 0 in 0.124s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.428 2 INFO nova.compute.manager [None req-c6683d03-b790-46f1-8332-207174d80bfb 5f75195e56504673bd403ce69cbc28ca f7cb78d24d1a4511a59ced45ccc4a1c7 - - default default] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Took 0.56 seconds to destroy the instance on the hypervisor.
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.429 2 DEBUG oslo.service.loopingcall [None req-c6683d03-b790-46f1-8332-207174d80bfb 5f75195e56504673bd403ce69cbc28ca f7cb78d24d1a4511a59ced45ccc4a1c7 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.429 2 DEBUG nova.compute.manager [-] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.429 2 DEBUG nova.network.neutron [-] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:05:10 compute-0 neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd[223893]: [NOTICE]   (223897) : haproxy version is 2.8.14-c23fe91
Oct 02 12:05:10 compute-0 systemd-machined[152150]: New machine qemu-18-instance-0000001b.
Oct 02 12:05:10 compute-0 neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd[223893]: [NOTICE]   (223897) : path to executable is /usr/sbin/haproxy
Oct 02 12:05:10 compute-0 neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd[223893]: [WARNING]  (223897) : Exiting Master process...
Oct 02 12:05:10 compute-0 neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd[223893]: [WARNING]  (223897) : Exiting Master process...
Oct 02 12:05:10 compute-0 neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd[223893]: [ALERT]    (223897) : Current worker (223899) exited with code 143 (Terminated)
Oct 02 12:05:10 compute-0 neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd[223893]: [WARNING]  (223897) : All workers exited. Exiting... (0)
Oct 02 12:05:10 compute-0 podman[224079]: 2025-10-02 12:05:10.44700764 +0000 UTC m=+0.054849092 container died 643c6aa59b0a86c66ac8c41e2d338fb0ac7682a44815333054c23c070d2a2e09 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true)
Oct 02 12:05:10 compute-0 systemd[1]: Started Virtual Machine qemu-18-instance-0000001b.
Oct 02 12:05:10 compute-0 systemd[1]: libpod-643c6aa59b0a86c66ac8c41e2d338fb0ac7682a44815333054c23c070d2a2e09.scope: Deactivated successfully.
Oct 02 12:05:10 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-643c6aa59b0a86c66ac8c41e2d338fb0ac7682a44815333054c23c070d2a2e09-userdata-shm.mount: Deactivated successfully.
Oct 02 12:05:10 compute-0 systemd[1]: var-lib-containers-storage-overlay-27c90b63ff0b33d76dedf732f0a465fce46088ecd124dd067abc0b9b01ed9a98-merged.mount: Deactivated successfully.
Oct 02 12:05:10 compute-0 podman[224079]: 2025-10-02 12:05:10.486780388 +0000 UTC m=+0.094621830 container cleanup 643c6aa59b0a86c66ac8c41e2d338fb0ac7682a44815333054c23c070d2a2e09 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0)
Oct 02 12:05:10 compute-0 systemd[1]: libpod-conmon-643c6aa59b0a86c66ac8c41e2d338fb0ac7682a44815333054c23c070d2a2e09.scope: Deactivated successfully.
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.502 2 DEBUG nova.compute.manager [req-e176f983-cfa1-4530-ac8a-117922f6b7b6 req-67dae16b-e610-4f2a-980a-1597410ca44f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Received event network-vif-unplugged-5e772b33-6577-4ba1-b187-e4779ef49ed6 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.502 2 DEBUG oslo_concurrency.lockutils [req-e176f983-cfa1-4530-ac8a-117922f6b7b6 req-67dae16b-e610-4f2a-980a-1597410ca44f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "a6bb5263-b0c7-4282-8e02-3503fd778e6f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.502 2 DEBUG oslo_concurrency.lockutils [req-e176f983-cfa1-4530-ac8a-117922f6b7b6 req-67dae16b-e610-4f2a-980a-1597410ca44f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a6bb5263-b0c7-4282-8e02-3503fd778e6f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.502 2 DEBUG oslo_concurrency.lockutils [req-e176f983-cfa1-4530-ac8a-117922f6b7b6 req-67dae16b-e610-4f2a-980a-1597410ca44f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a6bb5263-b0c7-4282-8e02-3503fd778e6f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.503 2 DEBUG nova.compute.manager [req-e176f983-cfa1-4530-ac8a-117922f6b7b6 req-67dae16b-e610-4f2a-980a-1597410ca44f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] No waiting events found dispatching network-vif-unplugged-5e772b33-6577-4ba1-b187-e4779ef49ed6 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.503 2 DEBUG nova.compute.manager [req-e176f983-cfa1-4530-ac8a-117922f6b7b6 req-67dae16b-e610-4f2a-980a-1597410ca44f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Received event network-vif-unplugged-5e772b33-6577-4ba1-b187-e4779ef49ed6 for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:05:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:10.527 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=8, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=7) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.527 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:10 compute-0 podman[224123]: 2025-10-02 12:05:10.549572786 +0000 UTC m=+0.041127556 container remove 643c6aa59b0a86c66ac8c41e2d338fb0ac7682a44815333054c23c070d2a2e09 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:05:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:10.555 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a3095c72-2b70-4355-b78a-cbcc28be76b6]: (4, ('Thu Oct  2 12:05:10 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd (643c6aa59b0a86c66ac8c41e2d338fb0ac7682a44815333054c23c070d2a2e09)\n643c6aa59b0a86c66ac8c41e2d338fb0ac7682a44815333054c23c070d2a2e09\nThu Oct  2 12:05:10 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd (643c6aa59b0a86c66ac8c41e2d338fb0ac7682a44815333054c23c070d2a2e09)\n643c6aa59b0a86c66ac8c41e2d338fb0ac7682a44815333054c23c070d2a2e09\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:10.557 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b5826c0d-4a89-4909-9c64-65f7e2b34412]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:10.558 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap664b6526-60, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.559 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:10 compute-0 kernel: tap664b6526-60: left promiscuous mode
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.571 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:10.575 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[08756dc0-0950-49f7-9d62-2bac99a4ed14]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:10.603 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f534fbf2-2329-4d61-8c8a-02bba339ef5a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:10.605 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[aa86933e-0c83-4fbb-b3c2-0bdb1e87b6fc]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:10.622 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[498d2e8f-620c-4fc1-b653-a797d2ad122a]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 469890, 'reachable_time': 15545, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 224138, 'error': None, 'target': 'ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:10.624 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-664b6526-6df1-4024-9bab-37218e6c18bd deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:05:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:10.624 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[ba023702-84a4-4260-9f69-2c5ea1d8db30]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:05:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:10.625 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 1 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.686 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.686 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.686 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.687 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.761 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.823 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk --force-share --output=json" returned: 0 in 0.062s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.824 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:05:10 compute-0 nova_compute[192079]: 2025-10-02 12:05:10.887 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff/disk --force-share --output=json" returned: 0 in 0.063s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.002 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.003 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5696MB free_disk=73.4250259399414GB free_vcpus=7 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.003 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.003 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:05:11 compute-0 systemd[1]: run-netns-ovnmeta\x2d664b6526\x2d6df1\x2d4024\x2d9bab\x2d37218e6c18bd.mount: Deactivated successfully.
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.233 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance c3b78b62-b1f8-477d-8ae4-9af540dc72ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.233 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance a6bb5263-b0c7-4282-8e02-3503fd778e6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.233 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 2 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.234 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=768MB phys_disk=79GB used_disk=2GB total_vcpus=8 used_vcpus=2 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.370 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.395 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.407 2 DEBUG nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Removed pending event for c3b78b62-b1f8-477d-8ae4-9af540dc72ff due to event _event_emit_delayed /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:438
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.408 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406711.4076495, c3b78b62-b1f8-477d-8ae4-9af540dc72ff => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.408 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] VM Resumed (Lifecycle Event)
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.410 2 DEBUG nova.compute.manager [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.410 2 DEBUG nova.virt.libvirt.driver [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.412 2 INFO nova.virt.libvirt.driver [-] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Instance spawned successfully.
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.413 2 DEBUG nova.virt.libvirt.driver [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.421 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.421 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.417s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.439 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.446 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: active, current task_state: rebuild_spawning, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.448 2 DEBUG nova.virt.libvirt.driver [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.449 2 DEBUG nova.virt.libvirt.driver [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.449 2 DEBUG nova.virt.libvirt.driver [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.449 2 DEBUG nova.virt.libvirt.driver [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.450 2 DEBUG nova.virt.libvirt.driver [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.450 2 DEBUG nova.virt.libvirt.driver [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.501 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] During sync_power_state the instance has a pending task (rebuild_spawning). Skip.
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.501 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406711.408527, c3b78b62-b1f8-477d-8ae4-9af540dc72ff => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.502 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] VM Started (Lifecycle Event)
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.535 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.539 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Synchronizing instance power state after lifecycle event "Started"; current vm_state: active, current task_state: rebuild_spawning, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.565 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] During sync_power_state the instance has a pending task (rebuild_spawning). Skip.
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.578 2 DEBUG nova.compute.manager [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:05:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:05:11.626 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '8'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.844 2 DEBUG oslo_concurrency.lockutils [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.846 2 DEBUG oslo_concurrency.lockutils [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.846 2 DEBUG nova.objects.instance [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Trying to apply a migration context that does not seem to be set for this instance apply_migration_context /usr/lib/python3.9/site-packages/nova/objects/instance.py:1032
Oct 02 12:05:11 compute-0 nova_compute[192079]: 2025-10-02 12:05:11.960 2 DEBUG oslo_concurrency.lockutils [None req-a616a0f3-c6a1-4c39-9490-d58c86d77105 dd6991e245a64695a82e5e66c3de6940 2646aa8e808d482a85b4fd98f9e46a26 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 0.114s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:05:12 compute-0 podman[224151]: 2025-10-02 12:05:12.156565572 +0000 UTC m=+0.059403636 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:05:12 compute-0 podman[224152]: 2025-10-02 12:05:12.157383925 +0000 UTC m=+0.066018437 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, config_id=iscsid, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3)
Oct 02 12:05:12 compute-0 nova_compute[192079]: 2025-10-02 12:05:12.416 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:05:12 compute-0 nova_compute[192079]: 2025-10-02 12:05:12.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:05:13 compute-0 nova_compute[192079]: 2025-10-02 12:05:13.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:05:13 compute-0 nova_compute[192079]: 2025-10-02 12:05:13.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:05:13 compute-0 nova_compute[192079]: 2025-10-02 12:05:13.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:05:13 compute-0 nova_compute[192079]: 2025-10-02 12:05:13.684 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Skipping network cache update for instance because it is being deleted. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9875
Oct 02 12:05:14 compute-0 nova_compute[192079]: 2025-10-02 12:05:14.230 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "refresh_cache-c3b78b62-b1f8-477d-8ae4-9af540dc72ff" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:05:14 compute-0 nova_compute[192079]: 2025-10-02 12:05:14.231 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquired lock "refresh_cache-c3b78b62-b1f8-477d-8ae4-9af540dc72ff" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:05:14 compute-0 nova_compute[192079]: 2025-10-02 12:05:14.231 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Forcefully refreshing network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2004
Oct 02 12:05:14 compute-0 nova_compute[192079]: 2025-10-02 12:05:14.231 2 DEBUG nova.objects.instance [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lazy-loading 'info_cache' on Instance uuid c3b78b62-b1f8-477d-8ae4-9af540dc72ff obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:05:14 compute-0 nova_compute[192079]: 2025-10-02 12:05:14.347 2 DEBUG nova.compute.manager [req-7b15ce24-3299-46ce-ab31-0aa48622a5d5 req-ae724a6e-b2a4-4d60-85b3-ba8d1458bb43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Received event network-vif-plugged-5e772b33-6577-4ba1-b187-e4779ef49ed6 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:05:14 compute-0 nova_compute[192079]: 2025-10-02 12:05:14.348 2 DEBUG oslo_concurrency.lockutils [req-7b15ce24-3299-46ce-ab31-0aa48622a5d5 req-ae724a6e-b2a4-4d60-85b3-ba8d1458bb43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "a6bb5263-b0c7-4282-8e02-3503fd778e6f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:05:14 compute-0 nova_compute[192079]: 2025-10-02 12:05:14.348 2 DEBUG oslo_concurrency.lockutils [req-7b15ce24-3299-46ce-ab31-0aa48622a5d5 req-ae724a6e-b2a4-4d60-85b3-ba8d1458bb43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a6bb5263-b0c7-4282-8e02-3503fd778e6f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:05:14 compute-0 nova_compute[192079]: 2025-10-02 12:05:14.348 2 DEBUG oslo_concurrency.lockutils [req-7b15ce24-3299-46ce-ab31-0aa48622a5d5 req-ae724a6e-b2a4-4d60-85b3-ba8d1458bb43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a6bb5263-b0c7-4282-8e02-3503fd778e6f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:05:14 compute-0 nova_compute[192079]: 2025-10-02 12:05:14.349 2 DEBUG nova.compute.manager [req-7b15ce24-3299-46ce-ab31-0aa48622a5d5 req-ae724a6e-b2a4-4d60-85b3-ba8d1458bb43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] No waiting events found dispatching network-vif-plugged-5e772b33-6577-4ba1-b187-e4779ef49ed6 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:05:14 compute-0 nova_compute[192079]: 2025-10-02 12:05:14.349 2 WARNING nova.compute.manager [req-7b15ce24-3299-46ce-ab31-0aa48622a5d5 req-ae724a6e-b2a4-4d60-85b3-ba8d1458bb43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Received unexpected event network-vif-plugged-5e772b33-6577-4ba1-b187-e4779ef49ed6 for instance with vm_state active and task_state deleting.
Oct 02 12:05:14 compute-0 nova_compute[192079]: 2025-10-02 12:05:14.614 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:14 compute-0 nova_compute[192079]: 2025-10-02 12:05:14.768 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:05:14 compute-0 nova_compute[192079]: 2025-10-02 12:05:14.784 2 DEBUG oslo_concurrency.lockutils [None req-bd9b5ce2-0ac4-4e7a-8cb7-1ca42f13bc7c e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Acquiring lock "c3b78b62-b1f8-477d-8ae4-9af540dc72ff" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:05:14 compute-0 nova_compute[192079]: 2025-10-02 12:05:14.785 2 DEBUG oslo_concurrency.lockutils [None req-bd9b5ce2-0ac4-4e7a-8cb7-1ca42f13bc7c e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lock "c3b78b62-b1f8-477d-8ae4-9af540dc72ff" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:05:14 compute-0 nova_compute[192079]: 2025-10-02 12:05:14.785 2 DEBUG oslo_concurrency.lockutils [None req-bd9b5ce2-0ac4-4e7a-8cb7-1ca42f13bc7c e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Acquiring lock "c3b78b62-b1f8-477d-8ae4-9af540dc72ff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:05:14 compute-0 nova_compute[192079]: 2025-10-02 12:05:14.785 2 DEBUG oslo_concurrency.lockutils [None req-bd9b5ce2-0ac4-4e7a-8cb7-1ca42f13bc7c e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lock "c3b78b62-b1f8-477d-8ae4-9af540dc72ff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:05:14 compute-0 nova_compute[192079]: 2025-10-02 12:05:14.786 2 DEBUG oslo_concurrency.lockutils [None req-bd9b5ce2-0ac4-4e7a-8cb7-1ca42f13bc7c e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lock "c3b78b62-b1f8-477d-8ae4-9af540dc72ff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:05:14 compute-0 nova_compute[192079]: 2025-10-02 12:05:14.940 2 DEBUG nova.network.neutron [-] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:05:15 compute-0 nova_compute[192079]: 2025-10-02 12:05:15.050 2 INFO nova.compute.manager [None req-bd9b5ce2-0ac4-4e7a-8cb7-1ca42f13bc7c e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Terminating instance
Oct 02 12:05:15 compute-0 nova_compute[192079]: 2025-10-02 12:05:15.180 2 INFO nova.compute.manager [-] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Took 4.75 seconds to deallocate network for instance.
Oct 02 12:05:15 compute-0 nova_compute[192079]: 2025-10-02 12:05:15.184 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:05:15 compute-0 nova_compute[192079]: 2025-10-02 12:05:15.255 2 DEBUG oslo_concurrency.lockutils [None req-bd9b5ce2-0ac4-4e7a-8cb7-1ca42f13bc7c e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Acquiring lock "refresh_cache-c3b78b62-b1f8-477d-8ae4-9af540dc72ff" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:05:15 compute-0 nova_compute[192079]: 2025-10-02 12:05:15.263 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Releasing lock "refresh_cache-c3b78b62-b1f8-477d-8ae4-9af540dc72ff" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:05:15 compute-0 nova_compute[192079]: 2025-10-02 12:05:15.263 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Updated the network info_cache for instance _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9929
Oct 02 12:05:15 compute-0 nova_compute[192079]: 2025-10-02 12:05:15.263 2 DEBUG oslo_concurrency.lockutils [None req-bd9b5ce2-0ac4-4e7a-8cb7-1ca42f13bc7c e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Acquired lock "refresh_cache-c3b78b62-b1f8-477d-8ae4-9af540dc72ff" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:05:15 compute-0 nova_compute[192079]: 2025-10-02 12:05:15.264 2 DEBUG nova.network.neutron [None req-bd9b5ce2-0ac4-4e7a-8cb7-1ca42f13bc7c e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:05:15 compute-0 nova_compute[192079]: 2025-10-02 12:05:15.264 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:05:15 compute-0 nova_compute[192079]: 2025-10-02 12:05:15.315 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:15 compute-0 nova_compute[192079]: 2025-10-02 12:05:15.492 2 DEBUG oslo_concurrency.lockutils [None req-c6683d03-b790-46f1-8332-207174d80bfb 5f75195e56504673bd403ce69cbc28ca f7cb78d24d1a4511a59ced45ccc4a1c7 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:05:15 compute-0 nova_compute[192079]: 2025-10-02 12:05:15.494 2 DEBUG oslo_concurrency.lockutils [None req-c6683d03-b790-46f1-8332-207174d80bfb 5f75195e56504673bd403ce69cbc28ca f7cb78d24d1a4511a59ced45ccc4a1c7 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:05:15 compute-0 nova_compute[192079]: 2025-10-02 12:05:15.565 2 DEBUG nova.compute.provider_tree [None req-c6683d03-b790-46f1-8332-207174d80bfb 5f75195e56504673bd403ce69cbc28ca f7cb78d24d1a4511a59ced45ccc4a1c7 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:05:15 compute-0 nova_compute[192079]: 2025-10-02 12:05:15.589 2 DEBUG nova.scheduler.client.report [None req-c6683d03-b790-46f1-8332-207174d80bfb 5f75195e56504673bd403ce69cbc28ca f7cb78d24d1a4511a59ced45ccc4a1c7 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:05:15 compute-0 nova_compute[192079]: 2025-10-02 12:05:15.613 2 DEBUG oslo_concurrency.lockutils [None req-c6683d03-b790-46f1-8332-207174d80bfb 5f75195e56504673bd403ce69cbc28ca f7cb78d24d1a4511a59ced45ccc4a1c7 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.119s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:05:15 compute-0 nova_compute[192079]: 2025-10-02 12:05:15.651 2 INFO nova.scheduler.client.report [None req-c6683d03-b790-46f1-8332-207174d80bfb 5f75195e56504673bd403ce69cbc28ca f7cb78d24d1a4511a59ced45ccc4a1c7 - - default default] Deleted allocations for instance a6bb5263-b0c7-4282-8e02-3503fd778e6f
Oct 02 12:05:15 compute-0 nova_compute[192079]: 2025-10-02 12:05:15.681 2 DEBUG nova.network.neutron [None req-bd9b5ce2-0ac4-4e7a-8cb7-1ca42f13bc7c e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:05:15 compute-0 nova_compute[192079]: 2025-10-02 12:05:15.762 2 DEBUG oslo_concurrency.lockutils [None req-c6683d03-b790-46f1-8332-207174d80bfb 5f75195e56504673bd403ce69cbc28ca f7cb78d24d1a4511a59ced45ccc4a1c7 - - default default] Lock "a6bb5263-b0c7-4282-8e02-3503fd778e6f" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 5.951s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:05:16 compute-0 nova_compute[192079]: 2025-10-02 12:05:16.001 2 DEBUG nova.network.neutron [None req-bd9b5ce2-0ac4-4e7a-8cb7-1ca42f13bc7c e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:05:16 compute-0 nova_compute[192079]: 2025-10-02 12:05:16.031 2 DEBUG oslo_concurrency.lockutils [None req-bd9b5ce2-0ac4-4e7a-8cb7-1ca42f13bc7c e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Releasing lock "refresh_cache-c3b78b62-b1f8-477d-8ae4-9af540dc72ff" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:05:16 compute-0 nova_compute[192079]: 2025-10-02 12:05:16.032 2 DEBUG nova.compute.manager [None req-bd9b5ce2-0ac4-4e7a-8cb7-1ca42f13bc7c e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:05:16 compute-0 systemd[1]: machine-qemu\x2d18\x2dinstance\x2d0000001b.scope: Deactivated successfully.
Oct 02 12:05:16 compute-0 systemd[1]: machine-qemu\x2d18\x2dinstance\x2d0000001b.scope: Consumed 5.416s CPU time.
Oct 02 12:05:16 compute-0 systemd-machined[152150]: Machine qemu-18-instance-0000001b terminated.
Oct 02 12:05:16 compute-0 nova_compute[192079]: 2025-10-02 12:05:16.281 2 INFO nova.virt.libvirt.driver [-] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Instance destroyed successfully.
Oct 02 12:05:16 compute-0 nova_compute[192079]: 2025-10-02 12:05:16.282 2 DEBUG nova.objects.instance [None req-bd9b5ce2-0ac4-4e7a-8cb7-1ca42f13bc7c e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lazy-loading 'resources' on Instance uuid c3b78b62-b1f8-477d-8ae4-9af540dc72ff obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:05:16 compute-0 nova_compute[192079]: 2025-10-02 12:05:16.301 2 INFO nova.virt.libvirt.driver [None req-bd9b5ce2-0ac4-4e7a-8cb7-1ca42f13bc7c e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Deleting instance files /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff_del
Oct 02 12:05:16 compute-0 nova_compute[192079]: 2025-10-02 12:05:16.302 2 INFO nova.virt.libvirt.driver [None req-bd9b5ce2-0ac4-4e7a-8cb7-1ca42f13bc7c e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Deletion of /var/lib/nova/instances/c3b78b62-b1f8-477d-8ae4-9af540dc72ff_del complete
Oct 02 12:05:16 compute-0 nova_compute[192079]: 2025-10-02 12:05:16.386 2 INFO nova.compute.manager [None req-bd9b5ce2-0ac4-4e7a-8cb7-1ca42f13bc7c e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Took 0.35 seconds to destroy the instance on the hypervisor.
Oct 02 12:05:16 compute-0 nova_compute[192079]: 2025-10-02 12:05:16.387 2 DEBUG oslo.service.loopingcall [None req-bd9b5ce2-0ac4-4e7a-8cb7-1ca42f13bc7c e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:05:16 compute-0 nova_compute[192079]: 2025-10-02 12:05:16.388 2 DEBUG nova.compute.manager [-] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:05:16 compute-0 nova_compute[192079]: 2025-10-02 12:05:16.388 2 DEBUG nova.network.neutron [-] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:05:17.098 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:05:17.099 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:05:17.099 12 DEBUG ceilometer.polling.manager [-] Skip pollster cpu, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:05:17.099 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:05:17.099 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:05:17.099 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.capacity, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:05:17.099 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:05:17.099 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:05:17.099 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:05:17.100 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:05:17.100 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:05:17.100 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:05:17.100 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:05:17.100 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:05:17.100 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.allocation, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:05:17.100 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:05:17.100 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:05:17.100 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:05:17.100 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:05:17.100 12 DEBUG ceilometer.polling.manager [-] Skip pollster memory.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:05:17.100 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:05:17.101 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:05:17.101 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.iops, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:05:17.101 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:05:17.101 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:05:17 compute-0 nova_compute[192079]: 2025-10-02 12:05:17.371 2 DEBUG nova.network.neutron [-] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:05:18 compute-0 nova_compute[192079]: 2025-10-02 12:05:18.475 2 DEBUG nova.network.neutron [-] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:05:18 compute-0 nova_compute[192079]: 2025-10-02 12:05:18.563 2 INFO nova.compute.manager [-] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Took 2.17 seconds to deallocate network for instance.
Oct 02 12:05:18 compute-0 nova_compute[192079]: 2025-10-02 12:05:18.646 2 DEBUG oslo_concurrency.lockutils [None req-bd9b5ce2-0ac4-4e7a-8cb7-1ca42f13bc7c e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:05:18 compute-0 nova_compute[192079]: 2025-10-02 12:05:18.647 2 DEBUG oslo_concurrency.lockutils [None req-bd9b5ce2-0ac4-4e7a-8cb7-1ca42f13bc7c e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:05:18 compute-0 nova_compute[192079]: 2025-10-02 12:05:18.698 2 DEBUG nova.compute.provider_tree [None req-bd9b5ce2-0ac4-4e7a-8cb7-1ca42f13bc7c e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:05:18 compute-0 nova_compute[192079]: 2025-10-02 12:05:18.722 2 DEBUG nova.scheduler.client.report [None req-bd9b5ce2-0ac4-4e7a-8cb7-1ca42f13bc7c e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:05:18 compute-0 nova_compute[192079]: 2025-10-02 12:05:18.808 2 DEBUG oslo_concurrency.lockutils [None req-bd9b5ce2-0ac4-4e7a-8cb7-1ca42f13bc7c e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.161s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:05:18 compute-0 nova_compute[192079]: 2025-10-02 12:05:18.861 2 INFO nova.scheduler.client.report [None req-bd9b5ce2-0ac4-4e7a-8cb7-1ca42f13bc7c e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Deleted allocations for instance c3b78b62-b1f8-477d-8ae4-9af540dc72ff
Oct 02 12:05:19 compute-0 nova_compute[192079]: 2025-10-02 12:05:19.081 2 DEBUG oslo_concurrency.lockutils [None req-bd9b5ce2-0ac4-4e7a-8cb7-1ca42f13bc7c e88312197d9e4d24a0c49002f36053ba dbe4d58d976745ec9575dfbfcfb67333 - - default default] Lock "c3b78b62-b1f8-477d-8ae4-9af540dc72ff" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 4.297s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:05:19 compute-0 nova_compute[192079]: 2025-10-02 12:05:19.616 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:20 compute-0 nova_compute[192079]: 2025-10-02 12:05:20.318 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:22 compute-0 podman[224205]: 2025-10-02 12:05:22.213775359 +0000 UTC m=+0.129518023 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, org.label-schema.schema-version=1.0, tcib_managed=true, container_name=ovn_controller, io.buildah.version=1.41.3)
Oct 02 12:05:24 compute-0 podman[224231]: 2025-10-02 12:05:24.139878285 +0000 UTC m=+0.052340583 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, tcib_managed=true, container_name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_id=ovn_metadata_agent)
Oct 02 12:05:24 compute-0 podman[224232]: 2025-10-02 12:05:24.140161193 +0000 UTC m=+0.051284594 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>)
Oct 02 12:05:24 compute-0 nova_compute[192079]: 2025-10-02 12:05:24.617 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:25 compute-0 nova_compute[192079]: 2025-10-02 12:05:25.160 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759406710.1588757, a6bb5263-b0c7-4282-8e02-3503fd778e6f => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:05:25 compute-0 nova_compute[192079]: 2025-10-02 12:05:25.160 2 INFO nova.compute.manager [-] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] VM Stopped (Lifecycle Event)
Oct 02 12:05:25 compute-0 nova_compute[192079]: 2025-10-02 12:05:25.185 2 DEBUG nova.compute.manager [None req-2d65bb8b-3896-4ff1-b0c1-6bd0308a3c58 - - - - - -] [instance: a6bb5263-b0c7-4282-8e02-3503fd778e6f] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:05:25 compute-0 nova_compute[192079]: 2025-10-02 12:05:25.319 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:29 compute-0 nova_compute[192079]: 2025-10-02 12:05:29.619 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:30 compute-0 nova_compute[192079]: 2025-10-02 12:05:30.321 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:31 compute-0 podman[224273]: 2025-10-02 12:05:31.173705133 +0000 UTC m=+0.082921019 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_managed=true, config_id=edpm, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, container_name=ceilometer_agent_compute)
Oct 02 12:05:31 compute-0 nova_compute[192079]: 2025-10-02 12:05:31.279 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759406716.2778761, c3b78b62-b1f8-477d-8ae4-9af540dc72ff => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:05:31 compute-0 nova_compute[192079]: 2025-10-02 12:05:31.279 2 INFO nova.compute.manager [-] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] VM Stopped (Lifecycle Event)
Oct 02 12:05:31 compute-0 nova_compute[192079]: 2025-10-02 12:05:31.303 2 DEBUG nova.compute.manager [None req-aeca97c2-4d57-446c-9127-f56bb1d9ff15 - - - - - -] [instance: c3b78b62-b1f8-477d-8ae4-9af540dc72ff] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:05:34 compute-0 nova_compute[192079]: 2025-10-02 12:05:34.620 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:35 compute-0 nova_compute[192079]: 2025-10-02 12:05:35.323 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:36 compute-0 podman[224294]: 2025-10-02 12:05:36.138857226 +0000 UTC m=+0.052612890 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, io.buildah.version=1.41.3, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_id=multipathd, org.label-schema.schema-version=1.0, tcib_managed=true)
Oct 02 12:05:36 compute-0 podman[224293]: 2025-10-02 12:05:36.138188768 +0000 UTC m=+0.055958271 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., name=ubi9-minimal, io.buildah.version=1.33.7, io.openshift.tags=minimal rhel9, release=1755695350, url=https://catalog.redhat.com/en/search?searchType=containers, vcs-type=git, version=9.6, com.redhat.component=ubi9-minimal-container, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., maintainer=Red Hat, Inc., managed_by=edpm_ansible, architecture=x86_64, distribution-scope=public, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, config_id=edpm, build-date=2025-08-20T13:12:41, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, vendor=Red Hat, Inc., container_name=openstack_network_exporter, io.openshift.expose-services=, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b)
Oct 02 12:05:39 compute-0 nova_compute[192079]: 2025-10-02 12:05:39.622 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:40 compute-0 nova_compute[192079]: 2025-10-02 12:05:40.326 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:43 compute-0 podman[224334]: 2025-10-02 12:05:43.140580326 +0000 UTC m=+0.054569823 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:05:43 compute-0 podman[224335]: 2025-10-02 12:05:43.167866983 +0000 UTC m=+0.070938511 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=iscsid, org.label-schema.name=CentOS Stream 9 Base Image, container_name=iscsid, managed_by=edpm_ansible, org.label-schema.build-date=20251001, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, org.label-schema.schema-version=1.0, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:05:44 compute-0 nova_compute[192079]: 2025-10-02 12:05:44.623 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:45 compute-0 nova_compute[192079]: 2025-10-02 12:05:45.367 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:49 compute-0 nova_compute[192079]: 2025-10-02 12:05:49.625 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:50 compute-0 nova_compute[192079]: 2025-10-02 12:05:50.422 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:52 compute-0 nova_compute[192079]: 2025-10-02 12:05:52.526 2 DEBUG oslo_concurrency.lockutils [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Acquiring lock "7c66bab5-e0cd-40db-b080-086245f15c4f" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:05:52 compute-0 nova_compute[192079]: 2025-10-02 12:05:52.527 2 DEBUG oslo_concurrency.lockutils [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Lock "7c66bab5-e0cd-40db-b080-086245f15c4f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:05:52 compute-0 nova_compute[192079]: 2025-10-02 12:05:52.563 2 DEBUG nova.compute.manager [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:05:52 compute-0 nova_compute[192079]: 2025-10-02 12:05:52.706 2 DEBUG oslo_concurrency.lockutils [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:05:52 compute-0 nova_compute[192079]: 2025-10-02 12:05:52.707 2 DEBUG oslo_concurrency.lockutils [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:05:52 compute-0 nova_compute[192079]: 2025-10-02 12:05:52.713 2 DEBUG nova.virt.hardware [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:05:52 compute-0 nova_compute[192079]: 2025-10-02 12:05:52.713 2 INFO nova.compute.claims [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:05:52 compute-0 nova_compute[192079]: 2025-10-02 12:05:52.822 2 DEBUG nova.scheduler.client.report [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Refreshing inventories for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708 _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:804
Oct 02 12:05:52 compute-0 nova_compute[192079]: 2025-10-02 12:05:52.852 2 DEBUG nova.scheduler.client.report [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Updating ProviderTree inventory for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 from _refresh_and_get_inventory using data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} _refresh_and_get_inventory /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:768
Oct 02 12:05:52 compute-0 nova_compute[192079]: 2025-10-02 12:05:52.853 2 DEBUG nova.compute.provider_tree [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Updating inventory in ProviderTree for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 with inventory: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:176
Oct 02 12:05:52 compute-0 nova_compute[192079]: 2025-10-02 12:05:52.874 2 DEBUG nova.scheduler.client.report [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Refreshing aggregate associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, aggregates: None _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:813
Oct 02 12:05:52 compute-0 nova_compute[192079]: 2025-10-02 12:05:52.892 2 DEBUG nova.scheduler.client.report [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Refreshing trait associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, traits: COMPUTE_SECURITY_UEFI_SECURE_BOOT,COMPUTE_VIOMMU_MODEL_VIRTIO,COMPUTE_VIOMMU_MODEL_AUTO,COMPUTE_IMAGE_TYPE_AKI,COMPUTE_GRAPHICS_MODEL_VIRTIO,COMPUTE_NET_VIF_MODEL_PCNET,HW_CPU_X86_SSE42,COMPUTE_RESCUE_BFV,COMPUTE_VOLUME_EXTEND,COMPUTE_IMAGE_TYPE_QCOW2,COMPUTE_TRUSTED_CERTS,COMPUTE_SOCKET_PCI_NUMA_AFFINITY,COMPUTE_GRAPHICS_MODEL_CIRRUS,HW_CPU_X86_MMX,COMPUTE_STORAGE_BUS_VIRTIO,COMPUTE_NET_ATTACH_INTERFACE_WITH_TAG,COMPUTE_STORAGE_BUS_FDC,COMPUTE_STORAGE_BUS_USB,COMPUTE_NODE,HW_CPU_X86_SSSE3,HW_CPU_X86_SSE2,COMPUTE_GRAPHICS_MODEL_BOCHS,COMPUTE_NET_VIF_MODEL_E1000E,COMPUTE_IMAGE_TYPE_RAW,COMPUTE_NET_VIF_MODEL_NE2K_PCI,COMPUTE_IMAGE_TYPE_AMI,COMPUTE_VIOMMU_MODEL_INTEL,COMPUTE_SECURITY_TPM_2_0,COMPUTE_STORAGE_BUS_SCSI,COMPUTE_IMAGE_TYPE_ARI,COMPUTE_NET_VIF_MODEL_VMXNET3,COMPUTE_SECURITY_TPM_1_2,COMPUTE_NET_VIF_MODEL_E1000,HW_CPU_X86_SSE,COMPUTE_VOLUME_MULTI_ATTACH,COMPUTE_STORAGE_BUS_IDE,COMPUTE_GRAPHICS_MODEL_NONE,COMPUTE_VOLUME_ATTACH_WITH_TAG,COMPUTE_NET_VIF_MODEL_VIRTIO,HW_CPU_X86_SSE41,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_DEVICE_TAGGING,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_ACCELERATORS,COMPUTE_NET_VIF_MODEL_RTL8139,COMPUTE_GRAPHICS_MODEL_VGA,COMPUTE_STORAGE_BUS_SATA,COMPUTE_NET_VIF_MODEL_SPAPR_VLAN _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:825
Oct 02 12:05:52 compute-0 nova_compute[192079]: 2025-10-02 12:05:52.965 2 DEBUG nova.compute.provider_tree [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:05:52 compute-0 nova_compute[192079]: 2025-10-02 12:05:52.990 2 DEBUG nova.scheduler.client.report [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.020 2 DEBUG oslo_concurrency.lockutils [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.314s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.021 2 DEBUG nova.compute.manager [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.104 2 DEBUG nova.compute.manager [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.105 2 DEBUG nova.network.neutron [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.139 2 INFO nova.virt.libvirt.driver [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:05:53 compute-0 podman[224379]: 2025-10-02 12:05:53.167664672 +0000 UTC m=+0.086807726 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_controller, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, config_id=ovn_controller, managed_by=edpm_ansible, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0)
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.168 2 DEBUG nova.compute.manager [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.374 2 DEBUG nova.compute.manager [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.375 2 DEBUG nova.virt.libvirt.driver [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.376 2 INFO nova.virt.libvirt.driver [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Creating image(s)
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.376 2 DEBUG oslo_concurrency.lockutils [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Acquiring lock "/var/lib/nova/instances/7c66bab5-e0cd-40db-b080-086245f15c4f/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.377 2 DEBUG oslo_concurrency.lockutils [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Lock "/var/lib/nova/instances/7c66bab5-e0cd-40db-b080-086245f15c4f/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.377 2 DEBUG oslo_concurrency.lockutils [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Lock "/var/lib/nova/instances/7c66bab5-e0cd-40db-b080-086245f15c4f/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.394 2 DEBUG oslo_concurrency.processutils [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.474 2 DEBUG oslo_concurrency.processutils [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.080s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.475 2 DEBUG oslo_concurrency.lockutils [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.476 2 DEBUG oslo_concurrency.lockutils [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.491 2 DEBUG oslo_concurrency.processutils [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.527 2 DEBUG nova.network.neutron [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] No network configured allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1188
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.528 2 DEBUG nova.compute.manager [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Instance network_info: |[]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.545 2 DEBUG oslo_concurrency.processutils [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.546 2 DEBUG oslo_concurrency.processutils [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/7c66bab5-e0cd-40db-b080-086245f15c4f/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.581 2 DEBUG oslo_concurrency.processutils [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/7c66bab5-e0cd-40db-b080-086245f15c4f/disk 1073741824" returned: 0 in 0.035s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.582 2 DEBUG oslo_concurrency.lockutils [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.106s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.583 2 DEBUG oslo_concurrency.processutils [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.658 2 DEBUG oslo_concurrency.processutils [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.076s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.660 2 DEBUG nova.virt.disk.api [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Checking if we can resize image /var/lib/nova/instances/7c66bab5-e0cd-40db-b080-086245f15c4f/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.660 2 DEBUG oslo_concurrency.processutils [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/7c66bab5-e0cd-40db-b080-086245f15c4f/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.717 2 DEBUG oslo_concurrency.processutils [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/7c66bab5-e0cd-40db-b080-086245f15c4f/disk --force-share --output=json" returned: 0 in 0.057s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.718 2 DEBUG nova.virt.disk.api [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Cannot resize image /var/lib/nova/instances/7c66bab5-e0cd-40db-b080-086245f15c4f/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.719 2 DEBUG nova.objects.instance [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Lazy-loading 'migration_context' on Instance uuid 7c66bab5-e0cd-40db-b080-086245f15c4f obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.763 2 DEBUG nova.virt.libvirt.driver [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.764 2 DEBUG nova.virt.libvirt.driver [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Ensure instance console log exists: /var/lib/nova/instances/7c66bab5-e0cd-40db-b080-086245f15c4f/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.764 2 DEBUG oslo_concurrency.lockutils [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.765 2 DEBUG oslo_concurrency.lockutils [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.765 2 DEBUG oslo_concurrency.lockutils [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.767 2 DEBUG nova.virt.libvirt.driver [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Start _get_guest_xml network_info=[] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.772 2 WARNING nova.virt.libvirt.driver [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.780 2 DEBUG nova.virt.libvirt.host [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.781 2 DEBUG nova.virt.libvirt.host [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.785 2 DEBUG nova.virt.libvirt.host [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.786 2 DEBUG nova.virt.libvirt.host [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.787 2 DEBUG nova.virt.libvirt.driver [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.788 2 DEBUG nova.virt.hardware [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.788 2 DEBUG nova.virt.hardware [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.788 2 DEBUG nova.virt.hardware [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.789 2 DEBUG nova.virt.hardware [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.789 2 DEBUG nova.virt.hardware [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.789 2 DEBUG nova.virt.hardware [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.790 2 DEBUG nova.virt.hardware [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.790 2 DEBUG nova.virt.hardware [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.790 2 DEBUG nova.virt.hardware [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.790 2 DEBUG nova.virt.hardware [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.791 2 DEBUG nova.virt.hardware [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.795 2 DEBUG nova.objects.instance [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Lazy-loading 'pci_devices' on Instance uuid 7c66bab5-e0cd-40db-b080-086245f15c4f obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.832 2 DEBUG nova.virt.libvirt.driver [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:05:53 compute-0 nova_compute[192079]:   <uuid>7c66bab5-e0cd-40db-b080-086245f15c4f</uuid>
Oct 02 12:05:53 compute-0 nova_compute[192079]:   <name>instance-0000001f</name>
Oct 02 12:05:53 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:05:53 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:05:53 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:05:53 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:       <nova:name>tempest-LiveMigrationNegativeTest-server-1115992339</nova:name>
Oct 02 12:05:53 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:05:53</nova:creationTime>
Oct 02 12:05:53 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:05:53 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:05:53 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:05:53 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:05:53 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:05:53 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:05:53 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:05:53 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:05:53 compute-0 nova_compute[192079]:         <nova:user uuid="c8270c33bced4c1b806e47efa970c01e">tempest-LiveMigrationNegativeTest-50793384-project-member</nova:user>
Oct 02 12:05:53 compute-0 nova_compute[192079]:         <nova:project uuid="fb361251cae94d42aaec252513e2f05c">tempest-LiveMigrationNegativeTest-50793384</nova:project>
Oct 02 12:05:53 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:05:53 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:       <nova:ports/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:05:53 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:05:53 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <system>
Oct 02 12:05:53 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:05:53 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:05:53 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:05:53 compute-0 nova_compute[192079]:       <entry name="serial">7c66bab5-e0cd-40db-b080-086245f15c4f</entry>
Oct 02 12:05:53 compute-0 nova_compute[192079]:       <entry name="uuid">7c66bab5-e0cd-40db-b080-086245f15c4f</entry>
Oct 02 12:05:53 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     </system>
Oct 02 12:05:53 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:05:53 compute-0 nova_compute[192079]:   <os>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:   </os>
Oct 02 12:05:53 compute-0 nova_compute[192079]:   <features>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:   </features>
Oct 02 12:05:53 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:05:53 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:05:53 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:05:53 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/7c66bab5-e0cd-40db-b080-086245f15c4f/disk"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:05:53 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/7c66bab5-e0cd-40db-b080-086245f15c4f/disk.config"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:05:53 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/7c66bab5-e0cd-40db-b080-086245f15c4f/console.log" append="off"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <video>
Oct 02 12:05:53 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     </video>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:05:53 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:05:53 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:05:53 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:05:53 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:05:53 compute-0 nova_compute[192079]: </domain>
Oct 02 12:05:53 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.941 2 DEBUG nova.virt.libvirt.driver [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.942 2 DEBUG nova.virt.libvirt.driver [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:05:53 compute-0 nova_compute[192079]: 2025-10-02 12:05:53.942 2 INFO nova.virt.libvirt.driver [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Using config drive
Oct 02 12:05:54 compute-0 nova_compute[192079]: 2025-10-02 12:05:54.180 2 INFO nova.virt.libvirt.driver [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Creating config drive at /var/lib/nova/instances/7c66bab5-e0cd-40db-b080-086245f15c4f/disk.config
Oct 02 12:05:54 compute-0 nova_compute[192079]: 2025-10-02 12:05:54.187 2 DEBUG oslo_concurrency.processutils [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/7c66bab5-e0cd-40db-b080-086245f15c4f/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpd__r9wjz execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:05:54 compute-0 nova_compute[192079]: 2025-10-02 12:05:54.329 2 DEBUG oslo_concurrency.processutils [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/7c66bab5-e0cd-40db-b080-086245f15c4f/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpd__r9wjz" returned: 0 in 0.142s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:05:54 compute-0 systemd-machined[152150]: New machine qemu-19-instance-0000001f.
Oct 02 12:05:54 compute-0 systemd[1]: Started Virtual Machine qemu-19-instance-0000001f.
Oct 02 12:05:54 compute-0 podman[224429]: 2025-10-02 12:05:54.47931797 +0000 UTC m=+0.064457265 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, managed_by=edpm_ansible, config_id=ovn_metadata_agent, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, container_name=ovn_metadata_agent)
Oct 02 12:05:54 compute-0 podman[224430]: 2025-10-02 12:05:54.506864783 +0000 UTC m=+0.076807842 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:05:54 compute-0 nova_compute[192079]: 2025-10-02 12:05:54.627 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:55 compute-0 nova_compute[192079]: 2025-10-02 12:05:55.319 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406755.3187466, 7c66bab5-e0cd-40db-b080-086245f15c4f => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:05:55 compute-0 nova_compute[192079]: 2025-10-02 12:05:55.319 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] VM Resumed (Lifecycle Event)
Oct 02 12:05:55 compute-0 nova_compute[192079]: 2025-10-02 12:05:55.322 2 DEBUG nova.compute.manager [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:05:55 compute-0 nova_compute[192079]: 2025-10-02 12:05:55.323 2 DEBUG nova.virt.libvirt.driver [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:05:55 compute-0 nova_compute[192079]: 2025-10-02 12:05:55.327 2 INFO nova.virt.libvirt.driver [-] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Instance spawned successfully.
Oct 02 12:05:55 compute-0 nova_compute[192079]: 2025-10-02 12:05:55.327 2 DEBUG nova.virt.libvirt.driver [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:05:55 compute-0 nova_compute[192079]: 2025-10-02 12:05:55.372 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:05:55 compute-0 nova_compute[192079]: 2025-10-02 12:05:55.378 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:05:55 compute-0 nova_compute[192079]: 2025-10-02 12:05:55.382 2 DEBUG nova.virt.libvirt.driver [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:05:55 compute-0 nova_compute[192079]: 2025-10-02 12:05:55.382 2 DEBUG nova.virt.libvirt.driver [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:05:55 compute-0 nova_compute[192079]: 2025-10-02 12:05:55.383 2 DEBUG nova.virt.libvirt.driver [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:05:55 compute-0 nova_compute[192079]: 2025-10-02 12:05:55.383 2 DEBUG nova.virt.libvirt.driver [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:05:55 compute-0 nova_compute[192079]: 2025-10-02 12:05:55.383 2 DEBUG nova.virt.libvirt.driver [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:05:55 compute-0 nova_compute[192079]: 2025-10-02 12:05:55.384 2 DEBUG nova.virt.libvirt.driver [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:05:55 compute-0 nova_compute[192079]: 2025-10-02 12:05:55.421 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:05:55 compute-0 nova_compute[192079]: 2025-10-02 12:05:55.422 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406755.3193634, 7c66bab5-e0cd-40db-b080-086245f15c4f => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:05:55 compute-0 nova_compute[192079]: 2025-10-02 12:05:55.422 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] VM Started (Lifecycle Event)
Oct 02 12:05:55 compute-0 nova_compute[192079]: 2025-10-02 12:05:55.424 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:05:55 compute-0 nova_compute[192079]: 2025-10-02 12:05:55.445 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:05:55 compute-0 nova_compute[192079]: 2025-10-02 12:05:55.447 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:05:55 compute-0 nova_compute[192079]: 2025-10-02 12:05:55.494 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:05:55 compute-0 nova_compute[192079]: 2025-10-02 12:05:55.530 2 INFO nova.compute.manager [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Took 2.15 seconds to spawn the instance on the hypervisor.
Oct 02 12:05:55 compute-0 nova_compute[192079]: 2025-10-02 12:05:55.530 2 DEBUG nova.compute.manager [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:05:55 compute-0 nova_compute[192079]: 2025-10-02 12:05:55.719 2 INFO nova.compute.manager [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Took 3.07 seconds to build instance.
Oct 02 12:05:55 compute-0 nova_compute[192079]: 2025-10-02 12:05:55.747 2 DEBUG oslo_concurrency.lockutils [None req-7ca968c3-cfc3-4286-81da-9599bdb75956 c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Lock "7c66bab5-e0cd-40db-b080-086245f15c4f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 3.221s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:05:59 compute-0 nova_compute[192079]: 2025-10-02 12:05:59.671 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:00 compute-0 nova_compute[192079]: 2025-10-02 12:06:00.426 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:02 compute-0 podman[224489]: 2025-10-02 12:06:02.171839316 +0000 UTC m=+0.072550206 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, config_id=edpm, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, managed_by=edpm_ansible)
Oct 02 12:06:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:02.207 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:06:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:02.208 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:06:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:02.208 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:06:03 compute-0 ovn_controller[94336]: 2025-10-02T12:06:03Z|00117|memory_trim|INFO|Detected inactivity (last active 30004 ms ago): trimming memory
Oct 02 12:06:04 compute-0 nova_compute[192079]: 2025-10-02 12:06:04.674 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:05 compute-0 nova_compute[192079]: 2025-10-02 12:06:05.428 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:06 compute-0 nova_compute[192079]: 2025-10-02 12:06:06.070 2 DEBUG oslo_concurrency.lockutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Acquiring lock "6068f987-bbd4-4dac-a691-169dcb4570a8" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:06:06 compute-0 nova_compute[192079]: 2025-10-02 12:06:06.070 2 DEBUG oslo_concurrency.lockutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Lock "6068f987-bbd4-4dac-a691-169dcb4570a8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:06:06 compute-0 nova_compute[192079]: 2025-10-02 12:06:06.089 2 DEBUG nova.compute.manager [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:06:06 compute-0 nova_compute[192079]: 2025-10-02 12:06:06.275 2 DEBUG oslo_concurrency.lockutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:06:06 compute-0 nova_compute[192079]: 2025-10-02 12:06:06.276 2 DEBUG oslo_concurrency.lockutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:06:06 compute-0 nova_compute[192079]: 2025-10-02 12:06:06.284 2 DEBUG nova.virt.hardware [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:06:06 compute-0 nova_compute[192079]: 2025-10-02 12:06:06.285 2 INFO nova.compute.claims [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:06:06 compute-0 nova_compute[192079]: 2025-10-02 12:06:06.497 2 DEBUG nova.compute.provider_tree [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:06:06 compute-0 nova_compute[192079]: 2025-10-02 12:06:06.514 2 DEBUG nova.scheduler.client.report [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:06:06 compute-0 nova_compute[192079]: 2025-10-02 12:06:06.562 2 DEBUG oslo_concurrency.lockutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.286s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:06:06 compute-0 nova_compute[192079]: 2025-10-02 12:06:06.562 2 DEBUG nova.compute.manager [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:06:06 compute-0 nova_compute[192079]: 2025-10-02 12:06:06.624 2 DEBUG nova.compute.manager [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:06:06 compute-0 nova_compute[192079]: 2025-10-02 12:06:06.625 2 DEBUG nova.network.neutron [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:06:06 compute-0 nova_compute[192079]: 2025-10-02 12:06:06.660 2 INFO nova.virt.libvirt.driver [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:06:06 compute-0 nova_compute[192079]: 2025-10-02 12:06:06.687 2 DEBUG nova.compute.manager [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:06:06 compute-0 nova_compute[192079]: 2025-10-02 12:06:06.828 2 DEBUG nova.compute.manager [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:06:06 compute-0 nova_compute[192079]: 2025-10-02 12:06:06.830 2 DEBUG nova.virt.libvirt.driver [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:06:06 compute-0 nova_compute[192079]: 2025-10-02 12:06:06.831 2 INFO nova.virt.libvirt.driver [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Creating image(s)
Oct 02 12:06:06 compute-0 nova_compute[192079]: 2025-10-02 12:06:06.831 2 DEBUG oslo_concurrency.lockutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Acquiring lock "/var/lib/nova/instances/6068f987-bbd4-4dac-a691-169dcb4570a8/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:06:06 compute-0 nova_compute[192079]: 2025-10-02 12:06:06.832 2 DEBUG oslo_concurrency.lockutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Lock "/var/lib/nova/instances/6068f987-bbd4-4dac-a691-169dcb4570a8/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:06:06 compute-0 nova_compute[192079]: 2025-10-02 12:06:06.833 2 DEBUG oslo_concurrency.lockutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Lock "/var/lib/nova/instances/6068f987-bbd4-4dac-a691-169dcb4570a8/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:06:06 compute-0 nova_compute[192079]: 2025-10-02 12:06:06.853 2 DEBUG oslo_concurrency.processutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:06:06 compute-0 nova_compute[192079]: 2025-10-02 12:06:06.949 2 DEBUG oslo_concurrency.processutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.095s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:06:06 compute-0 nova_compute[192079]: 2025-10-02 12:06:06.950 2 DEBUG oslo_concurrency.lockutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:06:06 compute-0 nova_compute[192079]: 2025-10-02 12:06:06.951 2 DEBUG oslo_concurrency.lockutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:06:06 compute-0 nova_compute[192079]: 2025-10-02 12:06:06.965 2 DEBUG oslo_concurrency.processutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:06:07 compute-0 nova_compute[192079]: 2025-10-02 12:06:07.033 2 DEBUG oslo_concurrency.processutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.067s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:06:07 compute-0 nova_compute[192079]: 2025-10-02 12:06:07.035 2 DEBUG oslo_concurrency.processutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/6068f987-bbd4-4dac-a691-169dcb4570a8/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:06:07 compute-0 podman[224520]: 2025-10-02 12:06:07.165705905 +0000 UTC m=+0.065978456 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., distribution-scope=public, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., config_id=edpm, vendor=Red Hat, Inc., vcs-type=git, build-date=2025-08-20T13:12:41, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, managed_by=edpm_ansible, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, name=ubi9-minimal, com.redhat.component=ubi9-minimal-container, container_name=openstack_network_exporter, io.openshift.tags=minimal rhel9, maintainer=Red Hat, Inc., release=1755695350, architecture=x86_64, url=https://catalog.redhat.com/en/search?searchType=containers, io.buildah.version=1.33.7, io.openshift.expose-services=, version=9.6)
Oct 02 12:06:07 compute-0 podman[224521]: 2025-10-02 12:06:07.182780152 +0000 UTC m=+0.078841357 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, container_name=multipathd, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS)
Oct 02 12:06:07 compute-0 nova_compute[192079]: 2025-10-02 12:06:07.190 2 DEBUG oslo_concurrency.processutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/6068f987-bbd4-4dac-a691-169dcb4570a8/disk 1073741824" returned: 0 in 0.156s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:06:07 compute-0 nova_compute[192079]: 2025-10-02 12:06:07.191 2 DEBUG oslo_concurrency.lockutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.240s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:06:07 compute-0 nova_compute[192079]: 2025-10-02 12:06:07.192 2 DEBUG oslo_concurrency.processutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:06:07 compute-0 nova_compute[192079]: 2025-10-02 12:06:07.242 2 DEBUG oslo_concurrency.processutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.050s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:06:07 compute-0 nova_compute[192079]: 2025-10-02 12:06:07.243 2 DEBUG nova.virt.disk.api [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Checking if we can resize image /var/lib/nova/instances/6068f987-bbd4-4dac-a691-169dcb4570a8/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:06:07 compute-0 nova_compute[192079]: 2025-10-02 12:06:07.243 2 DEBUG oslo_concurrency.processutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/6068f987-bbd4-4dac-a691-169dcb4570a8/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:06:07 compute-0 nova_compute[192079]: 2025-10-02 12:06:07.261 2 DEBUG nova.policy [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:06:07 compute-0 nova_compute[192079]: 2025-10-02 12:06:07.294 2 DEBUG oslo_concurrency.processutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/6068f987-bbd4-4dac-a691-169dcb4570a8/disk --force-share --output=json" returned: 0 in 0.051s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:06:07 compute-0 nova_compute[192079]: 2025-10-02 12:06:07.295 2 DEBUG nova.virt.disk.api [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Cannot resize image /var/lib/nova/instances/6068f987-bbd4-4dac-a691-169dcb4570a8/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:06:07 compute-0 nova_compute[192079]: 2025-10-02 12:06:07.296 2 DEBUG nova.objects.instance [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Lazy-loading 'migration_context' on Instance uuid 6068f987-bbd4-4dac-a691-169dcb4570a8 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:06:07 compute-0 nova_compute[192079]: 2025-10-02 12:06:07.329 2 DEBUG nova.virt.libvirt.driver [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:06:07 compute-0 nova_compute[192079]: 2025-10-02 12:06:07.329 2 DEBUG nova.virt.libvirt.driver [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Ensure instance console log exists: /var/lib/nova/instances/6068f987-bbd4-4dac-a691-169dcb4570a8/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:06:07 compute-0 nova_compute[192079]: 2025-10-02 12:06:07.330 2 DEBUG oslo_concurrency.lockutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:06:07 compute-0 nova_compute[192079]: 2025-10-02 12:06:07.331 2 DEBUG oslo_concurrency.lockutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:06:07 compute-0 nova_compute[192079]: 2025-10-02 12:06:07.331 2 DEBUG oslo_concurrency.lockutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:06:07 compute-0 nova_compute[192079]: 2025-10-02 12:06:07.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:06:09 compute-0 nova_compute[192079]: 2025-10-02 12:06:09.677 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:09 compute-0 nova_compute[192079]: 2025-10-02 12:06:09.973 2 DEBUG nova.network.neutron [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Successfully created port: 876740d6-da80-4b19-9afb-af6d9bf00f50 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:06:10 compute-0 nova_compute[192079]: 2025-10-02 12:06:10.430 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:10 compute-0 nova_compute[192079]: 2025-10-02 12:06:10.660 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:06:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:12.372 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=9, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=8) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:06:12 compute-0 nova_compute[192079]: 2025-10-02 12:06:12.372 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:12.373 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 2 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:06:12 compute-0 nova_compute[192079]: 2025-10-02 12:06:12.716 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:06:12 compute-0 nova_compute[192079]: 2025-10-02 12:06:12.717 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:06:12 compute-0 nova_compute[192079]: 2025-10-02 12:06:12.718 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:06:12 compute-0 nova_compute[192079]: 2025-10-02 12:06:12.718 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:06:12 compute-0 nova_compute[192079]: 2025-10-02 12:06:12.718 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:06:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:12.756 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=10, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=9) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:06:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:12.757 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 5 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:06:12 compute-0 nova_compute[192079]: 2025-10-02 12:06:12.757 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:12 compute-0 nova_compute[192079]: 2025-10-02 12:06:12.777 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:06:12 compute-0 nova_compute[192079]: 2025-10-02 12:06:12.778 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:06:12 compute-0 nova_compute[192079]: 2025-10-02 12:06:12.778 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:06:12 compute-0 nova_compute[192079]: 2025-10-02 12:06:12.778 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:06:12 compute-0 nova_compute[192079]: 2025-10-02 12:06:12.869 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/7c66bab5-e0cd-40db-b080-086245f15c4f/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:06:12 compute-0 nova_compute[192079]: 2025-10-02 12:06:12.983 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/7c66bab5-e0cd-40db-b080-086245f15c4f/disk --force-share --output=json" returned: 0 in 0.114s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:06:12 compute-0 nova_compute[192079]: 2025-10-02 12:06:12.985 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/7c66bab5-e0cd-40db-b080-086245f15c4f/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:06:13 compute-0 nova_compute[192079]: 2025-10-02 12:06:13.041 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/7c66bab5-e0cd-40db-b080-086245f15c4f/disk --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:06:13 compute-0 nova_compute[192079]: 2025-10-02 12:06:13.172 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:06:13 compute-0 nova_compute[192079]: 2025-10-02 12:06:13.173 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5571MB free_disk=73.3969841003418GB free_vcpus=7 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:06:13 compute-0 nova_compute[192079]: 2025-10-02 12:06:13.173 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:06:13 compute-0 nova_compute[192079]: 2025-10-02 12:06:13.174 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:06:13 compute-0 nova_compute[192079]: 2025-10-02 12:06:13.307 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance 7c66bab5-e0cd-40db-b080-086245f15c4f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:06:13 compute-0 nova_compute[192079]: 2025-10-02 12:06:13.308 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance 6068f987-bbd4-4dac-a691-169dcb4570a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:06:13 compute-0 nova_compute[192079]: 2025-10-02 12:06:13.309 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 2 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:06:13 compute-0 nova_compute[192079]: 2025-10-02 12:06:13.309 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=768MB phys_disk=79GB used_disk=2GB total_vcpus=8 used_vcpus=2 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:06:13 compute-0 nova_compute[192079]: 2025-10-02 12:06:13.459 2 DEBUG nova.network.neutron [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Successfully updated port: 876740d6-da80-4b19-9afb-af6d9bf00f50 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:06:13 compute-0 nova_compute[192079]: 2025-10-02 12:06:13.499 2 DEBUG oslo_concurrency.lockutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Acquiring lock "refresh_cache-6068f987-bbd4-4dac-a691-169dcb4570a8" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:06:13 compute-0 nova_compute[192079]: 2025-10-02 12:06:13.499 2 DEBUG oslo_concurrency.lockutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Acquired lock "refresh_cache-6068f987-bbd4-4dac-a691-169dcb4570a8" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:06:13 compute-0 nova_compute[192079]: 2025-10-02 12:06:13.499 2 DEBUG nova.network.neutron [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:06:13 compute-0 nova_compute[192079]: 2025-10-02 12:06:13.558 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:06:13 compute-0 nova_compute[192079]: 2025-10-02 12:06:13.577 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:06:13 compute-0 nova_compute[192079]: 2025-10-02 12:06:13.622 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:06:13 compute-0 nova_compute[192079]: 2025-10-02 12:06:13.623 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.449s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:06:13 compute-0 nova_compute[192079]: 2025-10-02 12:06:13.714 2 DEBUG nova.network.neutron [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:06:14 compute-0 podman[224595]: 2025-10-02 12:06:14.156269559 +0000 UTC m=+0.063820287 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:06:14 compute-0 podman[224596]: 2025-10-02 12:06:14.168973387 +0000 UTC m=+0.071445606 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, config_id=iscsid, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=iscsid, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, io.buildah.version=1.41.3)
Oct 02 12:06:14 compute-0 nova_compute[192079]: 2025-10-02 12:06:14.254 2 DEBUG nova.compute.manager [req-500f6677-f915-44dc-88cb-fda37847f807 req-8624b4cc-31c6-4fdb-9ee7-a4bd56296e96 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Received event network-changed-876740d6-da80-4b19-9afb-af6d9bf00f50 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:06:14 compute-0 nova_compute[192079]: 2025-10-02 12:06:14.255 2 DEBUG nova.compute.manager [req-500f6677-f915-44dc-88cb-fda37847f807 req-8624b4cc-31c6-4fdb-9ee7-a4bd56296e96 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Refreshing instance network info cache due to event network-changed-876740d6-da80-4b19-9afb-af6d9bf00f50. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:06:14 compute-0 nova_compute[192079]: 2025-10-02 12:06:14.255 2 DEBUG oslo_concurrency.lockutils [req-500f6677-f915-44dc-88cb-fda37847f807 req-8624b4cc-31c6-4fdb-9ee7-a4bd56296e96 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-6068f987-bbd4-4dac-a691-169dcb4570a8" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:06:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:14.375 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '9'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:06:14 compute-0 nova_compute[192079]: 2025-10-02 12:06:14.571 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:06:14 compute-0 nova_compute[192079]: 2025-10-02 12:06:14.572 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:06:14 compute-0 nova_compute[192079]: 2025-10-02 12:06:14.572 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:06:14 compute-0 nova_compute[192079]: 2025-10-02 12:06:14.572 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:06:14 compute-0 nova_compute[192079]: 2025-10-02 12:06:14.593 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Skipping network cache update for instance because it is Building. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9871
Oct 02 12:06:14 compute-0 nova_compute[192079]: 2025-10-02 12:06:14.782 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "refresh_cache-7c66bab5-e0cd-40db-b080-086245f15c4f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:06:14 compute-0 nova_compute[192079]: 2025-10-02 12:06:14.782 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquired lock "refresh_cache-7c66bab5-e0cd-40db-b080-086245f15c4f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:06:14 compute-0 nova_compute[192079]: 2025-10-02 12:06:14.782 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Forcefully refreshing network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2004
Oct 02 12:06:14 compute-0 nova_compute[192079]: 2025-10-02 12:06:14.783 2 DEBUG nova.objects.instance [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lazy-loading 'info_cache' on Instance uuid 7c66bab5-e0cd-40db-b080-086245f15c4f obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:06:14 compute-0 nova_compute[192079]: 2025-10-02 12:06:14.792 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.002 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.108 2 DEBUG nova.network.neutron [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Updating instance_info_cache with network_info: [{"id": "876740d6-da80-4b19-9afb-af6d9bf00f50", "address": "fa:16:3e:4f:35:f0", "network": {"id": "66b5a7c3-fe3e-42b0-aea6-19534bca6e0e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1726703238-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "db3f04a20fd740c1af3139196dc928d2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap876740d6-da", "ovs_interfaceid": "876740d6-da80-4b19-9afb-af6d9bf00f50", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.126 2 DEBUG oslo_concurrency.lockutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Releasing lock "refresh_cache-6068f987-bbd4-4dac-a691-169dcb4570a8" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.126 2 DEBUG nova.compute.manager [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Instance network_info: |[{"id": "876740d6-da80-4b19-9afb-af6d9bf00f50", "address": "fa:16:3e:4f:35:f0", "network": {"id": "66b5a7c3-fe3e-42b0-aea6-19534bca6e0e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1726703238-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "db3f04a20fd740c1af3139196dc928d2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap876740d6-da", "ovs_interfaceid": "876740d6-da80-4b19-9afb-af6d9bf00f50", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.126 2 DEBUG oslo_concurrency.lockutils [req-500f6677-f915-44dc-88cb-fda37847f807 req-8624b4cc-31c6-4fdb-9ee7-a4bd56296e96 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-6068f987-bbd4-4dac-a691-169dcb4570a8" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.127 2 DEBUG nova.network.neutron [req-500f6677-f915-44dc-88cb-fda37847f807 req-8624b4cc-31c6-4fdb-9ee7-a4bd56296e96 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Refreshing network info cache for port 876740d6-da80-4b19-9afb-af6d9bf00f50 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.130 2 DEBUG nova.virt.libvirt.driver [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Start _get_guest_xml network_info=[{"id": "876740d6-da80-4b19-9afb-af6d9bf00f50", "address": "fa:16:3e:4f:35:f0", "network": {"id": "66b5a7c3-fe3e-42b0-aea6-19534bca6e0e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1726703238-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "db3f04a20fd740c1af3139196dc928d2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap876740d6-da", "ovs_interfaceid": "876740d6-da80-4b19-9afb-af6d9bf00f50", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.135 2 WARNING nova.virt.libvirt.driver [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.141 2 DEBUG nova.virt.libvirt.host [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.141 2 DEBUG nova.virt.libvirt.host [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.150 2 DEBUG nova.virt.libvirt.host [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.151 2 DEBUG nova.virt.libvirt.host [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.152 2 DEBUG nova.virt.libvirt.driver [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.153 2 DEBUG nova.virt.hardware [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.153 2 DEBUG nova.virt.hardware [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.154 2 DEBUG nova.virt.hardware [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.154 2 DEBUG nova.virt.hardware [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.154 2 DEBUG nova.virt.hardware [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.155 2 DEBUG nova.virt.hardware [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.155 2 DEBUG nova.virt.hardware [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.156 2 DEBUG nova.virt.hardware [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.156 2 DEBUG nova.virt.hardware [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.156 2 DEBUG nova.virt.hardware [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.157 2 DEBUG nova.virt.hardware [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.162 2 DEBUG nova.virt.libvirt.vif [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:06:03Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ServersAdminTestJSON-server-537172074',display_name='tempest-ServersAdminTestJSON-server-537172074',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serversadmintestjson-server-537172074',id=34,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='db3f04a20fd740c1af3139196dc928d2',ramdisk_id='',reservation_id='r-04yxjkba',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServersAdminTestJSON-1782354187',owner_user_name='tempest-ServersAdminTestJSON-1782354187-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:06:06Z,user_data=None,user_id='9258efa4511c4bb3813eca27b75b1008',uuid=6068f987-bbd4-4dac-a691-169dcb4570a8,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "876740d6-da80-4b19-9afb-af6d9bf00f50", "address": "fa:16:3e:4f:35:f0", "network": {"id": "66b5a7c3-fe3e-42b0-aea6-19534bca6e0e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1726703238-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "db3f04a20fd740c1af3139196dc928d2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap876740d6-da", "ovs_interfaceid": "876740d6-da80-4b19-9afb-af6d9bf00f50", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.162 2 DEBUG nova.network.os_vif_util [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Converting VIF {"id": "876740d6-da80-4b19-9afb-af6d9bf00f50", "address": "fa:16:3e:4f:35:f0", "network": {"id": "66b5a7c3-fe3e-42b0-aea6-19534bca6e0e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1726703238-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "db3f04a20fd740c1af3139196dc928d2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap876740d6-da", "ovs_interfaceid": "876740d6-da80-4b19-9afb-af6d9bf00f50", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.164 2 DEBUG nova.network.os_vif_util [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:4f:35:f0,bridge_name='br-int',has_traffic_filtering=True,id=876740d6-da80-4b19-9afb-af6d9bf00f50,network=Network(66b5a7c3-fe3e-42b0-aea6-19534bca6e0e),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap876740d6-da') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.165 2 DEBUG nova.objects.instance [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Lazy-loading 'pci_devices' on Instance uuid 6068f987-bbd4-4dac-a691-169dcb4570a8 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.186 2 DEBUG nova.virt.libvirt.driver [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:06:15 compute-0 nova_compute[192079]:   <uuid>6068f987-bbd4-4dac-a691-169dcb4570a8</uuid>
Oct 02 12:06:15 compute-0 nova_compute[192079]:   <name>instance-00000022</name>
Oct 02 12:06:15 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:06:15 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:06:15 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:06:15 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:       <nova:name>tempest-ServersAdminTestJSON-server-537172074</nova:name>
Oct 02 12:06:15 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:06:15</nova:creationTime>
Oct 02 12:06:15 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:06:15 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:06:15 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:06:15 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:06:15 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:06:15 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:06:15 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:06:15 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:06:15 compute-0 nova_compute[192079]:         <nova:user uuid="9258efa4511c4bb3813eca27b75b1008">tempest-ServersAdminTestJSON-1782354187-project-member</nova:user>
Oct 02 12:06:15 compute-0 nova_compute[192079]:         <nova:project uuid="db3f04a20fd740c1af3139196dc928d2">tempest-ServersAdminTestJSON-1782354187</nova:project>
Oct 02 12:06:15 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:06:15 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:06:15 compute-0 nova_compute[192079]:         <nova:port uuid="876740d6-da80-4b19-9afb-af6d9bf00f50">
Oct 02 12:06:15 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.14" ipVersion="4"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:06:15 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:06:15 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:06:15 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <system>
Oct 02 12:06:15 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:06:15 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:06:15 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:06:15 compute-0 nova_compute[192079]:       <entry name="serial">6068f987-bbd4-4dac-a691-169dcb4570a8</entry>
Oct 02 12:06:15 compute-0 nova_compute[192079]:       <entry name="uuid">6068f987-bbd4-4dac-a691-169dcb4570a8</entry>
Oct 02 12:06:15 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     </system>
Oct 02 12:06:15 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:06:15 compute-0 nova_compute[192079]:   <os>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:   </os>
Oct 02 12:06:15 compute-0 nova_compute[192079]:   <features>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:   </features>
Oct 02 12:06:15 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:06:15 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:06:15 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:06:15 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/6068f987-bbd4-4dac-a691-169dcb4570a8/disk"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:06:15 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/6068f987-bbd4-4dac-a691-169dcb4570a8/disk.config"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:06:15 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:4f:35:f0"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:       <target dev="tap876740d6-da"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:06:15 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/6068f987-bbd4-4dac-a691-169dcb4570a8/console.log" append="off"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <video>
Oct 02 12:06:15 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     </video>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:06:15 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:06:15 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:06:15 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:06:15 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:06:15 compute-0 nova_compute[192079]: </domain>
Oct 02 12:06:15 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.186 2 DEBUG nova.compute.manager [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Preparing to wait for external event network-vif-plugged-876740d6-da80-4b19-9afb-af6d9bf00f50 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.187 2 DEBUG oslo_concurrency.lockutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Acquiring lock "6068f987-bbd4-4dac-a691-169dcb4570a8-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.187 2 DEBUG oslo_concurrency.lockutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Lock "6068f987-bbd4-4dac-a691-169dcb4570a8-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.187 2 DEBUG oslo_concurrency.lockutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Lock "6068f987-bbd4-4dac-a691-169dcb4570a8-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.189 2 DEBUG nova.virt.libvirt.vif [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:06:03Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ServersAdminTestJSON-server-537172074',display_name='tempest-ServersAdminTestJSON-server-537172074',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serversadmintestjson-server-537172074',id=34,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='db3f04a20fd740c1af3139196dc928d2',ramdisk_id='',reservation_id='r-04yxjkba',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServersAdminTestJSON-1782354187',owner_user_name='tempest-ServersAdminTestJSON-1782354187-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:06:06Z,user_data=None,user_id='9258efa4511c4bb3813eca27b75b1008',uuid=6068f987-bbd4-4dac-a691-169dcb4570a8,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "876740d6-da80-4b19-9afb-af6d9bf00f50", "address": "fa:16:3e:4f:35:f0", "network": {"id": "66b5a7c3-fe3e-42b0-aea6-19534bca6e0e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1726703238-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "db3f04a20fd740c1af3139196dc928d2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap876740d6-da", "ovs_interfaceid": "876740d6-da80-4b19-9afb-af6d9bf00f50", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.189 2 DEBUG nova.network.os_vif_util [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Converting VIF {"id": "876740d6-da80-4b19-9afb-af6d9bf00f50", "address": "fa:16:3e:4f:35:f0", "network": {"id": "66b5a7c3-fe3e-42b0-aea6-19534bca6e0e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1726703238-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "db3f04a20fd740c1af3139196dc928d2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap876740d6-da", "ovs_interfaceid": "876740d6-da80-4b19-9afb-af6d9bf00f50", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.190 2 DEBUG nova.network.os_vif_util [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:4f:35:f0,bridge_name='br-int',has_traffic_filtering=True,id=876740d6-da80-4b19-9afb-af6d9bf00f50,network=Network(66b5a7c3-fe3e-42b0-aea6-19534bca6e0e),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap876740d6-da') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.191 2 DEBUG os_vif [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:4f:35:f0,bridge_name='br-int',has_traffic_filtering=True,id=876740d6-da80-4b19-9afb-af6d9bf00f50,network=Network(66b5a7c3-fe3e-42b0-aea6-19534bca6e0e),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap876740d6-da') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.191 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.192 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.192 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.196 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.196 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap876740d6-da, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.197 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap876740d6-da, col_values=(('external_ids', {'iface-id': '876740d6-da80-4b19-9afb-af6d9bf00f50', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:4f:35:f0', 'vm-uuid': '6068f987-bbd4-4dac-a691-169dcb4570a8'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.199 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:15 compute-0 NetworkManager[51160]: <info>  [1759406775.2005] manager: (tap876740d6-da): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/60)
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.204 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.208 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.209 2 INFO os_vif [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:4f:35:f0,bridge_name='br-int',has_traffic_filtering=True,id=876740d6-da80-4b19-9afb-af6d9bf00f50,network=Network(66b5a7c3-fe3e-42b0-aea6-19534bca6e0e),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap876740d6-da')
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.352 2 DEBUG nova.virt.libvirt.driver [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.353 2 DEBUG nova.virt.libvirt.driver [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.353 2 DEBUG nova.virt.libvirt.driver [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] No VIF found with MAC fa:16:3e:4f:35:f0, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.353 2 INFO nova.virt.libvirt.driver [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Using config drive
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.365 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.382 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Releasing lock "refresh_cache-7c66bab5-e0cd-40db-b080-086245f15c4f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.383 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Updated the network info_cache for instance _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9929
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.383 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:06:15 compute-0 nova_compute[192079]: 2025-10-02 12:06:15.384 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:06:16 compute-0 nova_compute[192079]: 2025-10-02 12:06:16.842 2 INFO nova.virt.libvirt.driver [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Creating config drive at /var/lib/nova/instances/6068f987-bbd4-4dac-a691-169dcb4570a8/disk.config
Oct 02 12:06:16 compute-0 nova_compute[192079]: 2025-10-02 12:06:16.851 2 DEBUG oslo_concurrency.processutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/6068f987-bbd4-4dac-a691-169dcb4570a8/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpqf769jmq execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:06:16 compute-0 nova_compute[192079]: 2025-10-02 12:06:16.979 2 DEBUG oslo_concurrency.processutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/6068f987-bbd4-4dac-a691-169dcb4570a8/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpqf769jmq" returned: 0 in 0.128s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:06:17 compute-0 kernel: tap876740d6-da: entered promiscuous mode
Oct 02 12:06:17 compute-0 NetworkManager[51160]: <info>  [1759406777.0317] manager: (tap876740d6-da): new Tun device (/org/freedesktop/NetworkManager/Devices/61)
Oct 02 12:06:17 compute-0 ovn_controller[94336]: 2025-10-02T12:06:17Z|00118|binding|INFO|Claiming lport 876740d6-da80-4b19-9afb-af6d9bf00f50 for this chassis.
Oct 02 12:06:17 compute-0 ovn_controller[94336]: 2025-10-02T12:06:17Z|00119|binding|INFO|876740d6-da80-4b19-9afb-af6d9bf00f50: Claiming fa:16:3e:4f:35:f0 10.100.0.14
Oct 02 12:06:17 compute-0 nova_compute[192079]: 2025-10-02 12:06:17.032 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:17 compute-0 nova_compute[192079]: 2025-10-02 12:06:17.038 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:17 compute-0 systemd-udevd[224656]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:17.056 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:4f:35:f0 10.100.0.14'], port_security=['fa:16:3e:4f:35:f0 10.100.0.14'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.14/28', 'neutron:device_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-66b5a7c3-fe3e-42b0-aea6-19534bca6e0e', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'db3f04a20fd740c1af3139196dc928d2', 'neutron:revision_number': '2', 'neutron:security_group_ids': 'c69e6497-c2d4-4cc0-a1d9-2c5055cc5d77', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=5dc739b2-072d-4dd4-b9d2-9724145d12f5, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=876740d6-da80-4b19-9afb-af6d9bf00f50) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:17.058 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 876740d6-da80-4b19-9afb-af6d9bf00f50 in datapath 66b5a7c3-fe3e-42b0-aea6-19534bca6e0e bound to our chassis
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:17.061 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 66b5a7c3-fe3e-42b0-aea6-19534bca6e0e
Oct 02 12:06:17 compute-0 NetworkManager[51160]: <info>  [1759406777.0697] device (tap876740d6-da): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:06:17 compute-0 NetworkManager[51160]: <info>  [1759406777.0705] device (tap876740d6-da): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:17.074 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[06182665-e368-4ff5-a226-8aa99ccd9f95]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:17.077 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap66b5a7c3-f1 in ovnmeta-66b5a7c3-fe3e-42b0-aea6-19534bca6e0e namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:17.079 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap66b5a7c3-f0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:17.079 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f3276c81-cdcc-48df-8615-3dbb13911cc2]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:17.080 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4559c985-d2cb-4e78-813d-4e9e71c2c763]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:17.093 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[aaa693cb-6092-488b-8560-afc033156210]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:06:17 compute-0 systemd-machined[152150]: New machine qemu-20-instance-00000022.
Oct 02 12:06:17 compute-0 systemd[1]: Started Virtual Machine qemu-20-instance-00000022.
Oct 02 12:06:17 compute-0 nova_compute[192079]: 2025-10-02 12:06:17.109 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:17 compute-0 ovn_controller[94336]: 2025-10-02T12:06:17Z|00120|binding|INFO|Setting lport 876740d6-da80-4b19-9afb-af6d9bf00f50 ovn-installed in OVS
Oct 02 12:06:17 compute-0 ovn_controller[94336]: 2025-10-02T12:06:17Z|00121|binding|INFO|Setting lport 876740d6-da80-4b19-9afb-af6d9bf00f50 up in Southbound
Oct 02 12:06:17 compute-0 nova_compute[192079]: 2025-10-02 12:06:17.114 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:17.116 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[48188bb9-7670-447e-bb9d-6489053604ab]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:17.144 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[19466a61-5507-44cc-bc92-eed829f15d15]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:06:17 compute-0 systemd-udevd[224659]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:17.152 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[dd659bd6-0265-4036-8ec1-b997e9b83552]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:06:17 compute-0 NetworkManager[51160]: <info>  [1759406777.1532] manager: (tap66b5a7c3-f0): new Veth device (/org/freedesktop/NetworkManager/Devices/62)
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:17.185 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[b666550e-0554-449b-8c8f-9ab181737a6e]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:17.188 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[7dc2e31e-729b-43b1-9b15-027f0c06d441]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:06:17 compute-0 NetworkManager[51160]: <info>  [1759406777.2182] device (tap66b5a7c3-f0): carrier: link connected
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:17.224 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[3c659a73-fe28-4a4e-a58c-1171cb67329c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:17.247 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d6fcb139-297c-497e-9707-5fc14bc6d149]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap66b5a7c3-f1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:79:7b:77'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 37], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 477484, 'reachable_time': 34017, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 224692, 'error': None, 'target': 'ovnmeta-66b5a7c3-fe3e-42b0-aea6-19534bca6e0e', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:17.268 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[de5a7f4c-6fe9-4919-b38e-3de13b346510]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe79:7b77'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 477484, 'tstamp': 477484}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 224693, 'error': None, 'target': 'ovnmeta-66b5a7c3-fe3e-42b0-aea6-19534bca6e0e', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:17.284 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[485ce232-b305-4e53-8e99-8b037f6f4c9c]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap66b5a7c3-f1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:79:7b:77'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 37], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 477484, 'reachable_time': 34017, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 224694, 'error': None, 'target': 'ovnmeta-66b5a7c3-fe3e-42b0-aea6-19534bca6e0e', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:17.312 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[11db7820-a5fd-4e3c-9492-4a39dbfd6eee]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:17.389 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5023452e-1d05-4d6f-aca1-24981a02bf35]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:17.390 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap66b5a7c3-f0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:17.390 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:17.391 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap66b5a7c3-f0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:06:17 compute-0 nova_compute[192079]: 2025-10-02 12:06:17.412 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:17 compute-0 NetworkManager[51160]: <info>  [1759406777.4132] manager: (tap66b5a7c3-f0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/63)
Oct 02 12:06:17 compute-0 kernel: tap66b5a7c3-f0: entered promiscuous mode
Oct 02 12:06:17 compute-0 nova_compute[192079]: 2025-10-02 12:06:17.415 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:17.416 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap66b5a7c3-f0, col_values=(('external_ids', {'iface-id': 'a0163170-212d-4aba-9028-3d5fb4d45c5b'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:06:17 compute-0 nova_compute[192079]: 2025-10-02 12:06:17.417 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:17 compute-0 ovn_controller[94336]: 2025-10-02T12:06:17Z|00122|binding|INFO|Releasing lport a0163170-212d-4aba-9028-3d5fb4d45c5b from this chassis (sb_readonly=0)
Oct 02 12:06:17 compute-0 nova_compute[192079]: 2025-10-02 12:06:17.442 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:17.443 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/66b5a7c3-fe3e-42b0-aea6-19534bca6e0e.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/66b5a7c3-fe3e-42b0-aea6-19534bca6e0e.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:17.444 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d85b08d8-dce2-4d6a-9a31-829bd86430f6]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:17.445 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-66b5a7c3-fe3e-42b0-aea6-19534bca6e0e
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/66b5a7c3-fe3e-42b0-aea6-19534bca6e0e.pid.haproxy
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 66b5a7c3-fe3e-42b0-aea6-19534bca6e0e
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:17.445 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-66b5a7c3-fe3e-42b0-aea6-19534bca6e0e', 'env', 'PROCESS_TAG=haproxy-66b5a7c3-fe3e-42b0-aea6-19534bca6e0e', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/66b5a7c3-fe3e-42b0-aea6-19534bca6e0e.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:06:17 compute-0 nova_compute[192079]: 2025-10-02 12:06:17.591 2 DEBUG oslo_concurrency.lockutils [None req-7d707f03-f879-4164-8cc4-8b869a5b9fcd c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Acquiring lock "7c66bab5-e0cd-40db-b080-086245f15c4f" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:06:17 compute-0 nova_compute[192079]: 2025-10-02 12:06:17.592 2 DEBUG oslo_concurrency.lockutils [None req-7d707f03-f879-4164-8cc4-8b869a5b9fcd c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Lock "7c66bab5-e0cd-40db-b080-086245f15c4f" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:06:17 compute-0 nova_compute[192079]: 2025-10-02 12:06:17.592 2 DEBUG oslo_concurrency.lockutils [None req-7d707f03-f879-4164-8cc4-8b869a5b9fcd c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Acquiring lock "7c66bab5-e0cd-40db-b080-086245f15c4f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:06:17 compute-0 nova_compute[192079]: 2025-10-02 12:06:17.593 2 DEBUG oslo_concurrency.lockutils [None req-7d707f03-f879-4164-8cc4-8b869a5b9fcd c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Lock "7c66bab5-e0cd-40db-b080-086245f15c4f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:06:17 compute-0 nova_compute[192079]: 2025-10-02 12:06:17.593 2 DEBUG oslo_concurrency.lockutils [None req-7d707f03-f879-4164-8cc4-8b869a5b9fcd c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Lock "7c66bab5-e0cd-40db-b080-086245f15c4f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:06:17 compute-0 nova_compute[192079]: 2025-10-02 12:06:17.610 2 INFO nova.compute.manager [None req-7d707f03-f879-4164-8cc4-8b869a5b9fcd c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Terminating instance
Oct 02 12:06:17 compute-0 nova_compute[192079]: 2025-10-02 12:06:17.628 2 DEBUG oslo_concurrency.lockutils [None req-7d707f03-f879-4164-8cc4-8b869a5b9fcd c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Acquiring lock "refresh_cache-7c66bab5-e0cd-40db-b080-086245f15c4f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:06:17 compute-0 nova_compute[192079]: 2025-10-02 12:06:17.629 2 DEBUG oslo_concurrency.lockutils [None req-7d707f03-f879-4164-8cc4-8b869a5b9fcd c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Acquired lock "refresh_cache-7c66bab5-e0cd-40db-b080-086245f15c4f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:06:17 compute-0 nova_compute[192079]: 2025-10-02 12:06:17.629 2 DEBUG nova.network.neutron [None req-7d707f03-f879-4164-8cc4-8b869a5b9fcd c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:06:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:06:17.759 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '10'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:06:17 compute-0 podman[224733]: 2025-10-02 12:06:17.770594135 +0000 UTC m=+0.025778068 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:06:17 compute-0 nova_compute[192079]: 2025-10-02 12:06:17.881 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406777.8807538, 6068f987-bbd4-4dac-a691-169dcb4570a8 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:06:17 compute-0 nova_compute[192079]: 2025-10-02 12:06:17.881 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] VM Started (Lifecycle Event)
Oct 02 12:06:17 compute-0 nova_compute[192079]: 2025-10-02 12:06:17.914 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:06:17 compute-0 nova_compute[192079]: 2025-10-02 12:06:17.918 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406777.8809571, 6068f987-bbd4-4dac-a691-169dcb4570a8 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:06:17 compute-0 nova_compute[192079]: 2025-10-02 12:06:17.918 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] VM Paused (Lifecycle Event)
Oct 02 12:06:17 compute-0 nova_compute[192079]: 2025-10-02 12:06:17.956 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:06:17 compute-0 nova_compute[192079]: 2025-10-02 12:06:17.958 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:06:17 compute-0 nova_compute[192079]: 2025-10-02 12:06:17.986 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:06:18 compute-0 podman[224733]: 2025-10-02 12:06:18.007693631 +0000 UTC m=+0.262877554 container create 374f3597b1df4b6c3ab6e6a93f6484ae5e03e96275688834703adbc15516e371 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-66b5a7c3-fe3e-42b0-aea6-19534bca6e0e, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS)
Oct 02 12:06:18 compute-0 systemd[1]: Started libpod-conmon-374f3597b1df4b6c3ab6e6a93f6484ae5e03e96275688834703adbc15516e371.scope.
Oct 02 12:06:18 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:06:18 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/36da2daddb38e03a8f113c3cc4c452e37dea82ff45e194633b1629c6075e28a8/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:06:18 compute-0 podman[224733]: 2025-10-02 12:06:18.355436756 +0000 UTC m=+0.610620709 container init 374f3597b1df4b6c3ab6e6a93f6484ae5e03e96275688834703adbc15516e371 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-66b5a7c3-fe3e-42b0-aea6-19534bca6e0e, tcib_managed=true, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2)
Oct 02 12:06:18 compute-0 podman[224733]: 2025-10-02 12:06:18.361348616 +0000 UTC m=+0.616532549 container start 374f3597b1df4b6c3ab6e6a93f6484ae5e03e96275688834703adbc15516e371 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-66b5a7c3-fe3e-42b0-aea6-19534bca6e0e, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS)
Oct 02 12:06:18 compute-0 neutron-haproxy-ovnmeta-66b5a7c3-fe3e-42b0-aea6-19534bca6e0e[224749]: [NOTICE]   (224753) : New worker (224755) forked
Oct 02 12:06:18 compute-0 neutron-haproxy-ovnmeta-66b5a7c3-fe3e-42b0-aea6-19534bca6e0e[224749]: [NOTICE]   (224753) : Loading success.
Oct 02 12:06:18 compute-0 nova_compute[192079]: 2025-10-02 12:06:18.418 2 DEBUG nova.network.neutron [None req-7d707f03-f879-4164-8cc4-8b869a5b9fcd c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:06:18 compute-0 nova_compute[192079]: 2025-10-02 12:06:18.867 2 DEBUG nova.network.neutron [req-500f6677-f915-44dc-88cb-fda37847f807 req-8624b4cc-31c6-4fdb-9ee7-a4bd56296e96 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Updated VIF entry in instance network info cache for port 876740d6-da80-4b19-9afb-af6d9bf00f50. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:06:18 compute-0 nova_compute[192079]: 2025-10-02 12:06:18.868 2 DEBUG nova.network.neutron [req-500f6677-f915-44dc-88cb-fda37847f807 req-8624b4cc-31c6-4fdb-9ee7-a4bd56296e96 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Updating instance_info_cache with network_info: [{"id": "876740d6-da80-4b19-9afb-af6d9bf00f50", "address": "fa:16:3e:4f:35:f0", "network": {"id": "66b5a7c3-fe3e-42b0-aea6-19534bca6e0e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1726703238-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "db3f04a20fd740c1af3139196dc928d2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap876740d6-da", "ovs_interfaceid": "876740d6-da80-4b19-9afb-af6d9bf00f50", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:06:18 compute-0 nova_compute[192079]: 2025-10-02 12:06:18.896 2 DEBUG oslo_concurrency.lockutils [req-500f6677-f915-44dc-88cb-fda37847f807 req-8624b4cc-31c6-4fdb-9ee7-a4bd56296e96 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-6068f987-bbd4-4dac-a691-169dcb4570a8" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.082 2 DEBUG nova.network.neutron [None req-7d707f03-f879-4164-8cc4-8b869a5b9fcd c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.097 2 DEBUG oslo_concurrency.lockutils [None req-7d707f03-f879-4164-8cc4-8b869a5b9fcd c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Releasing lock "refresh_cache-7c66bab5-e0cd-40db-b080-086245f15c4f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.097 2 DEBUG nova.compute.manager [None req-7d707f03-f879-4164-8cc4-8b869a5b9fcd c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:06:19 compute-0 systemd[1]: machine-qemu\x2d19\x2dinstance\x2d0000001f.scope: Deactivated successfully.
Oct 02 12:06:19 compute-0 systemd[1]: machine-qemu\x2d19\x2dinstance\x2d0000001f.scope: Consumed 14.091s CPU time.
Oct 02 12:06:19 compute-0 systemd-machined[152150]: Machine qemu-19-instance-0000001f terminated.
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.302 2 DEBUG nova.compute.manager [req-4e9e1f5c-e91b-459d-af64-658a70242355 req-0b1b3bd5-72a6-495f-a164-b06fe941a3e8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Received event network-vif-plugged-876740d6-da80-4b19-9afb-af6d9bf00f50 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.302 2 DEBUG oslo_concurrency.lockutils [req-4e9e1f5c-e91b-459d-af64-658a70242355 req-0b1b3bd5-72a6-495f-a164-b06fe941a3e8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "6068f987-bbd4-4dac-a691-169dcb4570a8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.303 2 DEBUG oslo_concurrency.lockutils [req-4e9e1f5c-e91b-459d-af64-658a70242355 req-0b1b3bd5-72a6-495f-a164-b06fe941a3e8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6068f987-bbd4-4dac-a691-169dcb4570a8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.303 2 DEBUG oslo_concurrency.lockutils [req-4e9e1f5c-e91b-459d-af64-658a70242355 req-0b1b3bd5-72a6-495f-a164-b06fe941a3e8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6068f987-bbd4-4dac-a691-169dcb4570a8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.304 2 DEBUG nova.compute.manager [req-4e9e1f5c-e91b-459d-af64-658a70242355 req-0b1b3bd5-72a6-495f-a164-b06fe941a3e8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Processing event network-vif-plugged-876740d6-da80-4b19-9afb-af6d9bf00f50 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.304 2 DEBUG nova.compute.manager [req-4e9e1f5c-e91b-459d-af64-658a70242355 req-0b1b3bd5-72a6-495f-a164-b06fe941a3e8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Received event network-vif-plugged-876740d6-da80-4b19-9afb-af6d9bf00f50 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.304 2 DEBUG oslo_concurrency.lockutils [req-4e9e1f5c-e91b-459d-af64-658a70242355 req-0b1b3bd5-72a6-495f-a164-b06fe941a3e8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "6068f987-bbd4-4dac-a691-169dcb4570a8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.305 2 DEBUG oslo_concurrency.lockutils [req-4e9e1f5c-e91b-459d-af64-658a70242355 req-0b1b3bd5-72a6-495f-a164-b06fe941a3e8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6068f987-bbd4-4dac-a691-169dcb4570a8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.305 2 DEBUG oslo_concurrency.lockutils [req-4e9e1f5c-e91b-459d-af64-658a70242355 req-0b1b3bd5-72a6-495f-a164-b06fe941a3e8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6068f987-bbd4-4dac-a691-169dcb4570a8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.305 2 DEBUG nova.compute.manager [req-4e9e1f5c-e91b-459d-af64-658a70242355 req-0b1b3bd5-72a6-495f-a164-b06fe941a3e8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] No waiting events found dispatching network-vif-plugged-876740d6-da80-4b19-9afb-af6d9bf00f50 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.305 2 WARNING nova.compute.manager [req-4e9e1f5c-e91b-459d-af64-658a70242355 req-0b1b3bd5-72a6-495f-a164-b06fe941a3e8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Received unexpected event network-vif-plugged-876740d6-da80-4b19-9afb-af6d9bf00f50 for instance with vm_state building and task_state spawning.
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.306 2 DEBUG nova.compute.manager [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Instance event wait completed in 1 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.312 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406779.312248, 6068f987-bbd4-4dac-a691-169dcb4570a8 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.313 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] VM Resumed (Lifecycle Event)
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.315 2 DEBUG nova.virt.libvirt.driver [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.324 2 INFO nova.virt.libvirt.driver [-] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Instance spawned successfully.
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.324 2 DEBUG nova.virt.libvirt.driver [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.339 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.343 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.351 2 INFO nova.virt.libvirt.driver [-] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Instance destroyed successfully.
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.352 2 DEBUG nova.objects.instance [None req-7d707f03-f879-4164-8cc4-8b869a5b9fcd c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Lazy-loading 'resources' on Instance uuid 7c66bab5-e0cd-40db-b080-086245f15c4f obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.370 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.370 2 INFO nova.virt.libvirt.driver [None req-7d707f03-f879-4164-8cc4-8b869a5b9fcd c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Deleting instance files /var/lib/nova/instances/7c66bab5-e0cd-40db-b080-086245f15c4f_del
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.371 2 INFO nova.virt.libvirt.driver [None req-7d707f03-f879-4164-8cc4-8b869a5b9fcd c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Deletion of /var/lib/nova/instances/7c66bab5-e0cd-40db-b080-086245f15c4f_del complete
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.379 2 DEBUG nova.virt.libvirt.driver [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.379 2 DEBUG nova.virt.libvirt.driver [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.379 2 DEBUG nova.virt.libvirt.driver [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.380 2 DEBUG nova.virt.libvirt.driver [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.380 2 DEBUG nova.virt.libvirt.driver [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.380 2 DEBUG nova.virt.libvirt.driver [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.504 2 INFO nova.compute.manager [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Took 12.68 seconds to spawn the instance on the hypervisor.
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.504 2 DEBUG nova.compute.manager [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.507 2 INFO nova.compute.manager [None req-7d707f03-f879-4164-8cc4-8b869a5b9fcd c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Took 0.41 seconds to destroy the instance on the hypervisor.
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.508 2 DEBUG oslo.service.loopingcall [None req-7d707f03-f879-4164-8cc4-8b869a5b9fcd c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.509 2 DEBUG nova.compute.manager [-] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.509 2 DEBUG nova.network.neutron [-] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.649 2 INFO nova.compute.manager [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Took 13.44 seconds to build instance.
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.687 2 DEBUG oslo_concurrency.lockutils [None req-04483a16-d98b-4804-a6c0-37644e29b8ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Lock "6068f987-bbd4-4dac-a691-169dcb4570a8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 13.617s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.729 2 DEBUG nova.network.neutron [-] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.742 2 DEBUG nova.network.neutron [-] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.757 2 INFO nova.compute.manager [-] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Took 0.25 seconds to deallocate network for instance.
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.794 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.918 2 DEBUG oslo_concurrency.lockutils [None req-7d707f03-f879-4164-8cc4-8b869a5b9fcd c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:06:19 compute-0 nova_compute[192079]: 2025-10-02 12:06:19.918 2 DEBUG oslo_concurrency.lockutils [None req-7d707f03-f879-4164-8cc4-8b869a5b9fcd c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:06:20 compute-0 nova_compute[192079]: 2025-10-02 12:06:20.018 2 DEBUG nova.compute.provider_tree [None req-7d707f03-f879-4164-8cc4-8b869a5b9fcd c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:06:20 compute-0 nova_compute[192079]: 2025-10-02 12:06:20.040 2 DEBUG nova.scheduler.client.report [None req-7d707f03-f879-4164-8cc4-8b869a5b9fcd c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:06:20 compute-0 nova_compute[192079]: 2025-10-02 12:06:20.073 2 DEBUG oslo_concurrency.lockutils [None req-7d707f03-f879-4164-8cc4-8b869a5b9fcd c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.154s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:06:20 compute-0 nova_compute[192079]: 2025-10-02 12:06:20.122 2 INFO nova.scheduler.client.report [None req-7d707f03-f879-4164-8cc4-8b869a5b9fcd c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Deleted allocations for instance 7c66bab5-e0cd-40db-b080-086245f15c4f
Oct 02 12:06:20 compute-0 nova_compute[192079]: 2025-10-02 12:06:20.199 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:20 compute-0 nova_compute[192079]: 2025-10-02 12:06:20.241 2 DEBUG oslo_concurrency.lockutils [None req-7d707f03-f879-4164-8cc4-8b869a5b9fcd c8270c33bced4c1b806e47efa970c01e fb361251cae94d42aaec252513e2f05c - - default default] Lock "7c66bab5-e0cd-40db-b080-086245f15c4f" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 2.648s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:06:24 compute-0 podman[224772]: 2025-10-02 12:06:24.168769939 +0000 UTC m=+0.082565441 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, config_id=ovn_controller, container_name=ovn_controller, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2)
Oct 02 12:06:24 compute-0 nova_compute[192079]: 2025-10-02 12:06:24.828 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:25 compute-0 podman[224800]: 2025-10-02 12:06:25.144487441 +0000 UTC m=+0.056769375 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:06:25 compute-0 podman[224799]: 2025-10-02 12:06:25.144585763 +0000 UTC m=+0.059380025 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_metadata_agent, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, config_id=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:06:25 compute-0 nova_compute[192079]: 2025-10-02 12:06:25.201 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:29 compute-0 nova_compute[192079]: 2025-10-02 12:06:29.860 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:30 compute-0 nova_compute[192079]: 2025-10-02 12:06:30.203 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:32 compute-0 ovn_controller[94336]: 2025-10-02T12:06:32Z|00010|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:4f:35:f0 10.100.0.14
Oct 02 12:06:32 compute-0 ovn_controller[94336]: 2025-10-02T12:06:32Z|00011|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:4f:35:f0 10.100.0.14
Oct 02 12:06:33 compute-0 podman[224853]: 2025-10-02 12:06:33.148555355 +0000 UTC m=+0.055579143 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:06:34 compute-0 nova_compute[192079]: 2025-10-02 12:06:34.372 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759406779.349042, 7c66bab5-e0cd-40db-b080-086245f15c4f => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:06:34 compute-0 nova_compute[192079]: 2025-10-02 12:06:34.372 2 INFO nova.compute.manager [-] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] VM Stopped (Lifecycle Event)
Oct 02 12:06:34 compute-0 nova_compute[192079]: 2025-10-02 12:06:34.398 2 DEBUG nova.compute.manager [None req-d24a5d96-7d29-4976-b7a1-63218c644fe9 - - - - - -] [instance: 7c66bab5-e0cd-40db-b080-086245f15c4f] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:06:34 compute-0 nova_compute[192079]: 2025-10-02 12:06:34.914 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:35 compute-0 nova_compute[192079]: 2025-10-02 12:06:35.205 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:38 compute-0 podman[224873]: 2025-10-02 12:06:38.13585585 +0000 UTC m=+0.052001036 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, io.openshift.tags=minimal rhel9, io.buildah.version=1.33.7, io.openshift.expose-services=, vendor=Red Hat, Inc., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, vcs-type=git, build-date=2025-08-20T13:12:41, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, container_name=openstack_network_exporter, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., version=9.6, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, url=https://catalog.redhat.com/en/search?searchType=containers, name=ubi9-minimal, release=1755695350, com.redhat.component=ubi9-minimal-container, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., maintainer=Red Hat, Inc., architecture=x86_64, config_id=edpm, distribution-scope=public)
Oct 02 12:06:38 compute-0 podman[224874]: 2025-10-02 12:06:38.171776491 +0000 UTC m=+0.084463563 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=multipathd, container_name=multipathd)
Oct 02 12:06:39 compute-0 nova_compute[192079]: 2025-10-02 12:06:39.916 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:40 compute-0 nova_compute[192079]: 2025-10-02 12:06:40.208 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:44 compute-0 nova_compute[192079]: 2025-10-02 12:06:44.918 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:45 compute-0 podman[224915]: 2025-10-02 12:06:45.139041552 +0000 UTC m=+0.054378011 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:06:45 compute-0 podman[224916]: 2025-10-02 12:06:45.144006937 +0000 UTC m=+0.057309171 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, tcib_managed=true, container_name=iscsid, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, config_id=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001)
Oct 02 12:06:45 compute-0 nova_compute[192079]: 2025-10-02 12:06:45.210 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:49 compute-0 nova_compute[192079]: 2025-10-02 12:06:49.938 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:50 compute-0 nova_compute[192079]: 2025-10-02 12:06:50.211 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:54 compute-0 nova_compute[192079]: 2025-10-02 12:06:54.950 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:55 compute-0 podman[224957]: 2025-10-02 12:06:55.187956071 +0000 UTC m=+0.098437891 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_id=ovn_controller, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_managed=true, container_name=ovn_controller, managed_by=edpm_ansible)
Oct 02 12:06:55 compute-0 nova_compute[192079]: 2025-10-02 12:06:55.213 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:55 compute-0 podman[224983]: 2025-10-02 12:06:55.266817421 +0000 UTC m=+0.054752399 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_metadata_agent, managed_by=edpm_ansible, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0)
Oct 02 12:06:55 compute-0 podman[224984]: 2025-10-02 12:06:55.267404107 +0000 UTC m=+0.049553169 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 12:06:57 compute-0 nova_compute[192079]: 2025-10-02 12:06:57.532 2 DEBUG oslo_concurrency.lockutils [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Acquiring lock "854d86e2-3388-4709-a32c-15f8658aa41f" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:06:57 compute-0 nova_compute[192079]: 2025-10-02 12:06:57.532 2 DEBUG oslo_concurrency.lockutils [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Lock "854d86e2-3388-4709-a32c-15f8658aa41f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:06:57 compute-0 nova_compute[192079]: 2025-10-02 12:06:57.561 2 DEBUG nova.compute.manager [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:06:57 compute-0 nova_compute[192079]: 2025-10-02 12:06:57.661 2 DEBUG oslo_concurrency.lockutils [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:06:57 compute-0 nova_compute[192079]: 2025-10-02 12:06:57.662 2 DEBUG oslo_concurrency.lockutils [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:06:57 compute-0 nova_compute[192079]: 2025-10-02 12:06:57.667 2 DEBUG nova.virt.hardware [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:06:57 compute-0 nova_compute[192079]: 2025-10-02 12:06:57.667 2 INFO nova.compute.claims [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:06:57 compute-0 nova_compute[192079]: 2025-10-02 12:06:57.798 2 DEBUG nova.compute.provider_tree [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:06:57 compute-0 nova_compute[192079]: 2025-10-02 12:06:57.810 2 DEBUG nova.scheduler.client.report [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:06:57 compute-0 nova_compute[192079]: 2025-10-02 12:06:57.831 2 DEBUG oslo_concurrency.lockutils [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.170s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:06:57 compute-0 nova_compute[192079]: 2025-10-02 12:06:57.832 2 DEBUG nova.compute.manager [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:06:57 compute-0 nova_compute[192079]: 2025-10-02 12:06:57.906 2 DEBUG nova.compute.manager [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:06:57 compute-0 nova_compute[192079]: 2025-10-02 12:06:57.906 2 DEBUG nova.network.neutron [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:06:57 compute-0 nova_compute[192079]: 2025-10-02 12:06:57.927 2 INFO nova.virt.libvirt.driver [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:06:57 compute-0 nova_compute[192079]: 2025-10-02 12:06:57.956 2 DEBUG nova.compute.manager [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.061 2 DEBUG nova.compute.manager [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.062 2 DEBUG nova.virt.libvirt.driver [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.062 2 INFO nova.virt.libvirt.driver [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Creating image(s)
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.063 2 DEBUG oslo_concurrency.lockutils [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Acquiring lock "/var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.063 2 DEBUG oslo_concurrency.lockutils [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Lock "/var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.064 2 DEBUG oslo_concurrency.lockutils [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Lock "/var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.075 2 DEBUG oslo_concurrency.processutils [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.159 2 DEBUG oslo_concurrency.processutils [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.085s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.160 2 DEBUG oslo_concurrency.lockutils [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.161 2 DEBUG oslo_concurrency.lockutils [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.172 2 DEBUG oslo_concurrency.processutils [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.219 2 DEBUG nova.network.neutron [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] No network configured allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1188
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.219 2 DEBUG nova.compute.manager [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Instance network_info: |[]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.236 2 DEBUG oslo_concurrency.processutils [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.064s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.236 2 DEBUG oslo_concurrency.processutils [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.287 2 DEBUG oslo_concurrency.processutils [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f/disk 1073741824" returned: 0 in 0.051s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.288 2 DEBUG oslo_concurrency.lockutils [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.127s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.288 2 DEBUG oslo_concurrency.processutils [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.357 2 DEBUG oslo_concurrency.processutils [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.069s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.358 2 DEBUG nova.virt.disk.api [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Checking if we can resize image /var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.358 2 DEBUG oslo_concurrency.processutils [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.413 2 DEBUG oslo_concurrency.processutils [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f/disk --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.414 2 DEBUG nova.virt.disk.api [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Cannot resize image /var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.415 2 DEBUG nova.objects.instance [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Lazy-loading 'migration_context' on Instance uuid 854d86e2-3388-4709-a32c-15f8658aa41f obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.435 2 DEBUG nova.virt.libvirt.driver [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.436 2 DEBUG nova.virt.libvirt.driver [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Ensure instance console log exists: /var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.436 2 DEBUG oslo_concurrency.lockutils [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.437 2 DEBUG oslo_concurrency.lockutils [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.437 2 DEBUG oslo_concurrency.lockutils [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.440 2 DEBUG nova.virt.libvirt.driver [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Start _get_guest_xml network_info=[] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.447 2 WARNING nova.virt.libvirt.driver [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.453 2 DEBUG nova.virt.libvirt.host [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.454 2 DEBUG nova.virt.libvirt.host [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.458 2 DEBUG nova.virt.libvirt.host [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.459 2 DEBUG nova.virt.libvirt.host [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.461 2 DEBUG nova.virt.libvirt.driver [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.461 2 DEBUG nova.virt.hardware [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.462 2 DEBUG nova.virt.hardware [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.462 2 DEBUG nova.virt.hardware [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.463 2 DEBUG nova.virt.hardware [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.463 2 DEBUG nova.virt.hardware [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.463 2 DEBUG nova.virt.hardware [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.464 2 DEBUG nova.virt.hardware [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.464 2 DEBUG nova.virt.hardware [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.465 2 DEBUG nova.virt.hardware [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.465 2 DEBUG nova.virt.hardware [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.465 2 DEBUG nova.virt.hardware [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.471 2 DEBUG nova.objects.instance [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Lazy-loading 'pci_devices' on Instance uuid 854d86e2-3388-4709-a32c-15f8658aa41f obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.490 2 DEBUG nova.virt.libvirt.driver [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:06:58 compute-0 nova_compute[192079]:   <uuid>854d86e2-3388-4709-a32c-15f8658aa41f</uuid>
Oct 02 12:06:58 compute-0 nova_compute[192079]:   <name>instance-00000027</name>
Oct 02 12:06:58 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:06:58 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:06:58 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:06:58 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:       <nova:name>tempest-ListImageFiltersTestJSON-server-2079629969</nova:name>
Oct 02 12:06:58 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:06:58</nova:creationTime>
Oct 02 12:06:58 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:06:58 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:06:58 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:06:58 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:06:58 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:06:58 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:06:58 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:06:58 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:06:58 compute-0 nova_compute[192079]:         <nova:user uuid="c6a7a530a085472d8ace0b41fc888e26">tempest-ListImageFiltersTestJSON-730197453-project-member</nova:user>
Oct 02 12:06:58 compute-0 nova_compute[192079]:         <nova:project uuid="8993ff2640584165964db6af518beb94">tempest-ListImageFiltersTestJSON-730197453</nova:project>
Oct 02 12:06:58 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:06:58 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:       <nova:ports/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:06:58 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:06:58 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <system>
Oct 02 12:06:58 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:06:58 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:06:58 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:06:58 compute-0 nova_compute[192079]:       <entry name="serial">854d86e2-3388-4709-a32c-15f8658aa41f</entry>
Oct 02 12:06:58 compute-0 nova_compute[192079]:       <entry name="uuid">854d86e2-3388-4709-a32c-15f8658aa41f</entry>
Oct 02 12:06:58 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     </system>
Oct 02 12:06:58 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:06:58 compute-0 nova_compute[192079]:   <os>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:   </os>
Oct 02 12:06:58 compute-0 nova_compute[192079]:   <features>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:   </features>
Oct 02 12:06:58 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:06:58 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:06:58 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:06:58 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f/disk"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:06:58 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f/disk.config"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:06:58 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f/console.log" append="off"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <video>
Oct 02 12:06:58 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     </video>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:06:58 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:06:58 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:06:58 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:06:58 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:06:58 compute-0 nova_compute[192079]: </domain>
Oct 02 12:06:58 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.602 2 DEBUG nova.virt.libvirt.driver [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.603 2 DEBUG nova.virt.libvirt.driver [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.603 2 INFO nova.virt.libvirt.driver [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Using config drive
Oct 02 12:06:58 compute-0 systemd[1]: virtproxyd.service: Deactivated successfully.
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.889 2 INFO nova.virt.libvirt.driver [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Creating config drive at /var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f/disk.config
Oct 02 12:06:58 compute-0 nova_compute[192079]: 2025-10-02 12:06:58.893 2 DEBUG oslo_concurrency.processutils [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp6cmruygl execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:06:59 compute-0 nova_compute[192079]: 2025-10-02 12:06:59.039 2 DEBUG oslo_concurrency.processutils [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp6cmruygl" returned: 0 in 0.146s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:06:59 compute-0 systemd-machined[152150]: New machine qemu-21-instance-00000027.
Oct 02 12:06:59 compute-0 systemd[1]: Started Virtual Machine qemu-21-instance-00000027.
Oct 02 12:06:59 compute-0 nova_compute[192079]: 2025-10-02 12:06:59.837 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406819.8370829, 854d86e2-3388-4709-a32c-15f8658aa41f => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:06:59 compute-0 nova_compute[192079]: 2025-10-02 12:06:59.839 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] VM Resumed (Lifecycle Event)
Oct 02 12:06:59 compute-0 nova_compute[192079]: 2025-10-02 12:06:59.844 2 DEBUG nova.compute.manager [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:06:59 compute-0 nova_compute[192079]: 2025-10-02 12:06:59.845 2 DEBUG nova.virt.libvirt.driver [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:06:59 compute-0 nova_compute[192079]: 2025-10-02 12:06:59.850 2 INFO nova.virt.libvirt.driver [-] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Instance spawned successfully.
Oct 02 12:06:59 compute-0 nova_compute[192079]: 2025-10-02 12:06:59.851 2 DEBUG nova.virt.libvirt.driver [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:06:59 compute-0 nova_compute[192079]: 2025-10-02 12:06:59.862 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:06:59 compute-0 nova_compute[192079]: 2025-10-02 12:06:59.866 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:06:59 compute-0 nova_compute[192079]: 2025-10-02 12:06:59.885 2 DEBUG nova.virt.libvirt.driver [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:06:59 compute-0 nova_compute[192079]: 2025-10-02 12:06:59.885 2 DEBUG nova.virt.libvirt.driver [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:06:59 compute-0 nova_compute[192079]: 2025-10-02 12:06:59.886 2 DEBUG nova.virt.libvirt.driver [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:06:59 compute-0 nova_compute[192079]: 2025-10-02 12:06:59.887 2 DEBUG nova.virt.libvirt.driver [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:06:59 compute-0 nova_compute[192079]: 2025-10-02 12:06:59.888 2 DEBUG nova.virt.libvirt.driver [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:06:59 compute-0 nova_compute[192079]: 2025-10-02 12:06:59.888 2 DEBUG nova.virt.libvirt.driver [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:06:59 compute-0 nova_compute[192079]: 2025-10-02 12:06:59.894 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:06:59 compute-0 nova_compute[192079]: 2025-10-02 12:06:59.894 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406819.8386147, 854d86e2-3388-4709-a32c-15f8658aa41f => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:06:59 compute-0 nova_compute[192079]: 2025-10-02 12:06:59.894 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] VM Started (Lifecycle Event)
Oct 02 12:06:59 compute-0 nova_compute[192079]: 2025-10-02 12:06:59.941 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:06:59 compute-0 nova_compute[192079]: 2025-10-02 12:06:59.945 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:06:59 compute-0 nova_compute[192079]: 2025-10-02 12:06:59.953 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:06:59 compute-0 nova_compute[192079]: 2025-10-02 12:06:59.967 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:06:59 compute-0 nova_compute[192079]: 2025-10-02 12:06:59.983 2 INFO nova.compute.manager [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Took 1.92 seconds to spawn the instance on the hypervisor.
Oct 02 12:06:59 compute-0 nova_compute[192079]: 2025-10-02 12:06:59.983 2 DEBUG nova.compute.manager [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:07:00 compute-0 nova_compute[192079]: 2025-10-02 12:07:00.105 2 INFO nova.compute.manager [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Took 2.48 seconds to build instance.
Oct 02 12:07:00 compute-0 nova_compute[192079]: 2025-10-02 12:07:00.152 2 DEBUG oslo_concurrency.lockutils [None req-a3d1d73b-007b-4d6f-b84c-bc5b25e2504c c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Lock "854d86e2-3388-4709-a32c-15f8658aa41f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 2.620s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:07:00 compute-0 nova_compute[192079]: 2025-10-02 12:07:00.215 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:07:02.208 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:07:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:07:02.209 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:07:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:07:02.210 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:07:04 compute-0 podman[225072]: 2025-10-02 12:07:04.14757395 +0000 UTC m=+0.061230986 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, config_id=edpm, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible)
Oct 02 12:07:04 compute-0 nova_compute[192079]: 2025-10-02 12:07:04.430 2 DEBUG nova.compute.manager [None req-d4bf266d-eaab-43a9-91df-fc4ea6651484 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:07:04 compute-0 nova_compute[192079]: 2025-10-02 12:07:04.495 2 INFO nova.compute.manager [None req-d4bf266d-eaab-43a9-91df-fc4ea6651484 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] instance snapshotting
Oct 02 12:07:04 compute-0 nova_compute[192079]: 2025-10-02 12:07:04.732 2 INFO nova.virt.libvirt.driver [None req-d4bf266d-eaab-43a9-91df-fc4ea6651484 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Beginning live snapshot process
Oct 02 12:07:04 compute-0 virtqemud[191807]: invalid argument: disk vda does not have an active block job
Oct 02 12:07:04 compute-0 nova_compute[192079]: 2025-10-02 12:07:04.929 2 DEBUG oslo_concurrency.processutils [None req-d4bf266d-eaab-43a9-91df-fc4ea6651484 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f/disk --force-share --output=json -f qcow2 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:07:04 compute-0 nova_compute[192079]: 2025-10-02 12:07:04.954 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:05 compute-0 nova_compute[192079]: 2025-10-02 12:07:05.023 2 DEBUG oslo_concurrency.processutils [None req-d4bf266d-eaab-43a9-91df-fc4ea6651484 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f/disk --force-share --output=json -f qcow2" returned: 0 in 0.094s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:07:05 compute-0 nova_compute[192079]: 2025-10-02 12:07:05.024 2 DEBUG oslo_concurrency.processutils [None req-d4bf266d-eaab-43a9-91df-fc4ea6651484 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f/disk --force-share --output=json -f qcow2 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:07:05 compute-0 nova_compute[192079]: 2025-10-02 12:07:05.078 2 DEBUG oslo_concurrency.processutils [None req-d4bf266d-eaab-43a9-91df-fc4ea6651484 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f/disk --force-share --output=json -f qcow2" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:07:05 compute-0 nova_compute[192079]: 2025-10-02 12:07:05.090 2 DEBUG oslo_concurrency.processutils [None req-d4bf266d-eaab-43a9-91df-fc4ea6651484 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:07:05 compute-0 nova_compute[192079]: 2025-10-02 12:07:05.146 2 DEBUG oslo_concurrency.processutils [None req-d4bf266d-eaab-43a9-91df-fc4ea6651484 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:07:05 compute-0 nova_compute[192079]: 2025-10-02 12:07:05.147 2 DEBUG oslo_concurrency.processutils [None req-d4bf266d-eaab-43a9-91df-fc4ea6651484 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/snapshots/tmphe6g3mr8/cb0cd78c6eef4221a196cdc616a2ba32.delta 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:07:05 compute-0 nova_compute[192079]: 2025-10-02 12:07:05.185 2 DEBUG oslo_concurrency.processutils [None req-d4bf266d-eaab-43a9-91df-fc4ea6651484 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/snapshots/tmphe6g3mr8/cb0cd78c6eef4221a196cdc616a2ba32.delta 1073741824" returned: 0 in 0.038s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:07:05 compute-0 nova_compute[192079]: 2025-10-02 12:07:05.186 2 INFO nova.virt.libvirt.driver [None req-d4bf266d-eaab-43a9-91df-fc4ea6651484 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Quiescing instance not available: QEMU guest agent is not enabled.
Oct 02 12:07:05 compute-0 nova_compute[192079]: 2025-10-02 12:07:05.216 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:05 compute-0 nova_compute[192079]: 2025-10-02 12:07:05.233 2 DEBUG nova.virt.libvirt.guest [None req-d4bf266d-eaab-43a9-91df-fc4ea6651484 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] COPY block job progress, current cursor: 1 final cursor: 1 is_job_complete /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:846
Oct 02 12:07:05 compute-0 nova_compute[192079]: 2025-10-02 12:07:05.236 2 INFO nova.virt.libvirt.driver [None req-d4bf266d-eaab-43a9-91df-fc4ea6651484 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Skipping quiescing instance: QEMU guest agent is not enabled.
Oct 02 12:07:05 compute-0 nova_compute[192079]: 2025-10-02 12:07:05.275 2 DEBUG nova.privsep.utils [None req-d4bf266d-eaab-43a9-91df-fc4ea6651484 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Path '/var/lib/nova/instances' supports direct I/O supports_direct_io /usr/lib/python3.9/site-packages/nova/privsep/utils.py:63
Oct 02 12:07:05 compute-0 nova_compute[192079]: 2025-10-02 12:07:05.276 2 DEBUG oslo_concurrency.processutils [None req-d4bf266d-eaab-43a9-91df-fc4ea6651484 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Running cmd (subprocess): qemu-img convert -t none -O qcow2 -f qcow2 /var/lib/nova/instances/snapshots/tmphe6g3mr8/cb0cd78c6eef4221a196cdc616a2ba32.delta /var/lib/nova/instances/snapshots/tmphe6g3mr8/cb0cd78c6eef4221a196cdc616a2ba32 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:07:05 compute-0 nova_compute[192079]: 2025-10-02 12:07:05.481 2 DEBUG oslo_concurrency.processutils [None req-d4bf266d-eaab-43a9-91df-fc4ea6651484 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] CMD "qemu-img convert -t none -O qcow2 -f qcow2 /var/lib/nova/instances/snapshots/tmphe6g3mr8/cb0cd78c6eef4221a196cdc616a2ba32.delta /var/lib/nova/instances/snapshots/tmphe6g3mr8/cb0cd78c6eef4221a196cdc616a2ba32" returned: 0 in 0.205s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:07:05 compute-0 nova_compute[192079]: 2025-10-02 12:07:05.481 2 INFO nova.virt.libvirt.driver [None req-d4bf266d-eaab-43a9-91df-fc4ea6651484 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Snapshot extracted, beginning image upload
Oct 02 12:07:08 compute-0 nova_compute[192079]: 2025-10-02 12:07:08.018 2 INFO nova.virt.libvirt.driver [None req-d4bf266d-eaab-43a9-91df-fc4ea6651484 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Snapshot image upload complete
Oct 02 12:07:08 compute-0 nova_compute[192079]: 2025-10-02 12:07:08.018 2 INFO nova.compute.manager [None req-d4bf266d-eaab-43a9-91df-fc4ea6651484 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Took 3.51 seconds to snapshot the instance on the hypervisor.
Oct 02 12:07:09 compute-0 podman[225120]: 2025-10-02 12:07:09.154537837 +0000 UTC m=+0.061648026 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, managed_by=edpm_ansible, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, tcib_managed=true, config_id=multipathd, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:07:09 compute-0 podman[225119]: 2025-10-02 12:07:09.18684739 +0000 UTC m=+0.095394738 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., container_name=openstack_network_exporter, release=1755695350, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., build-date=2025-08-20T13:12:41, io.openshift.expose-services=, vendor=Red Hat, Inc., io.buildah.version=1.33.7, version=9.6, managed_by=edpm_ansible, name=ubi9-minimal, com.redhat.component=ubi9-minimal-container, vcs-type=git, architecture=x86_64, config_id=edpm, io.openshift.tags=minimal rhel9, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, distribution-scope=public, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, url=https://catalog.redhat.com/en/search?searchType=containers, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., maintainer=Red Hat, Inc.)
Oct 02 12:07:09 compute-0 nova_compute[192079]: 2025-10-02 12:07:09.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:07:09 compute-0 nova_compute[192079]: 2025-10-02 12:07:09.956 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:10 compute-0 nova_compute[192079]: 2025-10-02 12:07:10.219 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:11 compute-0 nova_compute[192079]: 2025-10-02 12:07:11.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:07:11 compute-0 nova_compute[192079]: 2025-10-02 12:07:11.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:07:11 compute-0 nova_compute[192079]: 2025-10-02 12:07:11.684 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:07:11 compute-0 nova_compute[192079]: 2025-10-02 12:07:11.684 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:07:11 compute-0 nova_compute[192079]: 2025-10-02 12:07:11.684 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:07:11 compute-0 nova_compute[192079]: 2025-10-02 12:07:11.685 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:07:11 compute-0 nova_compute[192079]: 2025-10-02 12:07:11.739 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:07:11 compute-0 nova_compute[192079]: 2025-10-02 12:07:11.810 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f/disk --force-share --output=json" returned: 0 in 0.071s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:07:11 compute-0 nova_compute[192079]: 2025-10-02 12:07:11.812 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:07:11 compute-0 nova_compute[192079]: 2025-10-02 12:07:11.900 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f/disk --force-share --output=json" returned: 0 in 0.089s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:07:11 compute-0 nova_compute[192079]: 2025-10-02 12:07:11.907 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/6068f987-bbd4-4dac-a691-169dcb4570a8/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:07:11 compute-0 nova_compute[192079]: 2025-10-02 12:07:11.977 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/6068f987-bbd4-4dac-a691-169dcb4570a8/disk --force-share --output=json" returned: 0 in 0.069s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:07:11 compute-0 nova_compute[192079]: 2025-10-02 12:07:11.978 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/6068f987-bbd4-4dac-a691-169dcb4570a8/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:07:12 compute-0 nova_compute[192079]: 2025-10-02 12:07:12.031 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/6068f987-bbd4-4dac-a691-169dcb4570a8/disk --force-share --output=json" returned: 0 in 0.053s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:07:12 compute-0 nova_compute[192079]: 2025-10-02 12:07:12.174 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:07:12 compute-0 nova_compute[192079]: 2025-10-02 12:07:12.175 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5429MB free_disk=73.36777114868164GB free_vcpus=6 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:07:12 compute-0 nova_compute[192079]: 2025-10-02 12:07:12.176 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:07:12 compute-0 nova_compute[192079]: 2025-10-02 12:07:12.176 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:07:12 compute-0 nova_compute[192079]: 2025-10-02 12:07:12.289 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance 6068f987-bbd4-4dac-a691-169dcb4570a8 actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:07:12 compute-0 nova_compute[192079]: 2025-10-02 12:07:12.289 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance 854d86e2-3388-4709-a32c-15f8658aa41f actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:07:12 compute-0 nova_compute[192079]: 2025-10-02 12:07:12.290 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 2 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:07:12 compute-0 nova_compute[192079]: 2025-10-02 12:07:12.290 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=768MB phys_disk=79GB used_disk=2GB total_vcpus=8 used_vcpus=2 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:07:12 compute-0 nova_compute[192079]: 2025-10-02 12:07:12.347 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:07:12 compute-0 nova_compute[192079]: 2025-10-02 12:07:12.360 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:07:12 compute-0 nova_compute[192079]: 2025-10-02 12:07:12.388 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:07:12 compute-0 nova_compute[192079]: 2025-10-02 12:07:12.388 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.212s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:07:13 compute-0 nova_compute[192079]: 2025-10-02 12:07:13.384 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:07:13 compute-0 nova_compute[192079]: 2025-10-02 12:07:13.384 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:07:13 compute-0 nova_compute[192079]: 2025-10-02 12:07:13.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:07:13 compute-0 nova_compute[192079]: 2025-10-02 12:07:13.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:07:13 compute-0 nova_compute[192079]: 2025-10-02 12:07:13.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:07:14 compute-0 nova_compute[192079]: 2025-10-02 12:07:14.217 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "refresh_cache-6068f987-bbd4-4dac-a691-169dcb4570a8" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:07:14 compute-0 nova_compute[192079]: 2025-10-02 12:07:14.218 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquired lock "refresh_cache-6068f987-bbd4-4dac-a691-169dcb4570a8" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:07:14 compute-0 nova_compute[192079]: 2025-10-02 12:07:14.218 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Forcefully refreshing network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2004
Oct 02 12:07:14 compute-0 nova_compute[192079]: 2025-10-02 12:07:14.218 2 DEBUG nova.objects.instance [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lazy-loading 'info_cache' on Instance uuid 6068f987-bbd4-4dac-a691-169dcb4570a8 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:07:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:07:14.454 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=11, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=10) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:07:14 compute-0 nova_compute[192079]: 2025-10-02 12:07:14.455 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:07:14.455 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 0 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:07:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:07:14.456 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '11'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:07:14 compute-0 nova_compute[192079]: 2025-10-02 12:07:14.958 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:15 compute-0 nova_compute[192079]: 2025-10-02 12:07:15.223 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:15 compute-0 nova_compute[192079]: 2025-10-02 12:07:15.418 2 DEBUG nova.compute.manager [None req-1b824321-af59-4620-b081-a4b0e87684b5 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:07:15 compute-0 nova_compute[192079]: 2025-10-02 12:07:15.420 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Updating instance_info_cache with network_info: [{"id": "876740d6-da80-4b19-9afb-af6d9bf00f50", "address": "fa:16:3e:4f:35:f0", "network": {"id": "66b5a7c3-fe3e-42b0-aea6-19534bca6e0e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1726703238-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "db3f04a20fd740c1af3139196dc928d2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap876740d6-da", "ovs_interfaceid": "876740d6-da80-4b19-9afb-af6d9bf00f50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:07:15 compute-0 nova_compute[192079]: 2025-10-02 12:07:15.468 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Releasing lock "refresh_cache-6068f987-bbd4-4dac-a691-169dcb4570a8" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:07:15 compute-0 nova_compute[192079]: 2025-10-02 12:07:15.468 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Updated the network info_cache for instance _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9929
Oct 02 12:07:15 compute-0 nova_compute[192079]: 2025-10-02 12:07:15.469 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:07:15 compute-0 nova_compute[192079]: 2025-10-02 12:07:15.470 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:07:15 compute-0 nova_compute[192079]: 2025-10-02 12:07:15.527 2 INFO nova.compute.manager [None req-1b824321-af59-4620-b081-a4b0e87684b5 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] instance snapshotting
Oct 02 12:07:15 compute-0 nova_compute[192079]: 2025-10-02 12:07:15.808 2 INFO nova.virt.libvirt.driver [None req-1b824321-af59-4620-b081-a4b0e87684b5 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Beginning live snapshot process
Oct 02 12:07:15 compute-0 virtqemud[191807]: invalid argument: disk vda does not have an active block job
Oct 02 12:07:15 compute-0 nova_compute[192079]: 2025-10-02 12:07:15.994 2 DEBUG oslo_concurrency.processutils [None req-1b824321-af59-4620-b081-a4b0e87684b5 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f/disk --force-share --output=json -f qcow2 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:07:16 compute-0 nova_compute[192079]: 2025-10-02 12:07:16.047 2 DEBUG oslo_concurrency.processutils [None req-1b824321-af59-4620-b081-a4b0e87684b5 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f/disk --force-share --output=json -f qcow2" returned: 0 in 0.053s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:07:16 compute-0 nova_compute[192079]: 2025-10-02 12:07:16.048 2 DEBUG oslo_concurrency.processutils [None req-1b824321-af59-4620-b081-a4b0e87684b5 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f/disk --force-share --output=json -f qcow2 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:07:16 compute-0 nova_compute[192079]: 2025-10-02 12:07:16.122 2 DEBUG oslo_concurrency.processutils [None req-1b824321-af59-4620-b081-a4b0e87684b5 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f/disk --force-share --output=json -f qcow2" returned: 0 in 0.074s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:07:16 compute-0 nova_compute[192079]: 2025-10-02 12:07:16.134 2 DEBUG oslo_concurrency.processutils [None req-1b824321-af59-4620-b081-a4b0e87684b5 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:07:16 compute-0 podman[225191]: 2025-10-02 12:07:16.137974614 +0000 UTC m=+0.053073735 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:07:16 compute-0 podman[225192]: 2025-10-02 12:07:16.148666583 +0000 UTC m=+0.056691842 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_id=iscsid, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:07:16 compute-0 nova_compute[192079]: 2025-10-02 12:07:16.187 2 DEBUG oslo_concurrency.processutils [None req-1b824321-af59-4620-b081-a4b0e87684b5 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.053s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:07:16 compute-0 nova_compute[192079]: 2025-10-02 12:07:16.188 2 DEBUG oslo_concurrency.processutils [None req-1b824321-af59-4620-b081-a4b0e87684b5 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/snapshots/tmpfx249vbp/aa90b880ef9c48418d8c5996bdc24e93.delta 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:07:16 compute-0 nova_compute[192079]: 2025-10-02 12:07:16.224 2 DEBUG oslo_concurrency.processutils [None req-1b824321-af59-4620-b081-a4b0e87684b5 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/snapshots/tmpfx249vbp/aa90b880ef9c48418d8c5996bdc24e93.delta 1073741824" returned: 0 in 0.036s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:07:16 compute-0 nova_compute[192079]: 2025-10-02 12:07:16.225 2 INFO nova.virt.libvirt.driver [None req-1b824321-af59-4620-b081-a4b0e87684b5 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Quiescing instance not available: QEMU guest agent is not enabled.
Oct 02 12:07:16 compute-0 nova_compute[192079]: 2025-10-02 12:07:16.274 2 DEBUG nova.virt.libvirt.guest [None req-1b824321-af59-4620-b081-a4b0e87684b5 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] COPY block job progress, current cursor: 0 final cursor: 1 is_job_complete /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:846
Oct 02 12:07:16 compute-0 nova_compute[192079]: 2025-10-02 12:07:16.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:07:16 compute-0 nova_compute[192079]: 2025-10-02 12:07:16.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:07:16 compute-0 nova_compute[192079]: 2025-10-02 12:07:16.778 2 DEBUG nova.virt.libvirt.guest [None req-1b824321-af59-4620-b081-a4b0e87684b5 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] COPY block job progress, current cursor: 75235328 final cursor: 75235328 is_job_complete /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:846
Oct 02 12:07:16 compute-0 nova_compute[192079]: 2025-10-02 12:07:16.782 2 INFO nova.virt.libvirt.driver [None req-1b824321-af59-4620-b081-a4b0e87684b5 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Skipping quiescing instance: QEMU guest agent is not enabled.
Oct 02 12:07:16 compute-0 nova_compute[192079]: 2025-10-02 12:07:16.858 2 DEBUG nova.privsep.utils [None req-1b824321-af59-4620-b081-a4b0e87684b5 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Path '/var/lib/nova/instances' supports direct I/O supports_direct_io /usr/lib/python3.9/site-packages/nova/privsep/utils.py:63
Oct 02 12:07:16 compute-0 nova_compute[192079]: 2025-10-02 12:07:16.858 2 DEBUG oslo_concurrency.processutils [None req-1b824321-af59-4620-b081-a4b0e87684b5 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Running cmd (subprocess): qemu-img convert -t none -O qcow2 -f qcow2 /var/lib/nova/instances/snapshots/tmpfx249vbp/aa90b880ef9c48418d8c5996bdc24e93.delta /var/lib/nova/instances/snapshots/tmpfx249vbp/aa90b880ef9c48418d8c5996bdc24e93 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.101 12 DEBUG novaclient.v2.client [-] REQ: curl -g -i -X GET https://nova-internal.openstack.svc:8774/v2.1/flavors?is_public=None -H "Accept: application/json" -H "User-Agent: python-novaclient" -H "X-Auth-Token: {SHA256}71356153e9cf84f21025dfc4736dd696f1dc7f2f65609442b5b4aacce068ebfe" -H "X-OpenStack-Nova-API-Version: 2.1" _http_log_request /usr/lib/python3.9/site-packages/keystoneauth1/session.py:519
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.244 12 DEBUG novaclient.v2.client [-] RESP: [200] Connection: Keep-Alive Content-Length: 644 Content-Type: application/json Date: Thu, 02 Oct 2025 12:07:17 GMT Keep-Alive: timeout=5, max=100 OpenStack-API-Version: compute 2.1 Server: Apache Vary: OpenStack-API-Version,X-OpenStack-Nova-API-Version X-OpenStack-Nova-API-Version: 2.1 x-compute-request-id: req-b1f01f19-6c1a-40a7-946e-d5c31edc491b x-openstack-request-id: req-b1f01f19-6c1a-40a7-946e-d5c31edc491b _http_log_response /usr/lib/python3.9/site-packages/keystoneauth1/session.py:550
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.244 12 DEBUG novaclient.v2.client [-] RESP BODY: {"flavors": [{"id": "9949d9da-6314-4ede-8797-6f2f0a6a64fc", "name": "m1.micro", "links": [{"rel": "self", "href": "https://nova-internal.openstack.svc:8774/v2.1/flavors/9949d9da-6314-4ede-8797-6f2f0a6a64fc"}, {"rel": "bookmark", "href": "https://nova-internal.openstack.svc:8774/flavors/9949d9da-6314-4ede-8797-6f2f0a6a64fc"}]}, {"id": "9ac83da7-f31e-4467-8569-d28002f6aeed", "name": "m1.nano", "links": [{"rel": "self", "href": "https://nova-internal.openstack.svc:8774/v2.1/flavors/9ac83da7-f31e-4467-8569-d28002f6aeed"}, {"rel": "bookmark", "href": "https://nova-internal.openstack.svc:8774/flavors/9ac83da7-f31e-4467-8569-d28002f6aeed"}]}]} _http_log_response /usr/lib/python3.9/site-packages/keystoneauth1/session.py:582
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.244 12 DEBUG novaclient.v2.client [-] GET call to compute for https://nova-internal.openstack.svc:8774/v2.1/flavors?is_public=None used request id req-b1f01f19-6c1a-40a7-946e-d5c31edc491b request /usr/lib/python3.9/site-packages/keystoneauth1/session.py:954
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.245 12 DEBUG novaclient.v2.client [-] REQ: curl -g -i -X GET https://nova-internal.openstack.svc:8774/v2.1/flavors/9ac83da7-f31e-4467-8569-d28002f6aeed -H "Accept: application/json" -H "User-Agent: python-novaclient" -H "X-Auth-Token: {SHA256}71356153e9cf84f21025dfc4736dd696f1dc7f2f65609442b5b4aacce068ebfe" -H "X-OpenStack-Nova-API-Version: 2.1" _http_log_request /usr/lib/python3.9/site-packages/keystoneauth1/session.py:519
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.311 12 DEBUG novaclient.v2.client [-] RESP: [200] Connection: Keep-Alive Content-Length: 495 Content-Type: application/json Date: Thu, 02 Oct 2025 12:07:17 GMT Keep-Alive: timeout=5, max=99 OpenStack-API-Version: compute 2.1 Server: Apache Vary: OpenStack-API-Version,X-OpenStack-Nova-API-Version X-OpenStack-Nova-API-Version: 2.1 x-compute-request-id: req-9fe24ef6-71e9-40cd-af84-9b4c6b976285 x-openstack-request-id: req-9fe24ef6-71e9-40cd-af84-9b4c6b976285 _http_log_response /usr/lib/python3.9/site-packages/keystoneauth1/session.py:550
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.311 12 DEBUG novaclient.v2.client [-] RESP BODY: {"flavor": {"id": "9ac83da7-f31e-4467-8569-d28002f6aeed", "name": "m1.nano", "ram": 128, "disk": 1, "swap": "", "OS-FLV-EXT-DATA:ephemeral": 0, "OS-FLV-DISABLED:disabled": false, "vcpus": 1, "os-flavor-access:is_public": true, "rxtx_factor": 1.0, "links": [{"rel": "self", "href": "https://nova-internal.openstack.svc:8774/v2.1/flavors/9ac83da7-f31e-4467-8569-d28002f6aeed"}, {"rel": "bookmark", "href": "https://nova-internal.openstack.svc:8774/flavors/9ac83da7-f31e-4467-8569-d28002f6aeed"}]}} _http_log_response /usr/lib/python3.9/site-packages/keystoneauth1/session.py:582
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.311 12 DEBUG novaclient.v2.client [-] GET call to compute for https://nova-internal.openstack.svc:8774/v2.1/flavors/9ac83da7-f31e-4467-8569-d28002f6aeed used request id req-9fe24ef6-71e9-40cd-af84-9b4c6b976285 request /usr/lib/python3.9/site-packages/keystoneauth1/session.py:954
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.312 12 DEBUG ceilometer.compute.discovery [-] instance data: {'id': '854d86e2-3388-4709-a32c-15f8658aa41f', 'name': 'tempest-ListImageFiltersTestJSON-server-2079629969', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'os_type': 'hvm', 'architecture': 'x86_64', 'OS-EXT-SRV-ATTR:instance_name': 'instance-00000027', 'OS-EXT-SRV-ATTR:host': 'compute-0.ctlplane.example.com', 'OS-EXT-STS:vm_state': 'running', 'tenant_id': '8993ff2640584165964db6af518beb94', 'user_id': 'c6a7a530a085472d8ace0b41fc888e26', 'hostId': 'd32dcae84b6b8026816be8d3fc1fd66558e76b17c904e0f11eab4222', 'status': 'active', 'metadata': {}} discover_libvirt_polling /usr/lib/python3.9/site-packages/ceilometer/compute/discovery.py:228
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.314 12 DEBUG ceilometer.compute.discovery [-] instance data: {'id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'name': 'tempest-ServersAdminTestJSON-server-537172074', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'os_type': 'hvm', 'architecture': 'x86_64', 'OS-EXT-SRV-ATTR:instance_name': 'instance-00000022', 'OS-EXT-SRV-ATTR:host': 'compute-0.ctlplane.example.com', 'OS-EXT-STS:vm_state': 'running', 'tenant_id': 'db3f04a20fd740c1af3139196dc928d2', 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'hostId': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'status': 'active', 'metadata': {}} discover_libvirt_polling /usr/lib/python3.9/site-packages/ceilometer/compute/discovery.py:228
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.315 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.bytes in the context of pollsters
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.335 12 DEBUG ceilometer.compute.pollsters [-] 854d86e2-3388-4709-a32c-15f8658aa41f/disk.device.read.bytes volume: 30734848 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.335 12 DEBUG ceilometer.compute.pollsters [-] 854d86e2-3388-4709-a32c-15f8658aa41f/disk.device.read.bytes volume: 274750 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.358 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/disk.device.read.bytes volume: 31005184 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.358 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/disk.device.read.bytes volume: 274750 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'c4399754-3c23-43d1-a054-81a04085b39a', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 30734848, 'user_id': 'c6a7a530a085472d8ace0b41fc888e26', 'user_name': None, 'project_id': '8993ff2640584165964db6af518beb94', 'project_name': None, 'resource_id': '854d86e2-3388-4709-a32c-15f8658aa41f-vda', 'timestamp': '2025-10-02T12:07:17.315262', 'resource_metadata': {'display_name': 'tempest-ListImageFiltersTestJSON-server-2079629969', 'name': 'instance-00000027', 'instance_id': '854d86e2-3388-4709-a32c-15f8658aa41f', 'instance_type': 'm1.nano', 'host': 'd32dcae84b6b8026816be8d3fc1fd66558e76b17c904e0f11eab4222', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '570d7df4-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.002336431, 'message_signature': 'a26e284e9b5d1ebd16e1a61c2003111016c650c2d321f94f720a709465d1b1f7'}, {'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 274750, 'user_id': 'c6a7a530a085472d8ace0b41fc888e26', 'user_name': None, 'project_id': '8993ff2640584165964db6af518beb94', 'project_name': None, 'resource_id': '854d86e2-3388-4709-a32c-15f8658aa41f-sda', 'timestamp': '2025-10-02T12:07:17.315262', 'resource_metadata': {'display_name': 'tempest-ListImageFiltersTestJSON-server-2079629969', 'name': 'instance-00000027', 'instance_id': '854d86e2-3388-4709-a32c-15f8658aa41f', 'instance_type': 'm1.nano', 'host': 'd32dcae84b6b8026816be8d3fc1fd66558e76b17c904e0f11eab4222', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '570d89a2-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.002336431, 'message_signature': '55bdf9aafe56e4c2aec4219b97f91d1f8f11da7996cc93cbf6b8ec3070eb5e5a'}, {'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 31005184, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': '6068f987-bbd4-4dac-a691-169dcb4570a8-vda', 'timestamp': '2025-10-02T12:07:17.315262', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'instance-00000022', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '5710f6be-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.023100892, 'message_signature': '2c05eeca5073922aa2ad9353ed6656a877f3ce71b418f9e1c85c35973d47175d'}, {'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 274750, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': '6068f987-bbd4-4dac-a691-169dcb4570a8-sda', 'timestamp': '2025-10-02T12:07:17.315262', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'instance-00000022', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '5711023a-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.023100892, 'message_signature': '77900941b328ba25e1af494c126f384508c6c5f2ec21c17459b2e2ecb9f28535'}]}, 'timestamp': '2025-10-02 12:07:17.358762', '_unique_id': 'c92447abfb4c47c79937e8e234208c19'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.359 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.360 12 INFO ceilometer.polling.manager [-] Polling pollster memory.usage in the context of pollsters
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.375 12 DEBUG ceilometer.compute.pollsters [-] 854d86e2-3388-4709-a32c-15f8658aa41f/memory.usage volume: 40.36328125 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.391 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/memory.usage volume: 46.64453125 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '72e876f5-22b2-4aac-aa7c-4550cdd75e1b', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'memory.usage', 'counter_type': 'gauge', 'counter_unit': 'MB', 'counter_volume': 40.36328125, 'user_id': 'c6a7a530a085472d8ace0b41fc888e26', 'user_name': None, 'project_id': '8993ff2640584165964db6af518beb94', 'project_name': None, 'resource_id': '854d86e2-3388-4709-a32c-15f8658aa41f', 'timestamp': '2025-10-02T12:07:17.360643', 'resource_metadata': {'display_name': 'tempest-ListImageFiltersTestJSON-server-2079629969', 'name': 'instance-00000027', 'instance_id': '854d86e2-3388-4709-a32c-15f8658aa41f', 'instance_type': 'm1.nano', 'host': 'd32dcae84b6b8026816be8d3fc1fd66558e76b17c904e0f11eab4222', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1}, 'message_id': '5713918a-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.062149547, 'message_signature': '75195dc3d3618582d9b1b3e2655247881992ae066d45dde6156d200ad56eb5be'}, {'source': 'openstack', 'counter_name': 'memory.usage', 'counter_type': 'gauge', 'counter_unit': 'MB', 'counter_volume': 46.64453125, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'timestamp': '2025-10-02T12:07:17.360643', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'instance-00000022', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1}, 'message_id': '57160316-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.078055807, 'message_signature': '2ebe196514429e1d72e6691d4f5fdd2f4a2db3c740dd681f9f64d44f07df1350'}]}, 'timestamp': '2025-10-02 12:07:17.391624', '_unique_id': 'b4685ea4e3dc40f7b27c882dec562431'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.392 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.393 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes.rate in the context of pollsters
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.393 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for IncomingBytesRatePollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.393 12 ERROR ceilometer.polling.manager [-] Prevent pollster network.incoming.bytes.rate from polling [<NovaLikeServer: tempest-ListImageFiltersTestJSON-server-2079629969>, <NovaLikeServer: tempest-ServersAdminTestJSON-server-537172074>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-ListImageFiltersTestJSON-server-2079629969>, <NovaLikeServer: tempest-ServersAdminTestJSON-server-537172074>]
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.393 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.bytes in the context of pollsters
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.393 12 DEBUG ceilometer.compute.pollsters [-] 854d86e2-3388-4709-a32c-15f8658aa41f/disk.device.write.bytes volume: 72851456 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.393 12 DEBUG ceilometer.compute.pollsters [-] 854d86e2-3388-4709-a32c-15f8658aa41f/disk.device.write.bytes volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.394 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/disk.device.write.bytes volume: 72888320 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.394 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/disk.device.write.bytes volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '4ba74725-462c-42fe-9df7-b3a58500a4ef', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 72851456, 'user_id': 'c6a7a530a085472d8ace0b41fc888e26', 'user_name': None, 'project_id': '8993ff2640584165964db6af518beb94', 'project_name': None, 'resource_id': '854d86e2-3388-4709-a32c-15f8658aa41f-vda', 'timestamp': '2025-10-02T12:07:17.393646', 'resource_metadata': {'display_name': 'tempest-ListImageFiltersTestJSON-server-2079629969', 'name': 'instance-00000027', 'instance_id': '854d86e2-3388-4709-a32c-15f8658aa41f', 'instance_type': 'm1.nano', 'host': 'd32dcae84b6b8026816be8d3fc1fd66558e76b17c904e0f11eab4222', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '57165f78-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.002336431, 'message_signature': '31915c56cfc9de3a2a687836d65a4e11944f036f6585809f9277199d59f414a7'}, {'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': 'c6a7a530a085472d8ace0b41fc888e26', 'user_name': None, 'project_id': '8993ff2640584165964db6af518beb94', 'project_name': None, 'resource_id': '854d86e2-3388-4709-a32c-15f8658aa41f-sda', 'timestamp': '2025-10-02T12:07:17.393646', 'resource_metadata': {'display_name': 'tempest-ListImageFiltersTestJSON-server-2079629969', 'name': 'instance-00000027', 'instance_id': '854d86e2-3388-4709-a32c-15f8658aa41f', 'instance_type': 'm1.nano', 'host': 'd32dcae84b6b8026816be8d3fc1fd66558e76b17c904e0f11eab4222', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '5716698c-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.002336431, 'message_signature': 'eb14d51ab9956ab02664db45a72304f2b8b2e14ba05d6f4383a057c7c22a0cb8'}, {'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 72888320, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': '6068f987-bbd4-4dac-a691-169dcb4570a8-vda', 'timestamp': '2025-10-02T12:07:17.393646', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'instance-00000022', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '5716765c-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.023100892, 'message_signature': '532ee70fa0467e94dcb6ed5d5db072441593628427bfb05f681f734218946536'}, {'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': '6068f987-bbd4-4dac-a691-169dcb4570a8-sda', 'timestamp': '2025-10-02T12:07:17.393646', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'instance-00000022', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '57167e36-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.023100892, 'message_signature': '05df15a33665ac024f8a9ba28fb5d5abf87e6fc840a27178b60b886713a2c74f'}]}, 'timestamp': '2025-10-02 12:07:17.394689', '_unique_id': 'f15c69c4f03b404295880c9615dc8e85'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.395 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes.delta in the context of pollsters
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.399 12 DEBUG ceilometer.compute.virt.libvirt.inspector [-] No delta meter predecessor for 6068f987-bbd4-4dac-a691-169dcb4570a8 / tap876740d6-da inspect_vnics /usr/lib/python3.9/site-packages/ceilometer/compute/virt/libvirt/inspector.py:136
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.399 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/network.incoming.bytes.delta volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '92b477be-04f3-4db2-9952-12dd4977fc79', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.bytes.delta', 'counter_type': 'delta', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': 'instance-00000022-6068f987-bbd4-4dac-a691-169dcb4570a8-tap876740d6-da', 'timestamp': '2025-10-02T12:07:17.396002', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'tap876740d6-da', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:4f:35:f0', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap876740d6-da'}, 'message_id': '57175180-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.084790528, 'message_signature': '6abc653f71c762b9c7b41adf8104ebd4d6abde98951c18ba1ac91e01110ad93e'}]}, 'timestamp': '2025-10-02 12:07:17.400120', '_unique_id': '972b300ec5dd408397ab378eb57d74f8'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.400 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.401 12 INFO ceilometer.polling.manager [-] Polling pollster cpu in the context of pollsters
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.401 12 DEBUG ceilometer.compute.pollsters [-] 854d86e2-3388-4709-a32c-15f8658aa41f/cpu volume: 10620000000 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.401 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/cpu volume: 12420000000 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '8f49b3a2-0339-447a-997e-8db20fb6732c', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'cpu', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 10620000000, 'user_id': 'c6a7a530a085472d8ace0b41fc888e26', 'user_name': None, 'project_id': '8993ff2640584165964db6af518beb94', 'project_name': None, 'resource_id': '854d86e2-3388-4709-a32c-15f8658aa41f', 'timestamp': '2025-10-02T12:07:17.401361', 'resource_metadata': {'display_name': 'tempest-ListImageFiltersTestJSON-server-2079629969', 'name': 'instance-00000027', 'instance_id': '854d86e2-3388-4709-a32c-15f8658aa41f', 'instance_type': 'm1.nano', 'host': 'd32dcae84b6b8026816be8d3fc1fd66558e76b17c904e0f11eab4222', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'cpu_number': 1}, 'message_id': '57178bc8-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.062149547, 'message_signature': '30f01312517312e9d90a4725301945f037b4f7630062949d016ffec62ce6f42c'}, {'source': 'openstack', 'counter_name': 'cpu', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 12420000000, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'timestamp': '2025-10-02T12:07:17.401361', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'instance-00000022', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'cpu_number': 1}, 'message_id': '571793de-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.078055807, 'message_signature': '6bf045df909388c90a58b4a8d5e1d3cb70a7a668386eaa7054234a5a2808cf24'}]}, 'timestamp': '2025-10-02 12:07:17.401794', '_unique_id': '6b604e94722942d6820928fd8a7f2e57'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets.error in the context of pollsters
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.402 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/network.outgoing.packets.error volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'b4b62450-7a94-4b6a-a3e5-affb4f102d4f', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets.error', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': 'instance-00000022-6068f987-bbd4-4dac-a691-169dcb4570a8-tap876740d6-da', 'timestamp': '2025-10-02T12:07:17.402918', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'tap876740d6-da', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:4f:35:f0', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap876740d6-da'}, 'message_id': '5717c9f8-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.084790528, 'message_signature': '56b5a2782f4bc190bd94c7cc8280035142fa97499e7e6492d50e7c8ab157986c'}]}, 'timestamp': '2025-10-02 12:07:17.403202', '_unique_id': '4bde9e4cb66742a480fa0142c7febe41'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.403 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets.error in the context of pollsters
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/network.incoming.packets.error volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '651085d4-e9b3-4ffb-abdb-9927df9b2a74', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets.error', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': 'instance-00000022-6068f987-bbd4-4dac-a691-169dcb4570a8-tap876740d6-da', 'timestamp': '2025-10-02T12:07:17.404311', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'tap876740d6-da', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:4f:35:f0', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap876740d6-da'}, 'message_id': '5717febe-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.084790528, 'message_signature': 'a9dd55b8223a7e16cb5bf3a5dee84c9528907a92c684f453460a7679f61ff404'}]}, 'timestamp': '2025-10-02 12:07:17.404543', '_unique_id': 'aa939705bfbc41f086b19b1a72c85987'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.404 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.405 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes.delta in the context of pollsters
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.405 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/network.outgoing.bytes.delta volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '532d01b0-eaa1-41b5-b459-0bf6c066d9ee', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.bytes.delta', 'counter_type': 'delta', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': 'instance-00000022-6068f987-bbd4-4dac-a691-169dcb4570a8-tap876740d6-da', 'timestamp': '2025-10-02T12:07:17.405614', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'tap876740d6-da', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:4f:35:f0', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap876740d6-da'}, 'message_id': '5718317c-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.084790528, 'message_signature': 'ab29406b98a6bb1ff24d672ad7e79b7797d12272ba7724f7db40e6ee51d9e68c'}]}, 'timestamp': '2025-10-02 12:07:17.405840', '_unique_id': '834239290151478f8e6012af7340ec64'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets.drop in the context of pollsters
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.406 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/network.outgoing.packets.drop volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'd553d479-bc02-44b9-9e10-577b61f8089d', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets.drop', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': 'instance-00000022-6068f987-bbd4-4dac-a691-169dcb4570a8-tap876740d6-da', 'timestamp': '2025-10-02T12:07:17.406916', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'tap876740d6-da', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:4f:35:f0', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap876740d6-da'}, 'message_id': '57186534-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.084790528, 'message_signature': '1a3374dd750f3d82a763247299093aa008eafc125b0d86dd14c21ed7e57be1f3'}]}, 'timestamp': '2025-10-02 12:07:17.407166', '_unique_id': '88f8b07a90754b8b88fd98c7d46d7308'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.407 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.408 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.requests in the context of pollsters
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.408 12 DEBUG ceilometer.compute.pollsters [-] 854d86e2-3388-4709-a32c-15f8658aa41f/disk.device.read.requests volume: 1105 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.408 12 DEBUG ceilometer.compute.pollsters [-] 854d86e2-3388-4709-a32c-15f8658aa41f/disk.device.read.requests volume: 108 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.408 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/disk.device.read.requests volume: 1132 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/disk.device.read.requests volume: 108 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '83b8569d-5b6b-4807-a690-a6912c7d5218', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 1105, 'user_id': 'c6a7a530a085472d8ace0b41fc888e26', 'user_name': None, 'project_id': '8993ff2640584165964db6af518beb94', 'project_name': None, 'resource_id': '854d86e2-3388-4709-a32c-15f8658aa41f-vda', 'timestamp': '2025-10-02T12:07:17.408477', 'resource_metadata': {'display_name': 'tempest-ListImageFiltersTestJSON-server-2079629969', 'name': 'instance-00000027', 'instance_id': '854d86e2-3388-4709-a32c-15f8658aa41f', 'instance_type': 'm1.nano', 'host': 'd32dcae84b6b8026816be8d3fc1fd66558e76b17c904e0f11eab4222', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '5718a13e-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.002336431, 'message_signature': '1005f6e187194025d5a30a40304e4773cfd400873305c961728e6338769b6b25'}, {'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 108, 'user_id': 'c6a7a530a085472d8ace0b41fc888e26', 'user_name': None, 'project_id': '8993ff2640584165964db6af518beb94', 'project_name': None, 'resource_id': '854d86e2-3388-4709-a32c-15f8658aa41f-sda', 'timestamp': '2025-10-02T12:07:17.408477', 'resource_metadata': {'display_name': 'tempest-ListImageFiltersTestJSON-server-2079629969', 'name': 'instance-00000027', 'instance_id': '854d86e2-3388-4709-a32c-15f8658aa41f', 'instance_type': 'm1.nano', 'host': 'd32dcae84b6b8026816be8d3fc1fd66558e76b17c904e0f11eab4222', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '5718aa08-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.002336431, 'message_signature': 'ebfe6432630f520cf4df9f79b463825524c22922781281b488796952a781a453'}, {'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 1132, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': '6068f987-bbd4-4dac-a691-169dcb4570a8-vda', 'timestamp': '2025-10-02T12:07:17.408477', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'instance-00000022', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '5718b2d2-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.023100892, 'message_signature': '6db5b2acb6c41aa409ebc99d3100e345b59726746e79ddef4948706d637ba8b0'}, {'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 108, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': '6068f987-bbd4-4dac-a691-169dcb4570a8-sda', 'timestamp': '2025-10-02T12:07:17.408477', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'instance-00000022', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '5718bb38-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.023100892, 'message_signature': '56974e666db59829c2d346698a27902938c9a5f908e48d7e1cb16586fd480c23'}]}, 'timestamp': '2025-10-02 12:07:17.409362', '_unique_id': 'd9fe614157a748b480feae87754754d1'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.409 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.410 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.capacity in the context of pollsters
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.425 12 DEBUG ceilometer.compute.pollsters [-] 854d86e2-3388-4709-a32c-15f8658aa41f/disk.device.capacity volume: 1073741824 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.425 12 DEBUG ceilometer.compute.pollsters [-] 854d86e2-3388-4709-a32c-15f8658aa41f/disk.device.capacity volume: 485376 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.437 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/disk.device.capacity volume: 1073741824 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.437 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/disk.device.capacity volume: 485376 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'ddac163f-13f0-40c9-ab93-48cf66a888a9', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 1073741824, 'user_id': 'c6a7a530a085472d8ace0b41fc888e26', 'user_name': None, 'project_id': '8993ff2640584165964db6af518beb94', 'project_name': None, 'resource_id': '854d86e2-3388-4709-a32c-15f8658aa41f-vda', 'timestamp': '2025-10-02T12:07:17.410655', 'resource_metadata': {'display_name': 'tempest-ListImageFiltersTestJSON-server-2079629969', 'name': 'instance-00000027', 'instance_id': '854d86e2-3388-4709-a32c-15f8658aa41f', 'instance_type': 'm1.nano', 'host': 'd32dcae84b6b8026816be8d3fc1fd66558e76b17c904e0f11eab4222', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '571b3a16-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.097714368, 'message_signature': '94cef124e98e70844fda3c6c8417c647518fdd3eec8d7817dec4c8c558931c2a'}, {'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 485376, 'user_id': 'c6a7a530a085472d8ace0b41fc888e26', 'user_name': None, 'project_id': '8993ff2640584165964db6af518beb94', 'project_name': None, 'resource_id': '854d86e2-3388-4709-a32c-15f8658aa41f-sda', 'timestamp': '2025-10-02T12:07:17.410655', 'resource_metadata': {'display_name': 'tempest-ListImageFiltersTestJSON-server-2079629969', 'name': 'instance-00000027', 'instance_id': '854d86e2-3388-4709-a32c-15f8658aa41f', 'instance_type': 'm1.nano', 'host': 'd32dcae84b6b8026816be8d3fc1fd66558e76b17c904e0f11eab4222', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '571b457e-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.097714368, 'message_signature': '8d7322a7067c647ebb267618a36d7dcc53146b9f3d2fd736bf44ded2161444b1'}, {'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 1073741824, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': '6068f987-bbd4-4dac-a691-169dcb4570a8-vda', 'timestamp': '2025-10-02T12:07:17.410655', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'instance-00000022', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '571d0f62-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.113140514, 'message_signature': 'fd015ead1290a7462fe92c14fbb4d91215e8fd0ccacc39c41a2baba569dab6c7'}, {'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 485376, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': '6068f987-bbd4-4dac-a691-169dcb4570a8-sda', 'timestamp': '2025-10-02T12:07:17.410655', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'instance-00000022', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '571d1868-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.113140514, 'message_signature': '27d671c34fe78f13d8d03210a12371b27ad22a95687f1ee4adb5fbf270879271'}]}, 'timestamp': '2025-10-02 12:07:17.437969', '_unique_id': '3e7cc74ccfa94bfa96c99d1e8063677f'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.438 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.439 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets.drop in the context of pollsters
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.439 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/network.incoming.packets.drop volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'ed722b6e-a94e-41df-8f4f-321ffa55cecd', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets.drop', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': 'instance-00000022-6068f987-bbd4-4dac-a691-169dcb4570a8-tap876740d6-da', 'timestamp': '2025-10-02T12:07:17.439763', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'tap876740d6-da', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:4f:35:f0', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap876740d6-da'}, 'message_id': '571d680e-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.084790528, 'message_signature': 'c4e255293e4d1afe9fdafe742622d886cabbf3ca2891137d33b7fa19bb57f92b'}]}, 'timestamp': '2025-10-02 12:07:17.440031', '_unique_id': '6242df9020f44018af08cc019b9e8835'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.440 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.441 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.iops in the context of pollsters
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.441 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for PerDeviceDiskIOPSPollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.441 12 ERROR ceilometer.polling.manager [-] Prevent pollster disk.device.iops from polling [<NovaLikeServer: tempest-ListImageFiltersTestJSON-server-2079629969>, <NovaLikeServer: tempest-ServersAdminTestJSON-server-537172074>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-ListImageFiltersTestJSON-server-2079629969>, <NovaLikeServer: tempest-ServersAdminTestJSON-server-537172074>]
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.441 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets in the context of pollsters
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.441 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/network.incoming.packets volume: 17 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '1c2a219f-0b83-4dc5-9da7-aae852025c8b', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 17, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': 'instance-00000022-6068f987-bbd4-4dac-a691-169dcb4570a8-tap876740d6-da', 'timestamp': '2025-10-02T12:07:17.441420', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'tap876740d6-da', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:4f:35:f0', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap876740d6-da'}, 'message_id': '571da850-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.084790528, 'message_signature': '190f138d0a51d8434775667adf2599a1447193dc0913da8add700a8905867e56'}]}, 'timestamp': '2025-10-02 12:07:17.441657', '_unique_id': 'deaf5f0d0e8940d0a38d4f411f6b25d7'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.latency in the context of pollsters
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 DEBUG ceilometer.compute.pollsters [-] 854d86e2-3388-4709-a32c-15f8658aa41f/disk.device.write.latency volume: 3264806530 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.442 12 DEBUG ceilometer.compute.pollsters [-] 854d86e2-3388-4709-a32c-15f8658aa41f/disk.device.write.latency volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.443 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/disk.device.write.latency volume: 3670507112 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.443 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/disk.device.write.latency volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '942e66fd-541b-416a-943b-dad68d998af9', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 3264806530, 'user_id': 'c6a7a530a085472d8ace0b41fc888e26', 'user_name': None, 'project_id': '8993ff2640584165964db6af518beb94', 'project_name': None, 'resource_id': '854d86e2-3388-4709-a32c-15f8658aa41f-vda', 'timestamp': '2025-10-02T12:07:17.442754', 'resource_metadata': {'display_name': 'tempest-ListImageFiltersTestJSON-server-2079629969', 'name': 'instance-00000027', 'instance_id': '854d86e2-3388-4709-a32c-15f8658aa41f', 'instance_type': 'm1.nano', 'host': 'd32dcae84b6b8026816be8d3fc1fd66558e76b17c904e0f11eab4222', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '571ddc12-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.002336431, 'message_signature': 'd1dc1fede44b4426be0ab4becf861abada80e0817d5036ea66febbdbeaca1127'}, {'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 0, 'user_id': 'c6a7a530a085472d8ace0b41fc888e26', 'user_name': None, 'project_id': '8993ff2640584165964db6af518beb94', 'project_name': None, 'resource_id': '854d86e2-3388-4709-a32c-15f8658aa41f-sda', 'timestamp': '2025-10-02T12:07:17.442754', 'resource_metadata': {'display_name': 'tempest-ListImageFiltersTestJSON-server-2079629969', 'name': 'instance-00000027', 'instance_id': '854d86e2-3388-4709-a32c-15f8658aa41f', 'instance_type': 'm1.nano', 'host': 'd32dcae84b6b8026816be8d3fc1fd66558e76b17c904e0f11eab4222', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '571de536-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.002336431, 'message_signature': '084e1891a43b16505bff9804e60b73f141287bf2e927327a98958faa00971238'}, {'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 3670507112, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': '6068f987-bbd4-4dac-a691-169dcb4570a8-vda', 'timestamp': '2025-10-02T12:07:17.442754', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'instance-00000022', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '571ded10-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.023100892, 'message_signature': '0e318718849c15b54e5e96e150115dc9604a6f58574c24bb024ca141cc2fdb01'}, {'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 0, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': '6068f987-bbd4-4dac-a691-169dcb4570a8-sda', 'timestamp': '2025-10-02T12:07:17.442754', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'instance-00000022', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '571df4ae-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.023100892, 'message_signature': 'f880bde9bcbd48f3e2f59450e005251dc3fb30ecb971410bf1dfa9cd6921f910'}]}, 'timestamp': '2025-10-02 12:07:17.443592', '_unique_id': '8493ac47e85d4c79ad1594a181eaec71'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.requests in the context of pollsters
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 DEBUG ceilometer.compute.pollsters [-] 854d86e2-3388-4709-a32c-15f8658aa41f/disk.device.write.requests volume: 310 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.444 12 DEBUG ceilometer.compute.pollsters [-] 854d86e2-3388-4709-a32c-15f8658aa41f/disk.device.write.requests volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.445 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/disk.device.write.requests volume: 317 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.445 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/disk.device.write.requests volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'bbdea090-a663-41d6-ba55-530085379e1a', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 310, 'user_id': 'c6a7a530a085472d8ace0b41fc888e26', 'user_name': None, 'project_id': '8993ff2640584165964db6af518beb94', 'project_name': None, 'resource_id': '854d86e2-3388-4709-a32c-15f8658aa41f-vda', 'timestamp': '2025-10-02T12:07:17.444723', 'resource_metadata': {'display_name': 'tempest-ListImageFiltersTestJSON-server-2079629969', 'name': 'instance-00000027', 'instance_id': '854d86e2-3388-4709-a32c-15f8658aa41f', 'instance_type': 'm1.nano', 'host': 'd32dcae84b6b8026816be8d3fc1fd66558e76b17c904e0f11eab4222', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '571e2910-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.002336431, 'message_signature': '57554e49ff86f6d3505d7d60a9107602b72bffc5948d4116b8e5871459cef55c'}, {'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 0, 'user_id': 'c6a7a530a085472d8ace0b41fc888e26', 'user_name': None, 'project_id': '8993ff2640584165964db6af518beb94', 'project_name': None, 'resource_id': '854d86e2-3388-4709-a32c-15f8658aa41f-sda', 'timestamp': '2025-10-02T12:07:17.444723', 'resource_metadata': {'display_name': 'tempest-ListImageFiltersTestJSON-server-2079629969', 'name': 'instance-00000027', 'instance_id': '854d86e2-3388-4709-a32c-15f8658aa41f', 'instance_type': 'm1.nano', 'host': 'd32dcae84b6b8026816be8d3fc1fd66558e76b17c904e0f11eab4222', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '571e3248-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.002336431, 'message_signature': 'c33c15bf54cebd6086081539b7b41d5b997eb1be9fbb581420f003606362b41a'}, {'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 317, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': '6068f987-bbd4-4dac-a691-169dcb4570a8-vda', 'timestamp': '2025-10-02T12:07:17.444723', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'instance-00000022', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '571e3a18-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.023100892, 'message_signature': 'd6dbe3a7d896e969ce984a3ad48c6431eee1bdc1e7fad2949afa7e50dbf42df8'}, {'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 0, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': '6068f987-bbd4-4dac-a691-169dcb4570a8-sda', 'timestamp': '2025-10-02T12:07:17.444723', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'instance-00000022', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '571e4170-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.023100892, 'message_signature': '7d1f5244497602981f2f8e1f02051c07630525e62bd010c34ad79fa4b2aa39b7'}]}, 'timestamp': '2025-10-02 12:07:17.445555', '_unique_id': 'a372ac6a72eb405084e36389b0847e67'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.usage in the context of pollsters
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.446 12 DEBUG ceilometer.compute.pollsters [-] 854d86e2-3388-4709-a32c-15f8658aa41f/disk.device.usage volume: 29884416 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.447 12 DEBUG ceilometer.compute.pollsters [-] 854d86e2-3388-4709-a32c-15f8658aa41f/disk.device.usage volume: 485376 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.447 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/disk.device.usage volume: 29949952 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.447 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/disk.device.usage volume: 485376 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'a502cde6-beee-4e8d-b2ff-63bdaf5412be', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 29884416, 'user_id': 'c6a7a530a085472d8ace0b41fc888e26', 'user_name': None, 'project_id': '8993ff2640584165964db6af518beb94', 'project_name': None, 'resource_id': '854d86e2-3388-4709-a32c-15f8658aa41f-vda', 'timestamp': '2025-10-02T12:07:17.446815', 'resource_metadata': {'display_name': 'tempest-ListImageFiltersTestJSON-server-2079629969', 'name': 'instance-00000027', 'instance_id': '854d86e2-3388-4709-a32c-15f8658aa41f', 'instance_type': 'm1.nano', 'host': 'd32dcae84b6b8026816be8d3fc1fd66558e76b17c904e0f11eab4222', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '571e7ad2-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.097714368, 'message_signature': 'b400b7781703f282665b5ed82cabe5442af0dd5a485f52d64d91323de0d13714'}, {'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 485376, 'user_id': 'c6a7a530a085472d8ace0b41fc888e26', 'user_name': None, 'project_id': '8993ff2640584165964db6af518beb94', 'project_name': None, 'resource_id': '854d86e2-3388-4709-a32c-15f8658aa41f-sda', 'timestamp': '2025-10-02T12:07:17.446815', 'resource_metadata': {'display_name': 'tempest-ListImageFiltersTestJSON-server-2079629969', 'name': 'instance-00000027', 'instance_id': '854d86e2-3388-4709-a32c-15f8658aa41f', 'instance_type': 'm1.nano', 'host': 'd32dcae84b6b8026816be8d3fc1fd66558e76b17c904e0f11eab4222', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '571e83ce-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.097714368, 'message_signature': 'c4d10d71cfb05f95555cade20e0e414d80f3a020b589eeb40d03809175a6b7f5'}, {'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 29949952, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': '6068f987-bbd4-4dac-a691-169dcb4570a8-vda', 'timestamp': '2025-10-02T12:07:17.446815', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'instance-00000022', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '571e8b80-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.113140514, 'message_signature': '0bf5ee10f563aea54d240d3e14f5c1929c8f4b875270d4fd00fcd324e86783dc'}, {'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 485376, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': '6068f987-bbd4-4dac-a691-169dcb4570a8-sda', 'timestamp': '2025-10-02T12:07:17.446815', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'instance-00000022', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '571e92ec-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.113140514, 'message_signature': '55e0830c0edf7810a8afcf8222d2d31874e4b3d758bd2111e2b2108a651ad509'}]}, 'timestamp': '2025-10-02 12:07:17.447645', '_unique_id': '70d1496c540646fbaa3ad2064728a02f'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes in the context of pollsters
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.448 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/network.incoming.bytes volume: 1904 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '6a9166ec-725d-41b2-a693-0d461a85913d', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 1904, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': 'instance-00000022-6068f987-bbd4-4dac-a691-169dcb4570a8-tap876740d6-da', 'timestamp': '2025-10-02T12:07:17.448801', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'tap876740d6-da', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:4f:35:f0', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap876740d6-da'}, 'message_id': '571ec88e-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.084790528, 'message_signature': '1ef011aa8c3fe7f07817a468608b6e212d5859f4dfe58ba9bc290143be1255ef'}]}, 'timestamp': '2025-10-02 12:07:17.449079', '_unique_id': '32123d5e0a7447448f8945be949ccd18'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.449 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.450 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.latency in the context of pollsters
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.450 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for PerDeviceDiskLatencyPollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.450 12 ERROR ceilometer.polling.manager [-] Prevent pollster disk.device.latency from polling [<NovaLikeServer: tempest-ListImageFiltersTestJSON-server-2079629969>, <NovaLikeServer: tempest-ServersAdminTestJSON-server-537172074>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-ListImageFiltersTestJSON-server-2079629969>, <NovaLikeServer: tempest-ServersAdminTestJSON-server-537172074>]
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.450 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes in the context of pollsters
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.450 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/network.outgoing.bytes volume: 1620 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '66be54f0-be8a-438e-bb0f-1ca5eb475414', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 1620, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': 'instance-00000022-6068f987-bbd4-4dac-a691-169dcb4570a8-tap876740d6-da', 'timestamp': '2025-10-02T12:07:17.450720', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'tap876740d6-da', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:4f:35:f0', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap876740d6-da'}, 'message_id': '571f1442-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.084790528, 'message_signature': '7babd49592b474c3d82c7bea473923b24ea9ebe623f5bf2058fa123ebfe09f41'}]}, 'timestamp': '2025-10-02 12:07:17.450970', '_unique_id': '34348eb26d0847cd9ddd5e34eac90792'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.451 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets in the context of pollsters
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/network.outgoing.packets volume: 16 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'fd8a9337-65bf-4cb3-ad77-1bd4a0c184c2', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 16, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': 'instance-00000022-6068f987-bbd4-4dac-a691-169dcb4570a8-tap876740d6-da', 'timestamp': '2025-10-02T12:07:17.452070', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'tap876740d6-da', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:4f:35:f0', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap876740d6-da'}, 'message_id': '571f480e-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.084790528, 'message_signature': '001bcf209c193aa8abe002728895cbd06891eb3bb415736edac3fc5fe7a60823'}]}, 'timestamp': '2025-10-02 12:07:17.452293', '_unique_id': 'e3bef6febdd24db289e99a6d2b3c07dd'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.452 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.453 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.allocation in the context of pollsters
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.453 12 DEBUG ceilometer.compute.pollsters [-] 854d86e2-3388-4709-a32c-15f8658aa41f/disk.device.allocation volume: 30482432 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.453 12 DEBUG ceilometer.compute.pollsters [-] 854d86e2-3388-4709-a32c-15f8658aa41f/disk.device.allocation volume: 487424 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.453 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/disk.device.allocation volume: 30547968 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/disk.device.allocation volume: 487424 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'aa8d1950-093d-4b12-b84e-71f591b82660', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 30482432, 'user_id': 'c6a7a530a085472d8ace0b41fc888e26', 'user_name': None, 'project_id': '8993ff2640584165964db6af518beb94', 'project_name': None, 'resource_id': '854d86e2-3388-4709-a32c-15f8658aa41f-vda', 'timestamp': '2025-10-02T12:07:17.453377', 'resource_metadata': {'display_name': 'tempest-ListImageFiltersTestJSON-server-2079629969', 'name': 'instance-00000027', 'instance_id': '854d86e2-3388-4709-a32c-15f8658aa41f', 'instance_type': 'm1.nano', 'host': 'd32dcae84b6b8026816be8d3fc1fd66558e76b17c904e0f11eab4222', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '571f7b12-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.097714368, 'message_signature': 'e3065e27d3c413522296ae3c34630439f498e2303f7e3290cabab5640f8505ba'}, {'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 487424, 'user_id': 'c6a7a530a085472d8ace0b41fc888e26', 'user_name': None, 'project_id': '8993ff2640584165964db6af518beb94', 'project_name': None, 'resource_id': '854d86e2-3388-4709-a32c-15f8658aa41f-sda', 'timestamp': '2025-10-02T12:07:17.453377', 'resource_metadata': {'display_name': 'tempest-ListImageFiltersTestJSON-server-2079629969', 'name': 'instance-00000027', 'instance_id': '854d86e2-3388-4709-a32c-15f8658aa41f', 'instance_type': 'm1.nano', 'host': 'd32dcae84b6b8026816be8d3fc1fd66558e76b17c904e0f11eab4222', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '571f8314-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.097714368, 'message_signature': '0cd914310ed9d83644a13e48868f9fd9769c46d81b613c6623469a93c8d2317a'}, {'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 30547968, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': '6068f987-bbd4-4dac-a691-169dcb4570a8-vda', 'timestamp': '2025-10-02T12:07:17.453377', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'instance-00000022', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '571f8c42-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.113140514, 'message_signature': 'ecb7ba7ad2ad3eeff511cd19d064041734482f6de398a406992759264a123770'}, {'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 487424, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': '6068f987-bbd4-4dac-a691-169dcb4570a8-sda', 'timestamp': '2025-10-02T12:07:17.453377', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'instance-00000022', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '571f9494-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.113140514, 'message_signature': '8361f3e50cc6770b1752a3a0ab49ca188f99dbaf8435c1ee3041ac70ce287f9d'}]}, 'timestamp': '2025-10-02 12:07:17.454236', '_unique_id': '0e8e1e3a8bff49518122bba91e104581'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.454 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.455 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.latency in the context of pollsters
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.455 12 DEBUG ceilometer.compute.pollsters [-] 854d86e2-3388-4709-a32c-15f8658aa41f/disk.device.read.latency volume: 685014260 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.455 12 DEBUG ceilometer.compute.pollsters [-] 854d86e2-3388-4709-a32c-15f8658aa41f/disk.device.read.latency volume: 37622645 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.455 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/disk.device.read.latency volume: 1122987546 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.455 12 DEBUG ceilometer.compute.pollsters [-] 6068f987-bbd4-4dac-a691-169dcb4570a8/disk.device.read.latency volume: 42008028 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'e6044bc0-dfd2-43d0-b0a8-694887cfad27', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 685014260, 'user_id': 'c6a7a530a085472d8ace0b41fc888e26', 'user_name': None, 'project_id': '8993ff2640584165964db6af518beb94', 'project_name': None, 'resource_id': '854d86e2-3388-4709-a32c-15f8658aa41f-vda', 'timestamp': '2025-10-02T12:07:17.455352', 'resource_metadata': {'display_name': 'tempest-ListImageFiltersTestJSON-server-2079629969', 'name': 'instance-00000027', 'instance_id': '854d86e2-3388-4709-a32c-15f8658aa41f', 'instance_type': 'm1.nano', 'host': 'd32dcae84b6b8026816be8d3fc1fd66558e76b17c904e0f11eab4222', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '571fc838-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.002336431, 'message_signature': 'f3d25b445bf1c79579a12fc0aa660f399f5aa530a58c4a9efe0a03f01918593b'}, {'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 37622645, 'user_id': 'c6a7a530a085472d8ace0b41fc888e26', 'user_name': None, 'project_id': '8993ff2640584165964db6af518beb94', 'project_name': None, 'resource_id': '854d86e2-3388-4709-a32c-15f8658aa41f-sda', 'timestamp': '2025-10-02T12:07:17.455352', 'resource_metadata': {'display_name': 'tempest-ListImageFiltersTestJSON-server-2079629969', 'name': 'instance-00000027', 'instance_id': '854d86e2-3388-4709-a32c-15f8658aa41f', 'instance_type': 'm1.nano', 'host': 'd32dcae84b6b8026816be8d3fc1fd66558e76b17c904e0f11eab4222', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '571fcfb8-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.002336431, 'message_signature': '8f6cd3d1081d51285f84b027b072aff9ffb1295e5001073e67b1995266ac0959'}, {'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 1122987546, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': '6068f987-bbd4-4dac-a691-169dcb4570a8-vda', 'timestamp': '2025-10-02T12:07:17.455352', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'instance-00000022', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '571fd79c-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.023100892, 'message_signature': '2ec98aa25acfb27ec453ccbbada7f2d1d605544bfeb3afe43fb92a9b714b5614'}, {'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 42008028, 'user_id': '9258efa4511c4bb3813eca27b75b1008', 'user_name': None, 'project_id': 'db3f04a20fd740c1af3139196dc928d2', 'project_name': None, 'resource_id': '6068f987-bbd4-4dac-a691-169dcb4570a8-sda', 'timestamp': '2025-10-02T12:07:17.455352', 'resource_metadata': {'display_name': 'tempest-ServersAdminTestJSON-server-537172074', 'name': 'instance-00000022', 'instance_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'instance_type': 'm1.nano', 'host': 'f0cb68c513d32355220de7248bdeeb468a12f6dca93a7d65559543df', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '571fe020-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4835.023100892, 'message_signature': '6dd4d1501c518b526d8356811a07c1e0e87e21072da41ad43c7a4df8d4018341'}]}, 'timestamp': '2025-10-02 12:07:17.456172', '_unique_id': '67ac8955f25b427ba5515b83157cc188'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.456 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.457 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes.rate in the context of pollsters
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.457 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for OutgoingBytesRatePollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:07:17.457 12 ERROR ceilometer.polling.manager [-] Prevent pollster network.outgoing.bytes.rate from polling [<NovaLikeServer: tempest-ListImageFiltersTestJSON-server-2079629969>, <NovaLikeServer: tempest-ServersAdminTestJSON-server-537172074>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-ListImageFiltersTestJSON-server-2079629969>, <NovaLikeServer: tempest-ServersAdminTestJSON-server-537172074>]
Oct 02 12:07:17 compute-0 nova_compute[192079]: 2025-10-02 12:07:17.669 2 DEBUG oslo_concurrency.processutils [None req-1b824321-af59-4620-b081-a4b0e87684b5 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] CMD "qemu-img convert -t none -O qcow2 -f qcow2 /var/lib/nova/instances/snapshots/tmpfx249vbp/aa90b880ef9c48418d8c5996bdc24e93.delta /var/lib/nova/instances/snapshots/tmpfx249vbp/aa90b880ef9c48418d8c5996bdc24e93" returned: 0 in 0.811s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:07:17 compute-0 nova_compute[192079]: 2025-10-02 12:07:17.678 2 INFO nova.virt.libvirt.driver [None req-1b824321-af59-4620-b081-a4b0e87684b5 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Snapshot extracted, beginning image upload
Oct 02 12:07:18 compute-0 ovn_controller[94336]: 2025-10-02T12:07:18Z|00123|memory_trim|INFO|Detected inactivity (last active 30003 ms ago): trimming memory
Oct 02 12:07:19 compute-0 nova_compute[192079]: 2025-10-02 12:07:19.975 2 INFO nova.virt.libvirt.driver [None req-1b824321-af59-4620-b081-a4b0e87684b5 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Snapshot image upload complete
Oct 02 12:07:19 compute-0 nova_compute[192079]: 2025-10-02 12:07:19.976 2 INFO nova.compute.manager [None req-1b824321-af59-4620-b081-a4b0e87684b5 c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Took 4.43 seconds to snapshot the instance on the hypervisor.
Oct 02 12:07:19 compute-0 nova_compute[192079]: 2025-10-02 12:07:19.980 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:20 compute-0 nova_compute[192079]: 2025-10-02 12:07:20.225 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:24 compute-0 nova_compute[192079]: 2025-10-02 12:07:24.965 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:25 compute-0 nova_compute[192079]: 2025-10-02 12:07:25.227 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:26 compute-0 podman[225272]: 2025-10-02 12:07:26.146732361 +0000 UTC m=+0.060431673 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_id=ovn_metadata_agent, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS)
Oct 02 12:07:26 compute-0 podman[225274]: 2025-10-02 12:07:26.148131539 +0000 UTC m=+0.060038123 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:07:26 compute-0 podman[225273]: 2025-10-02 12:07:26.168723315 +0000 UTC m=+0.081534993 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, tcib_managed=true, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller)
Oct 02 12:07:29 compute-0 nova_compute[192079]: 2025-10-02 12:07:29.967 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:30 compute-0 nova_compute[192079]: 2025-10-02 12:07:30.229 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:33 compute-0 nova_compute[192079]: 2025-10-02 12:07:33.327 2 DEBUG oslo_concurrency.lockutils [None req-0e2f902b-8f46-4aac-9f1a-20dd9cf964ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Acquiring lock "6068f987-bbd4-4dac-a691-169dcb4570a8" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:07:33 compute-0 nova_compute[192079]: 2025-10-02 12:07:33.327 2 DEBUG oslo_concurrency.lockutils [None req-0e2f902b-8f46-4aac-9f1a-20dd9cf964ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Lock "6068f987-bbd4-4dac-a691-169dcb4570a8" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:07:33 compute-0 nova_compute[192079]: 2025-10-02 12:07:33.328 2 DEBUG oslo_concurrency.lockutils [None req-0e2f902b-8f46-4aac-9f1a-20dd9cf964ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Acquiring lock "6068f987-bbd4-4dac-a691-169dcb4570a8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:07:33 compute-0 nova_compute[192079]: 2025-10-02 12:07:33.328 2 DEBUG oslo_concurrency.lockutils [None req-0e2f902b-8f46-4aac-9f1a-20dd9cf964ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Lock "6068f987-bbd4-4dac-a691-169dcb4570a8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:07:33 compute-0 nova_compute[192079]: 2025-10-02 12:07:33.328 2 DEBUG oslo_concurrency.lockutils [None req-0e2f902b-8f46-4aac-9f1a-20dd9cf964ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Lock "6068f987-bbd4-4dac-a691-169dcb4570a8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:07:33 compute-0 nova_compute[192079]: 2025-10-02 12:07:33.340 2 INFO nova.compute.manager [None req-0e2f902b-8f46-4aac-9f1a-20dd9cf964ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Terminating instance
Oct 02 12:07:33 compute-0 nova_compute[192079]: 2025-10-02 12:07:33.350 2 DEBUG nova.compute.manager [None req-0e2f902b-8f46-4aac-9f1a-20dd9cf964ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:07:33 compute-0 kernel: tap876740d6-da (unregistering): left promiscuous mode
Oct 02 12:07:33 compute-0 NetworkManager[51160]: <info>  [1759406853.3717] device (tap876740d6-da): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:07:33 compute-0 ovn_controller[94336]: 2025-10-02T12:07:33Z|00124|binding|INFO|Releasing lport 876740d6-da80-4b19-9afb-af6d9bf00f50 from this chassis (sb_readonly=0)
Oct 02 12:07:33 compute-0 nova_compute[192079]: 2025-10-02 12:07:33.383 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:33 compute-0 ovn_controller[94336]: 2025-10-02T12:07:33Z|00125|binding|INFO|Setting lport 876740d6-da80-4b19-9afb-af6d9bf00f50 down in Southbound
Oct 02 12:07:33 compute-0 ovn_controller[94336]: 2025-10-02T12:07:33Z|00126|binding|INFO|Removing iface tap876740d6-da ovn-installed in OVS
Oct 02 12:07:33 compute-0 nova_compute[192079]: 2025-10-02 12:07:33.385 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:07:33.393 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:4f:35:f0 10.100.0.14'], port_security=['fa:16:3e:4f:35:f0 10.100.0.14'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.14/28', 'neutron:device_id': '6068f987-bbd4-4dac-a691-169dcb4570a8', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-66b5a7c3-fe3e-42b0-aea6-19534bca6e0e', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'db3f04a20fd740c1af3139196dc928d2', 'neutron:revision_number': '4', 'neutron:security_group_ids': 'c69e6497-c2d4-4cc0-a1d9-2c5055cc5d77', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=5dc739b2-072d-4dd4-b9d2-9724145d12f5, chassis=[], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=876740d6-da80-4b19-9afb-af6d9bf00f50) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:07:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:07:33.394 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 876740d6-da80-4b19-9afb-af6d9bf00f50 in datapath 66b5a7c3-fe3e-42b0-aea6-19534bca6e0e unbound from our chassis
Oct 02 12:07:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:07:33.395 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 66b5a7c3-fe3e-42b0-aea6-19534bca6e0e, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:07:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:07:33.396 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0508059c-96a2-48bc-ae45-3ed0d7788ba6]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:07:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:07:33.397 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-66b5a7c3-fe3e-42b0-aea6-19534bca6e0e namespace which is not needed anymore
Oct 02 12:07:33 compute-0 nova_compute[192079]: 2025-10-02 12:07:33.405 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:33 compute-0 systemd[1]: machine-qemu\x2d20\x2dinstance\x2d00000022.scope: Deactivated successfully.
Oct 02 12:07:33 compute-0 systemd[1]: machine-qemu\x2d20\x2dinstance\x2d00000022.scope: Consumed 16.033s CPU time.
Oct 02 12:07:33 compute-0 systemd-machined[152150]: Machine qemu-20-instance-00000022 terminated.
Oct 02 12:07:33 compute-0 neutron-haproxy-ovnmeta-66b5a7c3-fe3e-42b0-aea6-19534bca6e0e[224749]: [NOTICE]   (224753) : haproxy version is 2.8.14-c23fe91
Oct 02 12:07:33 compute-0 neutron-haproxy-ovnmeta-66b5a7c3-fe3e-42b0-aea6-19534bca6e0e[224749]: [NOTICE]   (224753) : path to executable is /usr/sbin/haproxy
Oct 02 12:07:33 compute-0 neutron-haproxy-ovnmeta-66b5a7c3-fe3e-42b0-aea6-19534bca6e0e[224749]: [WARNING]  (224753) : Exiting Master process...
Oct 02 12:07:33 compute-0 neutron-haproxy-ovnmeta-66b5a7c3-fe3e-42b0-aea6-19534bca6e0e[224749]: [ALERT]    (224753) : Current worker (224755) exited with code 143 (Terminated)
Oct 02 12:07:33 compute-0 neutron-haproxy-ovnmeta-66b5a7c3-fe3e-42b0-aea6-19534bca6e0e[224749]: [WARNING]  (224753) : All workers exited. Exiting... (0)
Oct 02 12:07:33 compute-0 systemd[1]: libpod-374f3597b1df4b6c3ab6e6a93f6484ae5e03e96275688834703adbc15516e371.scope: Deactivated successfully.
Oct 02 12:07:33 compute-0 podman[225366]: 2025-10-02 12:07:33.541602244 +0000 UTC m=+0.050679070 container died 374f3597b1df4b6c3ab6e6a93f6484ae5e03e96275688834703adbc15516e371 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-66b5a7c3-fe3e-42b0-aea6-19534bca6e0e, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS)
Oct 02 12:07:33 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-374f3597b1df4b6c3ab6e6a93f6484ae5e03e96275688834703adbc15516e371-userdata-shm.mount: Deactivated successfully.
Oct 02 12:07:33 compute-0 systemd[1]: var-lib-containers-storage-overlay-36da2daddb38e03a8f113c3cc4c452e37dea82ff45e194633b1629c6075e28a8-merged.mount: Deactivated successfully.
Oct 02 12:07:33 compute-0 podman[225366]: 2025-10-02 12:07:33.616299603 +0000 UTC m=+0.125376409 container cleanup 374f3597b1df4b6c3ab6e6a93f6484ae5e03e96275688834703adbc15516e371 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-66b5a7c3-fe3e-42b0-aea6-19534bca6e0e, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:07:33 compute-0 systemd[1]: libpod-conmon-374f3597b1df4b6c3ab6e6a93f6484ae5e03e96275688834703adbc15516e371.scope: Deactivated successfully.
Oct 02 12:07:33 compute-0 nova_compute[192079]: 2025-10-02 12:07:33.640 2 INFO nova.virt.libvirt.driver [-] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Instance destroyed successfully.
Oct 02 12:07:33 compute-0 nova_compute[192079]: 2025-10-02 12:07:33.641 2 DEBUG nova.objects.instance [None req-0e2f902b-8f46-4aac-9f1a-20dd9cf964ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Lazy-loading 'resources' on Instance uuid 6068f987-bbd4-4dac-a691-169dcb4570a8 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:07:33 compute-0 nova_compute[192079]: 2025-10-02 12:07:33.656 2 DEBUG nova.virt.libvirt.vif [None req-0e2f902b-8f46-4aac-9f1a-20dd9cf964ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:06:03Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServersAdminTestJSON-server-537172074',display_name='tempest-ServersAdminTestJSON-server-537172074',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serversadmintestjson-server-537172074',id=34,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:06:19Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='db3f04a20fd740c1af3139196dc928d2',ramdisk_id='',reservation_id='r-04yxjkba',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServersAdminTestJSON-1782354187',owner_user_name='tempest-ServersAdminTestJSON-1782354187-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:06:19Z,user_data=None,user_id='9258efa4511c4bb3813eca27b75b1008',uuid=6068f987-bbd4-4dac-a691-169dcb4570a8,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "876740d6-da80-4b19-9afb-af6d9bf00f50", "address": "fa:16:3e:4f:35:f0", "network": {"id": "66b5a7c3-fe3e-42b0-aea6-19534bca6e0e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1726703238-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "db3f04a20fd740c1af3139196dc928d2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap876740d6-da", "ovs_interfaceid": "876740d6-da80-4b19-9afb-af6d9bf00f50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:07:33 compute-0 nova_compute[192079]: 2025-10-02 12:07:33.656 2 DEBUG nova.network.os_vif_util [None req-0e2f902b-8f46-4aac-9f1a-20dd9cf964ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Converting VIF {"id": "876740d6-da80-4b19-9afb-af6d9bf00f50", "address": "fa:16:3e:4f:35:f0", "network": {"id": "66b5a7c3-fe3e-42b0-aea6-19534bca6e0e", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1726703238-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "db3f04a20fd740c1af3139196dc928d2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap876740d6-da", "ovs_interfaceid": "876740d6-da80-4b19-9afb-af6d9bf00f50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:07:33 compute-0 nova_compute[192079]: 2025-10-02 12:07:33.658 2 DEBUG nova.network.os_vif_util [None req-0e2f902b-8f46-4aac-9f1a-20dd9cf964ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:4f:35:f0,bridge_name='br-int',has_traffic_filtering=True,id=876740d6-da80-4b19-9afb-af6d9bf00f50,network=Network(66b5a7c3-fe3e-42b0-aea6-19534bca6e0e),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap876740d6-da') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:07:33 compute-0 nova_compute[192079]: 2025-10-02 12:07:33.658 2 DEBUG os_vif [None req-0e2f902b-8f46-4aac-9f1a-20dd9cf964ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:4f:35:f0,bridge_name='br-int',has_traffic_filtering=True,id=876740d6-da80-4b19-9afb-af6d9bf00f50,network=Network(66b5a7c3-fe3e-42b0-aea6-19534bca6e0e),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap876740d6-da') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:07:33 compute-0 nova_compute[192079]: 2025-10-02 12:07:33.661 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:33 compute-0 nova_compute[192079]: 2025-10-02 12:07:33.661 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap876740d6-da, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:07:33 compute-0 nova_compute[192079]: 2025-10-02 12:07:33.663 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:33 compute-0 nova_compute[192079]: 2025-10-02 12:07:33.665 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:33 compute-0 nova_compute[192079]: 2025-10-02 12:07:33.668 2 INFO os_vif [None req-0e2f902b-8f46-4aac-9f1a-20dd9cf964ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:4f:35:f0,bridge_name='br-int',has_traffic_filtering=True,id=876740d6-da80-4b19-9afb-af6d9bf00f50,network=Network(66b5a7c3-fe3e-42b0-aea6-19534bca6e0e),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap876740d6-da')
Oct 02 12:07:33 compute-0 nova_compute[192079]: 2025-10-02 12:07:33.669 2 INFO nova.virt.libvirt.driver [None req-0e2f902b-8f46-4aac-9f1a-20dd9cf964ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Deleting instance files /var/lib/nova/instances/6068f987-bbd4-4dac-a691-169dcb4570a8_del
Oct 02 12:07:33 compute-0 nova_compute[192079]: 2025-10-02 12:07:33.670 2 INFO nova.virt.libvirt.driver [None req-0e2f902b-8f46-4aac-9f1a-20dd9cf964ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Deletion of /var/lib/nova/instances/6068f987-bbd4-4dac-a691-169dcb4570a8_del complete
Oct 02 12:07:33 compute-0 podman[225410]: 2025-10-02 12:07:33.688222856 +0000 UTC m=+0.045500851 container remove 374f3597b1df4b6c3ab6e6a93f6484ae5e03e96275688834703adbc15516e371 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-66b5a7c3-fe3e-42b0-aea6-19534bca6e0e, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0)
Oct 02 12:07:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:07:33.694 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8b2e1651-b297-48de-ac9a-33f619f64278]: (4, ('Thu Oct  2 12:07:33 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-66b5a7c3-fe3e-42b0-aea6-19534bca6e0e (374f3597b1df4b6c3ab6e6a93f6484ae5e03e96275688834703adbc15516e371)\n374f3597b1df4b6c3ab6e6a93f6484ae5e03e96275688834703adbc15516e371\nThu Oct  2 12:07:33 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-66b5a7c3-fe3e-42b0-aea6-19534bca6e0e (374f3597b1df4b6c3ab6e6a93f6484ae5e03e96275688834703adbc15516e371)\n374f3597b1df4b6c3ab6e6a93f6484ae5e03e96275688834703adbc15516e371\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:07:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:07:33.696 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[fb56cc49-211c-4c78-bee5-7d7afbcf5991]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:07:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:07:33.697 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap66b5a7c3-f0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:07:33 compute-0 nova_compute[192079]: 2025-10-02 12:07:33.699 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:33 compute-0 kernel: tap66b5a7c3-f0: left promiscuous mode
Oct 02 12:07:33 compute-0 nova_compute[192079]: 2025-10-02 12:07:33.715 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:07:33.718 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ad0cdcf2-c61e-40b9-a5c0-05da77d5fed9]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:07:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:07:33.743 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2cebb4cd-df9e-4fad-893c-776c47a209ee]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:07:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:07:33.744 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f909a9bb-071e-4699-b6ea-b186cb9b47cb]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:07:33 compute-0 nova_compute[192079]: 2025-10-02 12:07:33.755 2 INFO nova.compute.manager [None req-0e2f902b-8f46-4aac-9f1a-20dd9cf964ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Took 0.40 seconds to destroy the instance on the hypervisor.
Oct 02 12:07:33 compute-0 nova_compute[192079]: 2025-10-02 12:07:33.755 2 DEBUG oslo.service.loopingcall [None req-0e2f902b-8f46-4aac-9f1a-20dd9cf964ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:07:33 compute-0 nova_compute[192079]: 2025-10-02 12:07:33.756 2 DEBUG nova.compute.manager [-] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:07:33 compute-0 nova_compute[192079]: 2025-10-02 12:07:33.756 2 DEBUG nova.network.neutron [-] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:07:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:07:33.760 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[21d8691b-ac32-473b-84b4-c8d1bd1c24aa]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 477476, 'reachable_time': 24775, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 225425, 'error': None, 'target': 'ovnmeta-66b5a7c3-fe3e-42b0-aea6-19534bca6e0e', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:07:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:07:33.762 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-66b5a7c3-fe3e-42b0-aea6-19534bca6e0e deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:07:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:07:33.762 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[ef2981f8-f85f-4fff-9aae-0be55ebff944]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:07:33 compute-0 systemd[1]: run-netns-ovnmeta\x2d66b5a7c3\x2dfe3e\x2d42b0\x2daea6\x2d19534bca6e0e.mount: Deactivated successfully.
Oct 02 12:07:34 compute-0 nova_compute[192079]: 2025-10-02 12:07:34.167 2 DEBUG nova.compute.manager [req-dffa0b5d-b65c-43b0-8d96-0ac088f9f6cc req-4a64e6dd-f399-4252-b2cb-4397c0322cbd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Received event network-vif-unplugged-876740d6-da80-4b19-9afb-af6d9bf00f50 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:07:34 compute-0 nova_compute[192079]: 2025-10-02 12:07:34.167 2 DEBUG oslo_concurrency.lockutils [req-dffa0b5d-b65c-43b0-8d96-0ac088f9f6cc req-4a64e6dd-f399-4252-b2cb-4397c0322cbd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "6068f987-bbd4-4dac-a691-169dcb4570a8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:07:34 compute-0 nova_compute[192079]: 2025-10-02 12:07:34.167 2 DEBUG oslo_concurrency.lockutils [req-dffa0b5d-b65c-43b0-8d96-0ac088f9f6cc req-4a64e6dd-f399-4252-b2cb-4397c0322cbd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6068f987-bbd4-4dac-a691-169dcb4570a8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:07:34 compute-0 nova_compute[192079]: 2025-10-02 12:07:34.167 2 DEBUG oslo_concurrency.lockutils [req-dffa0b5d-b65c-43b0-8d96-0ac088f9f6cc req-4a64e6dd-f399-4252-b2cb-4397c0322cbd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6068f987-bbd4-4dac-a691-169dcb4570a8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:07:34 compute-0 nova_compute[192079]: 2025-10-02 12:07:34.168 2 DEBUG nova.compute.manager [req-dffa0b5d-b65c-43b0-8d96-0ac088f9f6cc req-4a64e6dd-f399-4252-b2cb-4397c0322cbd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] No waiting events found dispatching network-vif-unplugged-876740d6-da80-4b19-9afb-af6d9bf00f50 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:07:34 compute-0 nova_compute[192079]: 2025-10-02 12:07:34.168 2 DEBUG nova.compute.manager [req-dffa0b5d-b65c-43b0-8d96-0ac088f9f6cc req-4a64e6dd-f399-4252-b2cb-4397c0322cbd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Received event network-vif-unplugged-876740d6-da80-4b19-9afb-af6d9bf00f50 for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:07:34 compute-0 nova_compute[192079]: 2025-10-02 12:07:34.168 2 DEBUG nova.compute.manager [req-dffa0b5d-b65c-43b0-8d96-0ac088f9f6cc req-4a64e6dd-f399-4252-b2cb-4397c0322cbd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Received event network-vif-plugged-876740d6-da80-4b19-9afb-af6d9bf00f50 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:07:34 compute-0 nova_compute[192079]: 2025-10-02 12:07:34.168 2 DEBUG oslo_concurrency.lockutils [req-dffa0b5d-b65c-43b0-8d96-0ac088f9f6cc req-4a64e6dd-f399-4252-b2cb-4397c0322cbd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "6068f987-bbd4-4dac-a691-169dcb4570a8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:07:34 compute-0 nova_compute[192079]: 2025-10-02 12:07:34.168 2 DEBUG oslo_concurrency.lockutils [req-dffa0b5d-b65c-43b0-8d96-0ac088f9f6cc req-4a64e6dd-f399-4252-b2cb-4397c0322cbd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6068f987-bbd4-4dac-a691-169dcb4570a8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:07:34 compute-0 nova_compute[192079]: 2025-10-02 12:07:34.169 2 DEBUG oslo_concurrency.lockutils [req-dffa0b5d-b65c-43b0-8d96-0ac088f9f6cc req-4a64e6dd-f399-4252-b2cb-4397c0322cbd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6068f987-bbd4-4dac-a691-169dcb4570a8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:07:34 compute-0 nova_compute[192079]: 2025-10-02 12:07:34.169 2 DEBUG nova.compute.manager [req-dffa0b5d-b65c-43b0-8d96-0ac088f9f6cc req-4a64e6dd-f399-4252-b2cb-4397c0322cbd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] No waiting events found dispatching network-vif-plugged-876740d6-da80-4b19-9afb-af6d9bf00f50 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:07:34 compute-0 nova_compute[192079]: 2025-10-02 12:07:34.169 2 WARNING nova.compute.manager [req-dffa0b5d-b65c-43b0-8d96-0ac088f9f6cc req-4a64e6dd-f399-4252-b2cb-4397c0322cbd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Received unexpected event network-vif-plugged-876740d6-da80-4b19-9afb-af6d9bf00f50 for instance with vm_state active and task_state deleting.
Oct 02 12:07:34 compute-0 nova_compute[192079]: 2025-10-02 12:07:34.402 2 DEBUG nova.network.neutron [-] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:07:34 compute-0 nova_compute[192079]: 2025-10-02 12:07:34.417 2 INFO nova.compute.manager [-] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Took 0.66 seconds to deallocate network for instance.
Oct 02 12:07:34 compute-0 nova_compute[192079]: 2025-10-02 12:07:34.495 2 DEBUG oslo_concurrency.lockutils [None req-0e2f902b-8f46-4aac-9f1a-20dd9cf964ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:07:34 compute-0 nova_compute[192079]: 2025-10-02 12:07:34.495 2 DEBUG oslo_concurrency.lockutils [None req-0e2f902b-8f46-4aac-9f1a-20dd9cf964ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:07:34 compute-0 nova_compute[192079]: 2025-10-02 12:07:34.567 2 DEBUG nova.compute.provider_tree [None req-0e2f902b-8f46-4aac-9f1a-20dd9cf964ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:07:34 compute-0 nova_compute[192079]: 2025-10-02 12:07:34.580 2 DEBUG nova.scheduler.client.report [None req-0e2f902b-8f46-4aac-9f1a-20dd9cf964ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:07:34 compute-0 nova_compute[192079]: 2025-10-02 12:07:34.598 2 DEBUG oslo_concurrency.lockutils [None req-0e2f902b-8f46-4aac-9f1a-20dd9cf964ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.103s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:07:34 compute-0 nova_compute[192079]: 2025-10-02 12:07:34.615 2 INFO nova.scheduler.client.report [None req-0e2f902b-8f46-4aac-9f1a-20dd9cf964ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Deleted allocations for instance 6068f987-bbd4-4dac-a691-169dcb4570a8
Oct 02 12:07:34 compute-0 nova_compute[192079]: 2025-10-02 12:07:34.684 2 DEBUG oslo_concurrency.lockutils [None req-0e2f902b-8f46-4aac-9f1a-20dd9cf964ef 9258efa4511c4bb3813eca27b75b1008 db3f04a20fd740c1af3139196dc928d2 - - default default] Lock "6068f987-bbd4-4dac-a691-169dcb4570a8" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.357s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:07:34 compute-0 nova_compute[192079]: 2025-10-02 12:07:34.969 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:35 compute-0 nova_compute[192079]: 2025-10-02 12:07:35.093 2 DEBUG oslo_concurrency.lockutils [None req-71d69911-c4b1-4a57-9540-314b3735f39b c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Acquiring lock "854d86e2-3388-4709-a32c-15f8658aa41f" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:07:35 compute-0 nova_compute[192079]: 2025-10-02 12:07:35.094 2 DEBUG oslo_concurrency.lockutils [None req-71d69911-c4b1-4a57-9540-314b3735f39b c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Lock "854d86e2-3388-4709-a32c-15f8658aa41f" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:07:35 compute-0 nova_compute[192079]: 2025-10-02 12:07:35.094 2 DEBUG oslo_concurrency.lockutils [None req-71d69911-c4b1-4a57-9540-314b3735f39b c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Acquiring lock "854d86e2-3388-4709-a32c-15f8658aa41f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:07:35 compute-0 nova_compute[192079]: 2025-10-02 12:07:35.094 2 DEBUG oslo_concurrency.lockutils [None req-71d69911-c4b1-4a57-9540-314b3735f39b c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Lock "854d86e2-3388-4709-a32c-15f8658aa41f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:07:35 compute-0 nova_compute[192079]: 2025-10-02 12:07:35.095 2 DEBUG oslo_concurrency.lockutils [None req-71d69911-c4b1-4a57-9540-314b3735f39b c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Lock "854d86e2-3388-4709-a32c-15f8658aa41f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:07:35 compute-0 nova_compute[192079]: 2025-10-02 12:07:35.106 2 INFO nova.compute.manager [None req-71d69911-c4b1-4a57-9540-314b3735f39b c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Terminating instance
Oct 02 12:07:35 compute-0 nova_compute[192079]: 2025-10-02 12:07:35.115 2 DEBUG oslo_concurrency.lockutils [None req-71d69911-c4b1-4a57-9540-314b3735f39b c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Acquiring lock "refresh_cache-854d86e2-3388-4709-a32c-15f8658aa41f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:07:35 compute-0 nova_compute[192079]: 2025-10-02 12:07:35.115 2 DEBUG oslo_concurrency.lockutils [None req-71d69911-c4b1-4a57-9540-314b3735f39b c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Acquired lock "refresh_cache-854d86e2-3388-4709-a32c-15f8658aa41f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:07:35 compute-0 nova_compute[192079]: 2025-10-02 12:07:35.116 2 DEBUG nova.network.neutron [None req-71d69911-c4b1-4a57-9540-314b3735f39b c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:07:35 compute-0 podman[225426]: 2025-10-02 12:07:35.149864446 +0000 UTC m=+0.064746500 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=edpm, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:07:35 compute-0 nova_compute[192079]: 2025-10-02 12:07:35.327 2 DEBUG nova.network.neutron [None req-71d69911-c4b1-4a57-9540-314b3735f39b c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:07:35 compute-0 nova_compute[192079]: 2025-10-02 12:07:35.568 2 DEBUG nova.network.neutron [None req-71d69911-c4b1-4a57-9540-314b3735f39b c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:07:35 compute-0 nova_compute[192079]: 2025-10-02 12:07:35.588 2 DEBUG oslo_concurrency.lockutils [None req-71d69911-c4b1-4a57-9540-314b3735f39b c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Releasing lock "refresh_cache-854d86e2-3388-4709-a32c-15f8658aa41f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:07:35 compute-0 nova_compute[192079]: 2025-10-02 12:07:35.589 2 DEBUG nova.compute.manager [None req-71d69911-c4b1-4a57-9540-314b3735f39b c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:07:35 compute-0 systemd[1]: machine-qemu\x2d21\x2dinstance\x2d00000027.scope: Deactivated successfully.
Oct 02 12:07:35 compute-0 systemd[1]: machine-qemu\x2d21\x2dinstance\x2d00000027.scope: Consumed 12.921s CPU time.
Oct 02 12:07:35 compute-0 systemd-machined[152150]: Machine qemu-21-instance-00000027 terminated.
Oct 02 12:07:35 compute-0 nova_compute[192079]: 2025-10-02 12:07:35.841 2 INFO nova.virt.libvirt.driver [-] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Instance destroyed successfully.
Oct 02 12:07:35 compute-0 nova_compute[192079]: 2025-10-02 12:07:35.841 2 DEBUG nova.objects.instance [None req-71d69911-c4b1-4a57-9540-314b3735f39b c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Lazy-loading 'resources' on Instance uuid 854d86e2-3388-4709-a32c-15f8658aa41f obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:07:35 compute-0 nova_compute[192079]: 2025-10-02 12:07:35.854 2 INFO nova.virt.libvirt.driver [None req-71d69911-c4b1-4a57-9540-314b3735f39b c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Deleting instance files /var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f_del
Oct 02 12:07:35 compute-0 nova_compute[192079]: 2025-10-02 12:07:35.855 2 INFO nova.virt.libvirt.driver [None req-71d69911-c4b1-4a57-9540-314b3735f39b c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Deletion of /var/lib/nova/instances/854d86e2-3388-4709-a32c-15f8658aa41f_del complete
Oct 02 12:07:35 compute-0 nova_compute[192079]: 2025-10-02 12:07:35.936 2 INFO nova.compute.manager [None req-71d69911-c4b1-4a57-9540-314b3735f39b c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Took 0.35 seconds to destroy the instance on the hypervisor.
Oct 02 12:07:35 compute-0 nova_compute[192079]: 2025-10-02 12:07:35.936 2 DEBUG oslo.service.loopingcall [None req-71d69911-c4b1-4a57-9540-314b3735f39b c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:07:35 compute-0 nova_compute[192079]: 2025-10-02 12:07:35.937 2 DEBUG nova.compute.manager [-] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:07:35 compute-0 nova_compute[192079]: 2025-10-02 12:07:35.937 2 DEBUG nova.network.neutron [-] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:07:36 compute-0 nova_compute[192079]: 2025-10-02 12:07:36.127 2 DEBUG nova.network.neutron [-] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:07:36 compute-0 nova_compute[192079]: 2025-10-02 12:07:36.146 2 DEBUG nova.network.neutron [-] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:07:36 compute-0 nova_compute[192079]: 2025-10-02 12:07:36.161 2 INFO nova.compute.manager [-] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Took 0.22 seconds to deallocate network for instance.
Oct 02 12:07:36 compute-0 nova_compute[192079]: 2025-10-02 12:07:36.235 2 DEBUG oslo_concurrency.lockutils [None req-71d69911-c4b1-4a57-9540-314b3735f39b c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:07:36 compute-0 nova_compute[192079]: 2025-10-02 12:07:36.236 2 DEBUG oslo_concurrency.lockutils [None req-71d69911-c4b1-4a57-9540-314b3735f39b c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:07:36 compute-0 nova_compute[192079]: 2025-10-02 12:07:36.264 2 DEBUG nova.compute.manager [req-83ca202f-8808-4942-b7d6-ceeae1e22dc3 req-cd4da2ec-38e7-4ebc-bd5e-c1378bcd5f03 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Received event network-vif-deleted-876740d6-da80-4b19-9afb-af6d9bf00f50 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:07:36 compute-0 nova_compute[192079]: 2025-10-02 12:07:36.280 2 DEBUG nova.compute.provider_tree [None req-71d69911-c4b1-4a57-9540-314b3735f39b c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:07:36 compute-0 nova_compute[192079]: 2025-10-02 12:07:36.296 2 DEBUG nova.scheduler.client.report [None req-71d69911-c4b1-4a57-9540-314b3735f39b c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:07:36 compute-0 nova_compute[192079]: 2025-10-02 12:07:36.325 2 DEBUG oslo_concurrency.lockutils [None req-71d69911-c4b1-4a57-9540-314b3735f39b c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.089s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:07:36 compute-0 nova_compute[192079]: 2025-10-02 12:07:36.349 2 INFO nova.scheduler.client.report [None req-71d69911-c4b1-4a57-9540-314b3735f39b c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Deleted allocations for instance 854d86e2-3388-4709-a32c-15f8658aa41f
Oct 02 12:07:36 compute-0 nova_compute[192079]: 2025-10-02 12:07:36.466 2 DEBUG oslo_concurrency.lockutils [None req-71d69911-c4b1-4a57-9540-314b3735f39b c6a7a530a085472d8ace0b41fc888e26 8993ff2640584165964db6af518beb94 - - default default] Lock "854d86e2-3388-4709-a32c-15f8658aa41f" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.373s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:07:38 compute-0 nova_compute[192079]: 2025-10-02 12:07:38.665 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:39 compute-0 nova_compute[192079]: 2025-10-02 12:07:39.698 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:39 compute-0 nova_compute[192079]: 2025-10-02 12:07:39.971 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:40 compute-0 podman[225456]: 2025-10-02 12:07:40.135792135 +0000 UTC m=+0.052594293 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, tcib_managed=true, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, container_name=multipathd, org.label-schema.build-date=20251001)
Oct 02 12:07:40 compute-0 podman[225455]: 2025-10-02 12:07:40.136054012 +0000 UTC m=+0.054298138 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vendor=Red Hat, Inc., architecture=x86_64, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., managed_by=edpm_ansible, name=ubi9-minimal, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.tags=minimal rhel9, build-date=2025-08-20T13:12:41, com.redhat.component=ubi9-minimal-container, url=https://catalog.redhat.com/en/search?searchType=containers, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, config_id=edpm, release=1755695350, container_name=openstack_network_exporter, distribution-scope=public, io.openshift.expose-services=, version=9.6, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.buildah.version=1.33.7, maintainer=Red Hat, Inc., vcs-type=git)
Oct 02 12:07:43 compute-0 nova_compute[192079]: 2025-10-02 12:07:43.690 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:44 compute-0 nova_compute[192079]: 2025-10-02 12:07:44.972 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:47 compute-0 podman[225495]: 2025-10-02 12:07:47.159152851 +0000 UTC m=+0.073044025 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:07:47 compute-0 podman[225496]: 2025-10-02 12:07:47.17579169 +0000 UTC m=+0.080826955 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, tcib_managed=true, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=iscsid, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, container_name=iscsid, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']})
Oct 02 12:07:48 compute-0 nova_compute[192079]: 2025-10-02 12:07:48.640 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759406853.6385925, 6068f987-bbd4-4dac-a691-169dcb4570a8 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:07:48 compute-0 nova_compute[192079]: 2025-10-02 12:07:48.641 2 INFO nova.compute.manager [-] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] VM Stopped (Lifecycle Event)
Oct 02 12:07:48 compute-0 nova_compute[192079]: 2025-10-02 12:07:48.675 2 DEBUG nova.compute.manager [None req-3d093057-827e-443b-a113-b410f646e4f6 - - - - - -] [instance: 6068f987-bbd4-4dac-a691-169dcb4570a8] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:07:48 compute-0 nova_compute[192079]: 2025-10-02 12:07:48.739 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:49 compute-0 nova_compute[192079]: 2025-10-02 12:07:49.974 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:50 compute-0 nova_compute[192079]: 2025-10-02 12:07:50.841 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759406855.8396733, 854d86e2-3388-4709-a32c-15f8658aa41f => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:07:50 compute-0 nova_compute[192079]: 2025-10-02 12:07:50.842 2 INFO nova.compute.manager [-] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] VM Stopped (Lifecycle Event)
Oct 02 12:07:50 compute-0 nova_compute[192079]: 2025-10-02 12:07:50.871 2 DEBUG nova.compute.manager [None req-f9b6f879-b838-4909-84d3-0cc800dff2b9 - - - - - -] [instance: 854d86e2-3388-4709-a32c-15f8658aa41f] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:07:53 compute-0 nova_compute[192079]: 2025-10-02 12:07:53.782 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:54 compute-0 nova_compute[192079]: 2025-10-02 12:07:54.975 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:57 compute-0 podman[225543]: 2025-10-02 12:07:57.142446378 +0000 UTC m=+0.051887593 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']})
Oct 02 12:07:57 compute-0 podman[225541]: 2025-10-02 12:07:57.16622315 +0000 UTC m=+0.079161700 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_metadata_agent, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true)
Oct 02 12:07:57 compute-0 podman[225542]: 2025-10-02 12:07:57.166737724 +0000 UTC m=+0.079022156 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, config_id=ovn_controller, container_name=ovn_controller, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.schema-version=1.0)
Oct 02 12:07:58 compute-0 nova_compute[192079]: 2025-10-02 12:07:58.786 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:07:59 compute-0 nova_compute[192079]: 2025-10-02 12:07:59.781 2 DEBUG oslo_concurrency.lockutils [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Acquiring lock "bfc65113-6eeb-464b-bbc8-f22f60c53782" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:07:59 compute-0 nova_compute[192079]: 2025-10-02 12:07:59.782 2 DEBUG oslo_concurrency.lockutils [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Lock "bfc65113-6eeb-464b-bbc8-f22f60c53782" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:07:59 compute-0 nova_compute[192079]: 2025-10-02 12:07:59.808 2 DEBUG nova.compute.manager [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:07:59 compute-0 nova_compute[192079]: 2025-10-02 12:07:59.931 2 DEBUG oslo_concurrency.lockutils [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:07:59 compute-0 nova_compute[192079]: 2025-10-02 12:07:59.932 2 DEBUG oslo_concurrency.lockutils [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:07:59 compute-0 nova_compute[192079]: 2025-10-02 12:07:59.938 2 DEBUG nova.virt.hardware [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:07:59 compute-0 nova_compute[192079]: 2025-10-02 12:07:59.938 2 INFO nova.compute.claims [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:07:59 compute-0 nova_compute[192079]: 2025-10-02 12:07:59.977 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.053 2 DEBUG nova.compute.provider_tree [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.070 2 DEBUG nova.scheduler.client.report [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.103 2 DEBUG oslo_concurrency.lockutils [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.171s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.104 2 DEBUG nova.compute.manager [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.161 2 DEBUG nova.compute.manager [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.162 2 DEBUG nova.network.neutron [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.199 2 INFO nova.virt.libvirt.driver [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.240 2 DEBUG nova.compute.manager [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.393 2 DEBUG nova.compute.manager [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.394 2 DEBUG nova.virt.libvirt.driver [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.394 2 INFO nova.virt.libvirt.driver [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Creating image(s)
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.395 2 DEBUG oslo_concurrency.lockutils [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Acquiring lock "/var/lib/nova/instances/bfc65113-6eeb-464b-bbc8-f22f60c53782/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.395 2 DEBUG oslo_concurrency.lockutils [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Lock "/var/lib/nova/instances/bfc65113-6eeb-464b-bbc8-f22f60c53782/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.395 2 DEBUG oslo_concurrency.lockutils [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Lock "/var/lib/nova/instances/bfc65113-6eeb-464b-bbc8-f22f60c53782/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.407 2 DEBUG oslo_concurrency.processutils [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.462 2 DEBUG oslo_concurrency.processutils [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.463 2 DEBUG oslo_concurrency.lockutils [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.464 2 DEBUG oslo_concurrency.lockutils [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.474 2 DEBUG oslo_concurrency.processutils [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.538 2 DEBUG oslo_concurrency.processutils [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.063s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.539 2 DEBUG oslo_concurrency.processutils [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/bfc65113-6eeb-464b-bbc8-f22f60c53782/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.582 2 DEBUG oslo_concurrency.processutils [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/bfc65113-6eeb-464b-bbc8-f22f60c53782/disk 1073741824" returned: 0 in 0.043s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.583 2 DEBUG oslo_concurrency.lockutils [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.119s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.583 2 DEBUG oslo_concurrency.processutils [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.650 2 DEBUG oslo_concurrency.processutils [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.067s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.651 2 DEBUG nova.virt.disk.api [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Checking if we can resize image /var/lib/nova/instances/bfc65113-6eeb-464b-bbc8-f22f60c53782/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.652 2 DEBUG oslo_concurrency.processutils [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/bfc65113-6eeb-464b-bbc8-f22f60c53782/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.670 2 DEBUG nova.network.neutron [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] No network configured allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1188
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.671 2 DEBUG nova.compute.manager [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Instance network_info: |[]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.706 2 DEBUG oslo_concurrency.processutils [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/bfc65113-6eeb-464b-bbc8-f22f60c53782/disk --force-share --output=json" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.706 2 DEBUG nova.virt.disk.api [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Cannot resize image /var/lib/nova/instances/bfc65113-6eeb-464b-bbc8-f22f60c53782/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.707 2 DEBUG nova.objects.instance [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Lazy-loading 'migration_context' on Instance uuid bfc65113-6eeb-464b-bbc8-f22f60c53782 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.736 2 DEBUG nova.virt.libvirt.driver [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.736 2 DEBUG nova.virt.libvirt.driver [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Ensure instance console log exists: /var/lib/nova/instances/bfc65113-6eeb-464b-bbc8-f22f60c53782/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.737 2 DEBUG oslo_concurrency.lockutils [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.737 2 DEBUG oslo_concurrency.lockutils [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.737 2 DEBUG oslo_concurrency.lockutils [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.739 2 DEBUG nova.virt.libvirt.driver [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Start _get_guest_xml network_info=[] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.742 2 WARNING nova.virt.libvirt.driver [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.745 2 DEBUG nova.virt.libvirt.host [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.746 2 DEBUG nova.virt.libvirt.host [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.749 2 DEBUG nova.virt.libvirt.host [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.750 2 DEBUG nova.virt.libvirt.host [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.751 2 DEBUG nova.virt.libvirt.driver [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.751 2 DEBUG nova.virt.hardware [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.752 2 DEBUG nova.virt.hardware [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.752 2 DEBUG nova.virt.hardware [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.752 2 DEBUG nova.virt.hardware [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.752 2 DEBUG nova.virt.hardware [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.752 2 DEBUG nova.virt.hardware [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.753 2 DEBUG nova.virt.hardware [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.753 2 DEBUG nova.virt.hardware [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.753 2 DEBUG nova.virt.hardware [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.753 2 DEBUG nova.virt.hardware [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.753 2 DEBUG nova.virt.hardware [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.756 2 DEBUG nova.objects.instance [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Lazy-loading 'pci_devices' on Instance uuid bfc65113-6eeb-464b-bbc8-f22f60c53782 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:08:00 compute-0 nova_compute[192079]: 2025-10-02 12:08:00.895 2 DEBUG nova.virt.libvirt.driver [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:08:00 compute-0 nova_compute[192079]:   <uuid>bfc65113-6eeb-464b-bbc8-f22f60c53782</uuid>
Oct 02 12:08:00 compute-0 nova_compute[192079]:   <name>instance-0000002b</name>
Oct 02 12:08:00 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:08:00 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:08:00 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:08:00 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:       <nova:name>tempest-TenantUsagesTestJSON-server-1357954911</nova:name>
Oct 02 12:08:00 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:08:00</nova:creationTime>
Oct 02 12:08:00 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:08:00 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:08:00 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:08:00 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:08:00 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:08:00 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:08:00 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:08:00 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:08:00 compute-0 nova_compute[192079]:         <nova:user uuid="bce5351e893c465da33db5556cb822d7">tempest-TenantUsagesTestJSON-240606218-project-member</nova:user>
Oct 02 12:08:00 compute-0 nova_compute[192079]:         <nova:project uuid="5777295bb4a741ba9f78ad7c8c208ff0">tempest-TenantUsagesTestJSON-240606218</nova:project>
Oct 02 12:08:00 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:08:00 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:       <nova:ports/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:08:00 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:08:00 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <system>
Oct 02 12:08:00 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:08:00 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:08:00 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:08:00 compute-0 nova_compute[192079]:       <entry name="serial">bfc65113-6eeb-464b-bbc8-f22f60c53782</entry>
Oct 02 12:08:00 compute-0 nova_compute[192079]:       <entry name="uuid">bfc65113-6eeb-464b-bbc8-f22f60c53782</entry>
Oct 02 12:08:00 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     </system>
Oct 02 12:08:00 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:08:00 compute-0 nova_compute[192079]:   <os>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:   </os>
Oct 02 12:08:00 compute-0 nova_compute[192079]:   <features>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:   </features>
Oct 02 12:08:00 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:08:00 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:08:00 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:08:00 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/bfc65113-6eeb-464b-bbc8-f22f60c53782/disk"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:08:00 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/bfc65113-6eeb-464b-bbc8-f22f60c53782/disk.config"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:08:00 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/bfc65113-6eeb-464b-bbc8-f22f60c53782/console.log" append="off"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <video>
Oct 02 12:08:00 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     </video>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:08:00 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:08:00 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:08:00 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:08:00 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:08:00 compute-0 nova_compute[192079]: </domain>
Oct 02 12:08:00 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:08:01 compute-0 nova_compute[192079]: 2025-10-02 12:08:01.002 2 DEBUG nova.virt.libvirt.driver [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:08:01 compute-0 nova_compute[192079]: 2025-10-02 12:08:01.002 2 DEBUG nova.virt.libvirt.driver [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:08:01 compute-0 nova_compute[192079]: 2025-10-02 12:08:01.003 2 INFO nova.virt.libvirt.driver [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Using config drive
Oct 02 12:08:01 compute-0 nova_compute[192079]: 2025-10-02 12:08:01.157 2 INFO nova.virt.libvirt.driver [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Creating config drive at /var/lib/nova/instances/bfc65113-6eeb-464b-bbc8-f22f60c53782/disk.config
Oct 02 12:08:01 compute-0 nova_compute[192079]: 2025-10-02 12:08:01.166 2 DEBUG oslo_concurrency.processutils [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/bfc65113-6eeb-464b-bbc8-f22f60c53782/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpqxux5qiq execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:08:01 compute-0 nova_compute[192079]: 2025-10-02 12:08:01.292 2 DEBUG oslo_concurrency.processutils [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/bfc65113-6eeb-464b-bbc8-f22f60c53782/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpqxux5qiq" returned: 0 in 0.126s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:08:01 compute-0 systemd-machined[152150]: New machine qemu-22-instance-0000002b.
Oct 02 12:08:01 compute-0 systemd[1]: Started Virtual Machine qemu-22-instance-0000002b.
Oct 02 12:08:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:02.209 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:02.210 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:02.210 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:02 compute-0 nova_compute[192079]: 2025-10-02 12:08:02.295 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406882.2947118, bfc65113-6eeb-464b-bbc8-f22f60c53782 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:08:02 compute-0 nova_compute[192079]: 2025-10-02 12:08:02.295 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] VM Resumed (Lifecycle Event)
Oct 02 12:08:02 compute-0 nova_compute[192079]: 2025-10-02 12:08:02.298 2 DEBUG nova.compute.manager [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:08:02 compute-0 nova_compute[192079]: 2025-10-02 12:08:02.298 2 DEBUG nova.virt.libvirt.driver [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:08:02 compute-0 nova_compute[192079]: 2025-10-02 12:08:02.300 2 INFO nova.virt.libvirt.driver [-] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Instance spawned successfully.
Oct 02 12:08:02 compute-0 nova_compute[192079]: 2025-10-02 12:08:02.301 2 DEBUG nova.virt.libvirt.driver [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:08:02 compute-0 nova_compute[192079]: 2025-10-02 12:08:02.344 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:08:02 compute-0 nova_compute[192079]: 2025-10-02 12:08:02.348 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:08:02 compute-0 nova_compute[192079]: 2025-10-02 12:08:02.380 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:08:02 compute-0 nova_compute[192079]: 2025-10-02 12:08:02.380 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406882.2957237, bfc65113-6eeb-464b-bbc8-f22f60c53782 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:08:02 compute-0 nova_compute[192079]: 2025-10-02 12:08:02.381 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] VM Started (Lifecycle Event)
Oct 02 12:08:02 compute-0 nova_compute[192079]: 2025-10-02 12:08:02.383 2 DEBUG nova.virt.libvirt.driver [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:08:02 compute-0 nova_compute[192079]: 2025-10-02 12:08:02.383 2 DEBUG nova.virt.libvirt.driver [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:08:02 compute-0 nova_compute[192079]: 2025-10-02 12:08:02.383 2 DEBUG nova.virt.libvirt.driver [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:08:02 compute-0 nova_compute[192079]: 2025-10-02 12:08:02.384 2 DEBUG nova.virt.libvirt.driver [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:08:02 compute-0 nova_compute[192079]: 2025-10-02 12:08:02.384 2 DEBUG nova.virt.libvirt.driver [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:08:02 compute-0 nova_compute[192079]: 2025-10-02 12:08:02.384 2 DEBUG nova.virt.libvirt.driver [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:08:02 compute-0 nova_compute[192079]: 2025-10-02 12:08:02.443 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:08:02 compute-0 nova_compute[192079]: 2025-10-02 12:08:02.445 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:08:02 compute-0 nova_compute[192079]: 2025-10-02 12:08:02.493 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:08:02 compute-0 nova_compute[192079]: 2025-10-02 12:08:02.588 2 INFO nova.compute.manager [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Took 2.20 seconds to spawn the instance on the hypervisor.
Oct 02 12:08:02 compute-0 nova_compute[192079]: 2025-10-02 12:08:02.589 2 DEBUG nova.compute.manager [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:08:02 compute-0 nova_compute[192079]: 2025-10-02 12:08:02.862 2 INFO nova.compute.manager [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Took 2.97 seconds to build instance.
Oct 02 12:08:02 compute-0 nova_compute[192079]: 2025-10-02 12:08:02.927 2 DEBUG oslo_concurrency.lockutils [None req-3f858fad-82ab-4334-8d29-1c5b99e8e32b bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Lock "bfc65113-6eeb-464b-bbc8-f22f60c53782" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 3.145s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:03 compute-0 nova_compute[192079]: 2025-10-02 12:08:03.668 2 DEBUG oslo_concurrency.lockutils [None req-439ca624-864c-4582-8850-c2109538112a bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Acquiring lock "bfc65113-6eeb-464b-bbc8-f22f60c53782" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:03 compute-0 nova_compute[192079]: 2025-10-02 12:08:03.669 2 DEBUG oslo_concurrency.lockutils [None req-439ca624-864c-4582-8850-c2109538112a bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Lock "bfc65113-6eeb-464b-bbc8-f22f60c53782" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:03 compute-0 nova_compute[192079]: 2025-10-02 12:08:03.669 2 DEBUG oslo_concurrency.lockutils [None req-439ca624-864c-4582-8850-c2109538112a bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Acquiring lock "bfc65113-6eeb-464b-bbc8-f22f60c53782-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:03 compute-0 nova_compute[192079]: 2025-10-02 12:08:03.669 2 DEBUG oslo_concurrency.lockutils [None req-439ca624-864c-4582-8850-c2109538112a bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Lock "bfc65113-6eeb-464b-bbc8-f22f60c53782-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:03 compute-0 nova_compute[192079]: 2025-10-02 12:08:03.669 2 DEBUG oslo_concurrency.lockutils [None req-439ca624-864c-4582-8850-c2109538112a bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Lock "bfc65113-6eeb-464b-bbc8-f22f60c53782-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:03 compute-0 nova_compute[192079]: 2025-10-02 12:08:03.728 2 INFO nova.compute.manager [None req-439ca624-864c-4582-8850-c2109538112a bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Terminating instance
Oct 02 12:08:03 compute-0 nova_compute[192079]: 2025-10-02 12:08:03.781 2 DEBUG oslo_concurrency.lockutils [None req-439ca624-864c-4582-8850-c2109538112a bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Acquiring lock "refresh_cache-bfc65113-6eeb-464b-bbc8-f22f60c53782" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:08:03 compute-0 nova_compute[192079]: 2025-10-02 12:08:03.781 2 DEBUG oslo_concurrency.lockutils [None req-439ca624-864c-4582-8850-c2109538112a bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Acquired lock "refresh_cache-bfc65113-6eeb-464b-bbc8-f22f60c53782" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:08:03 compute-0 nova_compute[192079]: 2025-10-02 12:08:03.782 2 DEBUG nova.network.neutron [None req-439ca624-864c-4582-8850-c2109538112a bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:08:03 compute-0 nova_compute[192079]: 2025-10-02 12:08:03.789 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:03 compute-0 nova_compute[192079]: 2025-10-02 12:08:03.964 2 DEBUG nova.network.neutron [None req-439ca624-864c-4582-8850-c2109538112a bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:08:04 compute-0 nova_compute[192079]: 2025-10-02 12:08:04.182 2 DEBUG nova.network.neutron [None req-439ca624-864c-4582-8850-c2109538112a bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:08:04 compute-0 nova_compute[192079]: 2025-10-02 12:08:04.227 2 DEBUG oslo_concurrency.lockutils [None req-439ca624-864c-4582-8850-c2109538112a bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Releasing lock "refresh_cache-bfc65113-6eeb-464b-bbc8-f22f60c53782" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:08:04 compute-0 nova_compute[192079]: 2025-10-02 12:08:04.227 2 DEBUG nova.compute.manager [None req-439ca624-864c-4582-8850-c2109538112a bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:08:04 compute-0 systemd[1]: machine-qemu\x2d22\x2dinstance\x2d0000002b.scope: Deactivated successfully.
Oct 02 12:08:04 compute-0 systemd[1]: machine-qemu\x2d22\x2dinstance\x2d0000002b.scope: Consumed 2.810s CPU time.
Oct 02 12:08:04 compute-0 systemd-machined[152150]: Machine qemu-22-instance-0000002b terminated.
Oct 02 12:08:04 compute-0 nova_compute[192079]: 2025-10-02 12:08:04.472 2 INFO nova.virt.libvirt.driver [-] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Instance destroyed successfully.
Oct 02 12:08:04 compute-0 nova_compute[192079]: 2025-10-02 12:08:04.472 2 DEBUG nova.objects.instance [None req-439ca624-864c-4582-8850-c2109538112a bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Lazy-loading 'resources' on Instance uuid bfc65113-6eeb-464b-bbc8-f22f60c53782 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:08:04 compute-0 nova_compute[192079]: 2025-10-02 12:08:04.524 2 INFO nova.virt.libvirt.driver [None req-439ca624-864c-4582-8850-c2109538112a bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Deleting instance files /var/lib/nova/instances/bfc65113-6eeb-464b-bbc8-f22f60c53782_del
Oct 02 12:08:04 compute-0 nova_compute[192079]: 2025-10-02 12:08:04.525 2 INFO nova.virt.libvirt.driver [None req-439ca624-864c-4582-8850-c2109538112a bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Deletion of /var/lib/nova/instances/bfc65113-6eeb-464b-bbc8-f22f60c53782_del complete
Oct 02 12:08:04 compute-0 nova_compute[192079]: 2025-10-02 12:08:04.872 2 INFO nova.compute.manager [None req-439ca624-864c-4582-8850-c2109538112a bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Took 0.64 seconds to destroy the instance on the hypervisor.
Oct 02 12:08:04 compute-0 nova_compute[192079]: 2025-10-02 12:08:04.872 2 DEBUG oslo.service.loopingcall [None req-439ca624-864c-4582-8850-c2109538112a bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:08:04 compute-0 nova_compute[192079]: 2025-10-02 12:08:04.873 2 DEBUG nova.compute.manager [-] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:08:04 compute-0 nova_compute[192079]: 2025-10-02 12:08:04.873 2 DEBUG nova.network.neutron [-] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:08:04 compute-0 nova_compute[192079]: 2025-10-02 12:08:04.979 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:05 compute-0 nova_compute[192079]: 2025-10-02 12:08:05.022 2 DEBUG nova.network.neutron [-] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:08:05 compute-0 nova_compute[192079]: 2025-10-02 12:08:05.043 2 DEBUG nova.network.neutron [-] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:08:05 compute-0 nova_compute[192079]: 2025-10-02 12:08:05.064 2 INFO nova.compute.manager [-] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Took 0.19 seconds to deallocate network for instance.
Oct 02 12:08:05 compute-0 nova_compute[192079]: 2025-10-02 12:08:05.156 2 DEBUG oslo_concurrency.lockutils [None req-439ca624-864c-4582-8850-c2109538112a bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:05 compute-0 nova_compute[192079]: 2025-10-02 12:08:05.156 2 DEBUG oslo_concurrency.lockutils [None req-439ca624-864c-4582-8850-c2109538112a bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:05 compute-0 nova_compute[192079]: 2025-10-02 12:08:05.211 2 DEBUG nova.compute.provider_tree [None req-439ca624-864c-4582-8850-c2109538112a bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:08:05 compute-0 nova_compute[192079]: 2025-10-02 12:08:05.226 2 DEBUG nova.scheduler.client.report [None req-439ca624-864c-4582-8850-c2109538112a bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:08:05 compute-0 nova_compute[192079]: 2025-10-02 12:08:05.302 2 DEBUG oslo_concurrency.lockutils [None req-439ca624-864c-4582-8850-c2109538112a bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.146s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:05 compute-0 nova_compute[192079]: 2025-10-02 12:08:05.335 2 INFO nova.scheduler.client.report [None req-439ca624-864c-4582-8850-c2109538112a bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Deleted allocations for instance bfc65113-6eeb-464b-bbc8-f22f60c53782
Oct 02 12:08:05 compute-0 nova_compute[192079]: 2025-10-02 12:08:05.419 2 DEBUG oslo_concurrency.lockutils [None req-439ca624-864c-4582-8850-c2109538112a bce5351e893c465da33db5556cb822d7 5777295bb4a741ba9f78ad7c8c208ff0 - - default default] Lock "bfc65113-6eeb-464b-bbc8-f22f60c53782" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.750s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:06 compute-0 podman[225659]: 2025-10-02 12:08:06.176612141 +0000 UTC m=+0.079869719 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_managed=true, config_id=edpm, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']})
Oct 02 12:08:07 compute-0 nova_compute[192079]: 2025-10-02 12:08:07.316 2 DEBUG oslo_concurrency.lockutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Acquiring lock "d55dd428-ae1c-4c43-8582-3a46d50f4822" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:07 compute-0 nova_compute[192079]: 2025-10-02 12:08:07.317 2 DEBUG oslo_concurrency.lockutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Lock "d55dd428-ae1c-4c43-8582-3a46d50f4822" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:07 compute-0 nova_compute[192079]: 2025-10-02 12:08:07.332 2 DEBUG nova.compute.manager [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:08:07 compute-0 nova_compute[192079]: 2025-10-02 12:08:07.431 2 DEBUG oslo_concurrency.lockutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:07 compute-0 nova_compute[192079]: 2025-10-02 12:08:07.431 2 DEBUG oslo_concurrency.lockutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:07 compute-0 nova_compute[192079]: 2025-10-02 12:08:07.440 2 DEBUG nova.virt.hardware [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:08:07 compute-0 nova_compute[192079]: 2025-10-02 12:08:07.440 2 INFO nova.compute.claims [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:08:07 compute-0 nova_compute[192079]: 2025-10-02 12:08:07.558 2 DEBUG nova.compute.provider_tree [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:08:07 compute-0 nova_compute[192079]: 2025-10-02 12:08:07.574 2 DEBUG nova.scheduler.client.report [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:08:07 compute-0 nova_compute[192079]: 2025-10-02 12:08:07.598 2 DEBUG oslo_concurrency.lockutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.167s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:07 compute-0 nova_compute[192079]: 2025-10-02 12:08:07.599 2 DEBUG nova.compute.manager [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:08:07 compute-0 nova_compute[192079]: 2025-10-02 12:08:07.671 2 DEBUG nova.compute.manager [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:08:07 compute-0 nova_compute[192079]: 2025-10-02 12:08:07.671 2 DEBUG nova.network.neutron [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:08:07 compute-0 nova_compute[192079]: 2025-10-02 12:08:07.688 2 INFO nova.virt.libvirt.driver [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:08:07 compute-0 nova_compute[192079]: 2025-10-02 12:08:07.704 2 DEBUG nova.compute.manager [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:08:07 compute-0 nova_compute[192079]: 2025-10-02 12:08:07.814 2 DEBUG nova.compute.manager [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:08:07 compute-0 nova_compute[192079]: 2025-10-02 12:08:07.816 2 DEBUG nova.virt.libvirt.driver [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:08:07 compute-0 nova_compute[192079]: 2025-10-02 12:08:07.817 2 INFO nova.virt.libvirt.driver [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Creating image(s)
Oct 02 12:08:07 compute-0 nova_compute[192079]: 2025-10-02 12:08:07.818 2 DEBUG oslo_concurrency.lockutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Acquiring lock "/var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:07 compute-0 nova_compute[192079]: 2025-10-02 12:08:07.818 2 DEBUG oslo_concurrency.lockutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Lock "/var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:07 compute-0 nova_compute[192079]: 2025-10-02 12:08:07.820 2 DEBUG oslo_concurrency.lockutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Lock "/var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:07 compute-0 nova_compute[192079]: 2025-10-02 12:08:07.851 2 DEBUG oslo_concurrency.processutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:08:07 compute-0 nova_compute[192079]: 2025-10-02 12:08:07.922 2 DEBUG oslo_concurrency.processutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.071s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:08:07 compute-0 nova_compute[192079]: 2025-10-02 12:08:07.924 2 DEBUG oslo_concurrency.lockutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:07 compute-0 nova_compute[192079]: 2025-10-02 12:08:07.925 2 DEBUG oslo_concurrency.lockutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:07 compute-0 nova_compute[192079]: 2025-10-02 12:08:07.940 2 DEBUG oslo_concurrency.processutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:08:07 compute-0 nova_compute[192079]: 2025-10-02 12:08:07.996 2 DEBUG oslo_concurrency.processutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:08:07 compute-0 nova_compute[192079]: 2025-10-02 12:08:07.997 2 DEBUG oslo_concurrency.processutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:08:08 compute-0 nova_compute[192079]: 2025-10-02 12:08:08.030 2 DEBUG nova.policy [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fbc7616089cb4f78832692487019c83d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ef4e3be787374d90a6a236c7f76bd940', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:08:08 compute-0 nova_compute[192079]: 2025-10-02 12:08:08.041 2 DEBUG oslo_concurrency.processutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822/disk 1073741824" returned: 0 in 0.044s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:08:08 compute-0 nova_compute[192079]: 2025-10-02 12:08:08.042 2 DEBUG oslo_concurrency.lockutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.117s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:08 compute-0 nova_compute[192079]: 2025-10-02 12:08:08.042 2 DEBUG oslo_concurrency.processutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:08:08 compute-0 nova_compute[192079]: 2025-10-02 12:08:08.096 2 DEBUG oslo_concurrency.processutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:08:08 compute-0 nova_compute[192079]: 2025-10-02 12:08:08.097 2 DEBUG nova.virt.disk.api [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Checking if we can resize image /var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:08:08 compute-0 nova_compute[192079]: 2025-10-02 12:08:08.098 2 DEBUG oslo_concurrency.processutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:08:08 compute-0 nova_compute[192079]: 2025-10-02 12:08:08.157 2 DEBUG oslo_concurrency.processutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822/disk --force-share --output=json" returned: 0 in 0.060s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:08:08 compute-0 nova_compute[192079]: 2025-10-02 12:08:08.159 2 DEBUG nova.virt.disk.api [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Cannot resize image /var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:08:08 compute-0 nova_compute[192079]: 2025-10-02 12:08:08.159 2 DEBUG nova.objects.instance [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Lazy-loading 'migration_context' on Instance uuid d55dd428-ae1c-4c43-8582-3a46d50f4822 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:08:08 compute-0 nova_compute[192079]: 2025-10-02 12:08:08.176 2 DEBUG nova.virt.libvirt.driver [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:08:08 compute-0 nova_compute[192079]: 2025-10-02 12:08:08.176 2 DEBUG nova.virt.libvirt.driver [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Ensure instance console log exists: /var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:08:08 compute-0 nova_compute[192079]: 2025-10-02 12:08:08.177 2 DEBUG oslo_concurrency.lockutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:08 compute-0 nova_compute[192079]: 2025-10-02 12:08:08.177 2 DEBUG oslo_concurrency.lockutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:08 compute-0 nova_compute[192079]: 2025-10-02 12:08:08.177 2 DEBUG oslo_concurrency.lockutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:08 compute-0 nova_compute[192079]: 2025-10-02 12:08:08.791 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:08 compute-0 nova_compute[192079]: 2025-10-02 12:08:08.904 2 DEBUG nova.network.neutron [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Successfully created port: d86c2c53-081c-4754-b070-2dd5028a4c08 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:08:09 compute-0 nova_compute[192079]: 2025-10-02 12:08:09.799 2 DEBUG nova.network.neutron [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Successfully updated port: d86c2c53-081c-4754-b070-2dd5028a4c08 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:08:09 compute-0 nova_compute[192079]: 2025-10-02 12:08:09.814 2 DEBUG oslo_concurrency.lockutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Acquiring lock "refresh_cache-d55dd428-ae1c-4c43-8582-3a46d50f4822" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:08:09 compute-0 nova_compute[192079]: 2025-10-02 12:08:09.815 2 DEBUG oslo_concurrency.lockutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Acquired lock "refresh_cache-d55dd428-ae1c-4c43-8582-3a46d50f4822" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:08:09 compute-0 nova_compute[192079]: 2025-10-02 12:08:09.816 2 DEBUG nova.network.neutron [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:08:09 compute-0 nova_compute[192079]: 2025-10-02 12:08:09.930 2 DEBUG nova.compute.manager [req-73c02a31-68ed-4e85-bcc8-48420e38d66f req-f9d74bce-5180-47cf-8ca4-997aab8a6104 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Received event network-changed-d86c2c53-081c-4754-b070-2dd5028a4c08 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:08:09 compute-0 nova_compute[192079]: 2025-10-02 12:08:09.930 2 DEBUG nova.compute.manager [req-73c02a31-68ed-4e85-bcc8-48420e38d66f req-f9d74bce-5180-47cf-8ca4-997aab8a6104 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Refreshing instance network info cache due to event network-changed-d86c2c53-081c-4754-b070-2dd5028a4c08. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:08:09 compute-0 nova_compute[192079]: 2025-10-02 12:08:09.930 2 DEBUG oslo_concurrency.lockutils [req-73c02a31-68ed-4e85-bcc8-48420e38d66f req-f9d74bce-5180-47cf-8ca4-997aab8a6104 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-d55dd428-ae1c-4c43-8582-3a46d50f4822" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:08:09 compute-0 nova_compute[192079]: 2025-10-02 12:08:09.981 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:10 compute-0 nova_compute[192079]: 2025-10-02 12:08:10.028 2 DEBUG nova.network.neutron [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:08:10 compute-0 nova_compute[192079]: 2025-10-02 12:08:10.660 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:08:11 compute-0 podman[225695]: 2025-10-02 12:08:11.153797854 +0000 UTC m=+0.061302318 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, container_name=multipathd, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2)
Oct 02 12:08:11 compute-0 podman[225694]: 2025-10-02 12:08:11.167232367 +0000 UTC m=+0.071025880 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, release=1755695350, maintainer=Red Hat, Inc., managed_by=edpm_ansible, vcs-type=git, distribution-scope=public, io.buildah.version=1.33.7, io.openshift.tags=minimal rhel9, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, url=https://catalog.redhat.com/en/search?searchType=containers, container_name=openstack_network_exporter, name=ubi9-minimal, config_id=edpm, version=9.6, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, build-date=2025-08-20T13:12:41, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., architecture=x86_64, com.redhat.component=ubi9-minimal-container, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.expose-services=, vendor=Red Hat, Inc.)
Oct 02 12:08:11 compute-0 nova_compute[192079]: 2025-10-02 12:08:11.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:08:11 compute-0 nova_compute[192079]: 2025-10-02 12:08:11.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:08:11 compute-0 nova_compute[192079]: 2025-10-02 12:08:11.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:08:11 compute-0 nova_compute[192079]: 2025-10-02 12:08:11.718 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:11 compute-0 nova_compute[192079]: 2025-10-02 12:08:11.719 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:11 compute-0 nova_compute[192079]: 2025-10-02 12:08:11.719 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:11 compute-0 nova_compute[192079]: 2025-10-02 12:08:11.719 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:08:11 compute-0 nova_compute[192079]: 2025-10-02 12:08:11.881 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:08:11 compute-0 nova_compute[192079]: 2025-10-02 12:08:11.882 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5742MB free_disk=73.42570495605469GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:08:11 compute-0 nova_compute[192079]: 2025-10-02 12:08:11.882 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:11 compute-0 nova_compute[192079]: 2025-10-02 12:08:11.882 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:11 compute-0 nova_compute[192079]: 2025-10-02 12:08:11.917 2 DEBUG nova.network.neutron [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Updating instance_info_cache with network_info: [{"id": "d86c2c53-081c-4754-b070-2dd5028a4c08", "address": "fa:16:3e:4d:9f:a8", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd86c2c53-08", "ovs_interfaceid": "d86c2c53-081c-4754-b070-2dd5028a4c08", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.002 2 DEBUG oslo_concurrency.lockutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Releasing lock "refresh_cache-d55dd428-ae1c-4c43-8582-3a46d50f4822" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.003 2 DEBUG nova.compute.manager [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Instance network_info: |[{"id": "d86c2c53-081c-4754-b070-2dd5028a4c08", "address": "fa:16:3e:4d:9f:a8", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd86c2c53-08", "ovs_interfaceid": "d86c2c53-081c-4754-b070-2dd5028a4c08", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.005 2 DEBUG oslo_concurrency.lockutils [req-73c02a31-68ed-4e85-bcc8-48420e38d66f req-f9d74bce-5180-47cf-8ca4-997aab8a6104 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-d55dd428-ae1c-4c43-8582-3a46d50f4822" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.005 2 DEBUG nova.network.neutron [req-73c02a31-68ed-4e85-bcc8-48420e38d66f req-f9d74bce-5180-47cf-8ca4-997aab8a6104 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Refreshing network info cache for port d86c2c53-081c-4754-b070-2dd5028a4c08 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.010 2 DEBUG nova.virt.libvirt.driver [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Start _get_guest_xml network_info=[{"id": "d86c2c53-081c-4754-b070-2dd5028a4c08", "address": "fa:16:3e:4d:9f:a8", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd86c2c53-08", "ovs_interfaceid": "d86c2c53-081c-4754-b070-2dd5028a4c08", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.016 2 WARNING nova.virt.libvirt.driver [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.020 2 DEBUG nova.virt.libvirt.host [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.021 2 DEBUG nova.virt.libvirt.host [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.027 2 DEBUG nova.virt.libvirt.host [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.027 2 DEBUG nova.virt.libvirt.host [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.029 2 DEBUG nova.virt.libvirt.driver [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.030 2 DEBUG nova.virt.hardware [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.030 2 DEBUG nova.virt.hardware [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.031 2 DEBUG nova.virt.hardware [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.031 2 DEBUG nova.virt.hardware [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.032 2 DEBUG nova.virt.hardware [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.032 2 DEBUG nova.virt.hardware [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.033 2 DEBUG nova.virt.hardware [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.034 2 DEBUG nova.virt.hardware [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.034 2 DEBUG nova.virt.hardware [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.034 2 DEBUG nova.virt.hardware [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.035 2 DEBUG nova.virt.hardware [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.040 2 DEBUG nova.virt.libvirt.vif [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:08:06Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-AttachInterfacesTestJSON-server-1852145717',display_name='tempest-AttachInterfacesTestJSON-server-1852145717',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-attachinterfacestestjson-server-1852145717',id=44,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBOMAiNKx1nMsQWkjyoacitfMpSCECpXaL6jiwNift5lqyR8GB5bAw9OQhuD+NMppggB+YdsyU4EuF27p1sPXC0U7gBRRZIdIzuVGXUDvMEa8cZQfCNptjsHEFbvdeH21PA==',key_name='tempest-keypair-1558941664',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='ef4e3be787374d90a6a236c7f76bd940',ramdisk_id='',reservation_id='r-f02kral6',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-AttachInterfacesTestJSON-812274278',owner_user_name='tempest-AttachInterfacesTestJSON-812274278-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:08:07Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='fbc7616089cb4f78832692487019c83d',uuid=d55dd428-ae1c-4c43-8582-3a46d50f4822,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "d86c2c53-081c-4754-b070-2dd5028a4c08", "address": "fa:16:3e:4d:9f:a8", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd86c2c53-08", "ovs_interfaceid": "d86c2c53-081c-4754-b070-2dd5028a4c08", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.041 2 DEBUG nova.network.os_vif_util [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Converting VIF {"id": "d86c2c53-081c-4754-b070-2dd5028a4c08", "address": "fa:16:3e:4d:9f:a8", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd86c2c53-08", "ovs_interfaceid": "d86c2c53-081c-4754-b070-2dd5028a4c08", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.042 2 DEBUG nova.network.os_vif_util [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:4d:9f:a8,bridge_name='br-int',has_traffic_filtering=True,id=d86c2c53-081c-4754-b070-2dd5028a4c08,network=Network(7d845a33-56e0-4850-9f27-8a54095796f2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd86c2c53-08') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.044 2 DEBUG nova.objects.instance [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Lazy-loading 'pci_devices' on Instance uuid d55dd428-ae1c-4c43-8582-3a46d50f4822 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.089 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance d55dd428-ae1c-4c43-8582-3a46d50f4822 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.089 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 1 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.089 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=640MB phys_disk=79GB used_disk=1GB total_vcpus=8 used_vcpus=1 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.140 2 DEBUG nova.virt.libvirt.driver [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:08:12 compute-0 nova_compute[192079]:   <uuid>d55dd428-ae1c-4c43-8582-3a46d50f4822</uuid>
Oct 02 12:08:12 compute-0 nova_compute[192079]:   <name>instance-0000002c</name>
Oct 02 12:08:12 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:08:12 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:08:12 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:08:12 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:       <nova:name>tempest-AttachInterfacesTestJSON-server-1852145717</nova:name>
Oct 02 12:08:12 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:08:12</nova:creationTime>
Oct 02 12:08:12 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:08:12 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:08:12 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:08:12 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:08:12 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:08:12 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:08:12 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:08:12 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:08:12 compute-0 nova_compute[192079]:         <nova:user uuid="fbc7616089cb4f78832692487019c83d">tempest-AttachInterfacesTestJSON-812274278-project-member</nova:user>
Oct 02 12:08:12 compute-0 nova_compute[192079]:         <nova:project uuid="ef4e3be787374d90a6a236c7f76bd940">tempest-AttachInterfacesTestJSON-812274278</nova:project>
Oct 02 12:08:12 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:08:12 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:08:12 compute-0 nova_compute[192079]:         <nova:port uuid="d86c2c53-081c-4754-b070-2dd5028a4c08">
Oct 02 12:08:12 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.10" ipVersion="4"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:08:12 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:08:12 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:08:12 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <system>
Oct 02 12:08:12 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:08:12 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:08:12 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:08:12 compute-0 nova_compute[192079]:       <entry name="serial">d55dd428-ae1c-4c43-8582-3a46d50f4822</entry>
Oct 02 12:08:12 compute-0 nova_compute[192079]:       <entry name="uuid">d55dd428-ae1c-4c43-8582-3a46d50f4822</entry>
Oct 02 12:08:12 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     </system>
Oct 02 12:08:12 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:08:12 compute-0 nova_compute[192079]:   <os>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:   </os>
Oct 02 12:08:12 compute-0 nova_compute[192079]:   <features>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:   </features>
Oct 02 12:08:12 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:08:12 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:08:12 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:08:12 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822/disk"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:08:12 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822/disk.config"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:08:12 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:4d:9f:a8"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:       <target dev="tapd86c2c53-08"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:08:12 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822/console.log" append="off"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <video>
Oct 02 12:08:12 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     </video>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:08:12 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:08:12 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:08:12 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:08:12 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:08:12 compute-0 nova_compute[192079]: </domain>
Oct 02 12:08:12 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.141 2 DEBUG nova.compute.manager [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Preparing to wait for external event network-vif-plugged-d86c2c53-081c-4754-b070-2dd5028a4c08 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.141 2 DEBUG oslo_concurrency.lockutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Acquiring lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.141 2 DEBUG oslo_concurrency.lockutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.142 2 DEBUG oslo_concurrency.lockutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.143 2 DEBUG nova.virt.libvirt.vif [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:08:06Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-AttachInterfacesTestJSON-server-1852145717',display_name='tempest-AttachInterfacesTestJSON-server-1852145717',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-attachinterfacestestjson-server-1852145717',id=44,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBOMAiNKx1nMsQWkjyoacitfMpSCECpXaL6jiwNift5lqyR8GB5bAw9OQhuD+NMppggB+YdsyU4EuF27p1sPXC0U7gBRRZIdIzuVGXUDvMEa8cZQfCNptjsHEFbvdeH21PA==',key_name='tempest-keypair-1558941664',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='ef4e3be787374d90a6a236c7f76bd940',ramdisk_id='',reservation_id='r-f02kral6',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-AttachInterfacesTestJSON-812274278',owner_user_name='tempest-AttachInterfacesTestJSON-812274278-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:08:07Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='fbc7616089cb4f78832692487019c83d',uuid=d55dd428-ae1c-4c43-8582-3a46d50f4822,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "d86c2c53-081c-4754-b070-2dd5028a4c08", "address": "fa:16:3e:4d:9f:a8", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd86c2c53-08", "ovs_interfaceid": "d86c2c53-081c-4754-b070-2dd5028a4c08", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.143 2 DEBUG nova.network.os_vif_util [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Converting VIF {"id": "d86c2c53-081c-4754-b070-2dd5028a4c08", "address": "fa:16:3e:4d:9f:a8", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd86c2c53-08", "ovs_interfaceid": "d86c2c53-081c-4754-b070-2dd5028a4c08", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.144 2 DEBUG nova.network.os_vif_util [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:4d:9f:a8,bridge_name='br-int',has_traffic_filtering=True,id=d86c2c53-081c-4754-b070-2dd5028a4c08,network=Network(7d845a33-56e0-4850-9f27-8a54095796f2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd86c2c53-08') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.144 2 DEBUG os_vif [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:4d:9f:a8,bridge_name='br-int',has_traffic_filtering=True,id=d86c2c53-081c-4754-b070-2dd5028a4c08,network=Network(7d845a33-56e0-4850-9f27-8a54095796f2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd86c2c53-08') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.145 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.145 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.146 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.149 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.150 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapd86c2c53-08, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.151 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapd86c2c53-08, col_values=(('external_ids', {'iface-id': 'd86c2c53-081c-4754-b070-2dd5028a4c08', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:4d:9f:a8', 'vm-uuid': 'd55dd428-ae1c-4c43-8582-3a46d50f4822'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.152 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:12 compute-0 NetworkManager[51160]: <info>  [1759406892.1530] manager: (tapd86c2c53-08): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/64)
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.155 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.160 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.161 2 INFO os_vif [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:4d:9f:a8,bridge_name='br-int',has_traffic_filtering=True,id=d86c2c53-081c-4754-b070-2dd5028a4c08,network=Network(7d845a33-56e0-4850-9f27-8a54095796f2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd86c2c53-08')
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.165 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.283 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.398 2 DEBUG nova.virt.libvirt.driver [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.399 2 DEBUG nova.virt.libvirt.driver [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.399 2 DEBUG nova.virt.libvirt.driver [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] No VIF found with MAC fa:16:3e:4d:9f:a8, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.400 2 INFO nova.virt.libvirt.driver [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Using config drive
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.523 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:08:12 compute-0 nova_compute[192079]: 2025-10-02 12:08:12.523 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.641s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:12 compute-0 ovn_controller[94336]: 2025-10-02T12:08:12Z|00127|memory_trim|INFO|Detected inactivity (last active 30001 ms ago): trimming memory
Oct 02 12:08:13 compute-0 nova_compute[192079]: 2025-10-02 12:08:13.678 2 INFO nova.virt.libvirt.driver [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Creating config drive at /var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822/disk.config
Oct 02 12:08:13 compute-0 nova_compute[192079]: 2025-10-02 12:08:13.686 2 DEBUG oslo_concurrency.processutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpn813sqn9 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:08:13 compute-0 nova_compute[192079]: 2025-10-02 12:08:13.816 2 DEBUG oslo_concurrency.processutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpn813sqn9" returned: 0 in 0.131s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:08:13 compute-0 kernel: tapd86c2c53-08: entered promiscuous mode
Oct 02 12:08:13 compute-0 ovn_controller[94336]: 2025-10-02T12:08:13Z|00128|binding|INFO|Claiming lport d86c2c53-081c-4754-b070-2dd5028a4c08 for this chassis.
Oct 02 12:08:13 compute-0 ovn_controller[94336]: 2025-10-02T12:08:13Z|00129|binding|INFO|d86c2c53-081c-4754-b070-2dd5028a4c08: Claiming fa:16:3e:4d:9f:a8 10.100.0.10
Oct 02 12:08:13 compute-0 nova_compute[192079]: 2025-10-02 12:08:13.907 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:13 compute-0 NetworkManager[51160]: <info>  [1759406893.9101] manager: (tapd86c2c53-08): new Tun device (/org/freedesktop/NetworkManager/Devices/65)
Oct 02 12:08:13 compute-0 nova_compute[192079]: 2025-10-02 12:08:13.912 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:13 compute-0 nova_compute[192079]: 2025-10-02 12:08:13.915 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:13 compute-0 systemd-udevd[225755]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:08:13 compute-0 NetworkManager[51160]: <info>  [1759406893.9546] device (tapd86c2c53-08): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:08:13 compute-0 NetworkManager[51160]: <info>  [1759406893.9555] device (tapd86c2c53-08): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:08:13 compute-0 systemd-machined[152150]: New machine qemu-23-instance-0000002c.
Oct 02 12:08:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:13.974 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:4d:9f:a8 10.100.0.10'], port_security=['fa:16:3e:4d:9f:a8 10.100.0.10'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.10/28', 'neutron:device_id': 'd55dd428-ae1c-4c43-8582-3a46d50f4822', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-7d845a33-56e0-4850-9f27-8a54095796f2', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'ef4e3be787374d90a6a236c7f76bd940', 'neutron:revision_number': '2', 'neutron:security_group_ids': '97988f28-31b7-47ec-b097-a7d07047d94c', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=4583e9be-3cfa-4470-9e2e-4e943d469605, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=d86c2c53-081c-4754-b070-2dd5028a4c08) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:08:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:13.977 103294 INFO neutron.agent.ovn.metadata.agent [-] Port d86c2c53-081c-4754-b070-2dd5028a4c08 in datapath 7d845a33-56e0-4850-9f27-8a54095796f2 bound to our chassis
Oct 02 12:08:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:13.978 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 7d845a33-56e0-4850-9f27-8a54095796f2
Oct 02 12:08:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:13.989 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2bb8ba74-5a5e-402c-8c35-dfbd590a6c8d]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:13.990 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap7d845a33-51 in ovnmeta-7d845a33-56e0-4850-9f27-8a54095796f2 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:08:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:13.991 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap7d845a33-50 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:08:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:13.991 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[68d65bc2-32b2-47f3-ba6c-fe4d85dfea0f]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:13.992 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[baea7e60-2ef7-4a73-a6f4-5c8db49a776c]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:13 compute-0 systemd[1]: Started Virtual Machine qemu-23-instance-0000002c.
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:13.999 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:14 compute-0 ovn_controller[94336]: 2025-10-02T12:08:14Z|00130|binding|INFO|Setting lport d86c2c53-081c-4754-b070-2dd5028a4c08 ovn-installed in OVS
Oct 02 12:08:14 compute-0 ovn_controller[94336]: 2025-10-02T12:08:14Z|00131|binding|INFO|Setting lport d86c2c53-081c-4754-b070-2dd5028a4c08 up in Southbound
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.004 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:14.005 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[f10b4480-d912-45d6-ace6-cc6805d51799]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:14.031 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3a9a10d7-2c9d-4a42-ab5d-e3655e08f688]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:14.070 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[e2669796-5d25-471a-b03f-cd67abc1cc62]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:14.075 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f4a6a7a8-5871-4428-82c5-73dadb07e6f5]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:14 compute-0 NetworkManager[51160]: <info>  [1759406894.0761] manager: (tap7d845a33-50): new Veth device (/org/freedesktop/NetworkManager/Devices/66)
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:14.108 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[20717f3b-9af3-4063-bcba-a6a5b94e1f0f]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:14.110 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[ace80d47-f168-42f2-9d55-6773d7dbcb81]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:14 compute-0 NetworkManager[51160]: <info>  [1759406894.1387] device (tap7d845a33-50): carrier: link connected
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:14.149 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[59db48f7-4f5e-444a-a6ad-f0e754453845]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:14.172 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7f126c66-4b58-468a-984e-7ea0f7c66fea]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap7d845a33-51'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:8f:90:16'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 40], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 489176, 'reachable_time': 24583, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 225789, 'error': None, 'target': 'ovnmeta-7d845a33-56e0-4850-9f27-8a54095796f2', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:14.189 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a3ff1b6e-c107-4fe2-8845-e30088bcdd43]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe8f:9016'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 489176, 'tstamp': 489176}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 225791, 'error': None, 'target': 'ovnmeta-7d845a33-56e0-4850-9f27-8a54095796f2', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.192 2 DEBUG nova.network.neutron [req-73c02a31-68ed-4e85-bcc8-48420e38d66f req-f9d74bce-5180-47cf-8ca4-997aab8a6104 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Updated VIF entry in instance network info cache for port d86c2c53-081c-4754-b070-2dd5028a4c08. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.193 2 DEBUG nova.network.neutron [req-73c02a31-68ed-4e85-bcc8-48420e38d66f req-f9d74bce-5180-47cf-8ca4-997aab8a6104 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Updating instance_info_cache with network_info: [{"id": "d86c2c53-081c-4754-b070-2dd5028a4c08", "address": "fa:16:3e:4d:9f:a8", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd86c2c53-08", "ovs_interfaceid": "d86c2c53-081c-4754-b070-2dd5028a4c08", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:14.214 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ffd36873-ae82-4f34-850f-cd501cd5e202]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap7d845a33-51'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:8f:90:16'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 40], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 489176, 'reachable_time': 24583, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 225796, 'error': None, 'target': 'ovnmeta-7d845a33-56e0-4850-9f27-8a54095796f2', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:14.249 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a73dc485-efe2-4eeb-9de7-e150f6b05cc6]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.325 2 DEBUG oslo_concurrency.lockutils [req-73c02a31-68ed-4e85-bcc8-48420e38d66f req-f9d74bce-5180-47cf-8ca4-997aab8a6104 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-d55dd428-ae1c-4c43-8582-3a46d50f4822" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:14.334 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2639b2fc-d728-4a3d-8c8b-4dc35db70646]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:14.336 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap7d845a33-50, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:14.337 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:14.337 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap7d845a33-50, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.339 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:14 compute-0 kernel: tap7d845a33-50: entered promiscuous mode
Oct 02 12:08:14 compute-0 NetworkManager[51160]: <info>  [1759406894.3422] manager: (tap7d845a33-50): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/67)
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:14.342 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap7d845a33-50, col_values=(('external_ids', {'iface-id': '1c321c19-d630-4a6f-8ba8-7bac90af9bae'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.343 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:14 compute-0 ovn_controller[94336]: 2025-10-02T12:08:14Z|00132|binding|INFO|Releasing lport 1c321c19-d630-4a6f-8ba8-7bac90af9bae from this chassis (sb_readonly=0)
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.358 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:14.359 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/7d845a33-56e0-4850-9f27-8a54095796f2.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/7d845a33-56e0-4850-9f27-8a54095796f2.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:14.360 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b6e977d1-df4e-49fd-aa5f-53105cf033dd]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:14.360 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-7d845a33-56e0-4850-9f27-8a54095796f2
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/7d845a33-56e0-4850-9f27-8a54095796f2.pid.haproxy
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 7d845a33-56e0-4850-9f27-8a54095796f2
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:14.364 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-7d845a33-56e0-4850-9f27-8a54095796f2', 'env', 'PROCESS_TAG=haproxy-7d845a33-56e0-4850-9f27-8a54095796f2', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/7d845a33-56e0-4850-9f27-8a54095796f2.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.518 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.726 2 DEBUG nova.compute.manager [req-710d75ef-fb2e-4aae-af59-d228c8e98238 req-349960f0-33e0-484f-a129-3c4d58a7e92d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Received event network-vif-plugged-d86c2c53-081c-4754-b070-2dd5028a4c08 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.728 2 DEBUG oslo_concurrency.lockutils [req-710d75ef-fb2e-4aae-af59-d228c8e98238 req-349960f0-33e0-484f-a129-3c4d58a7e92d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.728 2 DEBUG oslo_concurrency.lockutils [req-710d75ef-fb2e-4aae-af59-d228c8e98238 req-349960f0-33e0-484f-a129-3c4d58a7e92d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.728 2 DEBUG oslo_concurrency.lockutils [req-710d75ef-fb2e-4aae-af59-d228c8e98238 req-349960f0-33e0-484f-a129-3c4d58a7e92d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.728 2 DEBUG nova.compute.manager [req-710d75ef-fb2e-4aae-af59-d228c8e98238 req-349960f0-33e0-484f-a129-3c4d58a7e92d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Processing event network-vif-plugged-d86c2c53-081c-4754-b070-2dd5028a4c08 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.753 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406894.7532427, d55dd428-ae1c-4c43-8582-3a46d50f4822 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.754 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] VM Started (Lifecycle Event)
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.756 2 DEBUG nova.compute.manager [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.760 2 DEBUG nova.virt.libvirt.driver [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.764 2 INFO nova.virt.libvirt.driver [-] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Instance spawned successfully.
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.765 2 DEBUG nova.virt.libvirt.driver [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.780 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.784 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.809 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:14.809 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=12, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=11) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:08:14 compute-0 podman[225830]: 2025-10-02 12:08:14.76609333 +0000 UTC m=+0.028073649 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:08:14 compute-0 podman[225830]: 2025-10-02 12:08:14.860280564 +0000 UTC m=+0.122260803 container create 1a1b35487be377a2f2711b341228a661c7f1516903526dd87d3a141734b96eee (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-7d845a33-56e0-4850-9f27-8a54095796f2, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:08:14 compute-0 systemd[1]: Started libpod-conmon-1a1b35487be377a2f2711b341228a661c7f1516903526dd87d3a141734b96eee.scope.
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.904 2 DEBUG nova.virt.libvirt.driver [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.904 2 DEBUG nova.virt.libvirt.driver [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.905 2 DEBUG nova.virt.libvirt.driver [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.905 2 DEBUG nova.virt.libvirt.driver [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.906 2 DEBUG nova.virt.libvirt.driver [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.906 2 DEBUG nova.virt.libvirt.driver [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.910 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.910 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406894.75348, d55dd428-ae1c-4c43-8582-3a46d50f4822 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.910 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] VM Paused (Lifecycle Event)
Oct 02 12:08:14 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:08:14 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/a8630d835d1f8b1f99e608ae92e192d8924dde1ca8ae9642c73ee6ef6bf2acf1/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.969 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.972 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406894.7585506, d55dd428-ae1c-4c43-8582-3a46d50f4822 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.973 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] VM Resumed (Lifecycle Event)
Oct 02 12:08:14 compute-0 podman[225830]: 2025-10-02 12:08:14.979551857 +0000 UTC m=+0.241532096 container init 1a1b35487be377a2f2711b341228a661c7f1516903526dd87d3a141734b96eee (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-7d845a33-56e0-4850-9f27-8a54095796f2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001)
Oct 02 12:08:14 compute-0 nova_compute[192079]: 2025-10-02 12:08:14.983 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:14 compute-0 podman[225830]: 2025-10-02 12:08:14.985280982 +0000 UTC m=+0.247261221 container start 1a1b35487be377a2f2711b341228a661c7f1516903526dd87d3a141734b96eee (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-7d845a33-56e0-4850-9f27-8a54095796f2, org.label-schema.license=GPLv2, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.vendor=CentOS)
Oct 02 12:08:15 compute-0 neutron-haproxy-ovnmeta-7d845a33-56e0-4850-9f27-8a54095796f2[225844]: [NOTICE]   (225849) : New worker (225851) forked
Oct 02 12:08:15 compute-0 neutron-haproxy-ovnmeta-7d845a33-56e0-4850-9f27-8a54095796f2[225844]: [NOTICE]   (225849) : Loading success.
Oct 02 12:08:15 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:15.080 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 3 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:08:15 compute-0 nova_compute[192079]: 2025-10-02 12:08:15.092 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:08:15 compute-0 nova_compute[192079]: 2025-10-02 12:08:15.095 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:08:15 compute-0 nova_compute[192079]: 2025-10-02 12:08:15.193 2 INFO nova.compute.manager [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Took 7.38 seconds to spawn the instance on the hypervisor.
Oct 02 12:08:15 compute-0 nova_compute[192079]: 2025-10-02 12:08:15.193 2 DEBUG nova.compute.manager [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:08:15 compute-0 nova_compute[192079]: 2025-10-02 12:08:15.201 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:08:15 compute-0 nova_compute[192079]: 2025-10-02 12:08:15.609 2 INFO nova.compute.manager [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Took 8.21 seconds to build instance.
Oct 02 12:08:15 compute-0 nova_compute[192079]: 2025-10-02 12:08:15.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:08:15 compute-0 nova_compute[192079]: 2025-10-02 12:08:15.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:08:15 compute-0 nova_compute[192079]: 2025-10-02 12:08:15.749 2 DEBUG oslo_concurrency.lockutils [None req-538b7c3f-1cf4-4e0d-8250-b10c4ec90e6b fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Lock "d55dd428-ae1c-4c43-8582-3a46d50f4822" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 8.433s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:15 compute-0 nova_compute[192079]: 2025-10-02 12:08:15.779 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:08:16 compute-0 nova_compute[192079]: 2025-10-02 12:08:16.851 2 DEBUG nova.compute.manager [req-be0c874b-76ef-4a77-b866-93ef1cc7506e req-03f81a77-c241-485a-bf66-ec5afb6aa530 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Received event network-vif-plugged-d86c2c53-081c-4754-b070-2dd5028a4c08 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:08:16 compute-0 nova_compute[192079]: 2025-10-02 12:08:16.851 2 DEBUG oslo_concurrency.lockutils [req-be0c874b-76ef-4a77-b866-93ef1cc7506e req-03f81a77-c241-485a-bf66-ec5afb6aa530 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:16 compute-0 nova_compute[192079]: 2025-10-02 12:08:16.851 2 DEBUG oslo_concurrency.lockutils [req-be0c874b-76ef-4a77-b866-93ef1cc7506e req-03f81a77-c241-485a-bf66-ec5afb6aa530 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:16 compute-0 nova_compute[192079]: 2025-10-02 12:08:16.852 2 DEBUG oslo_concurrency.lockutils [req-be0c874b-76ef-4a77-b866-93ef1cc7506e req-03f81a77-c241-485a-bf66-ec5afb6aa530 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:16 compute-0 nova_compute[192079]: 2025-10-02 12:08:16.852 2 DEBUG nova.compute.manager [req-be0c874b-76ef-4a77-b866-93ef1cc7506e req-03f81a77-c241-485a-bf66-ec5afb6aa530 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] No waiting events found dispatching network-vif-plugged-d86c2c53-081c-4754-b070-2dd5028a4c08 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:08:16 compute-0 nova_compute[192079]: 2025-10-02 12:08:16.852 2 WARNING nova.compute.manager [req-be0c874b-76ef-4a77-b866-93ef1cc7506e req-03f81a77-c241-485a-bf66-ec5afb6aa530 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Received unexpected event network-vif-plugged-d86c2c53-081c-4754-b070-2dd5028a4c08 for instance with vm_state active and task_state None.
Oct 02 12:08:17 compute-0 nova_compute[192079]: 2025-10-02 12:08:17.154 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:17 compute-0 nova_compute[192079]: 2025-10-02 12:08:17.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:08:17 compute-0 nova_compute[192079]: 2025-10-02 12:08:17.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:08:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:18.081 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '12'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:08:18 compute-0 podman[225860]: 2025-10-02 12:08:18.164073976 +0000 UTC m=+0.061110732 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:08:18 compute-0 podman[225861]: 2025-10-02 12:08:18.184510408 +0000 UTC m=+0.075381697 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, config_id=iscsid, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, container_name=iscsid)
Oct 02 12:08:19 compute-0 nova_compute[192079]: 2025-10-02 12:08:19.470 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759406884.4695065, bfc65113-6eeb-464b-bbc8-f22f60c53782 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:08:19 compute-0 nova_compute[192079]: 2025-10-02 12:08:19.471 2 INFO nova.compute.manager [-] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] VM Stopped (Lifecycle Event)
Oct 02 12:08:19 compute-0 nova_compute[192079]: 2025-10-02 12:08:19.493 2 DEBUG nova.compute.manager [None req-294665c6-6c67-4b3e-8718-46edd974a800 - - - - - -] [instance: bfc65113-6eeb-464b-bbc8-f22f60c53782] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:08:19 compute-0 nova_compute[192079]: 2025-10-02 12:08:19.778 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:19 compute-0 NetworkManager[51160]: <info>  [1759406899.7809] manager: (patch-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d-to-br-int): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/68)
Oct 02 12:08:19 compute-0 NetworkManager[51160]: <info>  [1759406899.7822] manager: (patch-br-int-to-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/69)
Oct 02 12:08:19 compute-0 ovn_controller[94336]: 2025-10-02T12:08:19Z|00133|binding|INFO|Releasing lport 1c321c19-d630-4a6f-8ba8-7bac90af9bae from this chassis (sb_readonly=0)
Oct 02 12:08:19 compute-0 nova_compute[192079]: 2025-10-02 12:08:19.823 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:19 compute-0 nova_compute[192079]: 2025-10-02 12:08:19.830 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:19 compute-0 nova_compute[192079]: 2025-10-02 12:08:19.985 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:20 compute-0 nova_compute[192079]: 2025-10-02 12:08:20.334 2 DEBUG nova.compute.manager [req-3db84fea-07c8-494b-80db-f9b4116731ac req-8a24581f-4c7e-4075-b22b-44857bdf7846 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Received event network-changed-d86c2c53-081c-4754-b070-2dd5028a4c08 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:08:20 compute-0 nova_compute[192079]: 2025-10-02 12:08:20.336 2 DEBUG nova.compute.manager [req-3db84fea-07c8-494b-80db-f9b4116731ac req-8a24581f-4c7e-4075-b22b-44857bdf7846 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Refreshing instance network info cache due to event network-changed-d86c2c53-081c-4754-b070-2dd5028a4c08. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:08:20 compute-0 nova_compute[192079]: 2025-10-02 12:08:20.336 2 DEBUG oslo_concurrency.lockutils [req-3db84fea-07c8-494b-80db-f9b4116731ac req-8a24581f-4c7e-4075-b22b-44857bdf7846 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-d55dd428-ae1c-4c43-8582-3a46d50f4822" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:08:20 compute-0 nova_compute[192079]: 2025-10-02 12:08:20.336 2 DEBUG oslo_concurrency.lockutils [req-3db84fea-07c8-494b-80db-f9b4116731ac req-8a24581f-4c7e-4075-b22b-44857bdf7846 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-d55dd428-ae1c-4c43-8582-3a46d50f4822" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:08:20 compute-0 nova_compute[192079]: 2025-10-02 12:08:20.337 2 DEBUG nova.network.neutron [req-3db84fea-07c8-494b-80db-f9b4116731ac req-8a24581f-4c7e-4075-b22b-44857bdf7846 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Refreshing network info cache for port d86c2c53-081c-4754-b070-2dd5028a4c08 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:08:22 compute-0 nova_compute[192079]: 2025-10-02 12:08:22.157 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:22 compute-0 nova_compute[192079]: 2025-10-02 12:08:22.962 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:22 compute-0 nova_compute[192079]: 2025-10-02 12:08:22.965 2 DEBUG nova.network.neutron [req-3db84fea-07c8-494b-80db-f9b4116731ac req-8a24581f-4c7e-4075-b22b-44857bdf7846 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Updated VIF entry in instance network info cache for port d86c2c53-081c-4754-b070-2dd5028a4c08. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:08:22 compute-0 nova_compute[192079]: 2025-10-02 12:08:22.966 2 DEBUG nova.network.neutron [req-3db84fea-07c8-494b-80db-f9b4116731ac req-8a24581f-4c7e-4075-b22b-44857bdf7846 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Updating instance_info_cache with network_info: [{"id": "d86c2c53-081c-4754-b070-2dd5028a4c08", "address": "fa:16:3e:4d:9f:a8", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd86c2c53-08", "ovs_interfaceid": "d86c2c53-081c-4754-b070-2dd5028a4c08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:08:22 compute-0 nova_compute[192079]: 2025-10-02 12:08:22.990 2 DEBUG oslo_concurrency.lockutils [req-3db84fea-07c8-494b-80db-f9b4116731ac req-8a24581f-4c7e-4075-b22b-44857bdf7846 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-d55dd428-ae1c-4c43-8582-3a46d50f4822" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:08:23 compute-0 nova_compute[192079]: 2025-10-02 12:08:23.326 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:24 compute-0 nova_compute[192079]: 2025-10-02 12:08:24.987 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:27 compute-0 nova_compute[192079]: 2025-10-02 12:08:27.159 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:27 compute-0 ovn_controller[94336]: 2025-10-02T12:08:27Z|00012|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:4d:9f:a8 10.100.0.10
Oct 02 12:08:27 compute-0 ovn_controller[94336]: 2025-10-02T12:08:27Z|00013|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:4d:9f:a8 10.100.0.10
Oct 02 12:08:27 compute-0 nova_compute[192079]: 2025-10-02 12:08:27.789 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:28 compute-0 podman[225923]: 2025-10-02 12:08:28.159765962 +0000 UTC m=+0.068689526 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, managed_by=edpm_ansible, tcib_managed=true, container_name=ovn_metadata_agent, org.label-schema.license=GPLv2, config_id=ovn_metadata_agent, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:08:28 compute-0 podman[225928]: 2025-10-02 12:08:28.18859517 +0000 UTC m=+0.075415987 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>)
Oct 02 12:08:28 compute-0 podman[225924]: 2025-10-02 12:08:28.201766746 +0000 UTC m=+0.093397853 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_managed=true, container_name=ovn_controller, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, config_id=ovn_controller, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2)
Oct 02 12:08:28 compute-0 nova_compute[192079]: 2025-10-02 12:08:28.791 2 DEBUG oslo_concurrency.lockutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Acquiring lock "1df89ab6-e68b-4cdb-96ac-80896dce72c0" by "nova.compute.manager.ComputeManager.unshelve_instance.<locals>.do_unshelve_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:28 compute-0 nova_compute[192079]: 2025-10-02 12:08:28.791 2 DEBUG oslo_concurrency.lockutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Lock "1df89ab6-e68b-4cdb-96ac-80896dce72c0" acquired by "nova.compute.manager.ComputeManager.unshelve_instance.<locals>.do_unshelve_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:28 compute-0 nova_compute[192079]: 2025-10-02 12:08:28.791 2 INFO nova.compute.manager [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Unshelving
Oct 02 12:08:28 compute-0 nova_compute[192079]: 2025-10-02 12:08:28.907 2 DEBUG oslo_concurrency.lockutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:28 compute-0 nova_compute[192079]: 2025-10-02 12:08:28.908 2 DEBUG oslo_concurrency.lockutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:28 compute-0 nova_compute[192079]: 2025-10-02 12:08:28.913 2 DEBUG nova.objects.instance [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Lazy-loading 'pci_requests' on Instance uuid 1df89ab6-e68b-4cdb-96ac-80896dce72c0 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:08:28 compute-0 nova_compute[192079]: 2025-10-02 12:08:28.932 2 DEBUG nova.objects.instance [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Lazy-loading 'numa_topology' on Instance uuid 1df89ab6-e68b-4cdb-96ac-80896dce72c0 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:08:28 compute-0 nova_compute[192079]: 2025-10-02 12:08:28.966 2 DEBUG nova.virt.hardware [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:08:28 compute-0 nova_compute[192079]: 2025-10-02 12:08:28.966 2 INFO nova.compute.claims [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:08:29 compute-0 nova_compute[192079]: 2025-10-02 12:08:29.120 2 DEBUG nova.compute.provider_tree [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:08:29 compute-0 nova_compute[192079]: 2025-10-02 12:08:29.139 2 DEBUG nova.scheduler.client.report [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:08:29 compute-0 nova_compute[192079]: 2025-10-02 12:08:29.163 2 DEBUG oslo_concurrency.lockutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.255s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:29 compute-0 nova_compute[192079]: 2025-10-02 12:08:29.355 2 DEBUG oslo_concurrency.lockutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Acquiring lock "refresh_cache-1df89ab6-e68b-4cdb-96ac-80896dce72c0" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:08:29 compute-0 nova_compute[192079]: 2025-10-02 12:08:29.356 2 DEBUG oslo_concurrency.lockutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Acquired lock "refresh_cache-1df89ab6-e68b-4cdb-96ac-80896dce72c0" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:08:29 compute-0 nova_compute[192079]: 2025-10-02 12:08:29.356 2 DEBUG nova.network.neutron [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:08:29 compute-0 nova_compute[192079]: 2025-10-02 12:08:29.503 2 DEBUG nova.network.neutron [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:08:29 compute-0 nova_compute[192079]: 2025-10-02 12:08:29.782 2 DEBUG nova.network.neutron [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:08:29 compute-0 nova_compute[192079]: 2025-10-02 12:08:29.805 2 DEBUG oslo_concurrency.lockutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Releasing lock "refresh_cache-1df89ab6-e68b-4cdb-96ac-80896dce72c0" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:08:29 compute-0 nova_compute[192079]: 2025-10-02 12:08:29.806 2 DEBUG nova.virt.libvirt.driver [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:08:29 compute-0 nova_compute[192079]: 2025-10-02 12:08:29.806 2 INFO nova.virt.libvirt.driver [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Creating image(s)
Oct 02 12:08:29 compute-0 nova_compute[192079]: 2025-10-02 12:08:29.807 2 DEBUG oslo_concurrency.lockutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Acquiring lock "/var/lib/nova/instances/1df89ab6-e68b-4cdb-96ac-80896dce72c0/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:29 compute-0 nova_compute[192079]: 2025-10-02 12:08:29.807 2 DEBUG oslo_concurrency.lockutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Lock "/var/lib/nova/instances/1df89ab6-e68b-4cdb-96ac-80896dce72c0/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:29 compute-0 nova_compute[192079]: 2025-10-02 12:08:29.808 2 DEBUG oslo_concurrency.lockutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Lock "/var/lib/nova/instances/1df89ab6-e68b-4cdb-96ac-80896dce72c0/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:29 compute-0 nova_compute[192079]: 2025-10-02 12:08:29.808 2 DEBUG nova.objects.instance [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Lazy-loading 'trusted_certs' on Instance uuid 1df89ab6-e68b-4cdb-96ac-80896dce72c0 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:08:29 compute-0 nova_compute[192079]: 2025-10-02 12:08:29.821 2 DEBUG oslo_concurrency.lockutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Acquiring lock "1c97c4192acfe97009a420fda390ab0403d0e46a" by "nova.virt.libvirt.imagebackend.Image.cache.<locals>.fetch_func_sync" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:29 compute-0 nova_compute[192079]: 2025-10-02 12:08:29.822 2 DEBUG oslo_concurrency.lockutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Lock "1c97c4192acfe97009a420fda390ab0403d0e46a" acquired by "nova.virt.libvirt.imagebackend.Image.cache.<locals>.fetch_func_sync" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:29 compute-0 nova_compute[192079]: 2025-10-02 12:08:29.989 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:31 compute-0 nova_compute[192079]: 2025-10-02 12:08:31.612 2 DEBUG oslo_concurrency.processutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/1c97c4192acfe97009a420fda390ab0403d0e46a.part --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:08:31 compute-0 nova_compute[192079]: 2025-10-02 12:08:31.666 2 DEBUG oslo_concurrency.processutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/1c97c4192acfe97009a420fda390ab0403d0e46a.part --force-share --output=json" returned: 0 in 0.053s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:08:31 compute-0 nova_compute[192079]: 2025-10-02 12:08:31.667 2 DEBUG nova.virt.images [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] e2eca2ac-ad19-4b9b-ba60-fa86ad4a9c00 was qcow2, converting to raw fetch_to_raw /usr/lib/python3.9/site-packages/nova/virt/images.py:242
Oct 02 12:08:31 compute-0 nova_compute[192079]: 2025-10-02 12:08:31.669 2 DEBUG nova.privsep.utils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Path '/var/lib/nova/instances' supports direct I/O supports_direct_io /usr/lib/python3.9/site-packages/nova/privsep/utils.py:63
Oct 02 12:08:31 compute-0 nova_compute[192079]: 2025-10-02 12:08:31.670 2 DEBUG oslo_concurrency.processutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Running cmd (subprocess): qemu-img convert -t none -O raw -f qcow2 /var/lib/nova/instances/_base/1c97c4192acfe97009a420fda390ab0403d0e46a.part /var/lib/nova/instances/_base/1c97c4192acfe97009a420fda390ab0403d0e46a.converted execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:08:32 compute-0 nova_compute[192079]: 2025-10-02 12:08:32.007 2 DEBUG oslo_concurrency.processutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] CMD "qemu-img convert -t none -O raw -f qcow2 /var/lib/nova/instances/_base/1c97c4192acfe97009a420fda390ab0403d0e46a.part /var/lib/nova/instances/_base/1c97c4192acfe97009a420fda390ab0403d0e46a.converted" returned: 0 in 0.337s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:08:32 compute-0 nova_compute[192079]: 2025-10-02 12:08:32.017 2 DEBUG oslo_concurrency.processutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/1c97c4192acfe97009a420fda390ab0403d0e46a.converted --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:08:32 compute-0 nova_compute[192079]: 2025-10-02 12:08:32.128 2 DEBUG oslo_concurrency.processutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/1c97c4192acfe97009a420fda390ab0403d0e46a.converted --force-share --output=json" returned: 0 in 0.110s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:08:32 compute-0 nova_compute[192079]: 2025-10-02 12:08:32.129 2 DEBUG oslo_concurrency.lockutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Lock "1c97c4192acfe97009a420fda390ab0403d0e46a" "released" by "nova.virt.libvirt.imagebackend.Image.cache.<locals>.fetch_func_sync" :: held 2.307s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:32 compute-0 nova_compute[192079]: 2025-10-02 12:08:32.142 2 DEBUG oslo_concurrency.processutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/1c97c4192acfe97009a420fda390ab0403d0e46a --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:08:32 compute-0 nova_compute[192079]: 2025-10-02 12:08:32.162 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:32 compute-0 nova_compute[192079]: 2025-10-02 12:08:32.200 2 DEBUG oslo_concurrency.processutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/1c97c4192acfe97009a420fda390ab0403d0e46a --force-share --output=json" returned: 0 in 0.058s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:08:32 compute-0 nova_compute[192079]: 2025-10-02 12:08:32.201 2 DEBUG oslo_concurrency.lockutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Acquiring lock "1c97c4192acfe97009a420fda390ab0403d0e46a" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:32 compute-0 nova_compute[192079]: 2025-10-02 12:08:32.202 2 DEBUG oslo_concurrency.lockutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Lock "1c97c4192acfe97009a420fda390ab0403d0e46a" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:32 compute-0 nova_compute[192079]: 2025-10-02 12:08:32.220 2 DEBUG oslo_concurrency.processutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/1c97c4192acfe97009a420fda390ab0403d0e46a --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:08:32 compute-0 nova_compute[192079]: 2025-10-02 12:08:32.298 2 DEBUG oslo_concurrency.processutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/1c97c4192acfe97009a420fda390ab0403d0e46a --force-share --output=json" returned: 0 in 0.078s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:08:32 compute-0 nova_compute[192079]: 2025-10-02 12:08:32.299 2 DEBUG oslo_concurrency.processutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/1c97c4192acfe97009a420fda390ab0403d0e46a,backing_fmt=raw /var/lib/nova/instances/1df89ab6-e68b-4cdb-96ac-80896dce72c0/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:08:32 compute-0 nova_compute[192079]: 2025-10-02 12:08:32.387 2 DEBUG oslo_concurrency.processutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/1c97c4192acfe97009a420fda390ab0403d0e46a,backing_fmt=raw /var/lib/nova/instances/1df89ab6-e68b-4cdb-96ac-80896dce72c0/disk 1073741824" returned: 0 in 0.088s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:08:32 compute-0 nova_compute[192079]: 2025-10-02 12:08:32.388 2 DEBUG oslo_concurrency.lockutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Lock "1c97c4192acfe97009a420fda390ab0403d0e46a" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.186s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:32 compute-0 nova_compute[192079]: 2025-10-02 12:08:32.388 2 DEBUG oslo_concurrency.processutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/1c97c4192acfe97009a420fda390ab0403d0e46a --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:08:32 compute-0 nova_compute[192079]: 2025-10-02 12:08:32.478 2 DEBUG oslo_concurrency.processutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/1c97c4192acfe97009a420fda390ab0403d0e46a --force-share --output=json" returned: 0 in 0.090s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:08:32 compute-0 nova_compute[192079]: 2025-10-02 12:08:32.480 2 DEBUG nova.objects.instance [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Lazy-loading 'migration_context' on Instance uuid 1df89ab6-e68b-4cdb-96ac-80896dce72c0 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:08:32 compute-0 nova_compute[192079]: 2025-10-02 12:08:32.498 2 INFO nova.virt.libvirt.driver [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Rebasing disk image.
Oct 02 12:08:32 compute-0 nova_compute[192079]: 2025-10-02 12:08:32.499 2 DEBUG oslo_concurrency.processutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:08:32 compute-0 nova_compute[192079]: 2025-10-02 12:08:32.563 2 DEBUG oslo_concurrency.processutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.064s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:08:32 compute-0 nova_compute[192079]: 2025-10-02 12:08:32.565 2 DEBUG oslo_concurrency.processutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Running cmd (subprocess): qemu-img rebase -b /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 -F raw /var/lib/nova/instances/1df89ab6-e68b-4cdb-96ac-80896dce72c0/disk execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.235 2 DEBUG oslo_concurrency.processutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] CMD "qemu-img rebase -b /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 -F raw /var/lib/nova/instances/1df89ab6-e68b-4cdb-96ac-80896dce72c0/disk" returned: 0 in 1.670s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.236 2 DEBUG nova.virt.libvirt.driver [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.237 2 DEBUG nova.virt.libvirt.driver [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Ensure instance console log exists: /var/lib/nova/instances/1df89ab6-e68b-4cdb-96ac-80896dce72c0/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.237 2 DEBUG oslo_concurrency.lockutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.237 2 DEBUG oslo_concurrency.lockutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.238 2 DEBUG oslo_concurrency.lockutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.239 2 DEBUG nova.virt.libvirt.driver [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Start _get_guest_xml network_info=[] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='365ebf534f5f64e3d89c6406a8cfe9c2',container_format='bare',created_at=2025-10-02T12:08:07Z,direct_url=<?>,disk_format='qcow2',id=e2eca2ac-ad19-4b9b-ba60-fa86ad4a9c00,min_disk=1,min_ram=0,name='tempest-UnshelveToHostMultiNodesTest-server-1188612115-shelved',owner='ef1d6333695d494da23ba067aaed9cfb',properties=ImageMetaProps,protected=<?>,size=52363264,status='active',tags=<?>,updated_at=2025-10-02T12:08:24Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.246 2 WARNING nova.virt.libvirt.driver [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.254 2 DEBUG nova.virt.libvirt.host [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.257 2 DEBUG nova.virt.libvirt.host [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.261 2 DEBUG nova.virt.libvirt.host [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.262 2 DEBUG nova.virt.libvirt.host [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.265 2 DEBUG nova.virt.libvirt.driver [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.265 2 DEBUG nova.virt.hardware [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='365ebf534f5f64e3d89c6406a8cfe9c2',container_format='bare',created_at=2025-10-02T12:08:07Z,direct_url=<?>,disk_format='qcow2',id=e2eca2ac-ad19-4b9b-ba60-fa86ad4a9c00,min_disk=1,min_ram=0,name='tempest-UnshelveToHostMultiNodesTest-server-1188612115-shelved',owner='ef1d6333695d494da23ba067aaed9cfb',properties=ImageMetaProps,protected=<?>,size=52363264,status='active',tags=<?>,updated_at=2025-10-02T12:08:24Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.266 2 DEBUG nova.virt.hardware [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.267 2 DEBUG nova.virt.hardware [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.267 2 DEBUG nova.virt.hardware [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.268 2 DEBUG nova.virt.hardware [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.268 2 DEBUG nova.virt.hardware [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.269 2 DEBUG nova.virt.hardware [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.269 2 DEBUG nova.virt.hardware [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.270 2 DEBUG nova.virt.hardware [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.270 2 DEBUG nova.virt.hardware [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.271 2 DEBUG nova.virt.hardware [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.271 2 DEBUG nova.objects.instance [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Lazy-loading 'vcpu_model' on Instance uuid 1df89ab6-e68b-4cdb-96ac-80896dce72c0 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.291 2 DEBUG nova.objects.instance [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Lazy-loading 'pci_devices' on Instance uuid 1df89ab6-e68b-4cdb-96ac-80896dce72c0 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.323 2 DEBUG nova.virt.libvirt.driver [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:08:34 compute-0 nova_compute[192079]:   <uuid>1df89ab6-e68b-4cdb-96ac-80896dce72c0</uuid>
Oct 02 12:08:34 compute-0 nova_compute[192079]:   <name>instance-0000002a</name>
Oct 02 12:08:34 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:08:34 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:08:34 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:08:34 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:       <nova:name>tempest-UnshelveToHostMultiNodesTest-server-1188612115</nova:name>
Oct 02 12:08:34 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:08:34</nova:creationTime>
Oct 02 12:08:34 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:08:34 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:08:34 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:08:34 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:08:34 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:08:34 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:08:34 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:08:34 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:08:34 compute-0 nova_compute[192079]:         <nova:user uuid="70e85655ffe7475ba88961b19bf4d65a">tempest-UnshelveToHostMultiNodesTest-250675149-project-member</nova:user>
Oct 02 12:08:34 compute-0 nova_compute[192079]:         <nova:project uuid="ef1d6333695d494da23ba067aaed9cfb">tempest-UnshelveToHostMultiNodesTest-250675149</nova:project>
Oct 02 12:08:34 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:08:34 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="e2eca2ac-ad19-4b9b-ba60-fa86ad4a9c00"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:       <nova:ports/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:08:34 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:08:34 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <system>
Oct 02 12:08:34 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:08:34 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:08:34 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:08:34 compute-0 nova_compute[192079]:       <entry name="serial">1df89ab6-e68b-4cdb-96ac-80896dce72c0</entry>
Oct 02 12:08:34 compute-0 nova_compute[192079]:       <entry name="uuid">1df89ab6-e68b-4cdb-96ac-80896dce72c0</entry>
Oct 02 12:08:34 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     </system>
Oct 02 12:08:34 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:08:34 compute-0 nova_compute[192079]:   <os>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:   </os>
Oct 02 12:08:34 compute-0 nova_compute[192079]:   <features>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:   </features>
Oct 02 12:08:34 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:08:34 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:08:34 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:08:34 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/1df89ab6-e68b-4cdb-96ac-80896dce72c0/disk"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:08:34 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/1df89ab6-e68b-4cdb-96ac-80896dce72c0/disk.config"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:08:34 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/1df89ab6-e68b-4cdb-96ac-80896dce72c0/console.log" append="off"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <video>
Oct 02 12:08:34 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     </video>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <input type="keyboard" bus="usb"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:08:34 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:08:34 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:08:34 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:08:34 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:08:34 compute-0 nova_compute[192079]: </domain>
Oct 02 12:08:34 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.374 2 DEBUG nova.virt.libvirt.driver [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.374 2 DEBUG nova.virt.libvirt.driver [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.375 2 INFO nova.virt.libvirt.driver [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Using config drive
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.391 2 DEBUG nova.objects.instance [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Lazy-loading 'ec2_ids' on Instance uuid 1df89ab6-e68b-4cdb-96ac-80896dce72c0 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.438 2 DEBUG nova.objects.instance [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Lazy-loading 'keypairs' on Instance uuid 1df89ab6-e68b-4cdb-96ac-80896dce72c0 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.576 2 INFO nova.virt.libvirt.driver [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Creating config drive at /var/lib/nova/instances/1df89ab6-e68b-4cdb-96ac-80896dce72c0/disk.config
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.581 2 DEBUG oslo_concurrency.processutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/1df89ab6-e68b-4cdb-96ac-80896dce72c0/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp52dv_62v execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:08:34 compute-0 ovn_controller[94336]: 2025-10-02T12:08:34Z|00134|binding|INFO|Releasing lport 1c321c19-d630-4a6f-8ba8-7bac90af9bae from this chassis (sb_readonly=0)
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.651 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:34 compute-0 nova_compute[192079]: 2025-10-02 12:08:34.714 2 DEBUG oslo_concurrency.processutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/1df89ab6-e68b-4cdb-96ac-80896dce72c0/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp52dv_62v" returned: 0 in 0.132s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:08:34 compute-0 systemd-machined[152150]: New machine qemu-24-instance-0000002a.
Oct 02 12:08:34 compute-0 systemd[1]: Started Virtual Machine qemu-24-instance-0000002a.
Oct 02 12:08:35 compute-0 nova_compute[192079]: 2025-10-02 12:08:35.033 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:35 compute-0 nova_compute[192079]: 2025-10-02 12:08:35.498 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406915.4977503, 1df89ab6-e68b-4cdb-96ac-80896dce72c0 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:08:35 compute-0 nova_compute[192079]: 2025-10-02 12:08:35.498 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] VM Resumed (Lifecycle Event)
Oct 02 12:08:35 compute-0 nova_compute[192079]: 2025-10-02 12:08:35.502 2 DEBUG nova.compute.manager [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:08:35 compute-0 nova_compute[192079]: 2025-10-02 12:08:35.503 2 DEBUG nova.virt.libvirt.driver [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:08:35 compute-0 nova_compute[192079]: 2025-10-02 12:08:35.507 2 INFO nova.virt.libvirt.driver [-] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Instance spawned successfully.
Oct 02 12:08:35 compute-0 nova_compute[192079]: 2025-10-02 12:08:35.527 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:08:35 compute-0 nova_compute[192079]: 2025-10-02 12:08:35.531 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: shelved_offloaded, current task_state: spawning, current DB power_state: 4, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:08:35 compute-0 nova_compute[192079]: 2025-10-02 12:08:35.552 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:08:35 compute-0 nova_compute[192079]: 2025-10-02 12:08:35.552 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406915.5025377, 1df89ab6-e68b-4cdb-96ac-80896dce72c0 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:08:35 compute-0 nova_compute[192079]: 2025-10-02 12:08:35.552 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] VM Started (Lifecycle Event)
Oct 02 12:08:35 compute-0 nova_compute[192079]: 2025-10-02 12:08:35.571 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:08:35 compute-0 nova_compute[192079]: 2025-10-02 12:08:35.574 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Synchronizing instance power state after lifecycle event "Started"; current vm_state: shelved_offloaded, current task_state: spawning, current DB power_state: 4, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:08:35 compute-0 nova_compute[192079]: 2025-10-02 12:08:35.600 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:08:36 compute-0 nova_compute[192079]: 2025-10-02 12:08:36.201 2 DEBUG nova.compute.manager [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:08:36 compute-0 nova_compute[192079]: 2025-10-02 12:08:36.298 2 DEBUG oslo_concurrency.lockutils [None req-f8d5a35d-448b-4a05-9d77-6d54ce78697c 7c9f5af6d8f24daf9842b195fa11137e 6e923b73e6774f58bca20f0f5d2962bf - - default default] Lock "1df89ab6-e68b-4cdb-96ac-80896dce72c0" "released" by "nova.compute.manager.ComputeManager.unshelve_instance.<locals>.do_unshelve_instance" :: held 7.507s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:37 compute-0 nova_compute[192079]: 2025-10-02 12:08:37.167 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:37 compute-0 podman[226049]: 2025-10-02 12:08:37.189311352 +0000 UTC m=+0.087245067 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, container_name=ceilometer_agent_compute, tcib_managed=true)
Oct 02 12:08:38 compute-0 nova_compute[192079]: 2025-10-02 12:08:38.783 2 DEBUG oslo_concurrency.lockutils [None req-d62a9e3f-106a-4bef-9ae3-af3d963d0f2e 70e85655ffe7475ba88961b19bf4d65a ef1d6333695d494da23ba067aaed9cfb - - default default] Acquiring lock "1df89ab6-e68b-4cdb-96ac-80896dce72c0" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:38 compute-0 nova_compute[192079]: 2025-10-02 12:08:38.783 2 DEBUG oslo_concurrency.lockutils [None req-d62a9e3f-106a-4bef-9ae3-af3d963d0f2e 70e85655ffe7475ba88961b19bf4d65a ef1d6333695d494da23ba067aaed9cfb - - default default] Lock "1df89ab6-e68b-4cdb-96ac-80896dce72c0" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:38 compute-0 nova_compute[192079]: 2025-10-02 12:08:38.784 2 DEBUG oslo_concurrency.lockutils [None req-d62a9e3f-106a-4bef-9ae3-af3d963d0f2e 70e85655ffe7475ba88961b19bf4d65a ef1d6333695d494da23ba067aaed9cfb - - default default] Acquiring lock "1df89ab6-e68b-4cdb-96ac-80896dce72c0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:38 compute-0 nova_compute[192079]: 2025-10-02 12:08:38.784 2 DEBUG oslo_concurrency.lockutils [None req-d62a9e3f-106a-4bef-9ae3-af3d963d0f2e 70e85655ffe7475ba88961b19bf4d65a ef1d6333695d494da23ba067aaed9cfb - - default default] Lock "1df89ab6-e68b-4cdb-96ac-80896dce72c0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:38 compute-0 nova_compute[192079]: 2025-10-02 12:08:38.784 2 DEBUG oslo_concurrency.lockutils [None req-d62a9e3f-106a-4bef-9ae3-af3d963d0f2e 70e85655ffe7475ba88961b19bf4d65a ef1d6333695d494da23ba067aaed9cfb - - default default] Lock "1df89ab6-e68b-4cdb-96ac-80896dce72c0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:38 compute-0 nova_compute[192079]: 2025-10-02 12:08:38.797 2 INFO nova.compute.manager [None req-d62a9e3f-106a-4bef-9ae3-af3d963d0f2e 70e85655ffe7475ba88961b19bf4d65a ef1d6333695d494da23ba067aaed9cfb - - default default] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Terminating instance
Oct 02 12:08:38 compute-0 nova_compute[192079]: 2025-10-02 12:08:38.809 2 DEBUG oslo_concurrency.lockutils [None req-d62a9e3f-106a-4bef-9ae3-af3d963d0f2e 70e85655ffe7475ba88961b19bf4d65a ef1d6333695d494da23ba067aaed9cfb - - default default] Acquiring lock "refresh_cache-1df89ab6-e68b-4cdb-96ac-80896dce72c0" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:08:38 compute-0 nova_compute[192079]: 2025-10-02 12:08:38.809 2 DEBUG oslo_concurrency.lockutils [None req-d62a9e3f-106a-4bef-9ae3-af3d963d0f2e 70e85655ffe7475ba88961b19bf4d65a ef1d6333695d494da23ba067aaed9cfb - - default default] Acquired lock "refresh_cache-1df89ab6-e68b-4cdb-96ac-80896dce72c0" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:08:38 compute-0 nova_compute[192079]: 2025-10-02 12:08:38.810 2 DEBUG nova.network.neutron [None req-d62a9e3f-106a-4bef-9ae3-af3d963d0f2e 70e85655ffe7475ba88961b19bf4d65a ef1d6333695d494da23ba067aaed9cfb - - default default] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:08:39 compute-0 nova_compute[192079]: 2025-10-02 12:08:39.065 2 DEBUG nova.network.neutron [None req-d62a9e3f-106a-4bef-9ae3-af3d963d0f2e 70e85655ffe7475ba88961b19bf4d65a ef1d6333695d494da23ba067aaed9cfb - - default default] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:08:39 compute-0 nova_compute[192079]: 2025-10-02 12:08:39.207 2 DEBUG oslo_concurrency.lockutils [None req-528972f7-1e25-4430-9d9c-7ae4a8117b1e fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Acquiring lock "interface-d55dd428-ae1c-4c43-8582-3a46d50f4822-None" by "nova.compute.manager.ComputeManager.attach_interface.<locals>.do_attach_interface" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:39 compute-0 nova_compute[192079]: 2025-10-02 12:08:39.208 2 DEBUG oslo_concurrency.lockutils [None req-528972f7-1e25-4430-9d9c-7ae4a8117b1e fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Lock "interface-d55dd428-ae1c-4c43-8582-3a46d50f4822-None" acquired by "nova.compute.manager.ComputeManager.attach_interface.<locals>.do_attach_interface" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:39 compute-0 nova_compute[192079]: 2025-10-02 12:08:39.208 2 DEBUG nova.objects.instance [None req-528972f7-1e25-4430-9d9c-7ae4a8117b1e fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Lazy-loading 'flavor' on Instance uuid d55dd428-ae1c-4c43-8582-3a46d50f4822 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:08:39 compute-0 nova_compute[192079]: 2025-10-02 12:08:39.396 2 DEBUG nova.network.neutron [None req-d62a9e3f-106a-4bef-9ae3-af3d963d0f2e 70e85655ffe7475ba88961b19bf4d65a ef1d6333695d494da23ba067aaed9cfb - - default default] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:08:39 compute-0 nova_compute[192079]: 2025-10-02 12:08:39.412 2 DEBUG oslo_concurrency.lockutils [None req-d62a9e3f-106a-4bef-9ae3-af3d963d0f2e 70e85655ffe7475ba88961b19bf4d65a ef1d6333695d494da23ba067aaed9cfb - - default default] Releasing lock "refresh_cache-1df89ab6-e68b-4cdb-96ac-80896dce72c0" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:08:39 compute-0 nova_compute[192079]: 2025-10-02 12:08:39.413 2 DEBUG nova.compute.manager [None req-d62a9e3f-106a-4bef-9ae3-af3d963d0f2e 70e85655ffe7475ba88961b19bf4d65a ef1d6333695d494da23ba067aaed9cfb - - default default] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:08:39 compute-0 systemd[1]: machine-qemu\x2d24\x2dinstance\x2d0000002a.scope: Deactivated successfully.
Oct 02 12:08:39 compute-0 systemd[1]: machine-qemu\x2d24\x2dinstance\x2d0000002a.scope: Consumed 4.697s CPU time.
Oct 02 12:08:39 compute-0 systemd-machined[152150]: Machine qemu-24-instance-0000002a terminated.
Oct 02 12:08:39 compute-0 nova_compute[192079]: 2025-10-02 12:08:39.574 2 DEBUG nova.objects.instance [None req-528972f7-1e25-4430-9d9c-7ae4a8117b1e fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Lazy-loading 'pci_requests' on Instance uuid d55dd428-ae1c-4c43-8582-3a46d50f4822 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:08:39 compute-0 nova_compute[192079]: 2025-10-02 12:08:39.588 2 DEBUG nova.network.neutron [None req-528972f7-1e25-4430-9d9c-7ae4a8117b1e fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:08:39 compute-0 nova_compute[192079]: 2025-10-02 12:08:39.667 2 INFO nova.virt.libvirt.driver [-] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Instance destroyed successfully.
Oct 02 12:08:39 compute-0 nova_compute[192079]: 2025-10-02 12:08:39.667 2 DEBUG nova.objects.instance [None req-d62a9e3f-106a-4bef-9ae3-af3d963d0f2e 70e85655ffe7475ba88961b19bf4d65a ef1d6333695d494da23ba067aaed9cfb - - default default] Lazy-loading 'resources' on Instance uuid 1df89ab6-e68b-4cdb-96ac-80896dce72c0 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:08:39 compute-0 nova_compute[192079]: 2025-10-02 12:08:39.682 2 INFO nova.virt.libvirt.driver [None req-d62a9e3f-106a-4bef-9ae3-af3d963d0f2e 70e85655ffe7475ba88961b19bf4d65a ef1d6333695d494da23ba067aaed9cfb - - default default] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Deleting instance files /var/lib/nova/instances/1df89ab6-e68b-4cdb-96ac-80896dce72c0_del
Oct 02 12:08:39 compute-0 nova_compute[192079]: 2025-10-02 12:08:39.688 2 INFO nova.virt.libvirt.driver [None req-d62a9e3f-106a-4bef-9ae3-af3d963d0f2e 70e85655ffe7475ba88961b19bf4d65a ef1d6333695d494da23ba067aaed9cfb - - default default] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Deletion of /var/lib/nova/instances/1df89ab6-e68b-4cdb-96ac-80896dce72c0_del complete
Oct 02 12:08:39 compute-0 nova_compute[192079]: 2025-10-02 12:08:39.769 2 INFO nova.compute.manager [None req-d62a9e3f-106a-4bef-9ae3-af3d963d0f2e 70e85655ffe7475ba88961b19bf4d65a ef1d6333695d494da23ba067aaed9cfb - - default default] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Took 0.35 seconds to destroy the instance on the hypervisor.
Oct 02 12:08:39 compute-0 nova_compute[192079]: 2025-10-02 12:08:39.770 2 DEBUG oslo.service.loopingcall [None req-d62a9e3f-106a-4bef-9ae3-af3d963d0f2e 70e85655ffe7475ba88961b19bf4d65a ef1d6333695d494da23ba067aaed9cfb - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:08:39 compute-0 nova_compute[192079]: 2025-10-02 12:08:39.770 2 DEBUG nova.compute.manager [-] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:08:39 compute-0 nova_compute[192079]: 2025-10-02 12:08:39.770 2 DEBUG nova.network.neutron [-] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:08:39 compute-0 nova_compute[192079]: 2025-10-02 12:08:39.826 2 DEBUG nova.policy [None req-528972f7-1e25-4430-9d9c-7ae4a8117b1e fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fbc7616089cb4f78832692487019c83d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ef4e3be787374d90a6a236c7f76bd940', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:08:40 compute-0 nova_compute[192079]: 2025-10-02 12:08:40.035 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:40 compute-0 nova_compute[192079]: 2025-10-02 12:08:40.486 2 DEBUG nova.network.neutron [-] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:08:40 compute-0 nova_compute[192079]: 2025-10-02 12:08:40.508 2 DEBUG nova.network.neutron [-] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:08:40 compute-0 nova_compute[192079]: 2025-10-02 12:08:40.527 2 INFO nova.compute.manager [-] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Took 0.76 seconds to deallocate network for instance.
Oct 02 12:08:40 compute-0 nova_compute[192079]: 2025-10-02 12:08:40.631 2 DEBUG oslo_concurrency.lockutils [None req-d62a9e3f-106a-4bef-9ae3-af3d963d0f2e 70e85655ffe7475ba88961b19bf4d65a ef1d6333695d494da23ba067aaed9cfb - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:40 compute-0 nova_compute[192079]: 2025-10-02 12:08:40.632 2 DEBUG oslo_concurrency.lockutils [None req-d62a9e3f-106a-4bef-9ae3-af3d963d0f2e 70e85655ffe7475ba88961b19bf4d65a ef1d6333695d494da23ba067aaed9cfb - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:40 compute-0 nova_compute[192079]: 2025-10-02 12:08:40.725 2 DEBUG nova.compute.provider_tree [None req-d62a9e3f-106a-4bef-9ae3-af3d963d0f2e 70e85655ffe7475ba88961b19bf4d65a ef1d6333695d494da23ba067aaed9cfb - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:08:40 compute-0 nova_compute[192079]: 2025-10-02 12:08:40.745 2 DEBUG nova.scheduler.client.report [None req-d62a9e3f-106a-4bef-9ae3-af3d963d0f2e 70e85655ffe7475ba88961b19bf4d65a ef1d6333695d494da23ba067aaed9cfb - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:08:40 compute-0 nova_compute[192079]: 2025-10-02 12:08:40.765 2 DEBUG oslo_concurrency.lockutils [None req-d62a9e3f-106a-4bef-9ae3-af3d963d0f2e 70e85655ffe7475ba88961b19bf4d65a ef1d6333695d494da23ba067aaed9cfb - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.134s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:40 compute-0 nova_compute[192079]: 2025-10-02 12:08:40.794 2 INFO nova.scheduler.client.report [None req-d62a9e3f-106a-4bef-9ae3-af3d963d0f2e 70e85655ffe7475ba88961b19bf4d65a ef1d6333695d494da23ba067aaed9cfb - - default default] Deleted allocations for instance 1df89ab6-e68b-4cdb-96ac-80896dce72c0
Oct 02 12:08:40 compute-0 nova_compute[192079]: 2025-10-02 12:08:40.884 2 DEBUG oslo_concurrency.lockutils [None req-d62a9e3f-106a-4bef-9ae3-af3d963d0f2e 70e85655ffe7475ba88961b19bf4d65a ef1d6333695d494da23ba067aaed9cfb - - default default] Lock "1df89ab6-e68b-4cdb-96ac-80896dce72c0" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 2.101s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:41 compute-0 nova_compute[192079]: 2025-10-02 12:08:41.593 2 DEBUG nova.network.neutron [None req-528972f7-1e25-4430-9d9c-7ae4a8117b1e fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Successfully created port: a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:08:42 compute-0 nova_compute[192079]: 2025-10-02 12:08:42.170 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:42 compute-0 podman[226079]: 2025-10-02 12:08:42.176825133 +0000 UTC m=+0.071752567 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, container_name=multipathd, org.label-schema.build-date=20251001)
Oct 02 12:08:42 compute-0 podman[226078]: 2025-10-02 12:08:42.199163606 +0000 UTC m=+0.091376267 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, com.redhat.component=ubi9-minimal-container, distribution-scope=public, vendor=Red Hat, Inc., io.buildah.version=1.33.7, version=9.6, container_name=openstack_network_exporter, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., architecture=x86_64, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., name=ubi9-minimal, config_id=edpm, url=https://catalog.redhat.com/en/search?searchType=containers, vcs-type=git, io.openshift.expose-services=, managed_by=edpm_ansible, io.openshift.tags=minimal rhel9, maintainer=Red Hat, Inc., build-date=2025-08-20T13:12:41, release=1755695350)
Oct 02 12:08:42 compute-0 nova_compute[192079]: 2025-10-02 12:08:42.789 2 DEBUG nova.network.neutron [None req-528972f7-1e25-4430-9d9c-7ae4a8117b1e fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Successfully updated port: a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:08:42 compute-0 nova_compute[192079]: 2025-10-02 12:08:42.805 2 DEBUG oslo_concurrency.lockutils [None req-528972f7-1e25-4430-9d9c-7ae4a8117b1e fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Acquiring lock "refresh_cache-d55dd428-ae1c-4c43-8582-3a46d50f4822" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:08:42 compute-0 nova_compute[192079]: 2025-10-02 12:08:42.805 2 DEBUG oslo_concurrency.lockutils [None req-528972f7-1e25-4430-9d9c-7ae4a8117b1e fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Acquired lock "refresh_cache-d55dd428-ae1c-4c43-8582-3a46d50f4822" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:08:42 compute-0 nova_compute[192079]: 2025-10-02 12:08:42.806 2 DEBUG nova.network.neutron [None req-528972f7-1e25-4430-9d9c-7ae4a8117b1e fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:08:43 compute-0 nova_compute[192079]: 2025-10-02 12:08:43.504 2 WARNING nova.network.neutron [None req-528972f7-1e25-4430-9d9c-7ae4a8117b1e fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] 7d845a33-56e0-4850-9f27-8a54095796f2 already exists in list: networks containing: ['7d845a33-56e0-4850-9f27-8a54095796f2']. ignoring it
Oct 02 12:08:44 compute-0 nova_compute[192079]: 2025-10-02 12:08:44.177 2 DEBUG nova.compute.manager [req-1ab9d6b5-e9cb-4a2c-b712-d8e81379ad24 req-171c11b7-5fdf-491f-b2fb-b8057af23c71 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Received event network-changed-a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:08:44 compute-0 nova_compute[192079]: 2025-10-02 12:08:44.178 2 DEBUG nova.compute.manager [req-1ab9d6b5-e9cb-4a2c-b712-d8e81379ad24 req-171c11b7-5fdf-491f-b2fb-b8057af23c71 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Refreshing instance network info cache due to event network-changed-a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:08:44 compute-0 nova_compute[192079]: 2025-10-02 12:08:44.179 2 DEBUG oslo_concurrency.lockutils [req-1ab9d6b5-e9cb-4a2c-b712-d8e81379ad24 req-171c11b7-5fdf-491f-b2fb-b8057af23c71 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-d55dd428-ae1c-4c43-8582-3a46d50f4822" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:08:45 compute-0 nova_compute[192079]: 2025-10-02 12:08:45.038 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.552 2 DEBUG nova.network.neutron [None req-528972f7-1e25-4430-9d9c-7ae4a8117b1e fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Updating instance_info_cache with network_info: [{"id": "d86c2c53-081c-4754-b070-2dd5028a4c08", "address": "fa:16:3e:4d:9f:a8", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd86c2c53-08", "ovs_interfaceid": "d86c2c53-081c-4754-b070-2dd5028a4c08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "address": "fa:16:3e:41:fe:e7", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa9e42a3e-0f", "ovs_interfaceid": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.573 2 DEBUG oslo_concurrency.lockutils [None req-528972f7-1e25-4430-9d9c-7ae4a8117b1e fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Releasing lock "refresh_cache-d55dd428-ae1c-4c43-8582-3a46d50f4822" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.574 2 DEBUG oslo_concurrency.lockutils [req-1ab9d6b5-e9cb-4a2c-b712-d8e81379ad24 req-171c11b7-5fdf-491f-b2fb-b8057af23c71 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-d55dd428-ae1c-4c43-8582-3a46d50f4822" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.574 2 DEBUG nova.network.neutron [req-1ab9d6b5-e9cb-4a2c-b712-d8e81379ad24 req-171c11b7-5fdf-491f-b2fb-b8057af23c71 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Refreshing network info cache for port a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.578 2 DEBUG nova.virt.libvirt.vif [None req-528972f7-1e25-4430-9d9c-7ae4a8117b1e fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:08:06Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-AttachInterfacesTestJSON-server-1852145717',display_name='tempest-AttachInterfacesTestJSON-server-1852145717',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-attachinterfacestestjson-server-1852145717',id=44,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBOMAiNKx1nMsQWkjyoacitfMpSCECpXaL6jiwNift5lqyR8GB5bAw9OQhuD+NMppggB+YdsyU4EuF27p1sPXC0U7gBRRZIdIzuVGXUDvMEa8cZQfCNptjsHEFbvdeH21PA==',key_name='tempest-keypair-1558941664',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:08:15Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=<?>,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='ef4e3be787374d90a6a236c7f76bd940',ramdisk_id='',reservation_id='r-f02kral6',resources=<?>,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-AttachInterfacesTestJSON-812274278',owner_user_name='tempest-AttachInterfacesTestJSON-812274278-project-member'},tags=<?>,task_state=None,terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:08:15Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='fbc7616089cb4f78832692487019c83d',uuid=d55dd428-ae1c-4c43-8582-3a46d50f4822,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "address": "fa:16:3e:41:fe:e7", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa9e42a3e-0f", "ovs_interfaceid": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.578 2 DEBUG nova.network.os_vif_util [None req-528972f7-1e25-4430-9d9c-7ae4a8117b1e fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Converting VIF {"id": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "address": "fa:16:3e:41:fe:e7", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa9e42a3e-0f", "ovs_interfaceid": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.579 2 DEBUG nova.network.os_vif_util [None req-528972f7-1e25-4430-9d9c-7ae4a8117b1e fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:41:fe:e7,bridge_name='br-int',has_traffic_filtering=True,id=a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3,network=Network(7d845a33-56e0-4850-9f27-8a54095796f2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapa9e42a3e-0f') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.580 2 DEBUG os_vif [None req-528972f7-1e25-4430-9d9c-7ae4a8117b1e fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:41:fe:e7,bridge_name='br-int',has_traffic_filtering=True,id=a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3,network=Network(7d845a33-56e0-4850-9f27-8a54095796f2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapa9e42a3e-0f') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.581 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.581 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.581 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.585 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.585 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapa9e42a3e-0f, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.585 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapa9e42a3e-0f, col_values=(('external_ids', {'iface-id': 'a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:41:fe:e7', 'vm-uuid': 'd55dd428-ae1c-4c43-8582-3a46d50f4822'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.587 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:46 compute-0 NetworkManager[51160]: <info>  [1759406926.5901] manager: (tapa9e42a3e-0f): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/70)
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.590 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.596 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.597 2 INFO os_vif [None req-528972f7-1e25-4430-9d9c-7ae4a8117b1e fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:41:fe:e7,bridge_name='br-int',has_traffic_filtering=True,id=a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3,network=Network(7d845a33-56e0-4850-9f27-8a54095796f2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapa9e42a3e-0f')
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.598 2 DEBUG nova.virt.libvirt.vif [None req-528972f7-1e25-4430-9d9c-7ae4a8117b1e fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:08:06Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-AttachInterfacesTestJSON-server-1852145717',display_name='tempest-AttachInterfacesTestJSON-server-1852145717',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-attachinterfacestestjson-server-1852145717',id=44,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBOMAiNKx1nMsQWkjyoacitfMpSCECpXaL6jiwNift5lqyR8GB5bAw9OQhuD+NMppggB+YdsyU4EuF27p1sPXC0U7gBRRZIdIzuVGXUDvMEa8cZQfCNptjsHEFbvdeH21PA==',key_name='tempest-keypair-1558941664',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:08:15Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=<?>,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='ef4e3be787374d90a6a236c7f76bd940',ramdisk_id='',reservation_id='r-f02kral6',resources=<?>,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-AttachInterfacesTestJSON-812274278',owner_user_name='tempest-AttachInterfacesTestJSON-812274278-project-member'},tags=<?>,task_state=None,terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:08:15Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='fbc7616089cb4f78832692487019c83d',uuid=d55dd428-ae1c-4c43-8582-3a46d50f4822,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "address": "fa:16:3e:41:fe:e7", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa9e42a3e-0f", "ovs_interfaceid": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.599 2 DEBUG nova.network.os_vif_util [None req-528972f7-1e25-4430-9d9c-7ae4a8117b1e fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Converting VIF {"id": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "address": "fa:16:3e:41:fe:e7", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa9e42a3e-0f", "ovs_interfaceid": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.600 2 DEBUG nova.network.os_vif_util [None req-528972f7-1e25-4430-9d9c-7ae4a8117b1e fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:41:fe:e7,bridge_name='br-int',has_traffic_filtering=True,id=a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3,network=Network(7d845a33-56e0-4850-9f27-8a54095796f2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapa9e42a3e-0f') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.606 2 DEBUG nova.virt.libvirt.guest [None req-528972f7-1e25-4430-9d9c-7ae4a8117b1e fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] attach device xml: <interface type="ethernet">
Oct 02 12:08:46 compute-0 nova_compute[192079]:   <mac address="fa:16:3e:41:fe:e7"/>
Oct 02 12:08:46 compute-0 nova_compute[192079]:   <model type="virtio"/>
Oct 02 12:08:46 compute-0 nova_compute[192079]:   <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:08:46 compute-0 nova_compute[192079]:   <mtu size="1442"/>
Oct 02 12:08:46 compute-0 nova_compute[192079]:   <target dev="tapa9e42a3e-0f"/>
Oct 02 12:08:46 compute-0 nova_compute[192079]: </interface>
Oct 02 12:08:46 compute-0 nova_compute[192079]:  attach_device /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:339
Oct 02 12:08:46 compute-0 kernel: tapa9e42a3e-0f: entered promiscuous mode
Oct 02 12:08:46 compute-0 NetworkManager[51160]: <info>  [1759406926.6163] manager: (tapa9e42a3e-0f): new Tun device (/org/freedesktop/NetworkManager/Devices/71)
Oct 02 12:08:46 compute-0 ovn_controller[94336]: 2025-10-02T12:08:46Z|00135|binding|INFO|Claiming lport a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3 for this chassis.
Oct 02 12:08:46 compute-0 ovn_controller[94336]: 2025-10-02T12:08:46Z|00136|binding|INFO|a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3: Claiming fa:16:3e:41:fe:e7 10.100.0.14
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.619 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:46.628 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:41:fe:e7 10.100.0.14'], port_security=['fa:16:3e:41:fe:e7 10.100.0.14'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.14/28', 'neutron:device_id': 'd55dd428-ae1c-4c43-8582-3a46d50f4822', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-7d845a33-56e0-4850-9f27-8a54095796f2', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'ef4e3be787374d90a6a236c7f76bd940', 'neutron:revision_number': '2', 'neutron:security_group_ids': 'e26b972b-3ab5-401c-9d8b-5161665ba680', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=4583e9be-3cfa-4470-9e2e-4e943d469605, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:08:46 compute-0 ovn_controller[94336]: 2025-10-02T12:08:46Z|00137|binding|INFO|Setting lport a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3 ovn-installed in OVS
Oct 02 12:08:46 compute-0 ovn_controller[94336]: 2025-10-02T12:08:46Z|00138|binding|INFO|Setting lport a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3 up in Southbound
Oct 02 12:08:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:46.629 103294 INFO neutron.agent.ovn.metadata.agent [-] Port a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3 in datapath 7d845a33-56e0-4850-9f27-8a54095796f2 bound to our chassis
Oct 02 12:08:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:46.645 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 7d845a33-56e0-4850-9f27-8a54095796f2
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.647 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:46.666 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d24ce673-9fbc-42c8-ab48-01d24518ed9d]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:46 compute-0 systemd-udevd[226126]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:08:46 compute-0 NetworkManager[51160]: <info>  [1759406926.6935] device (tapa9e42a3e-0f): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:08:46 compute-0 NetworkManager[51160]: <info>  [1759406926.6945] device (tapa9e42a3e-0f): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:08:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:46.708 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[3d8d246e-ccb0-4fa2-a843-ce7dba857292]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:46.712 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[a8c2746a-e180-4261-b799-4939ebc08f9e]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.727 2 DEBUG nova.virt.libvirt.driver [None req-528972f7-1e25-4430-9d9c-7ae4a8117b1e fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.728 2 DEBUG nova.virt.libvirt.driver [None req-528972f7-1e25-4430-9d9c-7ae4a8117b1e fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.728 2 DEBUG nova.virt.libvirt.driver [None req-528972f7-1e25-4430-9d9c-7ae4a8117b1e fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] No VIF found with MAC fa:16:3e:4d:9f:a8, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.728 2 DEBUG nova.virt.libvirt.driver [None req-528972f7-1e25-4430-9d9c-7ae4a8117b1e fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] No VIF found with MAC fa:16:3e:41:fe:e7, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:08:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:46.739 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[85166d24-e6a1-49fd-9788-e3491ec30e84]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.754 2 DEBUG nova.virt.libvirt.guest [None req-528972f7-1e25-4430-9d9c-7ae4a8117b1e fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] set metadata xml: <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:08:46 compute-0 nova_compute[192079]:   <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:08:46 compute-0 nova_compute[192079]:   <nova:name>tempest-AttachInterfacesTestJSON-server-1852145717</nova:name>
Oct 02 12:08:46 compute-0 nova_compute[192079]:   <nova:creationTime>2025-10-02 12:08:46</nova:creationTime>
Oct 02 12:08:46 compute-0 nova_compute[192079]:   <nova:flavor name="m1.nano">
Oct 02 12:08:46 compute-0 nova_compute[192079]:     <nova:memory>128</nova:memory>
Oct 02 12:08:46 compute-0 nova_compute[192079]:     <nova:disk>1</nova:disk>
Oct 02 12:08:46 compute-0 nova_compute[192079]:     <nova:swap>0</nova:swap>
Oct 02 12:08:46 compute-0 nova_compute[192079]:     <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:08:46 compute-0 nova_compute[192079]:     <nova:vcpus>1</nova:vcpus>
Oct 02 12:08:46 compute-0 nova_compute[192079]:   </nova:flavor>
Oct 02 12:08:46 compute-0 nova_compute[192079]:   <nova:owner>
Oct 02 12:08:46 compute-0 nova_compute[192079]:     <nova:user uuid="fbc7616089cb4f78832692487019c83d">tempest-AttachInterfacesTestJSON-812274278-project-member</nova:user>
Oct 02 12:08:46 compute-0 nova_compute[192079]:     <nova:project uuid="ef4e3be787374d90a6a236c7f76bd940">tempest-AttachInterfacesTestJSON-812274278</nova:project>
Oct 02 12:08:46 compute-0 nova_compute[192079]:   </nova:owner>
Oct 02 12:08:46 compute-0 nova_compute[192079]:   <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:08:46 compute-0 nova_compute[192079]:   <nova:ports>
Oct 02 12:08:46 compute-0 nova_compute[192079]:     <nova:port uuid="d86c2c53-081c-4754-b070-2dd5028a4c08">
Oct 02 12:08:46 compute-0 nova_compute[192079]:       <nova:ip type="fixed" address="10.100.0.10" ipVersion="4"/>
Oct 02 12:08:46 compute-0 nova_compute[192079]:     </nova:port>
Oct 02 12:08:46 compute-0 nova_compute[192079]:     <nova:port uuid="a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3">
Oct 02 12:08:46 compute-0 nova_compute[192079]:       <nova:ip type="fixed" address="10.100.0.14" ipVersion="4"/>
Oct 02 12:08:46 compute-0 nova_compute[192079]:     </nova:port>
Oct 02 12:08:46 compute-0 nova_compute[192079]:   </nova:ports>
Oct 02 12:08:46 compute-0 nova_compute[192079]: </nova:instance>
Oct 02 12:08:46 compute-0 nova_compute[192079]:  set_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:359
Oct 02 12:08:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:46.754 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8b5f5a83-d58e-4476-a851-756975e3dc26]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap7d845a33-51'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:8f:90:16'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 10, 'tx_packets': 5, 'rx_bytes': 916, 'tx_bytes': 354, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 10, 'tx_packets': 5, 'rx_bytes': 916, 'tx_bytes': 354, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 40], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 489176, 'reachable_time': 15543, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 8, 'inoctets': 720, 'indelivers': 1, 'outforwdatagrams': 0, 'outpkts': 3, 'outoctets': 228, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 8, 'outmcastpkts': 3, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 720, 'outmcastoctets': 228, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 8, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 1, 'inerrors': 0, 'outmsgs': 3, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 226133, 'error': None, 'target': 'ovnmeta-7d845a33-56e0-4850-9f27-8a54095796f2', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:46.768 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1df2b008-c448-4959-9038-b5c766100636]: (4, ({'family': 2, 'prefixlen': 28, 'flags': 128, 'scope': 0, 'index': 2, 'attrs': [['IFA_ADDRESS', '10.100.0.2'], ['IFA_LOCAL', '10.100.0.2'], ['IFA_BROADCAST', '10.100.0.15'], ['IFA_LABEL', 'tap7d845a33-51'], ['IFA_FLAGS', 128], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 489191, 'tstamp': 489191}]], 'header': {'length': 96, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 226134, 'error': None, 'target': 'ovnmeta-7d845a33-56e0-4850-9f27-8a54095796f2', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'}, {'family': 2, 'prefixlen': 32, 'flags': 128, 'scope': 0, 'index': 2, 'attrs': [['IFA_ADDRESS', '169.254.169.254'], ['IFA_LOCAL', '169.254.169.254'], ['IFA_BROADCAST', '169.254.169.254'], ['IFA_LABEL', 'tap7d845a33-51'], ['IFA_FLAGS', 128], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 489195, 'tstamp': 489195}]], 'header': {'length': 96, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 226134, 'error': None, 'target': 'ovnmeta-7d845a33-56e0-4850-9f27-8a54095796f2', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'})) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:46.770 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap7d845a33-50, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.771 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.772 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:46.772 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap7d845a33-50, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:08:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:46.773 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:08:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:46.773 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap7d845a33-50, col_values=(('external_ids', {'iface-id': '1c321c19-d630-4a6f-8ba8-7bac90af9bae'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:08:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:46.774 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.783 2 DEBUG oslo_concurrency.lockutils [None req-528972f7-1e25-4430-9d9c-7ae4a8117b1e fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Lock "interface-d55dd428-ae1c-4c43-8582-3a46d50f4822-None" "released" by "nova.compute.manager.ComputeManager.attach_interface.<locals>.do_attach_interface" :: held 7.576s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.938 2 DEBUG nova.compute.manager [req-3701491d-4fbb-4e98-90cb-af749c996bed req-23f6d444-365e-48c4-992a-ec6e06a0ea76 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Received event network-vif-plugged-a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.938 2 DEBUG oslo_concurrency.lockutils [req-3701491d-4fbb-4e98-90cb-af749c996bed req-23f6d444-365e-48c4-992a-ec6e06a0ea76 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.939 2 DEBUG oslo_concurrency.lockutils [req-3701491d-4fbb-4e98-90cb-af749c996bed req-23f6d444-365e-48c4-992a-ec6e06a0ea76 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.939 2 DEBUG oslo_concurrency.lockutils [req-3701491d-4fbb-4e98-90cb-af749c996bed req-23f6d444-365e-48c4-992a-ec6e06a0ea76 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.939 2 DEBUG nova.compute.manager [req-3701491d-4fbb-4e98-90cb-af749c996bed req-23f6d444-365e-48c4-992a-ec6e06a0ea76 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] No waiting events found dispatching network-vif-plugged-a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:08:46 compute-0 nova_compute[192079]: 2025-10-02 12:08:46.939 2 WARNING nova.compute.manager [req-3701491d-4fbb-4e98-90cb-af749c996bed req-23f6d444-365e-48c4-992a-ec6e06a0ea76 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Received unexpected event network-vif-plugged-a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3 for instance with vm_state active and task_state None.
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.015 2 DEBUG nova.network.neutron [req-1ab9d6b5-e9cb-4a2c-b712-d8e81379ad24 req-171c11b7-5fdf-491f-b2fb-b8057af23c71 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Updated VIF entry in instance network info cache for port a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.016 2 DEBUG nova.network.neutron [req-1ab9d6b5-e9cb-4a2c-b712-d8e81379ad24 req-171c11b7-5fdf-491f-b2fb-b8057af23c71 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Updating instance_info_cache with network_info: [{"id": "d86c2c53-081c-4754-b070-2dd5028a4c08", "address": "fa:16:3e:4d:9f:a8", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd86c2c53-08", "ovs_interfaceid": "d86c2c53-081c-4754-b070-2dd5028a4c08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "address": "fa:16:3e:41:fe:e7", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa9e42a3e-0f", "ovs_interfaceid": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.039 2 DEBUG oslo_concurrency.lockutils [req-1ab9d6b5-e9cb-4a2c-b712-d8e81379ad24 req-171c11b7-5fdf-491f-b2fb-b8057af23c71 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-d55dd428-ae1c-4c43-8582-3a46d50f4822" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.360 2 DEBUG oslo_concurrency.lockutils [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Acquiring lock "interface-d55dd428-ae1c-4c43-8582-3a46d50f4822-a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3" by "nova.compute.manager.ComputeManager.detach_interface.<locals>.do_detach_interface" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.361 2 DEBUG oslo_concurrency.lockutils [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Lock "interface-d55dd428-ae1c-4c43-8582-3a46d50f4822-a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3" acquired by "nova.compute.manager.ComputeManager.detach_interface.<locals>.do_detach_interface" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.377 2 DEBUG nova.objects.instance [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Lazy-loading 'flavor' on Instance uuid d55dd428-ae1c-4c43-8582-3a46d50f4822 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.405 2 DEBUG nova.virt.libvirt.vif [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:08:06Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-AttachInterfacesTestJSON-server-1852145717',display_name='tempest-AttachInterfacesTestJSON-server-1852145717',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-attachinterfacestestjson-server-1852145717',id=44,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBOMAiNKx1nMsQWkjyoacitfMpSCECpXaL6jiwNift5lqyR8GB5bAw9OQhuD+NMppggB+YdsyU4EuF27p1sPXC0U7gBRRZIdIzuVGXUDvMEa8cZQfCNptjsHEFbvdeH21PA==',key_name='tempest-keypair-1558941664',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:08:15Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=<?>,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='ef4e3be787374d90a6a236c7f76bd940',ramdisk_id='',reservation_id='r-f02kral6',resources=<?>,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-AttachInterfacesTestJSON-812274278',owner_user_name='tempest-AttachInterfacesTestJSON-812274278-project-member'},tags=<?>,task_state=None,terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:08:15Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='fbc7616089cb4f78832692487019c83d',uuid=d55dd428-ae1c-4c43-8582-3a46d50f4822,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "address": "fa:16:3e:41:fe:e7", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa9e42a3e-0f", "ovs_interfaceid": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.406 2 DEBUG nova.network.os_vif_util [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Converting VIF {"id": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "address": "fa:16:3e:41:fe:e7", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa9e42a3e-0f", "ovs_interfaceid": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.406 2 DEBUG nova.network.os_vif_util [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:41:fe:e7,bridge_name='br-int',has_traffic_filtering=True,id=a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3,network=Network(7d845a33-56e0-4850-9f27-8a54095796f2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapa9e42a3e-0f') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.409 2 DEBUG nova.virt.libvirt.guest [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] looking for interface given config: <interface type="ethernet"><mac address="fa:16:3e:41:fe:e7"/><model type="virtio"/><driver name="vhost" rx_queue_size="512"/><mtu size="1442"/><target dev="tapa9e42a3e-0f"/></interface> get_interface_by_cfg /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:257
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.411 2 DEBUG nova.virt.libvirt.guest [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] looking for interface given config: <interface type="ethernet"><mac address="fa:16:3e:41:fe:e7"/><model type="virtio"/><driver name="vhost" rx_queue_size="512"/><mtu size="1442"/><target dev="tapa9e42a3e-0f"/></interface> get_interface_by_cfg /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:257
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.412 2 DEBUG nova.virt.libvirt.driver [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Attempting to detach device tapa9e42a3e-0f from instance d55dd428-ae1c-4c43-8582-3a46d50f4822 from the persistent domain config. _detach_from_persistent /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:2487
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.413 2 DEBUG nova.virt.libvirt.guest [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] detach device xml: <interface type="ethernet">
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <mac address="fa:16:3e:41:fe:e7"/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <model type="virtio"/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <mtu size="1442"/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <target dev="tapa9e42a3e-0f"/>
Oct 02 12:08:48 compute-0 nova_compute[192079]: </interface>
Oct 02 12:08:48 compute-0 nova_compute[192079]:  detach_device /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:465
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.419 2 DEBUG nova.virt.libvirt.guest [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] looking for interface given config: <interface type="ethernet"><mac address="fa:16:3e:41:fe:e7"/><model type="virtio"/><driver name="vhost" rx_queue_size="512"/><mtu size="1442"/><target dev="tapa9e42a3e-0f"/></interface> get_interface_by_cfg /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:257
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.422 2 DEBUG nova.virt.libvirt.guest [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] interface for config: <interface type="ethernet"><mac address="fa:16:3e:41:fe:e7"/><model type="virtio"/><driver name="vhost" rx_queue_size="512"/><mtu size="1442"/><target dev="tapa9e42a3e-0f"/></interface>not found in domain: <domain type='kvm' id='23'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <name>instance-0000002c</name>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <uuid>d55dd428-ae1c-4c43-8582-3a46d50f4822</uuid>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1" xmlns:instance="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <nova:name>tempest-AttachInterfacesTestJSON-server-1852145717</nova:name>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <nova:creationTime>2025-10-02 12:08:46</nova:creationTime>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <nova:flavor name="m1.nano">
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <nova:memory>128</nova:memory>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <nova:disk>1</nova:disk>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <nova:swap>0</nova:swap>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <nova:vcpus>1</nova:vcpus>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   </nova:flavor>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <nova:owner>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <nova:user uuid="fbc7616089cb4f78832692487019c83d">tempest-AttachInterfacesTestJSON-812274278-project-member</nova:user>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <nova:project uuid="ef4e3be787374d90a6a236c7f76bd940">tempest-AttachInterfacesTestJSON-812274278</nova:project>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   </nova:owner>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <nova:ports>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <nova:port uuid="d86c2c53-081c-4754-b070-2dd5028a4c08">
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <nova:ip type="fixed" address="10.100.0.10" ipVersion="4"/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </nova:port>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <nova:port uuid="a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3">
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <nova:ip type="fixed" address="10.100.0.14" ipVersion="4"/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </nova:port>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   </nova:ports>
Oct 02 12:08:48 compute-0 nova_compute[192079]: </nova:instance>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <memory unit='KiB'>131072</memory>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <currentMemory unit='KiB'>131072</currentMemory>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <vcpu placement='static'>1</vcpu>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <resource>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <partition>/machine</partition>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   </resource>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <sysinfo type='smbios'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <system>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <entry name='manufacturer'>RDO</entry>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <entry name='product'>OpenStack Compute</entry>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <entry name='version'>27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <entry name='serial'>d55dd428-ae1c-4c43-8582-3a46d50f4822</entry>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <entry name='uuid'>d55dd428-ae1c-4c43-8582-3a46d50f4822</entry>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <entry name='family'>Virtual Machine</entry>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </system>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <os>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <type arch='x86_64' machine='pc-q35-rhel9.6.0'>hvm</type>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <boot dev='hd'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <smbios mode='sysinfo'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   </os>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <features>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <vmcoreinfo state='on'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   </features>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <cpu mode='custom' match='exact' check='full'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <model fallback='forbid'>Nehalem</model>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <topology sockets='1' dies='1' clusters='1' cores='1' threads='1'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <feature policy='require' name='x2apic'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <feature policy='require' name='hypervisor'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <feature policy='require' name='vme'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <clock offset='utc'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <timer name='pit' tickpolicy='delay'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <timer name='rtc' tickpolicy='catchup'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <timer name='hpet' present='no'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <on_poweroff>destroy</on_poweroff>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <on_reboot>restart</on_reboot>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <on_crash>destroy</on_crash>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <emulator>/usr/libexec/qemu-kvm</emulator>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <disk type='file' device='disk'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <driver name='qemu' type='qcow2' cache='none'/>
Oct 02 12:08:48 compute-0 rsyslogd[1013]: imjournal: journal files changed, reloading...  [v8.2506.0-2.el9 try https://www.rsyslog.com/e/0 ]
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <source file='/var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822/disk' index='2'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <backingStore type='file' index='3'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:         <format type='raw'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:         <source file='/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:         <backingStore/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       </backingStore>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target dev='vda' bus='virtio'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='virtio-disk0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x03' slot='0x00' function='0x0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <disk type='file' device='cdrom'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <driver name='qemu' type='raw' cache='none'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <source file='/var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822/disk.config' index='1'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <backingStore/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target dev='sda' bus='sata'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <readonly/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='sata0-0-0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='drive' controller='0' bus='0' target='0' unit='0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='0' model='pcie-root'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pcie.0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='1' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='1' port='0x10'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.1'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x02' function='0x0' multifunction='on'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='2' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='2' port='0x11'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.2'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x02' function='0x1'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='3' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='3' port='0x12'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.3'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x02' function='0x2'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='4' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='4' port='0x13'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.4'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x02' function='0x3'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='5' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='5' port='0x14'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.5'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x02' function='0x4'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='6' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='6' port='0x15'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.6'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x02' function='0x5'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='7' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='7' port='0x16'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.7'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x02' function='0x6'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='8' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='8' port='0x17'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.8'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x02' function='0x7'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 rsyslogd[1013]: imjournal: journal files changed, reloading...  [v8.2506.0-2.el9 try https://www.rsyslog.com/e/0 ]
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='9' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='9' port='0x18'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.9'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x03' function='0x0' multifunction='on'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='10' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='10' port='0x19'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.10'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x03' function='0x1'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='11' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='11' port='0x1a'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.11'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x03' function='0x2'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='12' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='12' port='0x1b'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.12'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x03' function='0x3'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='13' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='13' port='0x1c'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.13'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x03' function='0x4'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='14' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='14' port='0x1d'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.14'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x03' function='0x5'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='15' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='15' port='0x1e'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.15'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x03' function='0x6'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='16' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='16' port='0x1f'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.16'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x03' function='0x7'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='17' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='17' port='0x20'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.17'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x04' function='0x0' multifunction='on'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='18' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='18' port='0x21'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.18'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x04' function='0x1'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='19' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='19' port='0x22'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.19'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x04' function='0x2'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='20' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='20' port='0x23'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.20'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x04' function='0x3'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='21' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='21' port='0x24'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.21'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x04' function='0x4'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='22' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='22' port='0x25'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.22'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x04' function='0x5'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='23' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='23' port='0x26'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.23'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x04' function='0x6'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='24' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='24' port='0x27'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.24'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x04' function='0x7'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='25' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='25' port='0x28'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.25'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x05' function='0x0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='26' model='pcie-to-pci-bridge'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-pci-bridge'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.26'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x01' slot='0x00' function='0x0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='usb' index='0' model='piix3-uhci'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='usb'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x1a' slot='0x01' function='0x0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='sata' index='0'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='ide'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x1f' function='0x2'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <interface type='ethernet'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <mac address='fa:16:3e:4d:9f:a8'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target dev='tapd86c2c53-08'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model type='virtio'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <driver name='vhost' rx_queue_size='512'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <mtu size='1442'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='net0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x02' slot='0x00' function='0x0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <interface type='ethernet'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <mac address='fa:16:3e:41:fe:e7'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target dev='tapa9e42a3e-0f'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model type='virtio'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <driver name='vhost' rx_queue_size='512'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <mtu size='1442'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='net1'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x06' slot='0x00' function='0x0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <serial type='pty'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <source path='/dev/pts/0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <log file='/var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822/console.log' append='off'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target type='isa-serial' port='0'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:         <model name='isa-serial'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       </target>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='serial0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <console type='pty' tty='/dev/pts/0'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <source path='/dev/pts/0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <log file='/var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822/console.log' append='off'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target type='serial' port='0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='serial0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </console>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <input type='tablet' bus='usb'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='input0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='usb' bus='0' port='1'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </input>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <input type='mouse' bus='ps2'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='input1'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </input>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <input type='keyboard' bus='ps2'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='input2'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </input>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <graphics type='vnc' port='5900' autoport='yes' listen='::0'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <listen type='address' address='::0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </graphics>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <audio id='1' type='none'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <video>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model type='virtio' heads='1' primary='yes'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='video0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x01' function='0x0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </video>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <watchdog model='itco' action='reset'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='watchdog0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </watchdog>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <memballoon model='virtio'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <stats period='10'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='balloon0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x04' slot='0x00' function='0x0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <rng model='virtio'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <backend model='random'>/dev/urandom</backend>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='rng0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x05' slot='0x00' function='0x0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <seclabel type='dynamic' model='selinux' relabel='yes'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <label>system_u:system_r:svirt_t:s0:c164,c1004</label>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <imagelabel>system_u:object_r:svirt_image_t:s0:c164,c1004</imagelabel>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   </seclabel>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <seclabel type='dynamic' model='dac' relabel='yes'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <label>+107:+107</label>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <imagelabel>+107:+107</imagelabel>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   </seclabel>
Oct 02 12:08:48 compute-0 nova_compute[192079]: </domain>
Oct 02 12:08:48 compute-0 nova_compute[192079]:  get_interface_by_cfg /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:282
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.424 2 INFO nova.virt.libvirt.driver [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Successfully detached device tapa9e42a3e-0f from instance d55dd428-ae1c-4c43-8582-3a46d50f4822 from the persistent domain config.
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.424 2 DEBUG nova.virt.libvirt.driver [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] (1/8): Attempting to detach device tapa9e42a3e-0f with device alias net1 from instance d55dd428-ae1c-4c43-8582-3a46d50f4822 from the live domain config. _detach_from_live_with_retry /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:2523
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.425 2 DEBUG nova.virt.libvirt.guest [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] detach device xml: <interface type="ethernet">
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <mac address="fa:16:3e:41:fe:e7"/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <model type="virtio"/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <mtu size="1442"/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <target dev="tapa9e42a3e-0f"/>
Oct 02 12:08:48 compute-0 nova_compute[192079]: </interface>
Oct 02 12:08:48 compute-0 nova_compute[192079]:  detach_device /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:465
Oct 02 12:08:48 compute-0 kernel: tapa9e42a3e-0f (unregistering): left promiscuous mode
Oct 02 12:08:48 compute-0 NetworkManager[51160]: <info>  [1759406928.4731] device (tapa9e42a3e-0f): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:08:48 compute-0 ovn_controller[94336]: 2025-10-02T12:08:48Z|00139|binding|INFO|Releasing lport a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3 from this chassis (sb_readonly=0)
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.482 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:48 compute-0 ovn_controller[94336]: 2025-10-02T12:08:48Z|00140|binding|INFO|Setting lport a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3 down in Southbound
Oct 02 12:08:48 compute-0 ovn_controller[94336]: 2025-10-02T12:08:48Z|00141|binding|INFO|Removing iface tapa9e42a3e-0f ovn-installed in OVS
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.485 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:48.492 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:41:fe:e7 10.100.0.14'], port_security=['fa:16:3e:41:fe:e7 10.100.0.14'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.14/28', 'neutron:device_id': 'd55dd428-ae1c-4c43-8582-3a46d50f4822', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-7d845a33-56e0-4850-9f27-8a54095796f2', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'ef4e3be787374d90a6a236c7f76bd940', 'neutron:revision_number': '4', 'neutron:security_group_ids': 'e26b972b-3ab5-401c-9d8b-5161665ba680', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=4583e9be-3cfa-4470-9e2e-4e943d469605, chassis=[], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:08:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:48.494 103294 INFO neutron.agent.ovn.metadata.agent [-] Port a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3 in datapath 7d845a33-56e0-4850-9f27-8a54095796f2 unbound from our chassis
Oct 02 12:08:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:48.495 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 7d845a33-56e0-4850-9f27-8a54095796f2
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.500 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.505 2 DEBUG nova.virt.libvirt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Received event <DeviceRemovedEvent: 1759406928.505367, d55dd428-ae1c-4c43-8582-3a46d50f4822 => net1> from libvirt while the driver is waiting for it; dispatched. emit_event /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:2370
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.507 2 DEBUG nova.virt.libvirt.driver [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Start waiting for the detach event from libvirt for device tapa9e42a3e-0f with device alias net1 for instance d55dd428-ae1c-4c43-8582-3a46d50f4822 _detach_from_live_and_wait_for_event /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:2599
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.507 2 DEBUG nova.virt.libvirt.guest [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] looking for interface given config: <interface type="ethernet"><mac address="fa:16:3e:41:fe:e7"/><model type="virtio"/><driver name="vhost" rx_queue_size="512"/><mtu size="1442"/><target dev="tapa9e42a3e-0f"/></interface> get_interface_by_cfg /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:257
Oct 02 12:08:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:48.514 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f933144b-1144-4fcd-9176-9b73011efd02]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.516 2 DEBUG nova.virt.libvirt.guest [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] interface for config: <interface type="ethernet"><mac address="fa:16:3e:41:fe:e7"/><model type="virtio"/><driver name="vhost" rx_queue_size="512"/><mtu size="1442"/><target dev="tapa9e42a3e-0f"/></interface>not found in domain: <domain type='kvm' id='23'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <name>instance-0000002c</name>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <uuid>d55dd428-ae1c-4c43-8582-3a46d50f4822</uuid>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1" xmlns:instance="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <nova:name>tempest-AttachInterfacesTestJSON-server-1852145717</nova:name>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <nova:creationTime>2025-10-02 12:08:46</nova:creationTime>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <nova:flavor name="m1.nano">
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <nova:memory>128</nova:memory>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <nova:disk>1</nova:disk>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <nova:swap>0</nova:swap>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <nova:vcpus>1</nova:vcpus>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   </nova:flavor>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <nova:owner>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <nova:user uuid="fbc7616089cb4f78832692487019c83d">tempest-AttachInterfacesTestJSON-812274278-project-member</nova:user>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <nova:project uuid="ef4e3be787374d90a6a236c7f76bd940">tempest-AttachInterfacesTestJSON-812274278</nova:project>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   </nova:owner>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <nova:ports>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <nova:port uuid="d86c2c53-081c-4754-b070-2dd5028a4c08">
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <nova:ip type="fixed" address="10.100.0.10" ipVersion="4"/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </nova:port>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <nova:port uuid="a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3">
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <nova:ip type="fixed" address="10.100.0.14" ipVersion="4"/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </nova:port>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   </nova:ports>
Oct 02 12:08:48 compute-0 nova_compute[192079]: </nova:instance>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <memory unit='KiB'>131072</memory>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <currentMemory unit='KiB'>131072</currentMemory>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <vcpu placement='static'>1</vcpu>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <resource>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <partition>/machine</partition>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   </resource>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <sysinfo type='smbios'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <system>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <entry name='manufacturer'>RDO</entry>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <entry name='product'>OpenStack Compute</entry>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <entry name='version'>27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <entry name='serial'>d55dd428-ae1c-4c43-8582-3a46d50f4822</entry>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <entry name='uuid'>d55dd428-ae1c-4c43-8582-3a46d50f4822</entry>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <entry name='family'>Virtual Machine</entry>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </system>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <os>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <type arch='x86_64' machine='pc-q35-rhel9.6.0'>hvm</type>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <boot dev='hd'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <smbios mode='sysinfo'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   </os>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <features>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <vmcoreinfo state='on'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   </features>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <cpu mode='custom' match='exact' check='full'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <model fallback='forbid'>Nehalem</model>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <topology sockets='1' dies='1' clusters='1' cores='1' threads='1'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <feature policy='require' name='x2apic'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <feature policy='require' name='hypervisor'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <feature policy='require' name='vme'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <clock offset='utc'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <timer name='pit' tickpolicy='delay'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <timer name='rtc' tickpolicy='catchup'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <timer name='hpet' present='no'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <on_poweroff>destroy</on_poweroff>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <on_reboot>restart</on_reboot>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <on_crash>destroy</on_crash>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <emulator>/usr/libexec/qemu-kvm</emulator>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <disk type='file' device='disk'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <driver name='qemu' type='qcow2' cache='none'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <source file='/var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822/disk' index='2'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <backingStore type='file' index='3'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:         <format type='raw'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:         <source file='/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:         <backingStore/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       </backingStore>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target dev='vda' bus='virtio'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='virtio-disk0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x03' slot='0x00' function='0x0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <disk type='file' device='cdrom'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <driver name='qemu' type='raw' cache='none'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <source file='/var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822/disk.config' index='1'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <backingStore/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target dev='sda' bus='sata'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <readonly/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='sata0-0-0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='drive' controller='0' bus='0' target='0' unit='0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='0' model='pcie-root'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pcie.0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='1' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='1' port='0x10'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.1'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x02' function='0x0' multifunction='on'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='2' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='2' port='0x11'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.2'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x02' function='0x1'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='3' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='3' port='0x12'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.3'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x02' function='0x2'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='4' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='4' port='0x13'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.4'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x02' function='0x3'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='5' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='5' port='0x14'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.5'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x02' function='0x4'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='6' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='6' port='0x15'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.6'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x02' function='0x5'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='7' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='7' port='0x16'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.7'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x02' function='0x6'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='8' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='8' port='0x17'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.8'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x02' function='0x7'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='9' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='9' port='0x18'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.9'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x03' function='0x0' multifunction='on'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='10' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='10' port='0x19'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.10'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x03' function='0x1'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='11' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='11' port='0x1a'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.11'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x03' function='0x2'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='12' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='12' port='0x1b'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.12'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x03' function='0x3'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='13' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='13' port='0x1c'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.13'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x03' function='0x4'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='14' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='14' port='0x1d'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.14'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x03' function='0x5'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='15' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='15' port='0x1e'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.15'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x03' function='0x6'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='16' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='16' port='0x1f'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.16'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x03' function='0x7'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='17' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='17' port='0x20'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.17'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x04' function='0x0' multifunction='on'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='18' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='18' port='0x21'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.18'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x04' function='0x1'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='19' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='19' port='0x22'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.19'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x04' function='0x2'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='20' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='20' port='0x23'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.20'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x04' function='0x3'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='21' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='21' port='0x24'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.21'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x04' function='0x4'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='22' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='22' port='0x25'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.22'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x04' function='0x5'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='23' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='23' port='0x26'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.23'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x04' function='0x6'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='24' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='24' port='0x27'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.24'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x04' function='0x7'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='25' model='pcie-root-port'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target chassis='25' port='0x28'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.25'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x05' function='0x0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='pci' index='26' model='pcie-to-pci-bridge'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model name='pcie-pci-bridge'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='pci.26'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x01' slot='0x00' function='0x0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='usb' index='0' model='piix3-uhci'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='usb'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x1a' slot='0x01' function='0x0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <controller type='sata' index='0'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='ide'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x1f' function='0x2'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <interface type='ethernet'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <mac address='fa:16:3e:4d:9f:a8'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target dev='tapd86c2c53-08'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model type='virtio'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <driver name='vhost' rx_queue_size='512'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <mtu size='1442'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='net0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x02' slot='0x00' function='0x0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <serial type='pty'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <source path='/dev/pts/0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <log file='/var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822/console.log' append='off'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target type='isa-serial' port='0'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:         <model name='isa-serial'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       </target>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='serial0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <console type='pty' tty='/dev/pts/0'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <source path='/dev/pts/0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <log file='/var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822/console.log' append='off'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <target type='serial' port='0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='serial0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </console>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <input type='tablet' bus='usb'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='input0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='usb' bus='0' port='1'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </input>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <input type='mouse' bus='ps2'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='input1'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </input>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <input type='keyboard' bus='ps2'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='input2'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </input>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <graphics type='vnc' port='5900' autoport='yes' listen='::0'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <listen type='address' address='::0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </graphics>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <audio id='1' type='none'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <video>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <model type='virtio' heads='1' primary='yes'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='video0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x01' function='0x0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </video>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <watchdog model='itco' action='reset'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='watchdog0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </watchdog>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <memballoon model='virtio'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <stats period='10'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='balloon0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x04' slot='0x00' function='0x0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <rng model='virtio'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <backend model='random'>/dev/urandom</backend>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <alias name='rng0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x05' slot='0x00' function='0x0'/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <seclabel type='dynamic' model='selinux' relabel='yes'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <label>system_u:system_r:svirt_t:s0:c164,c1004</label>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <imagelabel>system_u:object_r:svirt_image_t:s0:c164,c1004</imagelabel>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   </seclabel>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <seclabel type='dynamic' model='dac' relabel='yes'>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <label>+107:+107</label>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <imagelabel>+107:+107</imagelabel>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   </seclabel>
Oct 02 12:08:48 compute-0 nova_compute[192079]: </domain>
Oct 02 12:08:48 compute-0 nova_compute[192079]:  get_interface_by_cfg /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:282
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.516 2 INFO nova.virt.libvirt.driver [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Successfully detached device tapa9e42a3e-0f from instance d55dd428-ae1c-4c43-8582-3a46d50f4822 from the live domain config.
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.517 2 DEBUG nova.virt.libvirt.vif [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:08:06Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-AttachInterfacesTestJSON-server-1852145717',display_name='tempest-AttachInterfacesTestJSON-server-1852145717',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-attachinterfacestestjson-server-1852145717',id=44,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBOMAiNKx1nMsQWkjyoacitfMpSCECpXaL6jiwNift5lqyR8GB5bAw9OQhuD+NMppggB+YdsyU4EuF27p1sPXC0U7gBRRZIdIzuVGXUDvMEa8cZQfCNptjsHEFbvdeH21PA==',key_name='tempest-keypair-1558941664',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:08:15Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=<?>,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='ef4e3be787374d90a6a236c7f76bd940',ramdisk_id='',reservation_id='r-f02kral6',resources=<?>,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-AttachInterfacesTestJSON-812274278',owner_user_name='tempest-AttachInterfacesTestJSON-812274278-project-member'},tags=<?>,task_state=None,terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:08:15Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='fbc7616089cb4f78832692487019c83d',uuid=d55dd428-ae1c-4c43-8582-3a46d50f4822,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "address": "fa:16:3e:41:fe:e7", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa9e42a3e-0f", "ovs_interfaceid": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.517 2 DEBUG nova.network.os_vif_util [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Converting VIF {"id": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "address": "fa:16:3e:41:fe:e7", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa9e42a3e-0f", "ovs_interfaceid": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.518 2 DEBUG nova.network.os_vif_util [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:41:fe:e7,bridge_name='br-int',has_traffic_filtering=True,id=a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3,network=Network(7d845a33-56e0-4850-9f27-8a54095796f2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapa9e42a3e-0f') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.518 2 DEBUG os_vif [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:41:fe:e7,bridge_name='br-int',has_traffic_filtering=True,id=a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3,network=Network(7d845a33-56e0-4850-9f27-8a54095796f2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapa9e42a3e-0f') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.520 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.520 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapa9e42a3e-0f, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.522 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.524 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.527 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.529 2 INFO os_vif [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:41:fe:e7,bridge_name='br-int',has_traffic_filtering=True,id=a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3,network=Network(7d845a33-56e0-4850-9f27-8a54095796f2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapa9e42a3e-0f')
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.530 2 DEBUG nova.virt.libvirt.guest [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] set metadata xml: <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <nova:name>tempest-AttachInterfacesTestJSON-server-1852145717</nova:name>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <nova:creationTime>2025-10-02 12:08:48</nova:creationTime>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <nova:flavor name="m1.nano">
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <nova:memory>128</nova:memory>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <nova:disk>1</nova:disk>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <nova:swap>0</nova:swap>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <nova:vcpus>1</nova:vcpus>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   </nova:flavor>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <nova:owner>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <nova:user uuid="fbc7616089cb4f78832692487019c83d">tempest-AttachInterfacesTestJSON-812274278-project-member</nova:user>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <nova:project uuid="ef4e3be787374d90a6a236c7f76bd940">tempest-AttachInterfacesTestJSON-812274278</nova:project>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   </nova:owner>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   <nova:ports>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     <nova:port uuid="d86c2c53-081c-4754-b070-2dd5028a4c08">
Oct 02 12:08:48 compute-0 nova_compute[192079]:       <nova:ip type="fixed" address="10.100.0.10" ipVersion="4"/>
Oct 02 12:08:48 compute-0 nova_compute[192079]:     </nova:port>
Oct 02 12:08:48 compute-0 nova_compute[192079]:   </nova:ports>
Oct 02 12:08:48 compute-0 nova_compute[192079]: </nova:instance>
Oct 02 12:08:48 compute-0 nova_compute[192079]:  set_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:359
Oct 02 12:08:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:48.550 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[35333426-dcf6-465a-bb55-82479a4c1e32]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:48.554 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[40569bbc-1a16-4cf2-986a-8b829ab34514]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:48.577 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[f4afc227-0fa3-44ff-bced-0c638cdbe281]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:48 compute-0 podman[226136]: 2025-10-02 12:08:48.579688436 +0000 UTC m=+0.079617150 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']})
Oct 02 12:08:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:48.592 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2d28ca15-b025-4374-b6b4-51c54d5dff4e]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap7d845a33-51'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:8f:90:16'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 10, 'tx_packets': 7, 'rx_bytes': 916, 'tx_bytes': 438, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 10, 'tx_packets': 7, 'rx_bytes': 916, 'tx_bytes': 438, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 40], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 489176, 'reachable_time': 15543, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 8, 'inoctets': 720, 'indelivers': 1, 'outforwdatagrams': 0, 'outpkts': 3, 'outoctets': 228, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 8, 'outmcastpkts': 3, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 720, 'outmcastoctets': 228, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 8, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 1, 'inerrors': 0, 'outmsgs': 3, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 226187, 'error': None, 'target': 'ovnmeta-7d845a33-56e0-4850-9f27-8a54095796f2', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:48 compute-0 podman[226141]: 2025-10-02 12:08:48.598693829 +0000 UTC m=+0.099571049 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, config_id=iscsid, container_name=iscsid, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:08:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:48.605 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[496b5cda-67a7-499c-8bd0-62dc7ba604b0]: (4, ({'family': 2, 'prefixlen': 28, 'flags': 128, 'scope': 0, 'index': 2, 'attrs': [['IFA_ADDRESS', '10.100.0.2'], ['IFA_LOCAL', '10.100.0.2'], ['IFA_BROADCAST', '10.100.0.15'], ['IFA_LABEL', 'tap7d845a33-51'], ['IFA_FLAGS', 128], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 489191, 'tstamp': 489191}]], 'header': {'length': 96, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 226188, 'error': None, 'target': 'ovnmeta-7d845a33-56e0-4850-9f27-8a54095796f2', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'}, {'family': 2, 'prefixlen': 32, 'flags': 128, 'scope': 0, 'index': 2, 'attrs': [['IFA_ADDRESS', '169.254.169.254'], ['IFA_LOCAL', '169.254.169.254'], ['IFA_BROADCAST', '169.254.169.254'], ['IFA_LABEL', 'tap7d845a33-51'], ['IFA_FLAGS', 128], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 489195, 'tstamp': 489195}]], 'header': {'length': 96, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 226188, 'error': None, 'target': 'ovnmeta-7d845a33-56e0-4850-9f27-8a54095796f2', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'})) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:48.606 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap7d845a33-50, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.607 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:48 compute-0 nova_compute[192079]: 2025-10-02 12:08:48.608 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:48.608 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap7d845a33-50, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:08:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:48.609 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:08:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:48.609 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap7d845a33-50, col_values=(('external_ids', {'iface-id': '1c321c19-d630-4a6f-8ba8-7bac90af9bae'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:08:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:48.609 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:08:49 compute-0 nova_compute[192079]: 2025-10-02 12:08:49.114 2 DEBUG nova.compute.manager [req-6afd7dc7-be77-40ea-807f-d770368a3fdd req-df30ec2d-ffc0-47d0-87bf-4e0422c94f43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Received event network-vif-plugged-a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:08:49 compute-0 nova_compute[192079]: 2025-10-02 12:08:49.115 2 DEBUG oslo_concurrency.lockutils [req-6afd7dc7-be77-40ea-807f-d770368a3fdd req-df30ec2d-ffc0-47d0-87bf-4e0422c94f43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:49 compute-0 nova_compute[192079]: 2025-10-02 12:08:49.115 2 DEBUG oslo_concurrency.lockutils [req-6afd7dc7-be77-40ea-807f-d770368a3fdd req-df30ec2d-ffc0-47d0-87bf-4e0422c94f43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:49 compute-0 nova_compute[192079]: 2025-10-02 12:08:49.116 2 DEBUG oslo_concurrency.lockutils [req-6afd7dc7-be77-40ea-807f-d770368a3fdd req-df30ec2d-ffc0-47d0-87bf-4e0422c94f43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:49 compute-0 nova_compute[192079]: 2025-10-02 12:08:49.116 2 DEBUG nova.compute.manager [req-6afd7dc7-be77-40ea-807f-d770368a3fdd req-df30ec2d-ffc0-47d0-87bf-4e0422c94f43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] No waiting events found dispatching network-vif-plugged-a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:08:49 compute-0 nova_compute[192079]: 2025-10-02 12:08:49.117 2 WARNING nova.compute.manager [req-6afd7dc7-be77-40ea-807f-d770368a3fdd req-df30ec2d-ffc0-47d0-87bf-4e0422c94f43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Received unexpected event network-vif-plugged-a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3 for instance with vm_state active and task_state None.
Oct 02 12:08:49 compute-0 nova_compute[192079]: 2025-10-02 12:08:49.117 2 DEBUG nova.compute.manager [req-6afd7dc7-be77-40ea-807f-d770368a3fdd req-df30ec2d-ffc0-47d0-87bf-4e0422c94f43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Received event network-vif-unplugged-a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:08:49 compute-0 nova_compute[192079]: 2025-10-02 12:08:49.117 2 DEBUG oslo_concurrency.lockutils [req-6afd7dc7-be77-40ea-807f-d770368a3fdd req-df30ec2d-ffc0-47d0-87bf-4e0422c94f43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:49 compute-0 nova_compute[192079]: 2025-10-02 12:08:49.118 2 DEBUG oslo_concurrency.lockutils [req-6afd7dc7-be77-40ea-807f-d770368a3fdd req-df30ec2d-ffc0-47d0-87bf-4e0422c94f43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:49 compute-0 nova_compute[192079]: 2025-10-02 12:08:49.118 2 DEBUG oslo_concurrency.lockutils [req-6afd7dc7-be77-40ea-807f-d770368a3fdd req-df30ec2d-ffc0-47d0-87bf-4e0422c94f43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:49 compute-0 nova_compute[192079]: 2025-10-02 12:08:49.119 2 DEBUG nova.compute.manager [req-6afd7dc7-be77-40ea-807f-d770368a3fdd req-df30ec2d-ffc0-47d0-87bf-4e0422c94f43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] No waiting events found dispatching network-vif-unplugged-a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:08:49 compute-0 nova_compute[192079]: 2025-10-02 12:08:49.119 2 WARNING nova.compute.manager [req-6afd7dc7-be77-40ea-807f-d770368a3fdd req-df30ec2d-ffc0-47d0-87bf-4e0422c94f43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Received unexpected event network-vif-unplugged-a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3 for instance with vm_state active and task_state None.
Oct 02 12:08:49 compute-0 nova_compute[192079]: 2025-10-02 12:08:49.119 2 DEBUG nova.compute.manager [req-6afd7dc7-be77-40ea-807f-d770368a3fdd req-df30ec2d-ffc0-47d0-87bf-4e0422c94f43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Received event network-vif-plugged-a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:08:49 compute-0 nova_compute[192079]: 2025-10-02 12:08:49.120 2 DEBUG oslo_concurrency.lockutils [req-6afd7dc7-be77-40ea-807f-d770368a3fdd req-df30ec2d-ffc0-47d0-87bf-4e0422c94f43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:49 compute-0 nova_compute[192079]: 2025-10-02 12:08:49.120 2 DEBUG oslo_concurrency.lockutils [req-6afd7dc7-be77-40ea-807f-d770368a3fdd req-df30ec2d-ffc0-47d0-87bf-4e0422c94f43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:49 compute-0 nova_compute[192079]: 2025-10-02 12:08:49.121 2 DEBUG oslo_concurrency.lockutils [req-6afd7dc7-be77-40ea-807f-d770368a3fdd req-df30ec2d-ffc0-47d0-87bf-4e0422c94f43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:49 compute-0 nova_compute[192079]: 2025-10-02 12:08:49.121 2 DEBUG nova.compute.manager [req-6afd7dc7-be77-40ea-807f-d770368a3fdd req-df30ec2d-ffc0-47d0-87bf-4e0422c94f43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] No waiting events found dispatching network-vif-plugged-a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:08:49 compute-0 nova_compute[192079]: 2025-10-02 12:08:49.122 2 WARNING nova.compute.manager [req-6afd7dc7-be77-40ea-807f-d770368a3fdd req-df30ec2d-ffc0-47d0-87bf-4e0422c94f43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Received unexpected event network-vif-plugged-a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3 for instance with vm_state active and task_state None.
Oct 02 12:08:49 compute-0 nova_compute[192079]: 2025-10-02 12:08:49.141 2 DEBUG oslo_concurrency.lockutils [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Acquiring lock "refresh_cache-d55dd428-ae1c-4c43-8582-3a46d50f4822" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:08:49 compute-0 nova_compute[192079]: 2025-10-02 12:08:49.142 2 DEBUG oslo_concurrency.lockutils [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Acquired lock "refresh_cache-d55dd428-ae1c-4c43-8582-3a46d50f4822" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:08:49 compute-0 nova_compute[192079]: 2025-10-02 12:08:49.142 2 DEBUG nova.network.neutron [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:08:49 compute-0 ovn_controller[94336]: 2025-10-02T12:08:49Z|00142|binding|INFO|Releasing lport 1c321c19-d630-4a6f-8ba8-7bac90af9bae from this chassis (sb_readonly=0)
Oct 02 12:08:49 compute-0 nova_compute[192079]: 2025-10-02 12:08:49.955 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:50 compute-0 nova_compute[192079]: 2025-10-02 12:08:50.076 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:50 compute-0 nova_compute[192079]: 2025-10-02 12:08:50.890 2 DEBUG oslo_concurrency.lockutils [None req-a1e83795-0626-4762-acc7-c51367783523 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Acquiring lock "d55dd428-ae1c-4c43-8582-3a46d50f4822" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:50 compute-0 nova_compute[192079]: 2025-10-02 12:08:50.891 2 DEBUG oslo_concurrency.lockutils [None req-a1e83795-0626-4762-acc7-c51367783523 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Lock "d55dd428-ae1c-4c43-8582-3a46d50f4822" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:50 compute-0 nova_compute[192079]: 2025-10-02 12:08:50.891 2 DEBUG oslo_concurrency.lockutils [None req-a1e83795-0626-4762-acc7-c51367783523 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Acquiring lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:50 compute-0 nova_compute[192079]: 2025-10-02 12:08:50.892 2 DEBUG oslo_concurrency.lockutils [None req-a1e83795-0626-4762-acc7-c51367783523 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:50 compute-0 nova_compute[192079]: 2025-10-02 12:08:50.892 2 DEBUG oslo_concurrency.lockutils [None req-a1e83795-0626-4762-acc7-c51367783523 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:50 compute-0 nova_compute[192079]: 2025-10-02 12:08:50.907 2 INFO nova.compute.manager [None req-a1e83795-0626-4762-acc7-c51367783523 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Terminating instance
Oct 02 12:08:50 compute-0 nova_compute[192079]: 2025-10-02 12:08:50.920 2 DEBUG nova.compute.manager [None req-a1e83795-0626-4762-acc7-c51367783523 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:08:50 compute-0 nova_compute[192079]: 2025-10-02 12:08:50.933 2 DEBUG nova.compute.manager [req-2ead09c3-0436-4a6b-9d14-ce0e62980736 req-138271f6-8d80-47e8-bf8c-30e6e52550b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Received event network-vif-deleted-a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:08:50 compute-0 nova_compute[192079]: 2025-10-02 12:08:50.933 2 INFO nova.compute.manager [req-2ead09c3-0436-4a6b-9d14-ce0e62980736 req-138271f6-8d80-47e8-bf8c-30e6e52550b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Neutron deleted interface a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3; detaching it from the instance and deleting it from the info cache
Oct 02 12:08:50 compute-0 nova_compute[192079]: 2025-10-02 12:08:50.933 2 DEBUG nova.network.neutron [req-2ead09c3-0436-4a6b-9d14-ce0e62980736 req-138271f6-8d80-47e8-bf8c-30e6e52550b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Updating instance_info_cache with network_info: [{"id": "d86c2c53-081c-4754-b070-2dd5028a4c08", "address": "fa:16:3e:4d:9f:a8", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd86c2c53-08", "ovs_interfaceid": "d86c2c53-081c-4754-b070-2dd5028a4c08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:08:50 compute-0 kernel: tapd86c2c53-08 (unregistering): left promiscuous mode
Oct 02 12:08:50 compute-0 NetworkManager[51160]: <info>  [1759406930.9497] device (tapd86c2c53-08): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:08:50 compute-0 ovn_controller[94336]: 2025-10-02T12:08:50Z|00143|binding|INFO|Releasing lport d86c2c53-081c-4754-b070-2dd5028a4c08 from this chassis (sb_readonly=0)
Oct 02 12:08:50 compute-0 ovn_controller[94336]: 2025-10-02T12:08:50Z|00144|binding|INFO|Setting lport d86c2c53-081c-4754-b070-2dd5028a4c08 down in Southbound
Oct 02 12:08:50 compute-0 ovn_controller[94336]: 2025-10-02T12:08:50Z|00145|binding|INFO|Removing iface tapd86c2c53-08 ovn-installed in OVS
Oct 02 12:08:50 compute-0 nova_compute[192079]: 2025-10-02 12:08:50.958 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:50 compute-0 nova_compute[192079]: 2025-10-02 12:08:50.959 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:50.966 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:4d:9f:a8 10.100.0.10'], port_security=['fa:16:3e:4d:9f:a8 10.100.0.10'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.10/28', 'neutron:device_id': 'd55dd428-ae1c-4c43-8582-3a46d50f4822', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-7d845a33-56e0-4850-9f27-8a54095796f2', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'ef4e3be787374d90a6a236c7f76bd940', 'neutron:revision_number': '4', 'neutron:security_group_ids': '97988f28-31b7-47ec-b097-a7d07047d94c', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com', 'neutron:port_fip': '192.168.122.207'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=4583e9be-3cfa-4470-9e2e-4e943d469605, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=d86c2c53-081c-4754-b070-2dd5028a4c08) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:08:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:50.967 103294 INFO neutron.agent.ovn.metadata.agent [-] Port d86c2c53-081c-4754-b070-2dd5028a4c08 in datapath 7d845a33-56e0-4850-9f27-8a54095796f2 unbound from our chassis
Oct 02 12:08:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:50.968 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 7d845a33-56e0-4850-9f27-8a54095796f2, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:08:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:50.969 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[33fac17b-dfcf-4b92-8d6f-009cf1852be3]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:50.969 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-7d845a33-56e0-4850-9f27-8a54095796f2 namespace which is not needed anymore
Oct 02 12:08:50 compute-0 nova_compute[192079]: 2025-10-02 12:08:50.982 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:51 compute-0 systemd[1]: machine-qemu\x2d23\x2dinstance\x2d0000002c.scope: Deactivated successfully.
Oct 02 12:08:51 compute-0 systemd[1]: machine-qemu\x2d23\x2dinstance\x2d0000002c.scope: Consumed 15.074s CPU time.
Oct 02 12:08:51 compute-0 systemd-machined[152150]: Machine qemu-23-instance-0000002c terminated.
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.127 2 DEBUG nova.objects.instance [req-2ead09c3-0436-4a6b-9d14-ce0e62980736 req-138271f6-8d80-47e8-bf8c-30e6e52550b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lazy-loading 'system_metadata' on Instance uuid d55dd428-ae1c-4c43-8582-3a46d50f4822 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:08:51 compute-0 neutron-haproxy-ovnmeta-7d845a33-56e0-4850-9f27-8a54095796f2[225844]: [NOTICE]   (225849) : haproxy version is 2.8.14-c23fe91
Oct 02 12:08:51 compute-0 neutron-haproxy-ovnmeta-7d845a33-56e0-4850-9f27-8a54095796f2[225844]: [NOTICE]   (225849) : path to executable is /usr/sbin/haproxy
Oct 02 12:08:51 compute-0 neutron-haproxy-ovnmeta-7d845a33-56e0-4850-9f27-8a54095796f2[225844]: [WARNING]  (225849) : Exiting Master process...
Oct 02 12:08:51 compute-0 neutron-haproxy-ovnmeta-7d845a33-56e0-4850-9f27-8a54095796f2[225844]: [ALERT]    (225849) : Current worker (225851) exited with code 143 (Terminated)
Oct 02 12:08:51 compute-0 neutron-haproxy-ovnmeta-7d845a33-56e0-4850-9f27-8a54095796f2[225844]: [WARNING]  (225849) : All workers exited. Exiting... (0)
Oct 02 12:08:51 compute-0 systemd[1]: libpod-1a1b35487be377a2f2711b341228a661c7f1516903526dd87d3a141734b96eee.scope: Deactivated successfully.
Oct 02 12:08:51 compute-0 podman[226214]: 2025-10-02 12:08:51.14189092 +0000 UTC m=+0.068374677 container died 1a1b35487be377a2f2711b341228a661c7f1516903526dd87d3a141734b96eee (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-7d845a33-56e0-4850-9f27-8a54095796f2, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0)
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.194 2 DEBUG nova.objects.instance [req-2ead09c3-0436-4a6b-9d14-ce0e62980736 req-138271f6-8d80-47e8-bf8c-30e6e52550b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lazy-loading 'flavor' on Instance uuid d55dd428-ae1c-4c43-8582-3a46d50f4822 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.196 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.208 2 DEBUG nova.compute.manager [req-eecfdd4e-1164-43b1-a475-fcaa118530ec req-59774c36-b09e-4619-95b3-4c9778b59d7f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Received event network-vif-unplugged-d86c2c53-081c-4754-b070-2dd5028a4c08 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.209 2 DEBUG oslo_concurrency.lockutils [req-eecfdd4e-1164-43b1-a475-fcaa118530ec req-59774c36-b09e-4619-95b3-4c9778b59d7f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.209 2 DEBUG oslo_concurrency.lockutils [req-eecfdd4e-1164-43b1-a475-fcaa118530ec req-59774c36-b09e-4619-95b3-4c9778b59d7f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.210 2 DEBUG oslo_concurrency.lockutils [req-eecfdd4e-1164-43b1-a475-fcaa118530ec req-59774c36-b09e-4619-95b3-4c9778b59d7f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.210 2 DEBUG nova.compute.manager [req-eecfdd4e-1164-43b1-a475-fcaa118530ec req-59774c36-b09e-4619-95b3-4c9778b59d7f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] No waiting events found dispatching network-vif-unplugged-d86c2c53-081c-4754-b070-2dd5028a4c08 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.210 2 DEBUG nova.compute.manager [req-eecfdd4e-1164-43b1-a475-fcaa118530ec req-59774c36-b09e-4619-95b3-4c9778b59d7f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Received event network-vif-unplugged-d86c2c53-081c-4754-b070-2dd5028a4c08 for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:08:51 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-1a1b35487be377a2f2711b341228a661c7f1516903526dd87d3a141734b96eee-userdata-shm.mount: Deactivated successfully.
Oct 02 12:08:51 compute-0 systemd[1]: var-lib-containers-storage-overlay-a8630d835d1f8b1f99e608ae92e192d8924dde1ca8ae9642c73ee6ef6bf2acf1-merged.mount: Deactivated successfully.
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.235 2 INFO nova.virt.libvirt.driver [-] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Instance destroyed successfully.
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.236 2 DEBUG nova.objects.instance [None req-a1e83795-0626-4762-acc7-c51367783523 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Lazy-loading 'resources' on Instance uuid d55dd428-ae1c-4c43-8582-3a46d50f4822 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.239 2 INFO nova.network.neutron [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Port a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache.
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.240 2 DEBUG nova.network.neutron [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Updating instance_info_cache with network_info: [{"id": "d86c2c53-081c-4754-b070-2dd5028a4c08", "address": "fa:16:3e:4d:9f:a8", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd86c2c53-08", "ovs_interfaceid": "d86c2c53-081c-4754-b070-2dd5028a4c08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:08:51 compute-0 podman[226214]: 2025-10-02 12:08:51.241242762 +0000 UTC m=+0.167726479 container cleanup 1a1b35487be377a2f2711b341228a661c7f1516903526dd87d3a141734b96eee (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-7d845a33-56e0-4850-9f27-8a54095796f2, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.245 2 DEBUG nova.virt.libvirt.vif [req-2ead09c3-0436-4a6b-9d14-ce0e62980736 req-138271f6-8d80-47e8-bf8c-30e6e52550b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:08:06Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-AttachInterfacesTestJSON-server-1852145717',display_name='tempest-AttachInterfacesTestJSON-server-1852145717',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-attachinterfacestestjson-server-1852145717',id=44,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBOMAiNKx1nMsQWkjyoacitfMpSCECpXaL6jiwNift5lqyR8GB5bAw9OQhuD+NMppggB+YdsyU4EuF27p1sPXC0U7gBRRZIdIzuVGXUDvMEa8cZQfCNptjsHEFbvdeH21PA==',key_name='tempest-keypair-1558941664',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:08:15Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata=<?>,migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=<?>,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='ef4e3be787374d90a6a236c7f76bd940',ramdisk_id='',reservation_id='r-f02kral6',resources=<?>,root_device_name='/dev/vda',root_gb=1,security_groups=<?>,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-AttachInterfacesTestJSON-812274278',owner_user_name='tempest-AttachInterfacesTestJSON-812274278-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:08:50Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='fbc7616089cb4f78832692487019c83d',uuid=d55dd428-ae1c-4c43-8582-3a46d50f4822,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "address": "fa:16:3e:41:fe:e7", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa9e42a3e-0f", "ovs_interfaceid": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.246 2 DEBUG nova.network.os_vif_util [req-2ead09c3-0436-4a6b-9d14-ce0e62980736 req-138271f6-8d80-47e8-bf8c-30e6e52550b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Converting VIF {"id": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "address": "fa:16:3e:41:fe:e7", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa9e42a3e-0f", "ovs_interfaceid": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.247 2 DEBUG nova.network.os_vif_util [req-2ead09c3-0436-4a6b-9d14-ce0e62980736 req-138271f6-8d80-47e8-bf8c-30e6e52550b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:41:fe:e7,bridge_name='br-int',has_traffic_filtering=True,id=a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3,network=Network(7d845a33-56e0-4850-9f27-8a54095796f2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapa9e42a3e-0f') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:08:51 compute-0 systemd[1]: libpod-conmon-1a1b35487be377a2f2711b341228a661c7f1516903526dd87d3a141734b96eee.scope: Deactivated successfully.
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.251 2 DEBUG nova.virt.libvirt.vif [None req-a1e83795-0626-4762-acc7-c51367783523 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:08:06Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-AttachInterfacesTestJSON-server-1852145717',display_name='tempest-AttachInterfacesTestJSON-server-1852145717',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-attachinterfacestestjson-server-1852145717',id=44,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBOMAiNKx1nMsQWkjyoacitfMpSCECpXaL6jiwNift5lqyR8GB5bAw9OQhuD+NMppggB+YdsyU4EuF27p1sPXC0U7gBRRZIdIzuVGXUDvMEa8cZQfCNptjsHEFbvdeH21PA==',key_name='tempest-keypair-1558941664',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:08:15Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='ef4e3be787374d90a6a236c7f76bd940',ramdisk_id='',reservation_id='r-f02kral6',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-AttachInterfacesTestJSON-812274278',owner_user_name='tempest-AttachInterfacesTestJSON-812274278-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:08:15Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='fbc7616089cb4f78832692487019c83d',uuid=d55dd428-ae1c-4c43-8582-3a46d50f4822,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "d86c2c53-081c-4754-b070-2dd5028a4c08", "address": "fa:16:3e:4d:9f:a8", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd86c2c53-08", "ovs_interfaceid": "d86c2c53-081c-4754-b070-2dd5028a4c08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.252 2 DEBUG nova.network.os_vif_util [None req-a1e83795-0626-4762-acc7-c51367783523 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Converting VIF {"id": "d86c2c53-081c-4754-b070-2dd5028a4c08", "address": "fa:16:3e:4d:9f:a8", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd86c2c53-08", "ovs_interfaceid": "d86c2c53-081c-4754-b070-2dd5028a4c08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.253 2 DEBUG nova.network.os_vif_util [None req-a1e83795-0626-4762-acc7-c51367783523 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:4d:9f:a8,bridge_name='br-int',has_traffic_filtering=True,id=d86c2c53-081c-4754-b070-2dd5028a4c08,network=Network(7d845a33-56e0-4850-9f27-8a54095796f2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd86c2c53-08') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.253 2 DEBUG os_vif [None req-a1e83795-0626-4762-acc7-c51367783523 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:4d:9f:a8,bridge_name='br-int',has_traffic_filtering=True,id=d86c2c53-081c-4754-b070-2dd5028a4c08,network=Network(7d845a33-56e0-4850-9f27-8a54095796f2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd86c2c53-08') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.257 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.257 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapd86c2c53-08, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.260 2 DEBUG oslo_concurrency.lockutils [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Releasing lock "refresh_cache-d55dd428-ae1c-4c43-8582-3a46d50f4822" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.264 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.266 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.268 2 DEBUG nova.virt.libvirt.guest [req-2ead09c3-0436-4a6b-9d14-ce0e62980736 req-138271f6-8d80-47e8-bf8c-30e6e52550b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] looking for interface given config: <interface type="ethernet"><mac address="fa:16:3e:41:fe:e7"/><model type="virtio"/><driver name="vhost" rx_queue_size="512"/><mtu size="1442"/><target dev="tapa9e42a3e-0f"/></interface> get_interface_by_cfg /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:257
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.270 2 INFO os_vif [None req-a1e83795-0626-4762-acc7-c51367783523 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:4d:9f:a8,bridge_name='br-int',has_traffic_filtering=True,id=d86c2c53-081c-4754-b070-2dd5028a4c08,network=Network(7d845a33-56e0-4850-9f27-8a54095796f2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd86c2c53-08')
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.271 2 DEBUG nova.virt.libvirt.vif [None req-a1e83795-0626-4762-acc7-c51367783523 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:08:06Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-AttachInterfacesTestJSON-server-1852145717',display_name='tempest-AttachInterfacesTestJSON-server-1852145717',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-attachinterfacestestjson-server-1852145717',id=44,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBOMAiNKx1nMsQWkjyoacitfMpSCECpXaL6jiwNift5lqyR8GB5bAw9OQhuD+NMppggB+YdsyU4EuF27p1sPXC0U7gBRRZIdIzuVGXUDvMEa8cZQfCNptjsHEFbvdeH21PA==',key_name='tempest-keypair-1558941664',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:08:15Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='ef4e3be787374d90a6a236c7f76bd940',ramdisk_id='',reservation_id='r-f02kral6',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-AttachInterfacesTestJSON-812274278',owner_user_name='tempest-AttachInterfacesTestJSON-812274278-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:08:15Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='fbc7616089cb4f78832692487019c83d',uuid=d55dd428-ae1c-4c43-8582-3a46d50f4822,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "address": "fa:16:3e:41:fe:e7", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa9e42a3e-0f", "ovs_interfaceid": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.271 2 DEBUG nova.network.os_vif_util [None req-a1e83795-0626-4762-acc7-c51367783523 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Converting VIF {"id": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "address": "fa:16:3e:41:fe:e7", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa9e42a3e-0f", "ovs_interfaceid": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.272 2 DEBUG nova.network.os_vif_util [None req-a1e83795-0626-4762-acc7-c51367783523 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:41:fe:e7,bridge_name='br-int',has_traffic_filtering=True,id=a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3,network=Network(7d845a33-56e0-4850-9f27-8a54095796f2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapa9e42a3e-0f') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.272 2 DEBUG os_vif [None req-a1e83795-0626-4762-acc7-c51367783523 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:41:fe:e7,bridge_name='br-int',has_traffic_filtering=True,id=a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3,network=Network(7d845a33-56e0-4850-9f27-8a54095796f2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapa9e42a3e-0f') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.273 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.274 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapa9e42a3e-0f, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.274 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.276 2 INFO os_vif [None req-a1e83795-0626-4762-acc7-c51367783523 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:41:fe:e7,bridge_name='br-int',has_traffic_filtering=True,id=a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3,network=Network(7d845a33-56e0-4850-9f27-8a54095796f2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapa9e42a3e-0f')
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.277 2 INFO nova.virt.libvirt.driver [None req-a1e83795-0626-4762-acc7-c51367783523 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Deleting instance files /var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822_del
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.278 2 INFO nova.virt.libvirt.driver [None req-a1e83795-0626-4762-acc7-c51367783523 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Deletion of /var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822_del complete
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.284 2 DEBUG nova.virt.libvirt.guest [req-2ead09c3-0436-4a6b-9d14-ce0e62980736 req-138271f6-8d80-47e8-bf8c-30e6e52550b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] interface for config: <interface type="ethernet"><mac address="fa:16:3e:41:fe:e7"/><model type="virtio"/><driver name="vhost" rx_queue_size="512"/><mtu size="1442"/><target dev="tapa9e42a3e-0f"/></interface>not found in domain: <domain type='kvm'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   <name>instance-0000002c</name>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   <uuid>d55dd428-ae1c-4c43-8582-3a46d50f4822</uuid>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <nova:name>tempest-AttachInterfacesTestJSON-server-1852145717</nova:name>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:08:12</nova:creationTime>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:08:51 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:08:51 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:08:51 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:08:51 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:08:51 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:08:51 compute-0 nova_compute[192079]:         <nova:user uuid="fbc7616089cb4f78832692487019c83d">tempest-AttachInterfacesTestJSON-812274278-project-member</nova:user>
Oct 02 12:08:51 compute-0 nova_compute[192079]:         <nova:project uuid="ef4e3be787374d90a6a236c7f76bd940">tempest-AttachInterfacesTestJSON-812274278</nova:project>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:08:51 compute-0 nova_compute[192079]:         <nova:port uuid="d86c2c53-081c-4754-b070-2dd5028a4c08">
Oct 02 12:08:51 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.10" ipVersion="4"/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   <memory unit='KiB'>131072</memory>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   <currentMemory unit='KiB'>131072</currentMemory>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   <vcpu placement='static'>1</vcpu>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   <sysinfo type='smbios'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <system>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <entry name='manufacturer'>RDO</entry>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <entry name='product'>OpenStack Compute</entry>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <entry name='version'>27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <entry name='serial'>d55dd428-ae1c-4c43-8582-3a46d50f4822</entry>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <entry name='uuid'>d55dd428-ae1c-4c43-8582-3a46d50f4822</entry>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <entry name='family'>Virtual Machine</entry>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </system>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   <os>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <type arch='x86_64' machine='pc-q35-rhel9.6.0'>hvm</type>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <boot dev='hd'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <smbios mode='sysinfo'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   </os>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   <features>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <vmcoreinfo state='on'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   </features>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   <cpu mode='custom' match='exact' check='partial'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <model fallback='allow'>Nehalem</model>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <topology sockets='1' dies='1' clusters='1' cores='1' threads='1'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   <clock offset='utc'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <timer name='pit' tickpolicy='delay'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <timer name='rtc' tickpolicy='catchup'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <timer name='hpet' present='no'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   <on_poweroff>destroy</on_poweroff>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   <on_reboot>restart</on_reboot>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   <on_crash>destroy</on_crash>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <emulator>/usr/libexec/qemu-kvm</emulator>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <disk type='file' device='disk'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <driver name='qemu' type='qcow2' cache='none'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <source file='/var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822/disk'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target dev='vda' bus='virtio'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x03' slot='0x00' function='0x0'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <disk type='file' device='cdrom'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <driver name='qemu' type='raw' cache='none'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <source file='/var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822/disk.config'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target dev='sda' bus='sata'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <readonly/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='drive' controller='0' bus='0' target='0' unit='0'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <controller type='pci' index='0' model='pcie-root'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <controller type='pci' index='1' model='pcie-root-port'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target chassis='1' port='0x10'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x02' function='0x0' multifunction='on'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <controller type='pci' index='2' model='pcie-root-port'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target chassis='2' port='0x11'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x02' function='0x1'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <controller type='pci' index='3' model='pcie-root-port'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target chassis='3' port='0x12'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x02' function='0x2'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <controller type='pci' index='4' model='pcie-root-port'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target chassis='4' port='0x13'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x02' function='0x3'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <controller type='pci' index='5' model='pcie-root-port'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target chassis='5' port='0x14'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x02' function='0x4'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <controller type='pci' index='6' model='pcie-root-port'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target chassis='6' port='0x15'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x02' function='0x5'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <controller type='pci' index='7' model='pcie-root-port'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target chassis='7' port='0x16'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x02' function='0x6'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <controller type='pci' index='8' model='pcie-root-port'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target chassis='8' port='0x17'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x02' function='0x7'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <controller type='pci' index='9' model='pcie-root-port'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target chassis='9' port='0x18'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x03' function='0x0' multifunction='on'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <controller type='pci' index='10' model='pcie-root-port'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target chassis='10' port='0x19'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x03' function='0x1'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <controller type='pci' index='11' model='pcie-root-port'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target chassis='11' port='0x1a'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x03' function='0x2'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <controller type='pci' index='12' model='pcie-root-port'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target chassis='12' port='0x1b'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x03' function='0x3'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <controller type='pci' index='13' model='pcie-root-port'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target chassis='13' port='0x1c'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x03' function='0x4'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <controller type='pci' index='14' model='pcie-root-port'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target chassis='14' port='0x1d'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x03' function='0x5'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <controller type='pci' index='15' model='pcie-root-port'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target chassis='15' port='0x1e'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x03' function='0x6'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <controller type='pci' index='16' model='pcie-root-port'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target chassis='16' port='0x1f'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x03' function='0x7'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <controller type='pci' index='17' model='pcie-root-port'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target chassis='17' port='0x20'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x04' function='0x0' multifunction='on'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <controller type='pci' index='18' model='pcie-root-port'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target chassis='18' port='0x21'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x04' function='0x1'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <controller type='pci' index='19' model='pcie-root-port'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target chassis='19' port='0x22'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x04' function='0x2'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <controller type='pci' index='20' model='pcie-root-port'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target chassis='20' port='0x23'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x04' function='0x3'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <controller type='pci' index='21' model='pcie-root-port'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target chassis='21' port='0x24'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x04' function='0x4'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <controller type='pci' index='22' model='pcie-root-port'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target chassis='22' port='0x25'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x04' function='0x5'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <controller type='pci' index='23' model='pcie-root-port'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target chassis='23' port='0x26'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x04' function='0x6'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <controller type='pci' index='24' model='pcie-root-port'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target chassis='24' port='0x27'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x04' function='0x7'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <controller type='pci' index='25' model='pcie-root-port'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <model name='pcie-root-port'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target chassis='25' port='0x28'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x05' function='0x0'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <controller type='pci' index='26' model='pcie-to-pci-bridge'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <model name='pcie-pci-bridge'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x01' slot='0x00' function='0x0'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <controller type='usb' index='0' model='piix3-uhci'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x1a' slot='0x01' function='0x0'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <controller type='sata' index='0'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x1f' function='0x2'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </controller>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <interface type='ethernet'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <mac address='fa:16:3e:4d:9f:a8'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target dev='tapd86c2c53-08'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <model type='virtio'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <driver name='vhost' rx_queue_size='512'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <mtu size='1442'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x02' slot='0x00' function='0x0'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <serial type='pty'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <log file='/var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822/console.log' append='off'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target type='isa-serial' port='0'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:         <model name='isa-serial'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       </target>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <console type='pty'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <log file='/var/lib/nova/instances/d55dd428-ae1c-4c43-8582-3a46d50f4822/console.log' append='off'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <target type='serial' port='0'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </console>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <input type='tablet' bus='usb'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='usb' bus='0' port='1'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </input>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <input type='mouse' bus='ps2'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <input type='keyboard' bus='ps2'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <graphics type='vnc' port='-1' autoport='yes' listen='::0'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <listen type='address' address='::0'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </graphics>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <audio id='1' type='none'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <video>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <model type='virtio' heads='1' primary='yes'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x00' slot='0x01' function='0x0'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </video>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <watchdog model='itco' action='reset'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <memballoon model='virtio'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <stats period='10'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x04' slot='0x00' function='0x0'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <rng model='virtio'>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <backend model='random'>/dev/urandom</backend>
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <address type='pci' domain='0x0000' bus='0x05' slot='0x00' function='0x0'/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:08:51 compute-0 nova_compute[192079]: </domain>
Oct 02 12:08:51 compute-0 nova_compute[192079]:  get_interface_by_cfg /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:282
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.287 2 WARNING nova.virt.libvirt.driver [req-2ead09c3-0436-4a6b-9d14-ce0e62980736 req-138271f6-8d80-47e8-bf8c-30e6e52550b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Detaching interface fa:16:3e:41:fe:e7 failed because the device is no longer found on the guest.: nova.exception.DeviceNotFound: Device 'tapa9e42a3e-0f' not found.
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.288 2 DEBUG nova.virt.libvirt.vif [req-2ead09c3-0436-4a6b-9d14-ce0e62980736 req-138271f6-8d80-47e8-bf8c-30e6e52550b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:08:06Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-AttachInterfacesTestJSON-server-1852145717',display_name='tempest-AttachInterfacesTestJSON-server-1852145717',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-attachinterfacestestjson-server-1852145717',id=44,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBOMAiNKx1nMsQWkjyoacitfMpSCECpXaL6jiwNift5lqyR8GB5bAw9OQhuD+NMppggB+YdsyU4EuF27p1sPXC0U7gBRRZIdIzuVGXUDvMEa8cZQfCNptjsHEFbvdeH21PA==',key_name='tempest-keypair-1558941664',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:08:15Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata=<?>,migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=<?>,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='ef4e3be787374d90a6a236c7f76bd940',ramdisk_id='',reservation_id='r-f02kral6',resources=<?>,root_device_name='/dev/vda',root_gb=1,security_groups=<?>,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-AttachInterfacesTestJSON-812274278',owner_user_name='tempest-AttachInterfacesTestJSON-812274278-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:08:50Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='fbc7616089cb4f78832692487019c83d',uuid=d55dd428-ae1c-4c43-8582-3a46d50f4822,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "address": "fa:16:3e:41:fe:e7", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa9e42a3e-0f", "ovs_interfaceid": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.289 2 DEBUG nova.network.os_vif_util [req-2ead09c3-0436-4a6b-9d14-ce0e62980736 req-138271f6-8d80-47e8-bf8c-30e6e52550b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Converting VIF {"id": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "address": "fa:16:3e:41:fe:e7", "network": {"id": "7d845a33-56e0-4850-9f27-8a54095796f2", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581762823-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ef4e3be787374d90a6a236c7f76bd940", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa9e42a3e-0f", "ovs_interfaceid": "a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.289 2 DEBUG nova.network.os_vif_util [req-2ead09c3-0436-4a6b-9d14-ce0e62980736 req-138271f6-8d80-47e8-bf8c-30e6e52550b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:41:fe:e7,bridge_name='br-int',has_traffic_filtering=True,id=a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3,network=Network(7d845a33-56e0-4850-9f27-8a54095796f2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapa9e42a3e-0f') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.290 2 DEBUG os_vif [req-2ead09c3-0436-4a6b-9d14-ce0e62980736 req-138271f6-8d80-47e8-bf8c-30e6e52550b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:41:fe:e7,bridge_name='br-int',has_traffic_filtering=True,id=a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3,network=Network(7d845a33-56e0-4850-9f27-8a54095796f2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapa9e42a3e-0f') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.295 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.296 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapa9e42a3e-0f, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.296 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.298 2 INFO os_vif [req-2ead09c3-0436-4a6b-9d14-ce0e62980736 req-138271f6-8d80-47e8-bf8c-30e6e52550b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:41:fe:e7,bridge_name='br-int',has_traffic_filtering=True,id=a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3,network=Network(7d845a33-56e0-4850-9f27-8a54095796f2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapa9e42a3e-0f')
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.299 2 DEBUG nova.virt.libvirt.guest [req-2ead09c3-0436-4a6b-9d14-ce0e62980736 req-138271f6-8d80-47e8-bf8c-30e6e52550b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] set metadata xml: <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:08:51 compute-0 nova_compute[192079]:   <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   <nova:name>tempest-AttachInterfacesTestJSON-server-1852145717</nova:name>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   <nova:creationTime>2025-10-02 12:08:51</nova:creationTime>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   <nova:flavor name="m1.nano">
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <nova:memory>128</nova:memory>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <nova:disk>1</nova:disk>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <nova:swap>0</nova:swap>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <nova:vcpus>1</nova:vcpus>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   </nova:flavor>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   <nova:owner>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <nova:user uuid="fbc7616089cb4f78832692487019c83d">tempest-AttachInterfacesTestJSON-812274278-project-member</nova:user>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <nova:project uuid="ef4e3be787374d90a6a236c7f76bd940">tempest-AttachInterfacesTestJSON-812274278</nova:project>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   </nova:owner>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   <nova:ports>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     <nova:port uuid="d86c2c53-081c-4754-b070-2dd5028a4c08">
Oct 02 12:08:51 compute-0 nova_compute[192079]:       <nova:ip type="fixed" address="10.100.0.10" ipVersion="4"/>
Oct 02 12:08:51 compute-0 nova_compute[192079]:     </nova:port>
Oct 02 12:08:51 compute-0 nova_compute[192079]:   </nova:ports>
Oct 02 12:08:51 compute-0 nova_compute[192079]: </nova:instance>
Oct 02 12:08:51 compute-0 nova_compute[192079]:  set_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:359
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.300 2 DEBUG oslo_concurrency.lockutils [None req-928dc96e-1e57-408e-b8b7-bb0d2144d522 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Lock "interface-d55dd428-ae1c-4c43-8582-3a46d50f4822-a9e42a3e-0f7f-4f1b-9ec9-0f65db6b27a3" "released" by "nova.compute.manager.ComputeManager.detach_interface.<locals>.do_detach_interface" :: held 2.939s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:51 compute-0 podman[226262]: 2025-10-02 12:08:51.308120048 +0000 UTC m=+0.043733442 container remove 1a1b35487be377a2f2711b341228a661c7f1516903526dd87d3a141734b96eee (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-7d845a33-56e0-4850-9f27-8a54095796f2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:08:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:51.313 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4d821f87-0ef1-41ee-ae85-b2730fd424a7]: (4, ('Thu Oct  2 12:08:51 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-7d845a33-56e0-4850-9f27-8a54095796f2 (1a1b35487be377a2f2711b341228a661c7f1516903526dd87d3a141734b96eee)\n1a1b35487be377a2f2711b341228a661c7f1516903526dd87d3a141734b96eee\nThu Oct  2 12:08:51 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-7d845a33-56e0-4850-9f27-8a54095796f2 (1a1b35487be377a2f2711b341228a661c7f1516903526dd87d3a141734b96eee)\n1a1b35487be377a2f2711b341228a661c7f1516903526dd87d3a141734b96eee\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:51.315 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[05cf907e-1a69-4f9f-96dd-56f0a328353a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:51.316 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap7d845a33-50, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.317 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:51 compute-0 kernel: tap7d845a33-50: left promiscuous mode
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.332 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:51.336 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[679ce304-da00-4ab4-8ce2-324c2d10cdc5]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.357 2 INFO nova.compute.manager [None req-a1e83795-0626-4762-acc7-c51367783523 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Took 0.44 seconds to destroy the instance on the hypervisor.
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.357 2 DEBUG oslo.service.loopingcall [None req-a1e83795-0626-4762-acc7-c51367783523 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.358 2 DEBUG nova.compute.manager [-] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:08:51 compute-0 nova_compute[192079]: 2025-10-02 12:08:51.358 2 DEBUG nova.network.neutron [-] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:08:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:51.362 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[050c54b6-7f00-46d3-8add-5425d32b1265]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:51.363 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8b8fef5e-78c3-4465-a600-596ed2553f4f]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:51.379 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c7a6a312-63bd-4367-94f3-43bb496abf47]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 489169, 'reachable_time': 19056, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 226275, 'error': None, 'target': 'ovnmeta-7d845a33-56e0-4850-9f27-8a54095796f2', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:51 compute-0 systemd[1]: run-netns-ovnmeta\x2d7d845a33\x2d56e0\x2d4850\x2d9f27\x2d8a54095796f2.mount: Deactivated successfully.
Oct 02 12:08:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:51.384 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-7d845a33-56e0-4850-9f27-8a54095796f2 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:08:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:08:51.384 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[6e5f2b73-3cdc-4e7e-a011-d5ef1bcdbd2d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:08:52 compute-0 nova_compute[192079]: 2025-10-02 12:08:52.475 2 DEBUG nova.network.neutron [-] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:08:52 compute-0 nova_compute[192079]: 2025-10-02 12:08:52.508 2 INFO nova.compute.manager [-] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Took 1.15 seconds to deallocate network for instance.
Oct 02 12:08:52 compute-0 nova_compute[192079]: 2025-10-02 12:08:52.575 2 DEBUG oslo_concurrency.lockutils [None req-a1e83795-0626-4762-acc7-c51367783523 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:52 compute-0 nova_compute[192079]: 2025-10-02 12:08:52.576 2 DEBUG oslo_concurrency.lockutils [None req-a1e83795-0626-4762-acc7-c51367783523 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:52 compute-0 nova_compute[192079]: 2025-10-02 12:08:52.649 2 DEBUG nova.compute.provider_tree [None req-a1e83795-0626-4762-acc7-c51367783523 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:08:52 compute-0 nova_compute[192079]: 2025-10-02 12:08:52.663 2 DEBUG nova.scheduler.client.report [None req-a1e83795-0626-4762-acc7-c51367783523 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:08:52 compute-0 nova_compute[192079]: 2025-10-02 12:08:52.682 2 DEBUG oslo_concurrency.lockutils [None req-a1e83795-0626-4762-acc7-c51367783523 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.106s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:52 compute-0 nova_compute[192079]: 2025-10-02 12:08:52.720 2 INFO nova.scheduler.client.report [None req-a1e83795-0626-4762-acc7-c51367783523 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Deleted allocations for instance d55dd428-ae1c-4c43-8582-3a46d50f4822
Oct 02 12:08:52 compute-0 nova_compute[192079]: 2025-10-02 12:08:52.820 2 DEBUG oslo_concurrency.lockutils [None req-a1e83795-0626-4762-acc7-c51367783523 fbc7616089cb4f78832692487019c83d ef4e3be787374d90a6a236c7f76bd940 - - default default] Lock "d55dd428-ae1c-4c43-8582-3a46d50f4822" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.930s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:53 compute-0 nova_compute[192079]: 2025-10-02 12:08:53.040 2 DEBUG nova.compute.manager [req-e7432b0b-6264-4b96-a6ef-2ea334fd5928 req-9fc1bd0a-e0a5-448d-83d4-21ba5899101d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Received event network-vif-deleted-d86c2c53-081c-4754-b070-2dd5028a4c08 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:08:53 compute-0 nova_compute[192079]: 2025-10-02 12:08:53.322 2 DEBUG nova.compute.manager [req-7b642275-c818-423c-b1e1-eb1abac1504c req-3d1304e0-2a43-4872-9e2e-e53f34eaf36e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Received event network-vif-plugged-d86c2c53-081c-4754-b070-2dd5028a4c08 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:08:53 compute-0 nova_compute[192079]: 2025-10-02 12:08:53.322 2 DEBUG oslo_concurrency.lockutils [req-7b642275-c818-423c-b1e1-eb1abac1504c req-3d1304e0-2a43-4872-9e2e-e53f34eaf36e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:08:53 compute-0 nova_compute[192079]: 2025-10-02 12:08:53.323 2 DEBUG oslo_concurrency.lockutils [req-7b642275-c818-423c-b1e1-eb1abac1504c req-3d1304e0-2a43-4872-9e2e-e53f34eaf36e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:08:53 compute-0 nova_compute[192079]: 2025-10-02 12:08:53.323 2 DEBUG oslo_concurrency.lockutils [req-7b642275-c818-423c-b1e1-eb1abac1504c req-3d1304e0-2a43-4872-9e2e-e53f34eaf36e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d55dd428-ae1c-4c43-8582-3a46d50f4822-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:08:53 compute-0 nova_compute[192079]: 2025-10-02 12:08:53.323 2 DEBUG nova.compute.manager [req-7b642275-c818-423c-b1e1-eb1abac1504c req-3d1304e0-2a43-4872-9e2e-e53f34eaf36e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] No waiting events found dispatching network-vif-plugged-d86c2c53-081c-4754-b070-2dd5028a4c08 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:08:53 compute-0 nova_compute[192079]: 2025-10-02 12:08:53.323 2 WARNING nova.compute.manager [req-7b642275-c818-423c-b1e1-eb1abac1504c req-3d1304e0-2a43-4872-9e2e-e53f34eaf36e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Received unexpected event network-vif-plugged-d86c2c53-081c-4754-b070-2dd5028a4c08 for instance with vm_state deleted and task_state None.
Oct 02 12:08:54 compute-0 nova_compute[192079]: 2025-10-02 12:08:54.666 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759406919.6647608, 1df89ab6-e68b-4cdb-96ac-80896dce72c0 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:08:54 compute-0 nova_compute[192079]: 2025-10-02 12:08:54.666 2 INFO nova.compute.manager [-] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] VM Stopped (Lifecycle Event)
Oct 02 12:08:54 compute-0 nova_compute[192079]: 2025-10-02 12:08:54.692 2 DEBUG nova.compute.manager [None req-20f0a44c-cdeb-42dc-84ce-f8223c3da840 - - - - - -] [instance: 1df89ab6-e68b-4cdb-96ac-80896dce72c0] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:08:55 compute-0 nova_compute[192079]: 2025-10-02 12:08:55.116 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:56 compute-0 nova_compute[192079]: 2025-10-02 12:08:56.308 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:57 compute-0 nova_compute[192079]: 2025-10-02 12:08:57.420 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:08:59 compute-0 podman[226277]: 2025-10-02 12:08:59.144018568 +0000 UTC m=+0.056965289 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, tcib_managed=true, config_id=ovn_metadata_agent, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, container_name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:08:59 compute-0 podman[226278]: 2025-10-02 12:08:59.176932606 +0000 UTC m=+0.084435610 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_controller, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, config_id=ovn_controller, org.label-schema.license=GPLv2, tcib_managed=true)
Oct 02 12:08:59 compute-0 podman[226279]: 2025-10-02 12:08:59.180159123 +0000 UTC m=+0.079227079 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:09:00 compute-0 nova_compute[192079]: 2025-10-02 12:09:00.116 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:00 compute-0 nova_compute[192079]: 2025-10-02 12:09:00.362 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:01 compute-0 nova_compute[192079]: 2025-10-02 12:09:01.312 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:02.210 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:09:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:02.211 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:09:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:02.211 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:09:03 compute-0 nova_compute[192079]: 2025-10-02 12:09:03.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_incomplete_migrations run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:09:03 compute-0 nova_compute[192079]: 2025-10-02 12:09:03.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Cleaning up deleted instances with incomplete migration  _cleanup_incomplete_migrations /usr/lib/python3.9/site-packages/nova/compute/manager.py:11183
Oct 02 12:09:05 compute-0 nova_compute[192079]: 2025-10-02 12:09:05.164 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:06 compute-0 nova_compute[192079]: 2025-10-02 12:09:06.233 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759406931.2320392, d55dd428-ae1c-4c43-8582-3a46d50f4822 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:09:06 compute-0 nova_compute[192079]: 2025-10-02 12:09:06.233 2 INFO nova.compute.manager [-] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] VM Stopped (Lifecycle Event)
Oct 02 12:09:06 compute-0 nova_compute[192079]: 2025-10-02 12:09:06.261 2 DEBUG nova.compute.manager [None req-35334348-cb99-40f4-adba-7014d5a4db37 - - - - - -] [instance: d55dd428-ae1c-4c43-8582-3a46d50f4822] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:09:06 compute-0 nova_compute[192079]: 2025-10-02 12:09:06.358 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:06 compute-0 nova_compute[192079]: 2025-10-02 12:09:06.405 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:06 compute-0 nova_compute[192079]: 2025-10-02 12:09:06.630 2 DEBUG oslo_concurrency.lockutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Acquiring lock "183a1b6e-784e-41d6-8632-851d606f23dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:09:06 compute-0 nova_compute[192079]: 2025-10-02 12:09:06.630 2 DEBUG oslo_concurrency.lockutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Lock "183a1b6e-784e-41d6-8632-851d606f23dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:09:06 compute-0 nova_compute[192079]: 2025-10-02 12:09:06.655 2 DEBUG nova.compute.manager [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:09:06 compute-0 nova_compute[192079]: 2025-10-02 12:09:06.745 2 DEBUG oslo_concurrency.lockutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:09:06 compute-0 nova_compute[192079]: 2025-10-02 12:09:06.745 2 DEBUG oslo_concurrency.lockutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:09:06 compute-0 nova_compute[192079]: 2025-10-02 12:09:06.751 2 DEBUG nova.virt.hardware [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:09:06 compute-0 nova_compute[192079]: 2025-10-02 12:09:06.751 2 INFO nova.compute.claims [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:09:06 compute-0 nova_compute[192079]: 2025-10-02 12:09:06.864 2 DEBUG nova.compute.provider_tree [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:09:06 compute-0 nova_compute[192079]: 2025-10-02 12:09:06.884 2 DEBUG nova.scheduler.client.report [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:09:06 compute-0 nova_compute[192079]: 2025-10-02 12:09:06.904 2 DEBUG oslo_concurrency.lockutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.159s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:09:06 compute-0 nova_compute[192079]: 2025-10-02 12:09:06.905 2 DEBUG nova.compute.manager [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:09:06 compute-0 nova_compute[192079]: 2025-10-02 12:09:06.965 2 DEBUG nova.compute.manager [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:09:06 compute-0 nova_compute[192079]: 2025-10-02 12:09:06.966 2 DEBUG nova.network.neutron [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:09:06 compute-0 nova_compute[192079]: 2025-10-02 12:09:06.986 2 INFO nova.virt.libvirt.driver [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:09:07 compute-0 nova_compute[192079]: 2025-10-02 12:09:07.003 2 DEBUG nova.compute.manager [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:09:07 compute-0 nova_compute[192079]: 2025-10-02 12:09:07.120 2 DEBUG nova.compute.manager [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:09:07 compute-0 nova_compute[192079]: 2025-10-02 12:09:07.121 2 DEBUG nova.virt.libvirt.driver [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:09:07 compute-0 nova_compute[192079]: 2025-10-02 12:09:07.122 2 INFO nova.virt.libvirt.driver [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Creating image(s)
Oct 02 12:09:07 compute-0 nova_compute[192079]: 2025-10-02 12:09:07.122 2 DEBUG oslo_concurrency.lockutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Acquiring lock "/var/lib/nova/instances/183a1b6e-784e-41d6-8632-851d606f23dc/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:09:07 compute-0 nova_compute[192079]: 2025-10-02 12:09:07.123 2 DEBUG oslo_concurrency.lockutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Lock "/var/lib/nova/instances/183a1b6e-784e-41d6-8632-851d606f23dc/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:09:07 compute-0 nova_compute[192079]: 2025-10-02 12:09:07.123 2 DEBUG oslo_concurrency.lockutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Lock "/var/lib/nova/instances/183a1b6e-784e-41d6-8632-851d606f23dc/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:09:07 compute-0 nova_compute[192079]: 2025-10-02 12:09:07.135 2 DEBUG oslo_concurrency.processutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:09:07 compute-0 nova_compute[192079]: 2025-10-02 12:09:07.191 2 DEBUG oslo_concurrency.processutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:09:07 compute-0 nova_compute[192079]: 2025-10-02 12:09:07.192 2 DEBUG oslo_concurrency.lockutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:09:07 compute-0 nova_compute[192079]: 2025-10-02 12:09:07.193 2 DEBUG oslo_concurrency.lockutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:09:07 compute-0 nova_compute[192079]: 2025-10-02 12:09:07.204 2 DEBUG oslo_concurrency.processutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:09:07 compute-0 nova_compute[192079]: 2025-10-02 12:09:07.257 2 DEBUG oslo_concurrency.processutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.053s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:09:07 compute-0 nova_compute[192079]: 2025-10-02 12:09:07.258 2 DEBUG oslo_concurrency.processutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/183a1b6e-784e-41d6-8632-851d606f23dc/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:09:07 compute-0 nova_compute[192079]: 2025-10-02 12:09:07.336 2 DEBUG oslo_concurrency.processutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/183a1b6e-784e-41d6-8632-851d606f23dc/disk 1073741824" returned: 0 in 0.078s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:09:07 compute-0 nova_compute[192079]: 2025-10-02 12:09:07.337 2 DEBUG oslo_concurrency.lockutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.145s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:09:07 compute-0 nova_compute[192079]: 2025-10-02 12:09:07.338 2 DEBUG oslo_concurrency.processutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:09:07 compute-0 nova_compute[192079]: 2025-10-02 12:09:07.389 2 DEBUG oslo_concurrency.processutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.051s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:09:07 compute-0 nova_compute[192079]: 2025-10-02 12:09:07.390 2 DEBUG nova.virt.disk.api [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Checking if we can resize image /var/lib/nova/instances/183a1b6e-784e-41d6-8632-851d606f23dc/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:09:07 compute-0 nova_compute[192079]: 2025-10-02 12:09:07.390 2 DEBUG oslo_concurrency.processutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/183a1b6e-784e-41d6-8632-851d606f23dc/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:09:07 compute-0 nova_compute[192079]: 2025-10-02 12:09:07.445 2 DEBUG oslo_concurrency.processutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/183a1b6e-784e-41d6-8632-851d606f23dc/disk --force-share --output=json" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:09:07 compute-0 nova_compute[192079]: 2025-10-02 12:09:07.446 2 DEBUG nova.virt.disk.api [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Cannot resize image /var/lib/nova/instances/183a1b6e-784e-41d6-8632-851d606f23dc/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:09:07 compute-0 nova_compute[192079]: 2025-10-02 12:09:07.446 2 DEBUG nova.objects.instance [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Lazy-loading 'migration_context' on Instance uuid 183a1b6e-784e-41d6-8632-851d606f23dc obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:09:07 compute-0 nova_compute[192079]: 2025-10-02 12:09:07.461 2 DEBUG nova.virt.libvirt.driver [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:09:07 compute-0 nova_compute[192079]: 2025-10-02 12:09:07.462 2 DEBUG nova.virt.libvirt.driver [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Ensure instance console log exists: /var/lib/nova/instances/183a1b6e-784e-41d6-8632-851d606f23dc/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:09:07 compute-0 nova_compute[192079]: 2025-10-02 12:09:07.462 2 DEBUG oslo_concurrency.lockutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:09:07 compute-0 nova_compute[192079]: 2025-10-02 12:09:07.463 2 DEBUG oslo_concurrency.lockutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:09:07 compute-0 nova_compute[192079]: 2025-10-02 12:09:07.463 2 DEBUG oslo_concurrency.lockutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:09:07 compute-0 nova_compute[192079]: 2025-10-02 12:09:07.617 2 DEBUG nova.policy [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:09:08 compute-0 podman[226361]: 2025-10-02 12:09:08.135840447 +0000 UTC m=+0.051362577 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_managed=true, config_id=edpm, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']})
Oct 02 12:09:09 compute-0 nova_compute[192079]: 2025-10-02 12:09:09.590 2 DEBUG nova.network.neutron [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Successfully created port: cf9649ce-4816-43e7-96a6-7a0e08d84e61 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:09:10 compute-0 nova_compute[192079]: 2025-10-02 12:09:10.167 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:10 compute-0 nova_compute[192079]: 2025-10-02 12:09:10.852 2 DEBUG nova.network.neutron [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Successfully updated port: cf9649ce-4816-43e7-96a6-7a0e08d84e61 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:09:11 compute-0 nova_compute[192079]: 2025-10-02 12:09:11.049 2 DEBUG nova.compute.manager [req-e1a26602-e0f9-44d3-919c-e421af1bde48 req-800af574-ebc5-41ea-b3ab-a76ca04c4771 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Received event network-changed-cf9649ce-4816-43e7-96a6-7a0e08d84e61 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:09:11 compute-0 nova_compute[192079]: 2025-10-02 12:09:11.049 2 DEBUG nova.compute.manager [req-e1a26602-e0f9-44d3-919c-e421af1bde48 req-800af574-ebc5-41ea-b3ab-a76ca04c4771 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Refreshing instance network info cache due to event network-changed-cf9649ce-4816-43e7-96a6-7a0e08d84e61. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:09:11 compute-0 nova_compute[192079]: 2025-10-02 12:09:11.050 2 DEBUG oslo_concurrency.lockutils [req-e1a26602-e0f9-44d3-919c-e421af1bde48 req-800af574-ebc5-41ea-b3ab-a76ca04c4771 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-183a1b6e-784e-41d6-8632-851d606f23dc" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:09:11 compute-0 nova_compute[192079]: 2025-10-02 12:09:11.050 2 DEBUG oslo_concurrency.lockutils [req-e1a26602-e0f9-44d3-919c-e421af1bde48 req-800af574-ebc5-41ea-b3ab-a76ca04c4771 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-183a1b6e-784e-41d6-8632-851d606f23dc" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:09:11 compute-0 nova_compute[192079]: 2025-10-02 12:09:11.050 2 DEBUG nova.network.neutron [req-e1a26602-e0f9-44d3-919c-e421af1bde48 req-800af574-ebc5-41ea-b3ab-a76ca04c4771 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Refreshing network info cache for port cf9649ce-4816-43e7-96a6-7a0e08d84e61 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:09:11 compute-0 nova_compute[192079]: 2025-10-02 12:09:11.052 2 DEBUG oslo_concurrency.lockutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Acquiring lock "refresh_cache-183a1b6e-784e-41d6-8632-851d606f23dc" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:09:11 compute-0 nova_compute[192079]: 2025-10-02 12:09:11.085 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:11 compute-0 nova_compute[192079]: 2025-10-02 12:09:11.266 2 DEBUG nova.network.neutron [req-e1a26602-e0f9-44d3-919c-e421af1bde48 req-800af574-ebc5-41ea-b3ab-a76ca04c4771 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:09:11 compute-0 nova_compute[192079]: 2025-10-02 12:09:11.359 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:11 compute-0 nova_compute[192079]: 2025-10-02 12:09:11.650 2 DEBUG nova.network.neutron [req-e1a26602-e0f9-44d3-919c-e421af1bde48 req-800af574-ebc5-41ea-b3ab-a76ca04c4771 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:09:11 compute-0 nova_compute[192079]: 2025-10-02 12:09:11.668 2 DEBUG oslo_concurrency.lockutils [req-e1a26602-e0f9-44d3-919c-e421af1bde48 req-800af574-ebc5-41ea-b3ab-a76ca04c4771 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-183a1b6e-784e-41d6-8632-851d606f23dc" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:09:11 compute-0 nova_compute[192079]: 2025-10-02 12:09:11.668 2 DEBUG oslo_concurrency.lockutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Acquired lock "refresh_cache-183a1b6e-784e-41d6-8632-851d606f23dc" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:09:11 compute-0 nova_compute[192079]: 2025-10-02 12:09:11.669 2 DEBUG nova.network.neutron [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:09:11 compute-0 nova_compute[192079]: 2025-10-02 12:09:11.676 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:09:11 compute-0 nova_compute[192079]: 2025-10-02 12:09:11.677 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:09:11 compute-0 nova_compute[192079]: 2025-10-02 12:09:11.704 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:09:11 compute-0 nova_compute[192079]: 2025-10-02 12:09:11.704 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:09:11 compute-0 nova_compute[192079]: 2025-10-02 12:09:11.704 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:09:11 compute-0 nova_compute[192079]: 2025-10-02 12:09:11.705 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:09:11 compute-0 nova_compute[192079]: 2025-10-02 12:09:11.869 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:09:11 compute-0 nova_compute[192079]: 2025-10-02 12:09:11.870 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5750MB free_disk=73.35729598999023GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:09:11 compute-0 nova_compute[192079]: 2025-10-02 12:09:11.870 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:09:11 compute-0 nova_compute[192079]: 2025-10-02 12:09:11.870 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:09:11 compute-0 nova_compute[192079]: 2025-10-02 12:09:11.955 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance 183a1b6e-784e-41d6-8632-851d606f23dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:09:11 compute-0 nova_compute[192079]: 2025-10-02 12:09:11.955 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 1 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:09:11 compute-0 nova_compute[192079]: 2025-10-02 12:09:11.956 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=640MB phys_disk=79GB used_disk=1GB total_vcpus=8 used_vcpus=1 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.002 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.022 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.036 2 DEBUG nova.network.neutron [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.045 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.046 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.176s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.858 2 DEBUG nova.network.neutron [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Updating instance_info_cache with network_info: [{"id": "cf9649ce-4816-43e7-96a6-7a0e08d84e61", "address": "fa:16:3e:eb:81:4a", "network": {"id": "beab6431-897b-46cc-9079-f58f012784e7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1799667945-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "887b026ea22942f7b709489ddec04ffc", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapcf9649ce-48", "ovs_interfaceid": "cf9649ce-4816-43e7-96a6-7a0e08d84e61", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.885 2 DEBUG oslo_concurrency.lockutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Releasing lock "refresh_cache-183a1b6e-784e-41d6-8632-851d606f23dc" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.886 2 DEBUG nova.compute.manager [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Instance network_info: |[{"id": "cf9649ce-4816-43e7-96a6-7a0e08d84e61", "address": "fa:16:3e:eb:81:4a", "network": {"id": "beab6431-897b-46cc-9079-f58f012784e7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1799667945-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "887b026ea22942f7b709489ddec04ffc", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapcf9649ce-48", "ovs_interfaceid": "cf9649ce-4816-43e7-96a6-7a0e08d84e61", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.888 2 DEBUG nova.virt.libvirt.driver [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Start _get_guest_xml network_info=[{"id": "cf9649ce-4816-43e7-96a6-7a0e08d84e61", "address": "fa:16:3e:eb:81:4a", "network": {"id": "beab6431-897b-46cc-9079-f58f012784e7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1799667945-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "887b026ea22942f7b709489ddec04ffc", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapcf9649ce-48", "ovs_interfaceid": "cf9649ce-4816-43e7-96a6-7a0e08d84e61", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.891 2 WARNING nova.virt.libvirt.driver [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.894 2 DEBUG nova.virt.libvirt.host [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.895 2 DEBUG nova.virt.libvirt.host [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.898 2 DEBUG nova.virt.libvirt.host [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.899 2 DEBUG nova.virt.libvirt.host [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.900 2 DEBUG nova.virt.libvirt.driver [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.900 2 DEBUG nova.virt.hardware [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.901 2 DEBUG nova.virt.hardware [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.901 2 DEBUG nova.virt.hardware [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.902 2 DEBUG nova.virt.hardware [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.902 2 DEBUG nova.virt.hardware [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.902 2 DEBUG nova.virt.hardware [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.902 2 DEBUG nova.virt.hardware [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.903 2 DEBUG nova.virt.hardware [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.903 2 DEBUG nova.virt.hardware [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.903 2 DEBUG nova.virt.hardware [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.903 2 DEBUG nova.virt.hardware [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.908 2 DEBUG nova.virt.libvirt.vif [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] vif_type=ovs instance=Instance(access_ip_v4=1.1.1.1,access_ip_v6=::babe:dc0c:1602,architecture=None,auto_disk_config=True,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:09:05Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ServersTestJSON-server-42881034',display_name='tempest-ServersTestJSON-server-42881034',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverstestjson-server-42881034',id=47,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBJ2ECj5rq6JEj2kFB2Dzpg61lYERPg5QuDNoG+3Ns56Ne8gmdcg9RlkerI4nyg4Jm5/KR5qb0w+Hb905h4pIodJPNYlpnVwDTZedUvrdpuRanm+Z5YZ4kjefqtX2UfG2BA==',key_name='tempest-keypair-1649035751',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={hello='world'},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='887b026ea22942f7b709489ddec04ffc',ramdisk_id='',reservation_id='r-fno0nr06',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServersTestJSON-801569',owner_user_name='tempest-ServersTestJSON-801569-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:09:07Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='202970a145ff4e8aa3dc22131cc9240d',uuid=183a1b6e-784e-41d6-8632-851d606f23dc,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "cf9649ce-4816-43e7-96a6-7a0e08d84e61", "address": "fa:16:3e:eb:81:4a", "network": {"id": "beab6431-897b-46cc-9079-f58f012784e7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1799667945-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "887b026ea22942f7b709489ddec04ffc", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapcf9649ce-48", "ovs_interfaceid": "cf9649ce-4816-43e7-96a6-7a0e08d84e61", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.908 2 DEBUG nova.network.os_vif_util [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Converting VIF {"id": "cf9649ce-4816-43e7-96a6-7a0e08d84e61", "address": "fa:16:3e:eb:81:4a", "network": {"id": "beab6431-897b-46cc-9079-f58f012784e7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1799667945-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "887b026ea22942f7b709489ddec04ffc", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapcf9649ce-48", "ovs_interfaceid": "cf9649ce-4816-43e7-96a6-7a0e08d84e61", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.909 2 DEBUG nova.network.os_vif_util [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:eb:81:4a,bridge_name='br-int',has_traffic_filtering=True,id=cf9649ce-4816-43e7-96a6-7a0e08d84e61,network=Network(beab6431-897b-46cc-9079-f58f012784e7),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapcf9649ce-48') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.910 2 DEBUG nova.objects.instance [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Lazy-loading 'pci_devices' on Instance uuid 183a1b6e-784e-41d6-8632-851d606f23dc obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.925 2 DEBUG nova.virt.libvirt.driver [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:09:12 compute-0 nova_compute[192079]:   <uuid>183a1b6e-784e-41d6-8632-851d606f23dc</uuid>
Oct 02 12:09:12 compute-0 nova_compute[192079]:   <name>instance-0000002f</name>
Oct 02 12:09:12 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:09:12 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:09:12 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:09:12 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:       <nova:name>tempest-ServersTestJSON-server-42881034</nova:name>
Oct 02 12:09:12 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:09:12</nova:creationTime>
Oct 02 12:09:12 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:09:12 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:09:12 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:09:12 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:09:12 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:09:12 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:09:12 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:09:12 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:09:12 compute-0 nova_compute[192079]:         <nova:user uuid="202970a145ff4e8aa3dc22131cc9240d">tempest-ServersTestJSON-801569-project-member</nova:user>
Oct 02 12:09:12 compute-0 nova_compute[192079]:         <nova:project uuid="887b026ea22942f7b709489ddec04ffc">tempest-ServersTestJSON-801569</nova:project>
Oct 02 12:09:12 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:09:12 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:09:12 compute-0 nova_compute[192079]:         <nova:port uuid="cf9649ce-4816-43e7-96a6-7a0e08d84e61">
Oct 02 12:09:12 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.10" ipVersion="4"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:09:12 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:09:12 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:09:12 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <system>
Oct 02 12:09:12 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:09:12 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:09:12 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:09:12 compute-0 nova_compute[192079]:       <entry name="serial">183a1b6e-784e-41d6-8632-851d606f23dc</entry>
Oct 02 12:09:12 compute-0 nova_compute[192079]:       <entry name="uuid">183a1b6e-784e-41d6-8632-851d606f23dc</entry>
Oct 02 12:09:12 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     </system>
Oct 02 12:09:12 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:09:12 compute-0 nova_compute[192079]:   <os>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:   </os>
Oct 02 12:09:12 compute-0 nova_compute[192079]:   <features>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:   </features>
Oct 02 12:09:12 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:09:12 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:09:12 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:09:12 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/183a1b6e-784e-41d6-8632-851d606f23dc/disk"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:09:12 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/183a1b6e-784e-41d6-8632-851d606f23dc/disk.config"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:09:12 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:eb:81:4a"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:       <target dev="tapcf9649ce-48"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:09:12 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/183a1b6e-784e-41d6-8632-851d606f23dc/console.log" append="off"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <video>
Oct 02 12:09:12 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     </video>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:09:12 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:09:12 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:09:12 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:09:12 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:09:12 compute-0 nova_compute[192079]: </domain>
Oct 02 12:09:12 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.926 2 DEBUG nova.compute.manager [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Preparing to wait for external event network-vif-plugged-cf9649ce-4816-43e7-96a6-7a0e08d84e61 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.926 2 DEBUG oslo_concurrency.lockutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Acquiring lock "183a1b6e-784e-41d6-8632-851d606f23dc-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.927 2 DEBUG oslo_concurrency.lockutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Lock "183a1b6e-784e-41d6-8632-851d606f23dc-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.927 2 DEBUG oslo_concurrency.lockutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Lock "183a1b6e-784e-41d6-8632-851d606f23dc-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.927 2 DEBUG nova.virt.libvirt.vif [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] vif_type=ovs instance=Instance(access_ip_v4=1.1.1.1,access_ip_v6=::babe:dc0c:1602,architecture=None,auto_disk_config=True,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:09:05Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ServersTestJSON-server-42881034',display_name='tempest-ServersTestJSON-server-42881034',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverstestjson-server-42881034',id=47,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBJ2ECj5rq6JEj2kFB2Dzpg61lYERPg5QuDNoG+3Ns56Ne8gmdcg9RlkerI4nyg4Jm5/KR5qb0w+Hb905h4pIodJPNYlpnVwDTZedUvrdpuRanm+Z5YZ4kjefqtX2UfG2BA==',key_name='tempest-keypair-1649035751',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={hello='world'},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='887b026ea22942f7b709489ddec04ffc',ramdisk_id='',reservation_id='r-fno0nr06',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServersTestJSON-801569',owner_user_name='tempest-ServersTestJSON-801569-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:09:07Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='202970a145ff4e8aa3dc22131cc9240d',uuid=183a1b6e-784e-41d6-8632-851d606f23dc,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "cf9649ce-4816-43e7-96a6-7a0e08d84e61", "address": "fa:16:3e:eb:81:4a", "network": {"id": "beab6431-897b-46cc-9079-f58f012784e7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1799667945-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "887b026ea22942f7b709489ddec04ffc", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapcf9649ce-48", "ovs_interfaceid": "cf9649ce-4816-43e7-96a6-7a0e08d84e61", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.928 2 DEBUG nova.network.os_vif_util [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Converting VIF {"id": "cf9649ce-4816-43e7-96a6-7a0e08d84e61", "address": "fa:16:3e:eb:81:4a", "network": {"id": "beab6431-897b-46cc-9079-f58f012784e7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1799667945-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "887b026ea22942f7b709489ddec04ffc", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapcf9649ce-48", "ovs_interfaceid": "cf9649ce-4816-43e7-96a6-7a0e08d84e61", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.928 2 DEBUG nova.network.os_vif_util [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:eb:81:4a,bridge_name='br-int',has_traffic_filtering=True,id=cf9649ce-4816-43e7-96a6-7a0e08d84e61,network=Network(beab6431-897b-46cc-9079-f58f012784e7),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapcf9649ce-48') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.928 2 DEBUG os_vif [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:eb:81:4a,bridge_name='br-int',has_traffic_filtering=True,id=cf9649ce-4816-43e7-96a6-7a0e08d84e61,network=Network(beab6431-897b-46cc-9079-f58f012784e7),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapcf9649ce-48') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.929 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.929 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.929 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.931 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.931 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapcf9649ce-48, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.932 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapcf9649ce-48, col_values=(('external_ids', {'iface-id': 'cf9649ce-4816-43e7-96a6-7a0e08d84e61', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:eb:81:4a', 'vm-uuid': '183a1b6e-784e-41d6-8632-851d606f23dc'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.933 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:12 compute-0 NetworkManager[51160]: <info>  [1759406952.9338] manager: (tapcf9649ce-48): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/72)
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.935 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.938 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.939 2 INFO os_vif [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:eb:81:4a,bridge_name='br-int',has_traffic_filtering=True,id=cf9649ce-4816-43e7-96a6-7a0e08d84e61,network=Network(beab6431-897b-46cc-9079-f58f012784e7),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapcf9649ce-48')
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.996 2 DEBUG nova.virt.libvirt.driver [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.996 2 DEBUG nova.virt.libvirt.driver [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.996 2 DEBUG nova.virt.libvirt.driver [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] No VIF found with MAC fa:16:3e:eb:81:4a, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:09:12 compute-0 nova_compute[192079]: 2025-10-02 12:09:12.997 2 INFO nova.virt.libvirt.driver [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Using config drive
Oct 02 12:09:13 compute-0 podman[226386]: 2025-10-02 12:09:13.143495062 +0000 UTC m=+0.055786606 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, container_name=multipathd, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, tcib_managed=true, config_id=multipathd, io.buildah.version=1.41.3)
Oct 02 12:09:13 compute-0 podman[226385]: 2025-10-02 12:09:13.162197888 +0000 UTC m=+0.077112453 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, com.redhat.component=ubi9-minimal-container, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.expose-services=, io.openshift.tags=minimal rhel9, container_name=openstack_network_exporter, name=ubi9-minimal, url=https://catalog.redhat.com/en/search?searchType=containers, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, distribution-scope=public, version=9.6, config_id=edpm, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.buildah.version=1.33.7, build-date=2025-08-20T13:12:41, maintainer=Red Hat, Inc., managed_by=edpm_ansible, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, vcs-type=git, release=1755695350, architecture=x86_64, vendor=Red Hat, Inc.)
Oct 02 12:09:13 compute-0 nova_compute[192079]: 2025-10-02 12:09:13.302 2 INFO nova.virt.libvirt.driver [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Creating config drive at /var/lib/nova/instances/183a1b6e-784e-41d6-8632-851d606f23dc/disk.config
Oct 02 12:09:13 compute-0 nova_compute[192079]: 2025-10-02 12:09:13.308 2 DEBUG oslo_concurrency.processutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/183a1b6e-784e-41d6-8632-851d606f23dc/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpwavuosdz execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:09:13 compute-0 nova_compute[192079]: 2025-10-02 12:09:13.430 2 DEBUG oslo_concurrency.processutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/183a1b6e-784e-41d6-8632-851d606f23dc/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpwavuosdz" returned: 0 in 0.123s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:09:13 compute-0 kernel: tapcf9649ce-48: entered promiscuous mode
Oct 02 12:09:13 compute-0 NetworkManager[51160]: <info>  [1759406953.4834] manager: (tapcf9649ce-48): new Tun device (/org/freedesktop/NetworkManager/Devices/73)
Oct 02 12:09:13 compute-0 ovn_controller[94336]: 2025-10-02T12:09:13Z|00146|binding|INFO|Claiming lport cf9649ce-4816-43e7-96a6-7a0e08d84e61 for this chassis.
Oct 02 12:09:13 compute-0 ovn_controller[94336]: 2025-10-02T12:09:13Z|00147|binding|INFO|cf9649ce-4816-43e7-96a6-7a0e08d84e61: Claiming fa:16:3e:eb:81:4a 10.100.0.10
Oct 02 12:09:13 compute-0 nova_compute[192079]: 2025-10-02 12:09:13.484 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:13.491 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:eb:81:4a 10.100.0.10'], port_security=['fa:16:3e:eb:81:4a 10.100.0.10'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.10/28', 'neutron:device_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-beab6431-897b-46cc-9079-f58f012784e7', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '887b026ea22942f7b709489ddec04ffc', 'neutron:revision_number': '2', 'neutron:security_group_ids': '5dea2340-ed18-46b8-bf33-e1eec0ef4b44', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=b7134cc5-0916-4a45-9ec8-c36785c44299, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=cf9649ce-4816-43e7-96a6-7a0e08d84e61) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:13.492 103294 INFO neutron.agent.ovn.metadata.agent [-] Port cf9649ce-4816-43e7-96a6-7a0e08d84e61 in datapath beab6431-897b-46cc-9079-f58f012784e7 bound to our chassis
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:13.493 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network beab6431-897b-46cc-9079-f58f012784e7
Oct 02 12:09:13 compute-0 ovn_controller[94336]: 2025-10-02T12:09:13Z|00148|binding|INFO|Setting lport cf9649ce-4816-43e7-96a6-7a0e08d84e61 ovn-installed in OVS
Oct 02 12:09:13 compute-0 ovn_controller[94336]: 2025-10-02T12:09:13Z|00149|binding|INFO|Setting lport cf9649ce-4816-43e7-96a6-7a0e08d84e61 up in Southbound
Oct 02 12:09:13 compute-0 nova_compute[192079]: 2025-10-02 12:09:13.498 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:13 compute-0 nova_compute[192079]: 2025-10-02 12:09:13.499 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:13 compute-0 nova_compute[192079]: 2025-10-02 12:09:13.503 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:13.503 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7cb96a30-dac3-4866-b495-5542ada86770]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:13.504 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tapbeab6431-81 in ovnmeta-beab6431-897b-46cc-9079-f58f012784e7 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:13.506 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tapbeab6431-80 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:13.506 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[58fb1848-9f5e-489d-96d7-bf15a0c73ba9]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:13.506 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2f34b73a-0646-49e1-8b06-52c8b07cc58b]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:09:13 compute-0 systemd-udevd[226443]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:13.516 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[a7dd06d5-d801-4c8d-afcb-4f17441baf03]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:09:13 compute-0 systemd-machined[152150]: New machine qemu-25-instance-0000002f.
Oct 02 12:09:13 compute-0 NetworkManager[51160]: <info>  [1759406953.5245] device (tapcf9649ce-48): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:09:13 compute-0 NetworkManager[51160]: <info>  [1759406953.5257] device (tapcf9649ce-48): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:09:13 compute-0 systemd[1]: Started Virtual Machine qemu-25-instance-0000002f.
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:13.539 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d2a0905e-a8d0-4a48-88cf-a257fb1c3032]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:13.563 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[8ad120da-7918-45e9-ba7a-af0802219332]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:13.568 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[aa686b19-d537-4aa4-adae-1d002d9f6d5e]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:09:13 compute-0 NetworkManager[51160]: <info>  [1759406953.5694] manager: (tapbeab6431-80): new Veth device (/org/freedesktop/NetworkManager/Devices/74)
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:13.594 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[523cc32e-f834-4338-90a3-de9612f85e34]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:13.597 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[f5ff7b43-8caf-44f4-b1ee-6cc5d1d247ba]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:09:13 compute-0 NetworkManager[51160]: <info>  [1759406953.6159] device (tapbeab6431-80): carrier: link connected
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:13.619 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[06b495f9-1553-492c-a89f-c82a83617e7d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:13.633 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4b5f1895-0f8d-4e4b-aad5-eae47f0c13de]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapbeab6431-81'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:e7:d8:da'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 44], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 495124, 'reachable_time': 29120, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 226475, 'error': None, 'target': 'ovnmeta-beab6431-897b-46cc-9079-f58f012784e7', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:13.647 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[60c23340-a926-4849-9451-2883c9f98bf5]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fee7:d8da'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 495124, 'tstamp': 495124}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 226476, 'error': None, 'target': 'ovnmeta-beab6431-897b-46cc-9079-f58f012784e7', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:13.661 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1689b095-fbba-4ef6-903a-742eb5181ab6]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapbeab6431-81'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:e7:d8:da'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 44], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 495124, 'reachable_time': 29120, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 226477, 'error': None, 'target': 'ovnmeta-beab6431-897b-46cc-9079-f58f012784e7', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:13.692 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[04f45f8b-3400-4939-a612-fd9ddf814861]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:13.740 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[fb177ab4-3fde-491d-9dd1-e03eb982d3d0]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:13.741 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapbeab6431-80, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:13.741 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:13.741 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapbeab6431-80, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:09:13 compute-0 nova_compute[192079]: 2025-10-02 12:09:13.743 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:13 compute-0 NetworkManager[51160]: <info>  [1759406953.7440] manager: (tapbeab6431-80): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/75)
Oct 02 12:09:13 compute-0 kernel: tapbeab6431-80: entered promiscuous mode
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:13.746 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tapbeab6431-80, col_values=(('external_ids', {'iface-id': 'ec2eb3d6-13ee-4c10-a761-838f61241c4f'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:09:13 compute-0 nova_compute[192079]: 2025-10-02 12:09:13.748 2 DEBUG nova.compute.manager [req-20e82a73-cf03-4f25-8f13-249d50a7dd5c req-15669b8e-3241-47c5-8311-41e3bea14b34 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Received event network-vif-plugged-cf9649ce-4816-43e7-96a6-7a0e08d84e61 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:09:13 compute-0 ovn_controller[94336]: 2025-10-02T12:09:13Z|00150|binding|INFO|Releasing lport ec2eb3d6-13ee-4c10-a761-838f61241c4f from this chassis (sb_readonly=0)
Oct 02 12:09:13 compute-0 nova_compute[192079]: 2025-10-02 12:09:13.748 2 DEBUG oslo_concurrency.lockutils [req-20e82a73-cf03-4f25-8f13-249d50a7dd5c req-15669b8e-3241-47c5-8311-41e3bea14b34 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "183a1b6e-784e-41d6-8632-851d606f23dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:09:13 compute-0 nova_compute[192079]: 2025-10-02 12:09:13.749 2 DEBUG oslo_concurrency.lockutils [req-20e82a73-cf03-4f25-8f13-249d50a7dd5c req-15669b8e-3241-47c5-8311-41e3bea14b34 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "183a1b6e-784e-41d6-8632-851d606f23dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:09:13 compute-0 nova_compute[192079]: 2025-10-02 12:09:13.749 2 DEBUG oslo_concurrency.lockutils [req-20e82a73-cf03-4f25-8f13-249d50a7dd5c req-15669b8e-3241-47c5-8311-41e3bea14b34 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "183a1b6e-784e-41d6-8632-851d606f23dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:09:13 compute-0 nova_compute[192079]: 2025-10-02 12:09:13.749 2 DEBUG nova.compute.manager [req-20e82a73-cf03-4f25-8f13-249d50a7dd5c req-15669b8e-3241-47c5-8311-41e3bea14b34 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Processing event network-vif-plugged-cf9649ce-4816-43e7-96a6-7a0e08d84e61 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:09:13 compute-0 nova_compute[192079]: 2025-10-02 12:09:13.749 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:13 compute-0 nova_compute[192079]: 2025-10-02 12:09:13.772 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:13.773 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/beab6431-897b-46cc-9079-f58f012784e7.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/beab6431-897b-46cc-9079-f58f012784e7.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:13.774 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[815dc65d-1b8f-40d9-b928-b4696c0ef34b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:13.775 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-beab6431-897b-46cc-9079-f58f012784e7
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/beab6431-897b-46cc-9079-f58f012784e7.pid.haproxy
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID beab6431-897b-46cc-9079-f58f012784e7
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:09:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:13.775 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-beab6431-897b-46cc-9079-f58f012784e7', 'env', 'PROCESS_TAG=haproxy-beab6431-897b-46cc-9079-f58f012784e7', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/beab6431-897b-46cc-9079-f58f012784e7.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:09:14 compute-0 nova_compute[192079]: 2025-10-02 12:09:14.028 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:09:14 compute-0 nova_compute[192079]: 2025-10-02 12:09:14.029 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:09:14 compute-0 podman[226516]: 2025-10-02 12:09:14.145226978 +0000 UTC m=+0.046452406 container create c7b3bca249080ab8dea0df347888108ff06b7e3577831f26c3ea9591c4525526 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-beab6431-897b-46cc-9079-f58f012784e7, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_managed=true, org.label-schema.license=GPLv2)
Oct 02 12:09:14 compute-0 systemd[1]: Started libpod-conmon-c7b3bca249080ab8dea0df347888108ff06b7e3577831f26c3ea9591c4525526.scope.
Oct 02 12:09:14 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:09:14 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/a0515c3d94a844d0631ea3e746ca1ff2cd0eec4efdc0378285d5a232dcee47fc/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:09:14 compute-0 podman[226516]: 2025-10-02 12:09:14.212324629 +0000 UTC m=+0.113550077 container init c7b3bca249080ab8dea0df347888108ff06b7e3577831f26c3ea9591c4525526 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-beab6431-897b-46cc-9079-f58f012784e7, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:09:14 compute-0 podman[226516]: 2025-10-02 12:09:14.117632712 +0000 UTC m=+0.018858160 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:09:14 compute-0 podman[226516]: 2025-10-02 12:09:14.216862722 +0000 UTC m=+0.118088150 container start c7b3bca249080ab8dea0df347888108ff06b7e3577831f26c3ea9591c4525526 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-beab6431-897b-46cc-9079-f58f012784e7, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2)
Oct 02 12:09:14 compute-0 neutron-haproxy-ovnmeta-beab6431-897b-46cc-9079-f58f012784e7[226531]: [NOTICE]   (226535) : New worker (226537) forked
Oct 02 12:09:14 compute-0 neutron-haproxy-ovnmeta-beab6431-897b-46cc-9079-f58f012784e7[226531]: [NOTICE]   (226535) : Loading success.
Oct 02 12:09:14 compute-0 nova_compute[192079]: 2025-10-02 12:09:14.337 2 DEBUG nova.compute.manager [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:09:14 compute-0 nova_compute[192079]: 2025-10-02 12:09:14.338 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406954.3365245, 183a1b6e-784e-41d6-8632-851d606f23dc => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:09:14 compute-0 nova_compute[192079]: 2025-10-02 12:09:14.339 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] VM Started (Lifecycle Event)
Oct 02 12:09:14 compute-0 nova_compute[192079]: 2025-10-02 12:09:14.341 2 DEBUG nova.virt.libvirt.driver [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:09:14 compute-0 nova_compute[192079]: 2025-10-02 12:09:14.344 2 INFO nova.virt.libvirt.driver [-] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Instance spawned successfully.
Oct 02 12:09:14 compute-0 nova_compute[192079]: 2025-10-02 12:09:14.345 2 DEBUG nova.virt.libvirt.driver [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:09:14 compute-0 nova_compute[192079]: 2025-10-02 12:09:14.365 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:09:14 compute-0 nova_compute[192079]: 2025-10-02 12:09:14.368 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:09:14 compute-0 nova_compute[192079]: 2025-10-02 12:09:14.376 2 DEBUG nova.virt.libvirt.driver [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:09:14 compute-0 nova_compute[192079]: 2025-10-02 12:09:14.377 2 DEBUG nova.virt.libvirt.driver [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:09:14 compute-0 nova_compute[192079]: 2025-10-02 12:09:14.377 2 DEBUG nova.virt.libvirt.driver [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:09:14 compute-0 nova_compute[192079]: 2025-10-02 12:09:14.378 2 DEBUG nova.virt.libvirt.driver [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:09:14 compute-0 nova_compute[192079]: 2025-10-02 12:09:14.378 2 DEBUG nova.virt.libvirt.driver [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:09:14 compute-0 nova_compute[192079]: 2025-10-02 12:09:14.379 2 DEBUG nova.virt.libvirt.driver [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:09:14 compute-0 nova_compute[192079]: 2025-10-02 12:09:14.388 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:09:14 compute-0 nova_compute[192079]: 2025-10-02 12:09:14.389 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406954.3374271, 183a1b6e-784e-41d6-8632-851d606f23dc => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:09:14 compute-0 nova_compute[192079]: 2025-10-02 12:09:14.389 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] VM Paused (Lifecycle Event)
Oct 02 12:09:14 compute-0 nova_compute[192079]: 2025-10-02 12:09:14.416 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:09:14 compute-0 nova_compute[192079]: 2025-10-02 12:09:14.420 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759406954.3412297, 183a1b6e-784e-41d6-8632-851d606f23dc => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:09:14 compute-0 nova_compute[192079]: 2025-10-02 12:09:14.420 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] VM Resumed (Lifecycle Event)
Oct 02 12:09:14 compute-0 nova_compute[192079]: 2025-10-02 12:09:14.448 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:09:14 compute-0 nova_compute[192079]: 2025-10-02 12:09:14.451 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:09:14 compute-0 nova_compute[192079]: 2025-10-02 12:09:14.485 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:09:14 compute-0 nova_compute[192079]: 2025-10-02 12:09:14.499 2 INFO nova.compute.manager [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Took 7.38 seconds to spawn the instance on the hypervisor.
Oct 02 12:09:14 compute-0 nova_compute[192079]: 2025-10-02 12:09:14.499 2 DEBUG nova.compute.manager [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:09:14 compute-0 nova_compute[192079]: 2025-10-02 12:09:14.609 2 INFO nova.compute.manager [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Took 7.90 seconds to build instance.
Oct 02 12:09:14 compute-0 nova_compute[192079]: 2025-10-02 12:09:14.647 2 DEBUG oslo_concurrency.lockutils [None req-91837109-56ff-43ff-bc6b-745ff58593d9 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Lock "183a1b6e-784e-41d6-8632-851d606f23dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 8.017s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:09:15 compute-0 nova_compute[192079]: 2025-10-02 12:09:15.169 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:15 compute-0 nova_compute[192079]: 2025-10-02 12:09:15.836 2 DEBUG nova.compute.manager [req-61ea9fd1-dde3-411c-a6fb-c804ca8fac46 req-dadae798-0a36-4e71-8519-079185dcfda6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Received event network-vif-plugged-cf9649ce-4816-43e7-96a6-7a0e08d84e61 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:09:15 compute-0 nova_compute[192079]: 2025-10-02 12:09:15.837 2 DEBUG oslo_concurrency.lockutils [req-61ea9fd1-dde3-411c-a6fb-c804ca8fac46 req-dadae798-0a36-4e71-8519-079185dcfda6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "183a1b6e-784e-41d6-8632-851d606f23dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:09:15 compute-0 nova_compute[192079]: 2025-10-02 12:09:15.837 2 DEBUG oslo_concurrency.lockutils [req-61ea9fd1-dde3-411c-a6fb-c804ca8fac46 req-dadae798-0a36-4e71-8519-079185dcfda6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "183a1b6e-784e-41d6-8632-851d606f23dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:09:15 compute-0 nova_compute[192079]: 2025-10-02 12:09:15.837 2 DEBUG oslo_concurrency.lockutils [req-61ea9fd1-dde3-411c-a6fb-c804ca8fac46 req-dadae798-0a36-4e71-8519-079185dcfda6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "183a1b6e-784e-41d6-8632-851d606f23dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:09:15 compute-0 nova_compute[192079]: 2025-10-02 12:09:15.838 2 DEBUG nova.compute.manager [req-61ea9fd1-dde3-411c-a6fb-c804ca8fac46 req-dadae798-0a36-4e71-8519-079185dcfda6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] No waiting events found dispatching network-vif-plugged-cf9649ce-4816-43e7-96a6-7a0e08d84e61 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:09:15 compute-0 nova_compute[192079]: 2025-10-02 12:09:15.838 2 WARNING nova.compute.manager [req-61ea9fd1-dde3-411c-a6fb-c804ca8fac46 req-dadae798-0a36-4e71-8519-079185dcfda6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Received unexpected event network-vif-plugged-cf9649ce-4816-43e7-96a6-7a0e08d84e61 for instance with vm_state active and task_state None.
Oct 02 12:09:16 compute-0 nova_compute[192079]: 2025-10-02 12:09:16.035 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:16.035 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=13, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=12) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:09:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:16.036 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 8 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:09:16 compute-0 nova_compute[192079]: 2025-10-02 12:09:16.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:09:16 compute-0 nova_compute[192079]: 2025-10-02 12:09:16.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:09:16 compute-0 nova_compute[192079]: 2025-10-02 12:09:16.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:09:16 compute-0 nova_compute[192079]: 2025-10-02 12:09:16.837 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "refresh_cache-183a1b6e-784e-41d6-8632-851d606f23dc" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:09:16 compute-0 nova_compute[192079]: 2025-10-02 12:09:16.838 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquired lock "refresh_cache-183a1b6e-784e-41d6-8632-851d606f23dc" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:09:16 compute-0 nova_compute[192079]: 2025-10-02 12:09:16.838 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Forcefully refreshing network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2004
Oct 02 12:09:16 compute-0 nova_compute[192079]: 2025-10-02 12:09:16.838 2 DEBUG nova.objects.instance [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lazy-loading 'info_cache' on Instance uuid 183a1b6e-784e-41d6-8632-851d606f23dc obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.101 12 DEBUG ceilometer.compute.discovery [-] instance data: {'id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'name': 'tempest-ServersTestJSON-server-42881034', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'os_type': 'hvm', 'architecture': 'x86_64', 'OS-EXT-SRV-ATTR:instance_name': 'instance-0000002f', 'OS-EXT-SRV-ATTR:host': 'compute-0.ctlplane.example.com', 'OS-EXT-STS:vm_state': 'running', 'tenant_id': '887b026ea22942f7b709489ddec04ffc', 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'hostId': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'status': 'active', 'metadata': {}} discover_libvirt_polling /usr/lib/python3.9/site-packages/ceilometer/compute/discovery.py:228
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.102 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.latency in the context of pollsters
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.102 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for PerDeviceDiskLatencyPollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.102 12 ERROR ceilometer.polling.manager [-] Prevent pollster disk.device.latency from polling [<NovaLikeServer: tempest-ServersTestJSON-server-42881034>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-ServersTestJSON-server-42881034>]
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.103 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.requests in the context of pollsters
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.119 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/disk.device.read.requests volume: 760 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.120 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/disk.device.read.requests volume: 1 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'cc0234e3-31dd-4bf5-aa29-df23ff1e74c1', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 760, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_name': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_name': None, 'resource_id': '183a1b6e-784e-41d6-8632-851d606f23dc-vda', 'timestamp': '2025-10-02T12:09:17.103275', 'resource_metadata': {'display_name': 'tempest-ServersTestJSON-server-42881034', 'name': 'instance-0000002f', 'instance_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'instance_type': 'm1.nano', 'host': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '9e73301c-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4954.790325845, 'message_signature': '6a24dbc7fca1b2ee94a46cf871ecb5e67292bd8387648dd045551bab05b5c920'}, {'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 1, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_name': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_name': None, 'resource_id': '183a1b6e-784e-41d6-8632-851d606f23dc-sda', 'timestamp': '2025-10-02T12:09:17.103275', 'resource_metadata': {'display_name': 'tempest-ServersTestJSON-server-42881034', 'name': 'instance-0000002f', 'instance_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'instance_type': 'm1.nano', 'host': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '9e733c6a-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4954.790325845, 'message_signature': 'd38e73b7a8769fd37082e706db8dfbee710cf26a66b8acfbe6a85583cd47ddd2'}]}, 'timestamp': '2025-10-02 12:09:17.120739', '_unique_id': 'a68074d7cda44048833f0080cdf167fb'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.121 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.123 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.iops in the context of pollsters
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.123 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for PerDeviceDiskIOPSPollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.123 12 ERROR ceilometer.polling.manager [-] Prevent pollster disk.device.iops from polling [<NovaLikeServer: tempest-ServersTestJSON-server-42881034>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-ServersTestJSON-server-42881034>]
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.123 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.latency in the context of pollsters
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.123 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/disk.device.write.latency volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.123 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/disk.device.write.latency volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'acfcd1c2-9ffe-46c8-afc0-8d5e6a4f1cb9', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 0, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_name': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_name': None, 'resource_id': '183a1b6e-784e-41d6-8632-851d606f23dc-vda', 'timestamp': '2025-10-02T12:09:17.123594', 'resource_metadata': {'display_name': 'tempest-ServersTestJSON-server-42881034', 'name': 'instance-0000002f', 'instance_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'instance_type': 'm1.nano', 'host': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '9e73b6d6-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4954.790325845, 'message_signature': '11363887bb003f2cc97a146fc1b928ad621d15d58527ae05c68fa1bc23ca0432'}, {'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 0, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_name': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_name': None, 'resource_id': '183a1b6e-784e-41d6-8632-851d606f23dc-sda', 'timestamp': '2025-10-02T12:09:17.123594', 'resource_metadata': {'display_name': 'tempest-ServersTestJSON-server-42881034', 'name': 'instance-0000002f', 'instance_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'instance_type': 'm1.nano', 'host': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '9e73c324-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4954.790325845, 'message_signature': '8185c2ad34529e6a9e37f507426ee73fcd8c8f1b416343c658887ca3b7886c62'}]}, 'timestamp': '2025-10-02 12:09:17.124186', '_unique_id': 'bc5c4409e5654c4fbcd3b00769a6aa2f'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.124 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.125 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.latency in the context of pollsters
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.125 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/disk.device.read.latency volume: 692489159 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.126 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/disk.device.read.latency volume: 1841080 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '63636fda-195f-4ae7-8c67-cab4ede5d69e', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 692489159, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_name': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_name': None, 'resource_id': '183a1b6e-784e-41d6-8632-851d606f23dc-vda', 'timestamp': '2025-10-02T12:09:17.125791', 'resource_metadata': {'display_name': 'tempest-ServersTestJSON-server-42881034', 'name': 'instance-0000002f', 'instance_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'instance_type': 'm1.nano', 'host': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '9e7412e8-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4954.790325845, 'message_signature': '7b759d8b9855d0a2d69d784c63e5f1c6772df840ee737b0ba22153b674bf1a19'}, {'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 1841080, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_name': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_name': None, 'resource_id': '183a1b6e-784e-41d6-8632-851d606f23dc-sda', 'timestamp': '2025-10-02T12:09:17.125791', 'resource_metadata': {'display_name': 'tempest-ServersTestJSON-server-42881034', 'name': 'instance-0000002f', 'instance_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'instance_type': 'm1.nano', 'host': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '9e741dd8-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4954.790325845, 'message_signature': 'bd97686c21f70165e86bcc101278a3de7b98c8ff6456920926fd7db93411daff'}]}, 'timestamp': '2025-10-02 12:09:17.126497', '_unique_id': 'e626a9f2d4da4bb8959c5814267b58b0'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.127 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.bytes in the context of pollsters
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.128 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/disk.device.read.bytes volume: 23775232 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.128 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/disk.device.read.bytes volume: 2048 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '8e58e678-6734-45a2-9ca9-5c85d3da3d20', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 23775232, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_name': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_name': None, 'resource_id': '183a1b6e-784e-41d6-8632-851d606f23dc-vda', 'timestamp': '2025-10-02T12:09:17.128063', 'resource_metadata': {'display_name': 'tempest-ServersTestJSON-server-42881034', 'name': 'instance-0000002f', 'instance_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'instance_type': 'm1.nano', 'host': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '9e7465ae-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4954.790325845, 'message_signature': '4328132e6e8516c7d94b8691e1dfbcf764af8b14a74fa96e867f62712b4b45d5'}, {'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 2048, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_name': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_name': None, 'resource_id': '183a1b6e-784e-41d6-8632-851d606f23dc-sda', 'timestamp': '2025-10-02T12:09:17.128063', 'resource_metadata': {'display_name': 'tempest-ServersTestJSON-server-42881034', 'name': 'instance-0000002f', 'instance_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'instance_type': 'm1.nano', 'host': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '9e746fa4-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4954.790325845, 'message_signature': '9ee3359634172033f8b381b1c32e044fab14d1bfc42b67d739d755f3da60a882'}]}, 'timestamp': '2025-10-02 12:09:17.128592', '_unique_id': 'b0a757d84df247b1a8161cb7c18f73e9'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.129 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.130 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets in the context of pollsters
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.132 12 DEBUG ceilometer.compute.virt.libvirt.inspector [-] No delta meter predecessor for 183a1b6e-784e-41d6-8632-851d606f23dc / tapcf9649ce-48 inspect_vnics /usr/lib/python3.9/site-packages/ceilometer/compute/virt/libvirt/inspector.py:136
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.133 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/network.incoming.packets volume: 1 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '5bb36059-4f89-4b7d-88a8-4e6a5770223c', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 1, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_name': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_name': None, 'resource_id': 'instance-0000002f-183a1b6e-784e-41d6-8632-851d606f23dc-tapcf9649ce-48', 'timestamp': '2025-10-02T12:09:17.130186', 'resource_metadata': {'display_name': 'tempest-ServersTestJSON-server-42881034', 'name': 'tapcf9649ce-48', 'instance_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'instance_type': 'm1.nano', 'host': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:eb:81:4a', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapcf9649ce-48'}, 'message_id': '9e752e4e-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4954.817259323, 'message_signature': '1ae0589a53825432feadee3e709c0624559c1d6e858b196d5938ecbe2eac4b9d'}]}, 'timestamp': '2025-10-02 12:09:17.133561', '_unique_id': '7b83dd4367fc4e3c90f6a97b4874a74d'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.134 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.135 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets.drop in the context of pollsters
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.135 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/network.incoming.packets.drop volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '040a0672-9860-4c15-92e7-2c085bdad3a9', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets.drop', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_name': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_name': None, 'resource_id': 'instance-0000002f-183a1b6e-784e-41d6-8632-851d606f23dc-tapcf9649ce-48', 'timestamp': '2025-10-02T12:09:17.135550', 'resource_metadata': {'display_name': 'tempest-ServersTestJSON-server-42881034', 'name': 'tapcf9649ce-48', 'instance_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'instance_type': 'm1.nano', 'host': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:eb:81:4a', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapcf9649ce-48'}, 'message_id': '9e758b32-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4954.817259323, 'message_signature': '23aaf23d5da95c5b944297ddb60ecb5ede8dd98bba08db258ca72ff78cbcf85f'}]}, 'timestamp': '2025-10-02 12:09:17.135896', '_unique_id': '7db9ef06f04941029eecff625e6a423b'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.136 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.137 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes.rate in the context of pollsters
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.137 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for OutgoingBytesRatePollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.137 12 ERROR ceilometer.polling.manager [-] Prevent pollster network.outgoing.bytes.rate from polling [<NovaLikeServer: tempest-ServersTestJSON-server-42881034>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-ServersTestJSON-server-42881034>]
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.137 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes.delta in the context of pollsters
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.138 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/network.outgoing.bytes.delta volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '3eb719b1-4de5-4b6d-9df7-9ac9ca42339e', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.bytes.delta', 'counter_type': 'delta', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_name': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_name': None, 'resource_id': 'instance-0000002f-183a1b6e-784e-41d6-8632-851d606f23dc-tapcf9649ce-48', 'timestamp': '2025-10-02T12:09:17.138120', 'resource_metadata': {'display_name': 'tempest-ServersTestJSON-server-42881034', 'name': 'tapcf9649ce-48', 'instance_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'instance_type': 'm1.nano', 'host': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:eb:81:4a', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapcf9649ce-48'}, 'message_id': '9e75ef8c-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4954.817259323, 'message_signature': 'db761ba23bbab7ff0a10812e7a3d355e0e82ad4427b1e48a5cd52afd0bdbfd94'}]}, 'timestamp': '2025-10-02 12:09:17.138475', '_unique_id': '78f8679e5d8946c7b592914e7ca900ea'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.139 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.140 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes in the context of pollsters
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.140 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/network.incoming.bytes volume: 110 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'dba3c154-936c-4c9b-8897-950d70edb8ef', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 110, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_name': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_name': None, 'resource_id': 'instance-0000002f-183a1b6e-784e-41d6-8632-851d606f23dc-tapcf9649ce-48', 'timestamp': '2025-10-02T12:09:17.140363', 'resource_metadata': {'display_name': 'tempest-ServersTestJSON-server-42881034', 'name': 'tapcf9649ce-48', 'instance_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'instance_type': 'm1.nano', 'host': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:eb:81:4a', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapcf9649ce-48'}, 'message_id': '9e7645c2-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4954.817259323, 'message_signature': '432d6845fd8a5ddd8c8c61c83922256a06d3f75f6c6759c44394de8689926a54'}]}, 'timestamp': '2025-10-02 12:09:17.140642', '_unique_id': '124fef0ddb86499bbf180465f185d6ec'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.141 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.142 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes.rate in the context of pollsters
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.142 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for IncomingBytesRatePollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.142 12 ERROR ceilometer.polling.manager [-] Prevent pollster network.incoming.bytes.rate from polling [<NovaLikeServer: tempest-ServersTestJSON-server-42881034>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-ServersTestJSON-server-42881034>]
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.142 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.bytes in the context of pollsters
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.142 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/disk.device.write.bytes volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/disk.device.write.bytes volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'ad736e55-e36f-4b91-93dd-c7c76c8ae26c', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_name': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_name': None, 'resource_id': '183a1b6e-784e-41d6-8632-851d606f23dc-vda', 'timestamp': '2025-10-02T12:09:17.142734', 'resource_metadata': {'display_name': 'tempest-ServersTestJSON-server-42881034', 'name': 'instance-0000002f', 'instance_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'instance_type': 'm1.nano', 'host': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '9e76a3dc-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4954.790325845, 'message_signature': '96ab4ac637169be191fe194e212083e7d8f2ea2ba75b9cf3c1899acf8915d340'}, {'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_name': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_name': None, 'resource_id': '183a1b6e-784e-41d6-8632-851d606f23dc-sda', 'timestamp': '2025-10-02T12:09:17.142734', 'resource_metadata': {'display_name': 'tempest-ServersTestJSON-server-42881034', 'name': 'instance-0000002f', 'instance_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'instance_type': 'm1.nano', 'host': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '9e76aeea-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4954.790325845, 'message_signature': '8228d470b94121e3a66563460390be8621fd11e74bba28b9b2e7c2fea53ee352'}]}, 'timestamp': '2025-10-02 12:09:17.143316', '_unique_id': '36eb550b3490438289c915d981ee55ca'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.143 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.144 12 INFO ceilometer.polling.manager [-] Polling pollster cpu in the context of pollsters
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.159 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/cpu volume: 2740000000 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'f61bbc58-1d39-4586-be8c-0b805c73f192', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'cpu', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 2740000000, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_name': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_name': None, 'resource_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'timestamp': '2025-10-02T12:09:17.145101', 'resource_metadata': {'display_name': 'tempest-ServersTestJSON-server-42881034', 'name': 'instance-0000002f', 'instance_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'instance_type': 'm1.nano', 'host': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'cpu_number': 1}, 'message_id': '9e7924f4-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4954.845990398, 'message_signature': '02b8d80d6262f95936fd0fb2e8c82d02c4a190335d97eac3b590d8bb76edd6f9'}]}, 'timestamp': '2025-10-02 12:09:17.159498', '_unique_id': '50bb881b5e8740f097cd25e4f9f131e1'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.160 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.161 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets in the context of pollsters
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.161 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/network.outgoing.packets volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'a1cfaebe-fd14-4d8d-9028-2d05da47c3c3', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_name': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_name': None, 'resource_id': 'instance-0000002f-183a1b6e-784e-41d6-8632-851d606f23dc-tapcf9649ce-48', 'timestamp': '2025-10-02T12:09:17.161276', 'resource_metadata': {'display_name': 'tempest-ServersTestJSON-server-42881034', 'name': 'tapcf9649ce-48', 'instance_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'instance_type': 'm1.nano', 'host': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:eb:81:4a', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapcf9649ce-48'}, 'message_id': '9e7976b6-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4954.817259323, 'message_signature': '0c10e8a857865062725932ca2a335d475ee676335c2895c12d4df47cd239e7b0'}]}, 'timestamp': '2025-10-02 12:09:17.161549', '_unique_id': 'ec544d267603433fb7653b67be8b49c9'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.162 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets.drop in the context of pollsters
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/network.outgoing.packets.drop volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '23205e86-9d09-4e6c-b3aa-8ad303d923d9', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets.drop', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_name': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_name': None, 'resource_id': 'instance-0000002f-183a1b6e-784e-41d6-8632-851d606f23dc-tapcf9649ce-48', 'timestamp': '2025-10-02T12:09:17.163187', 'resource_metadata': {'display_name': 'tempest-ServersTestJSON-server-42881034', 'name': 'tapcf9649ce-48', 'instance_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'instance_type': 'm1.nano', 'host': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:eb:81:4a', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapcf9649ce-48'}, 'message_id': '9e79c18e-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4954.817259323, 'message_signature': '5d46209cfc339b76f9b6479fc590eee4a82172ac662c822b1ae1ee7126a68753'}]}, 'timestamp': '2025-10-02 12:09:17.163466', '_unique_id': '45221951fea24108ab2a6d4c66d362d0'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.163 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.164 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets.error in the context of pollsters
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.164 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/network.outgoing.packets.error volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '9a314634-1aff-4294-810d-d9de032516d6', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets.error', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_name': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_name': None, 'resource_id': 'instance-0000002f-183a1b6e-784e-41d6-8632-851d606f23dc-tapcf9649ce-48', 'timestamp': '2025-10-02T12:09:17.164882', 'resource_metadata': {'display_name': 'tempest-ServersTestJSON-server-42881034', 'name': 'tapcf9649ce-48', 'instance_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'instance_type': 'm1.nano', 'host': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:eb:81:4a', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapcf9649ce-48'}, 'message_id': '9e7a0716-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4954.817259323, 'message_signature': 'da9363a797944f7b0ebc532b2d60dd255d8338322ce69a66694fa4bb2b39d6ef'}]}, 'timestamp': '2025-10-02 12:09:17.165247', '_unique_id': 'd222ae86bd2e4dd6bf1174e96397391f'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.165 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.166 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.capacity in the context of pollsters
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.176 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/disk.device.capacity volume: 1073741824 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.176 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/disk.device.capacity volume: 509952 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'a06995cb-dbd2-4912-b1a3-54331954646f', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 1073741824, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_name': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_name': None, 'resource_id': '183a1b6e-784e-41d6-8632-851d606f23dc-vda', 'timestamp': '2025-10-02T12:09:17.166764', 'resource_metadata': {'display_name': 'tempest-ServersTestJSON-server-42881034', 'name': 'instance-0000002f', 'instance_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'instance_type': 'm1.nano', 'host': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '9e7bbd5e-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4954.85383764, 'message_signature': '059b4cfb53383d9a627728cb21e0db7473fdfd24f67de99af572e866cf8bc38c'}, {'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 509952, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_name': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_name': None, 'resource_id': '183a1b6e-784e-41d6-8632-851d606f23dc-sda', 'timestamp': '2025-10-02T12:09:17.166764', 'resource_metadata': {'display_name': 'tempest-ServersTestJSON-server-42881034', 'name': 'instance-0000002f', 'instance_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'instance_type': 'm1.nano', 'host': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '9e7bc862-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4954.85383764, 'message_signature': '347fd6c85180dbf197dd0e5b5d7c0a6c660ed0c1b6a92c3adbbc75fa75760d69'}]}, 'timestamp': '2025-10-02 12:09:17.176743', '_unique_id': 'db900165bb064ccc9552a0b7e2091cc6'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.177 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.178 12 INFO ceilometer.polling.manager [-] Polling pollster memory.usage in the context of pollsters
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.178 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/memory.usage volume: Unavailable _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.178 12 WARNING ceilometer.compute.pollsters [-] memory.usage statistic in not available for instance 183a1b6e-784e-41d6-8632-851d606f23dc: ceilometer.compute.pollsters.NoVolumeException
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.178 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes in the context of pollsters
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.178 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/network.outgoing.bytes volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'a68f667b-9c70-42b9-82e4-1fb6f0e387ad', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_name': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_name': None, 'resource_id': 'instance-0000002f-183a1b6e-784e-41d6-8632-851d606f23dc-tapcf9649ce-48', 'timestamp': '2025-10-02T12:09:17.178936', 'resource_metadata': {'display_name': 'tempest-ServersTestJSON-server-42881034', 'name': 'tapcf9649ce-48', 'instance_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'instance_type': 'm1.nano', 'host': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:eb:81:4a', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapcf9649ce-48'}, 'message_id': '9e7c294c-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4954.817259323, 'message_signature': 'f28089759f066bfdab5db9b4e676f359e220b9c1fe5a895c8f81509fc8106990'}]}, 'timestamp': '2025-10-02 12:09:17.179231', '_unique_id': '37d9990fff6a4c10ae53679724e8bf12'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.179 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.180 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.usage in the context of pollsters
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/disk.device.usage volume: 196624 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/disk.device.usage volume: 509952 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '6dd69a5a-5b98-433a-a384-1130dd8209a4', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 196624, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_name': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_name': None, 'resource_id': '183a1b6e-784e-41d6-8632-851d606f23dc-vda', 'timestamp': '2025-10-02T12:09:17.180967', 'resource_metadata': {'display_name': 'tempest-ServersTestJSON-server-42881034', 'name': 'instance-0000002f', 'instance_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'instance_type': 'm1.nano', 'host': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '9e7c78c0-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4954.85383764, 'message_signature': '32b8e2f4c18d9285a6f469bca3989f134502c5b7a812dcd903754b8debefb54c'}, {'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 509952, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_name': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_name': None, 'resource_id': '183a1b6e-784e-41d6-8632-851d606f23dc-sda', 'timestamp': '2025-10-02T12:09:17.180967', 'resource_metadata': {'display_name': 'tempest-ServersTestJSON-server-42881034', 'name': 'instance-0000002f', 'instance_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'instance_type': 'm1.nano', 'host': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '9e7c8252-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4954.85383764, 'message_signature': '7d1d90934e657b309a038179e58b9b69b8603f9203d9c2989de4429ea70e9dd5'}]}, 'timestamp': '2025-10-02 12:09:17.181494', '_unique_id': 'd2b0ede859344831aeb13b4fe9d862a9'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.181 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.182 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.requests in the context of pollsters
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.183 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/disk.device.write.requests volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.183 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/disk.device.write.requests volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '6566ab10-43d2-4af3-8dc5-1c72eaff6adc', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 0, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_name': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_name': None, 'resource_id': '183a1b6e-784e-41d6-8632-851d606f23dc-vda', 'timestamp': '2025-10-02T12:09:17.182969', 'resource_metadata': {'display_name': 'tempest-ServersTestJSON-server-42881034', 'name': 'instance-0000002f', 'instance_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'instance_type': 'm1.nano', 'host': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '9e7cc762-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4954.790325845, 'message_signature': '828b83e0529011d8a44d73fb57e1b7acf749dfea355979d103f3f0d9414c2eae'}, {'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 0, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_name': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_name': None, 'resource_id': '183a1b6e-784e-41d6-8632-851d606f23dc-sda', 'timestamp': '2025-10-02T12:09:17.182969', 'resource_metadata': {'display_name': 'tempest-ServersTestJSON-server-42881034', 'name': 'instance-0000002f', 'instance_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'instance_type': 'm1.nano', 'host': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '9e7cd39c-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4954.790325845, 'message_signature': '7e1c9bded7fe429f9eaf30383bd9449e8636d9d93845597d1975ad0ceb427159'}]}, 'timestamp': '2025-10-02 12:09:17.183579', '_unique_id': 'c48b636f814b42949405469dfd5d0c5c'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.184 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.185 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets.error in the context of pollsters
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.185 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/network.incoming.packets.error volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '48b7b8f1-0061-443a-85a0-4ea8e02b7169', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets.error', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_name': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_name': None, 'resource_id': 'instance-0000002f-183a1b6e-784e-41d6-8632-851d606f23dc-tapcf9649ce-48', 'timestamp': '2025-10-02T12:09:17.185507', 'resource_metadata': {'display_name': 'tempest-ServersTestJSON-server-42881034', 'name': 'tapcf9649ce-48', 'instance_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'instance_type': 'm1.nano', 'host': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:eb:81:4a', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapcf9649ce-48'}, 'message_id': '9e7d2964-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4954.817259323, 'message_signature': '423e6fe304818fd150fb9d8427d1dd014b16ee66530dd6e0917e0dca7a7c4839'}]}, 'timestamp': '2025-10-02 12:09:17.185789', '_unique_id': 'dbbbbd775358417b89dd4328f0581721'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.186 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.187 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes.delta in the context of pollsters
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.187 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/network.incoming.bytes.delta volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '81264ff5-792c-4023-93cb-c86ebf14b417', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.bytes.delta', 'counter_type': 'delta', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_name': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_name': None, 'resource_id': 'instance-0000002f-183a1b6e-784e-41d6-8632-851d606f23dc-tapcf9649ce-48', 'timestamp': '2025-10-02T12:09:17.187306', 'resource_metadata': {'display_name': 'tempest-ServersTestJSON-server-42881034', 'name': 'tapcf9649ce-48', 'instance_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'instance_type': 'm1.nano', 'host': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:eb:81:4a', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapcf9649ce-48'}, 'message_id': '9e7d6f78-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4954.817259323, 'message_signature': 'da476d63bd89462345bee8dca5f50ef7e79ca61b38c17993f2f622aae360d788'}]}, 'timestamp': '2025-10-02 12:09:17.187578', '_unique_id': 'f9e68b0554824a7c8ae4155e4908e0c2'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.188 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.allocation in the context of pollsters
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.189 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/disk.device.allocation volume: 204800 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.189 12 DEBUG ceilometer.compute.pollsters [-] 183a1b6e-784e-41d6-8632-851d606f23dc/disk.device.allocation volume: 512000 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '6d9d7a53-adb8-4c55-ba80-227b2a547dab', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 204800, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_name': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_name': None, 'resource_id': '183a1b6e-784e-41d6-8632-851d606f23dc-vda', 'timestamp': '2025-10-02T12:09:17.188980', 'resource_metadata': {'display_name': 'tempest-ServersTestJSON-server-42881034', 'name': 'instance-0000002f', 'instance_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'instance_type': 'm1.nano', 'host': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '9e7db532-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4954.85383764, 'message_signature': '4f0745657b7aa2f5cffadec4feb63c20cb7df947ac92287a464879425de75efe'}, {'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 512000, 'user_id': '202970a145ff4e8aa3dc22131cc9240d', 'user_name': None, 'project_id': '887b026ea22942f7b709489ddec04ffc', 'project_name': None, 'resource_id': '183a1b6e-784e-41d6-8632-851d606f23dc-sda', 'timestamp': '2025-10-02T12:09:17.188980', 'resource_metadata': {'display_name': 'tempest-ServersTestJSON-server-42881034', 'name': 'instance-0000002f', 'instance_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'instance_type': 'm1.nano', 'host': 'f12ec06de91d90615502602fd109e53b8de8019376b490f65721bb49', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '9e7dc054-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 4954.85383764, 'message_signature': 'a28de3e03a828a859d7508bdf0700e9ccbe0856a6998fb8dc6b186ebf177153a'}]}, 'timestamp': '2025-10-02 12:09:17.189639', '_unique_id': 'dee7a05cc325419e9f693f51a51091fe'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:09:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:09:17.190 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:09:17 compute-0 nova_compute[192079]: 2025-10-02 12:09:17.933 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:18 compute-0 nova_compute[192079]: 2025-10-02 12:09:18.338 2 DEBUG nova.compute.manager [req-c670ff7c-a1d0-4107-89fa-e23aacbf4284 req-836b985d-509d-47d3-9bae-b52074379f65 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Received event network-changed-cf9649ce-4816-43e7-96a6-7a0e08d84e61 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:09:18 compute-0 nova_compute[192079]: 2025-10-02 12:09:18.339 2 DEBUG nova.compute.manager [req-c670ff7c-a1d0-4107-89fa-e23aacbf4284 req-836b985d-509d-47d3-9bae-b52074379f65 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Refreshing instance network info cache due to event network-changed-cf9649ce-4816-43e7-96a6-7a0e08d84e61. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:09:18 compute-0 nova_compute[192079]: 2025-10-02 12:09:18.339 2 DEBUG oslo_concurrency.lockutils [req-c670ff7c-a1d0-4107-89fa-e23aacbf4284 req-836b985d-509d-47d3-9bae-b52074379f65 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-183a1b6e-784e-41d6-8632-851d606f23dc" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:09:19 compute-0 nova_compute[192079]: 2025-10-02 12:09:19.024 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Updating instance_info_cache with network_info: [{"id": "cf9649ce-4816-43e7-96a6-7a0e08d84e61", "address": "fa:16:3e:eb:81:4a", "network": {"id": "beab6431-897b-46cc-9079-f58f012784e7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1799667945-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "887b026ea22942f7b709489ddec04ffc", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapcf9649ce-48", "ovs_interfaceid": "cf9649ce-4816-43e7-96a6-7a0e08d84e61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:09:19 compute-0 nova_compute[192079]: 2025-10-02 12:09:19.043 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Releasing lock "refresh_cache-183a1b6e-784e-41d6-8632-851d606f23dc" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:09:19 compute-0 nova_compute[192079]: 2025-10-02 12:09:19.043 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Updated the network info_cache for instance _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9929
Oct 02 12:09:19 compute-0 nova_compute[192079]: 2025-10-02 12:09:19.043 2 DEBUG oslo_concurrency.lockutils [req-c670ff7c-a1d0-4107-89fa-e23aacbf4284 req-836b985d-509d-47d3-9bae-b52074379f65 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-183a1b6e-784e-41d6-8632-851d606f23dc" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:09:19 compute-0 nova_compute[192079]: 2025-10-02 12:09:19.044 2 DEBUG nova.network.neutron [req-c670ff7c-a1d0-4107-89fa-e23aacbf4284 req-836b985d-509d-47d3-9bae-b52074379f65 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Refreshing network info cache for port cf9649ce-4816-43e7-96a6-7a0e08d84e61 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:09:19 compute-0 nova_compute[192079]: 2025-10-02 12:09:19.045 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:09:19 compute-0 nova_compute[192079]: 2025-10-02 12:09:19.045 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:09:19 compute-0 nova_compute[192079]: 2025-10-02 12:09:19.045 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:09:19 compute-0 nova_compute[192079]: 2025-10-02 12:09:19.046 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:09:19 compute-0 podman[226546]: 2025-10-02 12:09:19.147355264 +0000 UTC m=+0.062611362 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:09:19 compute-0 podman[226547]: 2025-10-02 12:09:19.164261101 +0000 UTC m=+0.068879371 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=iscsid, container_name=iscsid, io.buildah.version=1.41.3)
Oct 02 12:09:19 compute-0 nova_compute[192079]: 2025-10-02 12:09:19.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:09:19 compute-0 nova_compute[192079]: 2025-10-02 12:09:19.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:09:20 compute-0 nova_compute[192079]: 2025-10-02 12:09:20.171 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:21 compute-0 nova_compute[192079]: 2025-10-02 12:09:21.161 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:21 compute-0 nova_compute[192079]: 2025-10-02 12:09:21.779 2 DEBUG nova.network.neutron [req-c670ff7c-a1d0-4107-89fa-e23aacbf4284 req-836b985d-509d-47d3-9bae-b52074379f65 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Updated VIF entry in instance network info cache for port cf9649ce-4816-43e7-96a6-7a0e08d84e61. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:09:21 compute-0 nova_compute[192079]: 2025-10-02 12:09:21.780 2 DEBUG nova.network.neutron [req-c670ff7c-a1d0-4107-89fa-e23aacbf4284 req-836b985d-509d-47d3-9bae-b52074379f65 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Updating instance_info_cache with network_info: [{"id": "cf9649ce-4816-43e7-96a6-7a0e08d84e61", "address": "fa:16:3e:eb:81:4a", "network": {"id": "beab6431-897b-46cc-9079-f58f012784e7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1799667945-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "887b026ea22942f7b709489ddec04ffc", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapcf9649ce-48", "ovs_interfaceid": "cf9649ce-4816-43e7-96a6-7a0e08d84e61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:09:21 compute-0 nova_compute[192079]: 2025-10-02 12:09:21.795 2 DEBUG oslo_concurrency.lockutils [req-c670ff7c-a1d0-4107-89fa-e23aacbf4284 req-836b985d-509d-47d3-9bae-b52074379f65 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-183a1b6e-784e-41d6-8632-851d606f23dc" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:09:22 compute-0 nova_compute[192079]: 2025-10-02 12:09:22.935 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:23 compute-0 nova_compute[192079]: 2025-10-02 12:09:23.678 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._run_pending_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:09:23 compute-0 nova_compute[192079]: 2025-10-02 12:09:23.679 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Cleaning up deleted instances _run_pending_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:11145
Oct 02 12:09:23 compute-0 nova_compute[192079]: 2025-10-02 12:09:23.693 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] There are 0 instances to clean _run_pending_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:11154
Oct 02 12:09:24 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:24.038 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '13'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:09:25 compute-0 nova_compute[192079]: 2025-10-02 12:09:25.172 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:26 compute-0 nova_compute[192079]: 2025-10-02 12:09:26.061 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:27 compute-0 ovn_controller[94336]: 2025-10-02T12:09:27Z|00014|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:eb:81:4a 10.100.0.10
Oct 02 12:09:27 compute-0 ovn_controller[94336]: 2025-10-02T12:09:27Z|00015|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:eb:81:4a 10.100.0.10
Oct 02 12:09:27 compute-0 nova_compute[192079]: 2025-10-02 12:09:27.937 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:30 compute-0 podman[226611]: 2025-10-02 12:09:30.143245809 +0000 UTC m=+0.052759526 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:09:30 compute-0 podman[226609]: 2025-10-02 12:09:30.157183285 +0000 UTC m=+0.073370771 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_id=ovn_metadata_agent, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, container_name=ovn_metadata_agent)
Oct 02 12:09:30 compute-0 podman[226610]: 2025-10-02 12:09:30.167056611 +0000 UTC m=+0.082233160 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, config_id=ovn_controller, container_name=ovn_controller, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:09:30 compute-0 nova_compute[192079]: 2025-10-02 12:09:30.174 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:32 compute-0 nova_compute[192079]: 2025-10-02 12:09:32.939 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:34 compute-0 nova_compute[192079]: 2025-10-02 12:09:34.979 2 DEBUG oslo_concurrency.lockutils [None req-8036ec84-0fda-4aba-b237-153e79aa543d 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Acquiring lock "183a1b6e-784e-41d6-8632-851d606f23dc" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:09:34 compute-0 nova_compute[192079]: 2025-10-02 12:09:34.980 2 DEBUG oslo_concurrency.lockutils [None req-8036ec84-0fda-4aba-b237-153e79aa543d 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Lock "183a1b6e-784e-41d6-8632-851d606f23dc" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.002s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:09:34 compute-0 nova_compute[192079]: 2025-10-02 12:09:34.981 2 DEBUG oslo_concurrency.lockutils [None req-8036ec84-0fda-4aba-b237-153e79aa543d 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Acquiring lock "183a1b6e-784e-41d6-8632-851d606f23dc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:09:34 compute-0 nova_compute[192079]: 2025-10-02 12:09:34.981 2 DEBUG oslo_concurrency.lockutils [None req-8036ec84-0fda-4aba-b237-153e79aa543d 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Lock "183a1b6e-784e-41d6-8632-851d606f23dc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:09:34 compute-0 nova_compute[192079]: 2025-10-02 12:09:34.981 2 DEBUG oslo_concurrency.lockutils [None req-8036ec84-0fda-4aba-b237-153e79aa543d 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Lock "183a1b6e-784e-41d6-8632-851d606f23dc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:09:34 compute-0 nova_compute[192079]: 2025-10-02 12:09:34.991 2 INFO nova.compute.manager [None req-8036ec84-0fda-4aba-b237-153e79aa543d 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Terminating instance
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.006 2 DEBUG nova.compute.manager [None req-8036ec84-0fda-4aba-b237-153e79aa543d 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:09:35 compute-0 kernel: tapcf9649ce-48 (unregistering): left promiscuous mode
Oct 02 12:09:35 compute-0 NetworkManager[51160]: <info>  [1759406975.0262] device (tapcf9649ce-48): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.038 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:35 compute-0 ovn_controller[94336]: 2025-10-02T12:09:35Z|00151|binding|INFO|Releasing lport cf9649ce-4816-43e7-96a6-7a0e08d84e61 from this chassis (sb_readonly=0)
Oct 02 12:09:35 compute-0 ovn_controller[94336]: 2025-10-02T12:09:35Z|00152|binding|INFO|Setting lport cf9649ce-4816-43e7-96a6-7a0e08d84e61 down in Southbound
Oct 02 12:09:35 compute-0 ovn_controller[94336]: 2025-10-02T12:09:35Z|00153|binding|INFO|Removing iface tapcf9649ce-48 ovn-installed in OVS
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.040 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:35 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:35.046 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:eb:81:4a 10.100.0.10'], port_security=['fa:16:3e:eb:81:4a 10.100.0.10'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.10/28', 'neutron:device_id': '183a1b6e-784e-41d6-8632-851d606f23dc', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-beab6431-897b-46cc-9079-f58f012784e7', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '887b026ea22942f7b709489ddec04ffc', 'neutron:revision_number': '4', 'neutron:security_group_ids': '5dea2340-ed18-46b8-bf33-e1eec0ef4b44', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com', 'neutron:port_fip': '192.168.122.179'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=b7134cc5-0916-4a45-9ec8-c36785c44299, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=cf9649ce-4816-43e7-96a6-7a0e08d84e61) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:09:35 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:35.047 103294 INFO neutron.agent.ovn.metadata.agent [-] Port cf9649ce-4816-43e7-96a6-7a0e08d84e61 in datapath beab6431-897b-46cc-9079-f58f012784e7 unbound from our chassis
Oct 02 12:09:35 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:35.049 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network beab6431-897b-46cc-9079-f58f012784e7, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:09:35 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:35.052 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b7eef949-34ec-4bc5-83f1-e87f1ccdb3a6]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:09:35 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:35.053 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-beab6431-897b-46cc-9079-f58f012784e7 namespace which is not needed anymore
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.060 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:35 compute-0 systemd[1]: machine-qemu\x2d25\x2dinstance\x2d0000002f.scope: Deactivated successfully.
Oct 02 12:09:35 compute-0 systemd[1]: machine-qemu\x2d25\x2dinstance\x2d0000002f.scope: Consumed 13.492s CPU time.
Oct 02 12:09:35 compute-0 systemd-machined[152150]: Machine qemu-25-instance-0000002f terminated.
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.175 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:35 compute-0 neutron-haproxy-ovnmeta-beab6431-897b-46cc-9079-f58f012784e7[226531]: [NOTICE]   (226535) : haproxy version is 2.8.14-c23fe91
Oct 02 12:09:35 compute-0 neutron-haproxy-ovnmeta-beab6431-897b-46cc-9079-f58f012784e7[226531]: [NOTICE]   (226535) : path to executable is /usr/sbin/haproxy
Oct 02 12:09:35 compute-0 neutron-haproxy-ovnmeta-beab6431-897b-46cc-9079-f58f012784e7[226531]: [WARNING]  (226535) : Exiting Master process...
Oct 02 12:09:35 compute-0 neutron-haproxy-ovnmeta-beab6431-897b-46cc-9079-f58f012784e7[226531]: [ALERT]    (226535) : Current worker (226537) exited with code 143 (Terminated)
Oct 02 12:09:35 compute-0 neutron-haproxy-ovnmeta-beab6431-897b-46cc-9079-f58f012784e7[226531]: [WARNING]  (226535) : All workers exited. Exiting... (0)
Oct 02 12:09:35 compute-0 systemd[1]: libpod-c7b3bca249080ab8dea0df347888108ff06b7e3577831f26c3ea9591c4525526.scope: Deactivated successfully.
Oct 02 12:09:35 compute-0 podman[226701]: 2025-10-02 12:09:35.20411076 +0000 UTC m=+0.065833268 container died c7b3bca249080ab8dea0df347888108ff06b7e3577831f26c3ea9591c4525526 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-beab6431-897b-46cc-9079-f58f012784e7, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.228 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.234 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.278 2 INFO nova.virt.libvirt.driver [-] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Instance destroyed successfully.
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.278 2 DEBUG nova.objects.instance [None req-8036ec84-0fda-4aba-b237-153e79aa543d 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Lazy-loading 'resources' on Instance uuid 183a1b6e-784e-41d6-8632-851d606f23dc obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.306 2 DEBUG nova.virt.libvirt.vif [None req-8036ec84-0fda-4aba-b237-153e79aa543d 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] vif_type=ovs instance=Instance(access_ip_v4=1.1.1.1,access_ip_v6=::babe:dc0c:1602,architecture=None,auto_disk_config=True,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:09:05Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServersTestJSON-server-42881034',display_name='tempest-ServersTestJSON-server-42881034',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverstestjson-server-42881034',id=47,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBJ2ECj5rq6JEj2kFB2Dzpg61lYERPg5QuDNoG+3Ns56Ne8gmdcg9RlkerI4nyg4Jm5/KR5qb0w+Hb905h4pIodJPNYlpnVwDTZedUvrdpuRanm+Z5YZ4kjefqtX2UfG2BA==',key_name='tempest-keypair-1649035751',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:09:14Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={hello='world'},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='887b026ea22942f7b709489ddec04ffc',ramdisk_id='',reservation_id='r-fno0nr06',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServersTestJSON-801569',owner_user_name='tempest-ServersTestJSON-801569-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:09:14Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='202970a145ff4e8aa3dc22131cc9240d',uuid=183a1b6e-784e-41d6-8632-851d606f23dc,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "cf9649ce-4816-43e7-96a6-7a0e08d84e61", "address": "fa:16:3e:eb:81:4a", "network": {"id": "beab6431-897b-46cc-9079-f58f012784e7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1799667945-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "887b026ea22942f7b709489ddec04ffc", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapcf9649ce-48", "ovs_interfaceid": "cf9649ce-4816-43e7-96a6-7a0e08d84e61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.307 2 DEBUG nova.network.os_vif_util [None req-8036ec84-0fda-4aba-b237-153e79aa543d 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Converting VIF {"id": "cf9649ce-4816-43e7-96a6-7a0e08d84e61", "address": "fa:16:3e:eb:81:4a", "network": {"id": "beab6431-897b-46cc-9079-f58f012784e7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1799667945-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "887b026ea22942f7b709489ddec04ffc", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapcf9649ce-48", "ovs_interfaceid": "cf9649ce-4816-43e7-96a6-7a0e08d84e61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.309 2 DEBUG nova.network.os_vif_util [None req-8036ec84-0fda-4aba-b237-153e79aa543d 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:eb:81:4a,bridge_name='br-int',has_traffic_filtering=True,id=cf9649ce-4816-43e7-96a6-7a0e08d84e61,network=Network(beab6431-897b-46cc-9079-f58f012784e7),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapcf9649ce-48') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.310 2 DEBUG os_vif [None req-8036ec84-0fda-4aba-b237-153e79aa543d 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:eb:81:4a,bridge_name='br-int',has_traffic_filtering=True,id=cf9649ce-4816-43e7-96a6-7a0e08d84e61,network=Network(beab6431-897b-46cc-9079-f58f012784e7),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapcf9649ce-48') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.314 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.315 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapcf9649ce-48, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.318 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.320 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.323 2 INFO os_vif [None req-8036ec84-0fda-4aba-b237-153e79aa543d 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:eb:81:4a,bridge_name='br-int',has_traffic_filtering=True,id=cf9649ce-4816-43e7-96a6-7a0e08d84e61,network=Network(beab6431-897b-46cc-9079-f58f012784e7),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapcf9649ce-48')
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.324 2 INFO nova.virt.libvirt.driver [None req-8036ec84-0fda-4aba-b237-153e79aa543d 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Deleting instance files /var/lib/nova/instances/183a1b6e-784e-41d6-8632-851d606f23dc_del
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.325 2 INFO nova.virt.libvirt.driver [None req-8036ec84-0fda-4aba-b237-153e79aa543d 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Deletion of /var/lib/nova/instances/183a1b6e-784e-41d6-8632-851d606f23dc_del complete
Oct 02 12:09:35 compute-0 systemd[1]: var-lib-containers-storage-overlay-a0515c3d94a844d0631ea3e746ca1ff2cd0eec4efdc0378285d5a232dcee47fc-merged.mount: Deactivated successfully.
Oct 02 12:09:35 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-c7b3bca249080ab8dea0df347888108ff06b7e3577831f26c3ea9591c4525526-userdata-shm.mount: Deactivated successfully.
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.411 2 INFO nova.compute.manager [None req-8036ec84-0fda-4aba-b237-153e79aa543d 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Took 0.40 seconds to destroy the instance on the hypervisor.
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.412 2 DEBUG oslo.service.loopingcall [None req-8036ec84-0fda-4aba-b237-153e79aa543d 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.412 2 DEBUG nova.compute.manager [-] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.412 2 DEBUG nova.network.neutron [-] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:09:35 compute-0 podman[226701]: 2025-10-02 12:09:35.478301893 +0000 UTC m=+0.340024401 container cleanup c7b3bca249080ab8dea0df347888108ff06b7e3577831f26c3ea9591c4525526 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-beab6431-897b-46cc-9079-f58f012784e7, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:09:35 compute-0 systemd[1]: libpod-conmon-c7b3bca249080ab8dea0df347888108ff06b7e3577831f26c3ea9591c4525526.scope: Deactivated successfully.
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.540 2 DEBUG nova.compute.manager [req-cd83e214-5466-4a96-8a7c-50697cccfd2d req-e006ac93-8be8-414e-921f-b57c6f2630db 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Received event network-vif-unplugged-cf9649ce-4816-43e7-96a6-7a0e08d84e61 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.541 2 DEBUG oslo_concurrency.lockutils [req-cd83e214-5466-4a96-8a7c-50697cccfd2d req-e006ac93-8be8-414e-921f-b57c6f2630db 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "183a1b6e-784e-41d6-8632-851d606f23dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.541 2 DEBUG oslo_concurrency.lockutils [req-cd83e214-5466-4a96-8a7c-50697cccfd2d req-e006ac93-8be8-414e-921f-b57c6f2630db 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "183a1b6e-784e-41d6-8632-851d606f23dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.542 2 DEBUG oslo_concurrency.lockutils [req-cd83e214-5466-4a96-8a7c-50697cccfd2d req-e006ac93-8be8-414e-921f-b57c6f2630db 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "183a1b6e-784e-41d6-8632-851d606f23dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.542 2 DEBUG nova.compute.manager [req-cd83e214-5466-4a96-8a7c-50697cccfd2d req-e006ac93-8be8-414e-921f-b57c6f2630db 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] No waiting events found dispatching network-vif-unplugged-cf9649ce-4816-43e7-96a6-7a0e08d84e61 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.542 2 DEBUG nova.compute.manager [req-cd83e214-5466-4a96-8a7c-50697cccfd2d req-e006ac93-8be8-414e-921f-b57c6f2630db 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Received event network-vif-unplugged-cf9649ce-4816-43e7-96a6-7a0e08d84e61 for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:09:35 compute-0 podman[226747]: 2025-10-02 12:09:35.794745607 +0000 UTC m=+0.296471346 container remove c7b3bca249080ab8dea0df347888108ff06b7e3577831f26c3ea9591c4525526 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-beab6431-897b-46cc-9079-f58f012784e7, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:09:35 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:35.799 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7b3421b2-cf9b-454c-a475-dad63f6114f6]: (4, ('Thu Oct  2 12:09:35 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-beab6431-897b-46cc-9079-f58f012784e7 (c7b3bca249080ab8dea0df347888108ff06b7e3577831f26c3ea9591c4525526)\nc7b3bca249080ab8dea0df347888108ff06b7e3577831f26c3ea9591c4525526\nThu Oct  2 12:09:35 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-beab6431-897b-46cc-9079-f58f012784e7 (c7b3bca249080ab8dea0df347888108ff06b7e3577831f26c3ea9591c4525526)\nc7b3bca249080ab8dea0df347888108ff06b7e3577831f26c3ea9591c4525526\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:09:35 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:35.801 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[49f167c9-46de-4a58-b3e5-62d64644babd]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:09:35 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:35.801 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapbeab6431-80, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.839 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:35 compute-0 kernel: tapbeab6431-80: left promiscuous mode
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.850 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:35 compute-0 nova_compute[192079]: 2025-10-02 12:09:35.851 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:35 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:35.852 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f731f502-5d1b-4ce8-955f-088c052fb9ce]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:09:35 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:35.883 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[38c6011d-eb78-495f-83e8-50d137a259f7]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:09:35 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:35.884 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[13796449-88ec-4420-a16b-8e5b8177e1cb]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:09:35 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:35.897 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[66f7af0b-fc6f-4e75-97b8-1b3ce0f2ba37]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 495118, 'reachable_time': 15086, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 226762, 'error': None, 'target': 'ovnmeta-beab6431-897b-46cc-9079-f58f012784e7', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:09:35 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:35.899 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-beab6431-897b-46cc-9079-f58f012784e7 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:09:35 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:09:35.900 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[301fcc6a-5c56-49da-8094-a65e1dc60843]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:09:35 compute-0 systemd[1]: run-netns-ovnmeta\x2dbeab6431\x2d897b\x2d46cc\x2d9079\x2df58f012784e7.mount: Deactivated successfully.
Oct 02 12:09:37 compute-0 nova_compute[192079]: 2025-10-02 12:09:37.229 2 DEBUG nova.network.neutron [-] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:09:37 compute-0 nova_compute[192079]: 2025-10-02 12:09:37.251 2 INFO nova.compute.manager [-] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Took 1.84 seconds to deallocate network for instance.
Oct 02 12:09:37 compute-0 nova_compute[192079]: 2025-10-02 12:09:37.319 2 DEBUG oslo_concurrency.lockutils [None req-8036ec84-0fda-4aba-b237-153e79aa543d 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:09:37 compute-0 nova_compute[192079]: 2025-10-02 12:09:37.320 2 DEBUG oslo_concurrency.lockutils [None req-8036ec84-0fda-4aba-b237-153e79aa543d 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:09:37 compute-0 nova_compute[192079]: 2025-10-02 12:09:37.377 2 DEBUG nova.compute.provider_tree [None req-8036ec84-0fda-4aba-b237-153e79aa543d 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:09:37 compute-0 nova_compute[192079]: 2025-10-02 12:09:37.410 2 DEBUG nova.scheduler.client.report [None req-8036ec84-0fda-4aba-b237-153e79aa543d 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:09:37 compute-0 nova_compute[192079]: 2025-10-02 12:09:37.439 2 DEBUG oslo_concurrency.lockutils [None req-8036ec84-0fda-4aba-b237-153e79aa543d 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.119s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:09:37 compute-0 nova_compute[192079]: 2025-10-02 12:09:37.468 2 INFO nova.scheduler.client.report [None req-8036ec84-0fda-4aba-b237-153e79aa543d 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Deleted allocations for instance 183a1b6e-784e-41d6-8632-851d606f23dc
Oct 02 12:09:37 compute-0 nova_compute[192079]: 2025-10-02 12:09:37.583 2 DEBUG oslo_concurrency.lockutils [None req-8036ec84-0fda-4aba-b237-153e79aa543d 202970a145ff4e8aa3dc22131cc9240d 887b026ea22942f7b709489ddec04ffc - - default default] Lock "183a1b6e-784e-41d6-8632-851d606f23dc" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 2.603s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:09:37 compute-0 nova_compute[192079]: 2025-10-02 12:09:37.757 2 DEBUG nova.compute.manager [req-f3e12486-3cad-490c-8fd7-9f68427c3b2a req-25a51a09-bd54-4057-b78a-6d0c3723531e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Received event network-vif-deleted-cf9649ce-4816-43e7-96a6-7a0e08d84e61 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:09:37 compute-0 nova_compute[192079]: 2025-10-02 12:09:37.873 2 DEBUG nova.compute.manager [req-ea8f0b53-21ab-48b6-8ba9-13ce7d631047 req-946527ab-86a8-414e-9cb9-5c90d12f2c7e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Received event network-vif-plugged-cf9649ce-4816-43e7-96a6-7a0e08d84e61 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:09:37 compute-0 nova_compute[192079]: 2025-10-02 12:09:37.873 2 DEBUG oslo_concurrency.lockutils [req-ea8f0b53-21ab-48b6-8ba9-13ce7d631047 req-946527ab-86a8-414e-9cb9-5c90d12f2c7e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "183a1b6e-784e-41d6-8632-851d606f23dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:09:37 compute-0 nova_compute[192079]: 2025-10-02 12:09:37.874 2 DEBUG oslo_concurrency.lockutils [req-ea8f0b53-21ab-48b6-8ba9-13ce7d631047 req-946527ab-86a8-414e-9cb9-5c90d12f2c7e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "183a1b6e-784e-41d6-8632-851d606f23dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:09:37 compute-0 nova_compute[192079]: 2025-10-02 12:09:37.874 2 DEBUG oslo_concurrency.lockutils [req-ea8f0b53-21ab-48b6-8ba9-13ce7d631047 req-946527ab-86a8-414e-9cb9-5c90d12f2c7e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "183a1b6e-784e-41d6-8632-851d606f23dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:09:37 compute-0 nova_compute[192079]: 2025-10-02 12:09:37.874 2 DEBUG nova.compute.manager [req-ea8f0b53-21ab-48b6-8ba9-13ce7d631047 req-946527ab-86a8-414e-9cb9-5c90d12f2c7e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] No waiting events found dispatching network-vif-plugged-cf9649ce-4816-43e7-96a6-7a0e08d84e61 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:09:37 compute-0 nova_compute[192079]: 2025-10-02 12:09:37.875 2 WARNING nova.compute.manager [req-ea8f0b53-21ab-48b6-8ba9-13ce7d631047 req-946527ab-86a8-414e-9cb9-5c90d12f2c7e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Received unexpected event network-vif-plugged-cf9649ce-4816-43e7-96a6-7a0e08d84e61 for instance with vm_state deleted and task_state None.
Oct 02 12:09:39 compute-0 podman[226763]: 2025-10-02 12:09:39.173132686 +0000 UTC m=+0.075833618 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=edpm, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:09:40 compute-0 nova_compute[192079]: 2025-10-02 12:09:40.178 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:40 compute-0 nova_compute[192079]: 2025-10-02 12:09:40.318 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:43 compute-0 nova_compute[192079]: 2025-10-02 12:09:43.220 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:43 compute-0 nova_compute[192079]: 2025-10-02 12:09:43.434 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:44 compute-0 podman[226787]: 2025-10-02 12:09:44.153812354 +0000 UTC m=+0.060098983 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, managed_by=edpm_ansible, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=multipathd, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0)
Oct 02 12:09:44 compute-0 podman[226786]: 2025-10-02 12:09:44.158740627 +0000 UTC m=+0.069167039 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.openshift.tags=minimal rhel9, vcs-type=git, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, architecture=x86_64, io.openshift.expose-services=, release=1755695350, managed_by=edpm_ansible, name=ubi9-minimal, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., com.redhat.component=ubi9-minimal-container, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., url=https://catalog.redhat.com/en/search?searchType=containers, io.buildah.version=1.33.7, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, version=9.6, build-date=2025-08-20T13:12:41, config_id=edpm, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, vendor=Red Hat, Inc., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., distribution-scope=public, maintainer=Red Hat, Inc., container_name=openstack_network_exporter)
Oct 02 12:09:45 compute-0 nova_compute[192079]: 2025-10-02 12:09:45.180 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:45 compute-0 nova_compute[192079]: 2025-10-02 12:09:45.320 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:50 compute-0 podman[226829]: 2025-10-02 12:09:50.13569704 +0000 UTC m=+0.050173535 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']})
Oct 02 12:09:50 compute-0 podman[226830]: 2025-10-02 12:09:50.141876048 +0000 UTC m=+0.052606102 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, config_id=iscsid, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, io.buildah.version=1.41.3, managed_by=edpm_ansible)
Oct 02 12:09:50 compute-0 nova_compute[192079]: 2025-10-02 12:09:50.181 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:50 compute-0 nova_compute[192079]: 2025-10-02 12:09:50.276 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759406975.274635, 183a1b6e-784e-41d6-8632-851d606f23dc => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:09:50 compute-0 nova_compute[192079]: 2025-10-02 12:09:50.276 2 INFO nova.compute.manager [-] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] VM Stopped (Lifecycle Event)
Oct 02 12:09:50 compute-0 nova_compute[192079]: 2025-10-02 12:09:50.301 2 DEBUG nova.compute.manager [None req-155578d5-50aa-424a-a8df-03a7b999b7d0 - - - - - -] [instance: 183a1b6e-784e-41d6-8632-851d606f23dc] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:09:50 compute-0 nova_compute[192079]: 2025-10-02 12:09:50.348 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:55 compute-0 nova_compute[192079]: 2025-10-02 12:09:55.185 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:09:55 compute-0 nova_compute[192079]: 2025-10-02 12:09:55.348 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:00 compute-0 nova_compute[192079]: 2025-10-02 12:10:00.186 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:00 compute-0 nova_compute[192079]: 2025-10-02 12:10:00.350 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:01 compute-0 podman[226873]: 2025-10-02 12:10:01.132751907 +0000 UTC m=+0.046614840 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_metadata_agent, container_name=ovn_metadata_agent, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 12:10:01 compute-0 podman[226875]: 2025-10-02 12:10:01.161657037 +0000 UTC m=+0.061483171 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:10:01 compute-0 podman[226874]: 2025-10-02 12:10:01.163369203 +0000 UTC m=+0.075303434 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, config_id=ovn_controller, container_name=ovn_controller, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0)
Oct 02 12:10:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:02.211 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:10:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:02.211 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:10:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:02.212 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:10:04 compute-0 nova_compute[192079]: 2025-10-02 12:10:04.786 2 DEBUG oslo_concurrency.lockutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Acquiring lock "8ac83a4c-49bf-4b6d-94cb-08e337258894" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:10:04 compute-0 nova_compute[192079]: 2025-10-02 12:10:04.787 2 DEBUG oslo_concurrency.lockutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Lock "8ac83a4c-49bf-4b6d-94cb-08e337258894" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:10:04 compute-0 nova_compute[192079]: 2025-10-02 12:10:04.854 2 DEBUG nova.compute.manager [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:10:05 compute-0 nova_compute[192079]: 2025-10-02 12:10:05.176 2 DEBUG oslo_concurrency.lockutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:10:05 compute-0 nova_compute[192079]: 2025-10-02 12:10:05.177 2 DEBUG oslo_concurrency.lockutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:10:05 compute-0 nova_compute[192079]: 2025-10-02 12:10:05.182 2 DEBUG nova.virt.hardware [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:10:05 compute-0 nova_compute[192079]: 2025-10-02 12:10:05.183 2 INFO nova.compute.claims [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:10:05 compute-0 nova_compute[192079]: 2025-10-02 12:10:05.189 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:05 compute-0 nova_compute[192079]: 2025-10-02 12:10:05.350 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:05 compute-0 nova_compute[192079]: 2025-10-02 12:10:05.634 2 DEBUG nova.compute.provider_tree [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:10:05 compute-0 nova_compute[192079]: 2025-10-02 12:10:05.821 2 DEBUG nova.scheduler.client.report [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:10:05 compute-0 nova_compute[192079]: 2025-10-02 12:10:05.926 2 DEBUG oslo_concurrency.lockutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.749s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:10:05 compute-0 nova_compute[192079]: 2025-10-02 12:10:05.926 2 DEBUG nova.compute.manager [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:10:05 compute-0 nova_compute[192079]: 2025-10-02 12:10:05.995 2 DEBUG nova.compute.manager [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:10:05 compute-0 nova_compute[192079]: 2025-10-02 12:10:05.995 2 DEBUG nova.network.neutron [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.012 2 INFO nova.virt.libvirt.driver [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.027 2 DEBUG nova.compute.manager [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.133 2 DEBUG nova.compute.manager [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.135 2 DEBUG nova.virt.libvirt.driver [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.136 2 INFO nova.virt.libvirt.driver [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Creating image(s)
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.137 2 DEBUG oslo_concurrency.lockutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Acquiring lock "/var/lib/nova/instances/8ac83a4c-49bf-4b6d-94cb-08e337258894/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.137 2 DEBUG oslo_concurrency.lockutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Lock "/var/lib/nova/instances/8ac83a4c-49bf-4b6d-94cb-08e337258894/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.139 2 DEBUG oslo_concurrency.lockutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Lock "/var/lib/nova/instances/8ac83a4c-49bf-4b6d-94cb-08e337258894/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.165 2 DEBUG nova.policy [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5230b7fc4a3b4c28ac245755d300b23a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cacc76a138fb435ea3d69dd8c9682958', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.170 2 DEBUG oslo_concurrency.processutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.226 2 DEBUG oslo_concurrency.processutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.227 2 DEBUG oslo_concurrency.lockutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.227 2 DEBUG oslo_concurrency.lockutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.238 2 DEBUG oslo_concurrency.processutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.298 2 DEBUG oslo_concurrency.processutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.060s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.299 2 DEBUG oslo_concurrency.processutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/8ac83a4c-49bf-4b6d-94cb-08e337258894/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.337 2 DEBUG oslo_concurrency.processutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/8ac83a4c-49bf-4b6d-94cb-08e337258894/disk 1073741824" returned: 0 in 0.039s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.338 2 DEBUG oslo_concurrency.lockutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.111s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.339 2 DEBUG oslo_concurrency.processutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.391 2 DEBUG oslo_concurrency.processutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.052s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.392 2 DEBUG nova.virt.disk.api [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Checking if we can resize image /var/lib/nova/instances/8ac83a4c-49bf-4b6d-94cb-08e337258894/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.392 2 DEBUG oslo_concurrency.processutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/8ac83a4c-49bf-4b6d-94cb-08e337258894/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.451 2 DEBUG oslo_concurrency.processutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/8ac83a4c-49bf-4b6d-94cb-08e337258894/disk --force-share --output=json" returned: 0 in 0.059s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.452 2 DEBUG nova.virt.disk.api [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Cannot resize image /var/lib/nova/instances/8ac83a4c-49bf-4b6d-94cb-08e337258894/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.452 2 DEBUG nova.objects.instance [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Lazy-loading 'migration_context' on Instance uuid 8ac83a4c-49bf-4b6d-94cb-08e337258894 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.507 2 DEBUG nova.virt.libvirt.driver [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.508 2 DEBUG nova.virt.libvirt.driver [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Ensure instance console log exists: /var/lib/nova/instances/8ac83a4c-49bf-4b6d-94cb-08e337258894/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.508 2 DEBUG oslo_concurrency.lockutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.509 2 DEBUG oslo_concurrency.lockutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.509 2 DEBUG oslo_concurrency.lockutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:10:06 compute-0 nova_compute[192079]: 2025-10-02 12:10:06.758 2 DEBUG nova.network.neutron [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Successfully created port: 49c09300-dc39-461c-bec6-9b97a063d6e7 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:10:07 compute-0 nova_compute[192079]: 2025-10-02 12:10:07.475 2 DEBUG nova.network.neutron [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Successfully updated port: 49c09300-dc39-461c-bec6-9b97a063d6e7 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:10:07 compute-0 nova_compute[192079]: 2025-10-02 12:10:07.493 2 DEBUG oslo_concurrency.lockutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Acquiring lock "refresh_cache-8ac83a4c-49bf-4b6d-94cb-08e337258894" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:10:07 compute-0 nova_compute[192079]: 2025-10-02 12:10:07.494 2 DEBUG oslo_concurrency.lockutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Acquired lock "refresh_cache-8ac83a4c-49bf-4b6d-94cb-08e337258894" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:10:07 compute-0 nova_compute[192079]: 2025-10-02 12:10:07.494 2 DEBUG nova.network.neutron [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:10:07 compute-0 nova_compute[192079]: 2025-10-02 12:10:07.579 2 DEBUG nova.compute.manager [req-5c16e4d0-4ba6-4c75-94a4-dbf8a4e18b14 req-3e9d3529-2f76-4b2d-a008-e98d87d2e9c3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Received event network-changed-49c09300-dc39-461c-bec6-9b97a063d6e7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:10:07 compute-0 nova_compute[192079]: 2025-10-02 12:10:07.579 2 DEBUG nova.compute.manager [req-5c16e4d0-4ba6-4c75-94a4-dbf8a4e18b14 req-3e9d3529-2f76-4b2d-a008-e98d87d2e9c3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Refreshing instance network info cache due to event network-changed-49c09300-dc39-461c-bec6-9b97a063d6e7. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:10:07 compute-0 nova_compute[192079]: 2025-10-02 12:10:07.580 2 DEBUG oslo_concurrency.lockutils [req-5c16e4d0-4ba6-4c75-94a4-dbf8a4e18b14 req-3e9d3529-2f76-4b2d-a008-e98d87d2e9c3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-8ac83a4c-49bf-4b6d-94cb-08e337258894" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:10:07 compute-0 nova_compute[192079]: 2025-10-02 12:10:07.844 2 DEBUG nova.network.neutron [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.524 2 DEBUG nova.network.neutron [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Updating instance_info_cache with network_info: [{"id": "49c09300-dc39-461c-bec6-9b97a063d6e7", "address": "fa:16:3e:aa:d5:33", "network": {"id": "d6b20961-3c12-47aa-aeda-262b5ab075ae", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1638524493-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "cacc76a138fb435ea3d69dd8c9682958", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap49c09300-dc", "ovs_interfaceid": "49c09300-dc39-461c-bec6-9b97a063d6e7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.543 2 DEBUG oslo_concurrency.lockutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Releasing lock "refresh_cache-8ac83a4c-49bf-4b6d-94cb-08e337258894" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.544 2 DEBUG nova.compute.manager [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Instance network_info: |[{"id": "49c09300-dc39-461c-bec6-9b97a063d6e7", "address": "fa:16:3e:aa:d5:33", "network": {"id": "d6b20961-3c12-47aa-aeda-262b5ab075ae", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1638524493-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "cacc76a138fb435ea3d69dd8c9682958", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap49c09300-dc", "ovs_interfaceid": "49c09300-dc39-461c-bec6-9b97a063d6e7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.544 2 DEBUG oslo_concurrency.lockutils [req-5c16e4d0-4ba6-4c75-94a4-dbf8a4e18b14 req-3e9d3529-2f76-4b2d-a008-e98d87d2e9c3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-8ac83a4c-49bf-4b6d-94cb-08e337258894" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.544 2 DEBUG nova.network.neutron [req-5c16e4d0-4ba6-4c75-94a4-dbf8a4e18b14 req-3e9d3529-2f76-4b2d-a008-e98d87d2e9c3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Refreshing network info cache for port 49c09300-dc39-461c-bec6-9b97a063d6e7 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.547 2 DEBUG nova.virt.libvirt.driver [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Start _get_guest_xml network_info=[{"id": "49c09300-dc39-461c-bec6-9b97a063d6e7", "address": "fa:16:3e:aa:d5:33", "network": {"id": "d6b20961-3c12-47aa-aeda-262b5ab075ae", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1638524493-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "cacc76a138fb435ea3d69dd8c9682958", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap49c09300-dc", "ovs_interfaceid": "49c09300-dc39-461c-bec6-9b97a063d6e7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.551 2 WARNING nova.virt.libvirt.driver [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.555 2 DEBUG nova.virt.libvirt.host [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.556 2 DEBUG nova.virt.libvirt.host [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.562 2 DEBUG nova.virt.libvirt.host [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.562 2 DEBUG nova.virt.libvirt.host [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.563 2 DEBUG nova.virt.libvirt.driver [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.564 2 DEBUG nova.virt.hardware [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.564 2 DEBUG nova.virt.hardware [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.564 2 DEBUG nova.virt.hardware [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.564 2 DEBUG nova.virt.hardware [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.565 2 DEBUG nova.virt.hardware [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.565 2 DEBUG nova.virt.hardware [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.565 2 DEBUG nova.virt.hardware [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.565 2 DEBUG nova.virt.hardware [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.565 2 DEBUG nova.virt.hardware [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.565 2 DEBUG nova.virt.hardware [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.566 2 DEBUG nova.virt.hardware [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.569 2 DEBUG nova.virt.libvirt.vif [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:10:02Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-InstanceActionsNegativeTestJSON-server-1910785266',display_name='tempest-InstanceActionsNegativeTestJSON-server-1910785266',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-instanceactionsnegativetestjson-server-1910785266',id=51,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='cacc76a138fb435ea3d69dd8c9682958',ramdisk_id='',reservation_id='r-0t91p1d0',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-InstanceActionsNegativeTestJSON-725117245',owner_user_name='tempest-InstanceActionsNegativeTestJSON-725117245-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:10:06Z,user_data=None,user_id='5230b7fc4a3b4c28ac245755d300b23a',uuid=8ac83a4c-49bf-4b6d-94cb-08e337258894,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "49c09300-dc39-461c-bec6-9b97a063d6e7", "address": "fa:16:3e:aa:d5:33", "network": {"id": "d6b20961-3c12-47aa-aeda-262b5ab075ae", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1638524493-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "cacc76a138fb435ea3d69dd8c9682958", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap49c09300-dc", "ovs_interfaceid": "49c09300-dc39-461c-bec6-9b97a063d6e7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.570 2 DEBUG nova.network.os_vif_util [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Converting VIF {"id": "49c09300-dc39-461c-bec6-9b97a063d6e7", "address": "fa:16:3e:aa:d5:33", "network": {"id": "d6b20961-3c12-47aa-aeda-262b5ab075ae", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1638524493-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "cacc76a138fb435ea3d69dd8c9682958", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap49c09300-dc", "ovs_interfaceid": "49c09300-dc39-461c-bec6-9b97a063d6e7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.570 2 DEBUG nova.network.os_vif_util [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:aa:d5:33,bridge_name='br-int',has_traffic_filtering=True,id=49c09300-dc39-461c-bec6-9b97a063d6e7,network=Network(d6b20961-3c12-47aa-aeda-262b5ab075ae),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap49c09300-dc') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.571 2 DEBUG nova.objects.instance [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Lazy-loading 'pci_devices' on Instance uuid 8ac83a4c-49bf-4b6d-94cb-08e337258894 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.583 2 DEBUG nova.virt.libvirt.driver [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:10:08 compute-0 nova_compute[192079]:   <uuid>8ac83a4c-49bf-4b6d-94cb-08e337258894</uuid>
Oct 02 12:10:08 compute-0 nova_compute[192079]:   <name>instance-00000033</name>
Oct 02 12:10:08 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:10:08 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:10:08 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:10:08 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:       <nova:name>tempest-InstanceActionsNegativeTestJSON-server-1910785266</nova:name>
Oct 02 12:10:08 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:10:08</nova:creationTime>
Oct 02 12:10:08 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:10:08 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:10:08 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:10:08 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:10:08 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:10:08 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:10:08 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:10:08 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:10:08 compute-0 nova_compute[192079]:         <nova:user uuid="5230b7fc4a3b4c28ac245755d300b23a">tempest-InstanceActionsNegativeTestJSON-725117245-project-member</nova:user>
Oct 02 12:10:08 compute-0 nova_compute[192079]:         <nova:project uuid="cacc76a138fb435ea3d69dd8c9682958">tempest-InstanceActionsNegativeTestJSON-725117245</nova:project>
Oct 02 12:10:08 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:10:08 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:10:08 compute-0 nova_compute[192079]:         <nova:port uuid="49c09300-dc39-461c-bec6-9b97a063d6e7">
Oct 02 12:10:08 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.14" ipVersion="4"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:10:08 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:10:08 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:10:08 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <system>
Oct 02 12:10:08 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:10:08 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:10:08 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:10:08 compute-0 nova_compute[192079]:       <entry name="serial">8ac83a4c-49bf-4b6d-94cb-08e337258894</entry>
Oct 02 12:10:08 compute-0 nova_compute[192079]:       <entry name="uuid">8ac83a4c-49bf-4b6d-94cb-08e337258894</entry>
Oct 02 12:10:08 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     </system>
Oct 02 12:10:08 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:10:08 compute-0 nova_compute[192079]:   <os>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:   </os>
Oct 02 12:10:08 compute-0 nova_compute[192079]:   <features>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:   </features>
Oct 02 12:10:08 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:10:08 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:10:08 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:10:08 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/8ac83a4c-49bf-4b6d-94cb-08e337258894/disk"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:10:08 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/8ac83a4c-49bf-4b6d-94cb-08e337258894/disk.config"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:10:08 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:aa:d5:33"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:       <target dev="tap49c09300-dc"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:10:08 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/8ac83a4c-49bf-4b6d-94cb-08e337258894/console.log" append="off"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <video>
Oct 02 12:10:08 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     </video>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:10:08 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:10:08 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:10:08 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:10:08 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:10:08 compute-0 nova_compute[192079]: </domain>
Oct 02 12:10:08 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.584 2 DEBUG nova.compute.manager [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Preparing to wait for external event network-vif-plugged-49c09300-dc39-461c-bec6-9b97a063d6e7 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.584 2 DEBUG oslo_concurrency.lockutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Acquiring lock "8ac83a4c-49bf-4b6d-94cb-08e337258894-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.585 2 DEBUG oslo_concurrency.lockutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Lock "8ac83a4c-49bf-4b6d-94cb-08e337258894-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.585 2 DEBUG oslo_concurrency.lockutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Lock "8ac83a4c-49bf-4b6d-94cb-08e337258894-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.586 2 DEBUG nova.virt.libvirt.vif [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:10:02Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-InstanceActionsNegativeTestJSON-server-1910785266',display_name='tempest-InstanceActionsNegativeTestJSON-server-1910785266',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-instanceactionsnegativetestjson-server-1910785266',id=51,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='cacc76a138fb435ea3d69dd8c9682958',ramdisk_id='',reservation_id='r-0t91p1d0',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-InstanceActionsNegativeTestJSON-725117245',owner_user_name='tempest-InstanceActionsNegativeTestJSON-725117245-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:10:06Z,user_data=None,user_id='5230b7fc4a3b4c28ac245755d300b23a',uuid=8ac83a4c-49bf-4b6d-94cb-08e337258894,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "49c09300-dc39-461c-bec6-9b97a063d6e7", "address": "fa:16:3e:aa:d5:33", "network": {"id": "d6b20961-3c12-47aa-aeda-262b5ab075ae", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1638524493-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "cacc76a138fb435ea3d69dd8c9682958", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap49c09300-dc", "ovs_interfaceid": "49c09300-dc39-461c-bec6-9b97a063d6e7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.586 2 DEBUG nova.network.os_vif_util [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Converting VIF {"id": "49c09300-dc39-461c-bec6-9b97a063d6e7", "address": "fa:16:3e:aa:d5:33", "network": {"id": "d6b20961-3c12-47aa-aeda-262b5ab075ae", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1638524493-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "cacc76a138fb435ea3d69dd8c9682958", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap49c09300-dc", "ovs_interfaceid": "49c09300-dc39-461c-bec6-9b97a063d6e7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.587 2 DEBUG nova.network.os_vif_util [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:aa:d5:33,bridge_name='br-int',has_traffic_filtering=True,id=49c09300-dc39-461c-bec6-9b97a063d6e7,network=Network(d6b20961-3c12-47aa-aeda-262b5ab075ae),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap49c09300-dc') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.587 2 DEBUG os_vif [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:aa:d5:33,bridge_name='br-int',has_traffic_filtering=True,id=49c09300-dc39-461c-bec6-9b97a063d6e7,network=Network(d6b20961-3c12-47aa-aeda-262b5ab075ae),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap49c09300-dc') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.588 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.588 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.589 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.593 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.593 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap49c09300-dc, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.593 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap49c09300-dc, col_values=(('external_ids', {'iface-id': '49c09300-dc39-461c-bec6-9b97a063d6e7', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:aa:d5:33', 'vm-uuid': '8ac83a4c-49bf-4b6d-94cb-08e337258894'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.595 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:08 compute-0 NetworkManager[51160]: <info>  [1759407008.5964] manager: (tap49c09300-dc): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/76)
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.597 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.603 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.603 2 INFO os_vif [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:aa:d5:33,bridge_name='br-int',has_traffic_filtering=True,id=49c09300-dc39-461c-bec6-9b97a063d6e7,network=Network(d6b20961-3c12-47aa-aeda-262b5ab075ae),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap49c09300-dc')
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.657 2 DEBUG nova.virt.libvirt.driver [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.657 2 DEBUG nova.virt.libvirt.driver [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.657 2 DEBUG nova.virt.libvirt.driver [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] No VIF found with MAC fa:16:3e:aa:d5:33, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:10:08 compute-0 nova_compute[192079]: 2025-10-02 12:10:08.659 2 INFO nova.virt.libvirt.driver [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Using config drive
Oct 02 12:10:09 compute-0 nova_compute[192079]: 2025-10-02 12:10:09.835 2 INFO nova.virt.libvirt.driver [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Creating config drive at /var/lib/nova/instances/8ac83a4c-49bf-4b6d-94cb-08e337258894/disk.config
Oct 02 12:10:09 compute-0 nova_compute[192079]: 2025-10-02 12:10:09.841 2 DEBUG oslo_concurrency.processutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/8ac83a4c-49bf-4b6d-94cb-08e337258894/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpzb2zqray execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:10:09 compute-0 nova_compute[192079]: 2025-10-02 12:10:09.967 2 DEBUG oslo_concurrency.processutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/8ac83a4c-49bf-4b6d-94cb-08e337258894/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpzb2zqray" returned: 0 in 0.127s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:10:10 compute-0 kernel: tap49c09300-dc: entered promiscuous mode
Oct 02 12:10:10 compute-0 NetworkManager[51160]: <info>  [1759407010.0512] manager: (tap49c09300-dc): new Tun device (/org/freedesktop/NetworkManager/Devices/77)
Oct 02 12:10:10 compute-0 ovn_controller[94336]: 2025-10-02T12:10:10Z|00154|binding|INFO|Claiming lport 49c09300-dc39-461c-bec6-9b97a063d6e7 for this chassis.
Oct 02 12:10:10 compute-0 ovn_controller[94336]: 2025-10-02T12:10:10Z|00155|binding|INFO|49c09300-dc39-461c-bec6-9b97a063d6e7: Claiming fa:16:3e:aa:d5:33 10.100.0.14
Oct 02 12:10:10 compute-0 nova_compute[192079]: 2025-10-02 12:10:10.057 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:10 compute-0 nova_compute[192079]: 2025-10-02 12:10:10.061 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:10 compute-0 systemd-udevd[226981]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:10:10 compute-0 NetworkManager[51160]: <info>  [1759407010.0922] device (tap49c09300-dc): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:10:10 compute-0 systemd-machined[152150]: New machine qemu-26-instance-00000033.
Oct 02 12:10:10 compute-0 NetworkManager[51160]: <info>  [1759407010.0945] device (tap49c09300-dc): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:10:10 compute-0 nova_compute[192079]: 2025-10-02 12:10:10.112 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:10 compute-0 ovn_controller[94336]: 2025-10-02T12:10:10Z|00156|binding|INFO|Setting lport 49c09300-dc39-461c-bec6-9b97a063d6e7 ovn-installed in OVS
Oct 02 12:10:10 compute-0 ovn_controller[94336]: 2025-10-02T12:10:10Z|00157|binding|INFO|Setting lport 49c09300-dc39-461c-bec6-9b97a063d6e7 up in Southbound
Oct 02 12:10:10 compute-0 nova_compute[192079]: 2025-10-02 12:10:10.116 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:10 compute-0 systemd[1]: Started Virtual Machine qemu-26-instance-00000033.
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:10.116 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:aa:d5:33 10.100.0.14'], port_security=['fa:16:3e:aa:d5:33 10.100.0.14'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.14/28', 'neutron:device_id': '8ac83a4c-49bf-4b6d-94cb-08e337258894', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-d6b20961-3c12-47aa-aeda-262b5ab075ae', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'cacc76a138fb435ea3d69dd8c9682958', 'neutron:revision_number': '2', 'neutron:security_group_ids': '21894bf7-b45f-48da-9f74-06b9213e547e', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=b1ef904b-f8ba-4627-bbb4-2c765c6f8cd7, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=49c09300-dc39-461c-bec6-9b97a063d6e7) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:10.118 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 49c09300-dc39-461c-bec6-9b97a063d6e7 in datapath d6b20961-3c12-47aa-aeda-262b5ab075ae bound to our chassis
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:10.119 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network d6b20961-3c12-47aa-aeda-262b5ab075ae
Oct 02 12:10:10 compute-0 podman[226961]: 2025-10-02 12:10:10.129651463 +0000 UTC m=+0.088992504 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=edpm, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, container_name=ceilometer_agent_compute, org.label-schema.build-date=20251001, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:10.132 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2639695f-46e2-4ee9-b9a0-530e7e0e8bfa]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:10.132 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tapd6b20961-31 in ovnmeta-d6b20961-3c12-47aa-aeda-262b5ab075ae namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:10.134 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tapd6b20961-30 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:10.134 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[75e3e872-8fa2-4f08-b08f-47dbb0f82a9f]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:10.135 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[91aef33d-7fcb-4672-a86d-df7e8ab5b0d8]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:10.144 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[a23e8f0e-0743-4c3a-b4a3-d393cbdaec1d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:10.156 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[17ae6493-38a5-4ece-b290-c1a0f06ef09d]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:10.181 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[b9986bda-9361-4a5a-8a2a-23d6e19f4491]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:10 compute-0 NetworkManager[51160]: <info>  [1759407010.1873] manager: (tapd6b20961-30): new Veth device (/org/freedesktop/NetworkManager/Devices/78)
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:10.188 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[33c16e3e-99e5-4e68-9807-70b46eeed081]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:10 compute-0 nova_compute[192079]: 2025-10-02 12:10:10.189 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:10.214 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[0326063f-c4e7-4f37-9b6b-2a8923457524]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:10.217 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[d3e4cfe0-d347-44ff-92d2-9ba50ef17578]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:10 compute-0 NetworkManager[51160]: <info>  [1759407010.2379] device (tapd6b20961-30): carrier: link connected
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:10.243 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[abf86777-a7bd-48de-94e8-fe969e2875ba]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:10.260 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1cd42685-f32a-4663-9bf4-cdf20f11145a]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapd6b20961-31'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:7e:81:1b'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 47], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 500786, 'reachable_time': 32385, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 227022, 'error': None, 'target': 'ovnmeta-d6b20961-3c12-47aa-aeda-262b5ab075ae', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:10.272 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c33be4fb-3360-4d41-88f4-5cd575c887cc]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe7e:811b'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 500786, 'tstamp': 500786}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 227023, 'error': None, 'target': 'ovnmeta-d6b20961-3c12-47aa-aeda-262b5ab075ae', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:10.288 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[cdb1b19a-a9f0-4702-8c3a-e62a13b6aafd]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapd6b20961-31'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:7e:81:1b'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 47], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 500786, 'reachable_time': 32385, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 227024, 'error': None, 'target': 'ovnmeta-d6b20961-3c12-47aa-aeda-262b5ab075ae', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:10.322 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[cacf9741-129d-4208-b1bd-2cd08a3541e2]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:10.385 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7aba8a08-1603-47cc-8b72-ff11f1059b2b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:10.386 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapd6b20961-30, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:10.387 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:10.387 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapd6b20961-30, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:10:10 compute-0 nova_compute[192079]: 2025-10-02 12:10:10.429 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:10 compute-0 NetworkManager[51160]: <info>  [1759407010.4298] manager: (tapd6b20961-30): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/79)
Oct 02 12:10:10 compute-0 kernel: tapd6b20961-30: entered promiscuous mode
Oct 02 12:10:10 compute-0 nova_compute[192079]: 2025-10-02 12:10:10.430 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:10.434 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tapd6b20961-30, col_values=(('external_ids', {'iface-id': '67eb39c8-6c2d-49f0-b6ad-bece3fed40ac'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:10:10 compute-0 nova_compute[192079]: 2025-10-02 12:10:10.435 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:10 compute-0 ovn_controller[94336]: 2025-10-02T12:10:10Z|00158|binding|INFO|Releasing lport 67eb39c8-6c2d-49f0-b6ad-bece3fed40ac from this chassis (sb_readonly=0)
Oct 02 12:10:10 compute-0 nova_compute[192079]: 2025-10-02 12:10:10.436 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:10.438 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/d6b20961-3c12-47aa-aeda-262b5ab075ae.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/d6b20961-3c12-47aa-aeda-262b5ab075ae.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:10.439 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7ffa3156-2565-46e0-ac37-2873741eb10f]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:10.440 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-d6b20961-3c12-47aa-aeda-262b5ab075ae
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/d6b20961-3c12-47aa-aeda-262b5ab075ae.pid.haproxy
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID d6b20961-3c12-47aa-aeda-262b5ab075ae
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:10:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:10.442 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-d6b20961-3c12-47aa-aeda-262b5ab075ae', 'env', 'PROCESS_TAG=haproxy-d6b20961-3c12-47aa-aeda-262b5ab075ae', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/d6b20961-3c12-47aa-aeda-262b5ab075ae.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:10:10 compute-0 nova_compute[192079]: 2025-10-02 12:10:10.448 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:10 compute-0 nova_compute[192079]: 2025-10-02 12:10:10.669 2 DEBUG nova.compute.manager [req-abe6f2c1-c9f5-4c47-a33d-61c901c7fb83 req-780d4a98-6a00-4aa3-8fa1-acf682c6e2e7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Received event network-vif-plugged-49c09300-dc39-461c-bec6-9b97a063d6e7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:10:10 compute-0 nova_compute[192079]: 2025-10-02 12:10:10.675 2 DEBUG oslo_concurrency.lockutils [req-abe6f2c1-c9f5-4c47-a33d-61c901c7fb83 req-780d4a98-6a00-4aa3-8fa1-acf682c6e2e7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "8ac83a4c-49bf-4b6d-94cb-08e337258894-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:10:10 compute-0 nova_compute[192079]: 2025-10-02 12:10:10.675 2 DEBUG oslo_concurrency.lockutils [req-abe6f2c1-c9f5-4c47-a33d-61c901c7fb83 req-780d4a98-6a00-4aa3-8fa1-acf682c6e2e7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ac83a4c-49bf-4b6d-94cb-08e337258894-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:10:10 compute-0 nova_compute[192079]: 2025-10-02 12:10:10.676 2 DEBUG oslo_concurrency.lockutils [req-abe6f2c1-c9f5-4c47-a33d-61c901c7fb83 req-780d4a98-6a00-4aa3-8fa1-acf682c6e2e7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ac83a4c-49bf-4b6d-94cb-08e337258894-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:10:10 compute-0 nova_compute[192079]: 2025-10-02 12:10:10.676 2 DEBUG nova.compute.manager [req-abe6f2c1-c9f5-4c47-a33d-61c901c7fb83 req-780d4a98-6a00-4aa3-8fa1-acf682c6e2e7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Processing event network-vif-plugged-49c09300-dc39-461c-bec6-9b97a063d6e7 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:10:10 compute-0 podman[227063]: 2025-10-02 12:10:10.811373068 +0000 UTC m=+0.058445379 container create 9d211802c5a5f6bb0312a0851166e93215cf7573b5fd4ef6fa4d3acbafc6ef89 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d6b20961-3c12-47aa-aeda-262b5ab075ae, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:10:10 compute-0 systemd[1]: Started libpod-conmon-9d211802c5a5f6bb0312a0851166e93215cf7573b5fd4ef6fa4d3acbafc6ef89.scope.
Oct 02 12:10:10 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:10:10 compute-0 podman[227063]: 2025-10-02 12:10:10.773154085 +0000 UTC m=+0.020226426 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:10:10 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/bd127fabb091844cd3cbd693885e391a1ad834e57238dc36e2d55eb2a3611ca5/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:10:10 compute-0 podman[227063]: 2025-10-02 12:10:10.885339794 +0000 UTC m=+0.132412125 container init 9d211802c5a5f6bb0312a0851166e93215cf7573b5fd4ef6fa4d3acbafc6ef89 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d6b20961-3c12-47aa-aeda-262b5ab075ae, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0)
Oct 02 12:10:10 compute-0 podman[227063]: 2025-10-02 12:10:10.891662896 +0000 UTC m=+0.138735207 container start 9d211802c5a5f6bb0312a0851166e93215cf7573b5fd4ef6fa4d3acbafc6ef89 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d6b20961-3c12-47aa-aeda-262b5ab075ae, org.label-schema.schema-version=1.0, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:10:10 compute-0 nova_compute[192079]: 2025-10-02 12:10:10.918 2 DEBUG nova.compute.manager [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:10:10 compute-0 nova_compute[192079]: 2025-10-02 12:10:10.919 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407010.9179251, 8ac83a4c-49bf-4b6d-94cb-08e337258894 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:10:10 compute-0 nova_compute[192079]: 2025-10-02 12:10:10.919 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] VM Started (Lifecycle Event)
Oct 02 12:10:10 compute-0 neutron-haproxy-ovnmeta-d6b20961-3c12-47aa-aeda-262b5ab075ae[227078]: [NOTICE]   (227082) : New worker (227084) forked
Oct 02 12:10:10 compute-0 neutron-haproxy-ovnmeta-d6b20961-3c12-47aa-aeda-262b5ab075ae[227078]: [NOTICE]   (227082) : Loading success.
Oct 02 12:10:10 compute-0 nova_compute[192079]: 2025-10-02 12:10:10.933 2 DEBUG nova.virt.libvirt.driver [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:10:10 compute-0 nova_compute[192079]: 2025-10-02 12:10:10.937 2 INFO nova.virt.libvirt.driver [-] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Instance spawned successfully.
Oct 02 12:10:10 compute-0 nova_compute[192079]: 2025-10-02 12:10:10.937 2 DEBUG nova.virt.libvirt.driver [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:10:10 compute-0 nova_compute[192079]: 2025-10-02 12:10:10.955 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:10:10 compute-0 nova_compute[192079]: 2025-10-02 12:10:10.959 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:10:10 compute-0 nova_compute[192079]: 2025-10-02 12:10:10.965 2 DEBUG nova.virt.libvirt.driver [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:10:10 compute-0 nova_compute[192079]: 2025-10-02 12:10:10.965 2 DEBUG nova.virt.libvirt.driver [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:10:10 compute-0 nova_compute[192079]: 2025-10-02 12:10:10.966 2 DEBUG nova.virt.libvirt.driver [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:10:10 compute-0 nova_compute[192079]: 2025-10-02 12:10:10.966 2 DEBUG nova.virt.libvirt.driver [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:10:10 compute-0 nova_compute[192079]: 2025-10-02 12:10:10.966 2 DEBUG nova.virt.libvirt.driver [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:10:10 compute-0 nova_compute[192079]: 2025-10-02 12:10:10.967 2 DEBUG nova.virt.libvirt.driver [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:10:11 compute-0 nova_compute[192079]: 2025-10-02 12:10:11.004 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:10:11 compute-0 nova_compute[192079]: 2025-10-02 12:10:11.005 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407010.9205225, 8ac83a4c-49bf-4b6d-94cb-08e337258894 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:10:11 compute-0 nova_compute[192079]: 2025-10-02 12:10:11.005 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] VM Paused (Lifecycle Event)
Oct 02 12:10:11 compute-0 nova_compute[192079]: 2025-10-02 12:10:11.095 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:10:11 compute-0 nova_compute[192079]: 2025-10-02 12:10:11.098 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407010.922337, 8ac83a4c-49bf-4b6d-94cb-08e337258894 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:10:11 compute-0 nova_compute[192079]: 2025-10-02 12:10:11.098 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] VM Resumed (Lifecycle Event)
Oct 02 12:10:11 compute-0 nova_compute[192079]: 2025-10-02 12:10:11.154 2 INFO nova.compute.manager [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Took 5.02 seconds to spawn the instance on the hypervisor.
Oct 02 12:10:11 compute-0 nova_compute[192079]: 2025-10-02 12:10:11.154 2 DEBUG nova.compute.manager [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:10:11 compute-0 nova_compute[192079]: 2025-10-02 12:10:11.162 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:10:11 compute-0 nova_compute[192079]: 2025-10-02 12:10:11.164 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:10:11 compute-0 nova_compute[192079]: 2025-10-02 12:10:11.234 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:10:11 compute-0 nova_compute[192079]: 2025-10-02 12:10:11.293 2 INFO nova.compute.manager [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Took 6.24 seconds to build instance.
Oct 02 12:10:11 compute-0 nova_compute[192079]: 2025-10-02 12:10:11.336 2 DEBUG oslo_concurrency.lockutils [None req-f77e44a0-e041-4ac5-b9ac-0709e4161ab8 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Lock "8ac83a4c-49bf-4b6d-94cb-08e337258894" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 6.549s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:10:12 compute-0 nova_compute[192079]: 2025-10-02 12:10:12.526 2 DEBUG nova.network.neutron [req-5c16e4d0-4ba6-4c75-94a4-dbf8a4e18b14 req-3e9d3529-2f76-4b2d-a008-e98d87d2e9c3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Updated VIF entry in instance network info cache for port 49c09300-dc39-461c-bec6-9b97a063d6e7. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:10:12 compute-0 nova_compute[192079]: 2025-10-02 12:10:12.527 2 DEBUG nova.network.neutron [req-5c16e4d0-4ba6-4c75-94a4-dbf8a4e18b14 req-3e9d3529-2f76-4b2d-a008-e98d87d2e9c3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Updating instance_info_cache with network_info: [{"id": "49c09300-dc39-461c-bec6-9b97a063d6e7", "address": "fa:16:3e:aa:d5:33", "network": {"id": "d6b20961-3c12-47aa-aeda-262b5ab075ae", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1638524493-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "cacc76a138fb435ea3d69dd8c9682958", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap49c09300-dc", "ovs_interfaceid": "49c09300-dc39-461c-bec6-9b97a063d6e7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:10:12 compute-0 nova_compute[192079]: 2025-10-02 12:10:12.615 2 DEBUG oslo_concurrency.lockutils [req-5c16e4d0-4ba6-4c75-94a4-dbf8a4e18b14 req-3e9d3529-2f76-4b2d-a008-e98d87d2e9c3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-8ac83a4c-49bf-4b6d-94cb-08e337258894" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:10:12 compute-0 nova_compute[192079]: 2025-10-02 12:10:12.680 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:10:12 compute-0 nova_compute[192079]: 2025-10-02 12:10:12.914 2 DEBUG nova.compute.manager [req-37a5fda0-3497-49d9-9b20-bd0f92b58d96 req-d87ec682-211b-4dda-8e66-17337c8fe943 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Received event network-vif-plugged-49c09300-dc39-461c-bec6-9b97a063d6e7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:10:12 compute-0 nova_compute[192079]: 2025-10-02 12:10:12.915 2 DEBUG oslo_concurrency.lockutils [req-37a5fda0-3497-49d9-9b20-bd0f92b58d96 req-d87ec682-211b-4dda-8e66-17337c8fe943 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "8ac83a4c-49bf-4b6d-94cb-08e337258894-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:10:12 compute-0 nova_compute[192079]: 2025-10-02 12:10:12.915 2 DEBUG oslo_concurrency.lockutils [req-37a5fda0-3497-49d9-9b20-bd0f92b58d96 req-d87ec682-211b-4dda-8e66-17337c8fe943 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ac83a4c-49bf-4b6d-94cb-08e337258894-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:10:12 compute-0 nova_compute[192079]: 2025-10-02 12:10:12.916 2 DEBUG oslo_concurrency.lockutils [req-37a5fda0-3497-49d9-9b20-bd0f92b58d96 req-d87ec682-211b-4dda-8e66-17337c8fe943 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ac83a4c-49bf-4b6d-94cb-08e337258894-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:10:12 compute-0 nova_compute[192079]: 2025-10-02 12:10:12.916 2 DEBUG nova.compute.manager [req-37a5fda0-3497-49d9-9b20-bd0f92b58d96 req-d87ec682-211b-4dda-8e66-17337c8fe943 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] No waiting events found dispatching network-vif-plugged-49c09300-dc39-461c-bec6-9b97a063d6e7 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:10:12 compute-0 nova_compute[192079]: 2025-10-02 12:10:12.916 2 WARNING nova.compute.manager [req-37a5fda0-3497-49d9-9b20-bd0f92b58d96 req-d87ec682-211b-4dda-8e66-17337c8fe943 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Received unexpected event network-vif-plugged-49c09300-dc39-461c-bec6-9b97a063d6e7 for instance with vm_state active and task_state None.
Oct 02 12:10:13 compute-0 nova_compute[192079]: 2025-10-02 12:10:13.595 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:13 compute-0 nova_compute[192079]: 2025-10-02 12:10:13.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:10:13 compute-0 nova_compute[192079]: 2025-10-02 12:10:13.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:10:13 compute-0 nova_compute[192079]: 2025-10-02 12:10:13.711 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:10:13 compute-0 nova_compute[192079]: 2025-10-02 12:10:13.711 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:10:13 compute-0 nova_compute[192079]: 2025-10-02 12:10:13.711 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:10:13 compute-0 nova_compute[192079]: 2025-10-02 12:10:13.712 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:10:13 compute-0 nova_compute[192079]: 2025-10-02 12:10:13.785 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/8ac83a4c-49bf-4b6d-94cb-08e337258894/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:10:13 compute-0 nova_compute[192079]: 2025-10-02 12:10:13.840 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/8ac83a4c-49bf-4b6d-94cb-08e337258894/disk --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:10:13 compute-0 nova_compute[192079]: 2025-10-02 12:10:13.841 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/8ac83a4c-49bf-4b6d-94cb-08e337258894/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:10:13 compute-0 nova_compute[192079]: 2025-10-02 12:10:13.894 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/8ac83a4c-49bf-4b6d-94cb-08e337258894/disk --force-share --output=json" returned: 0 in 0.053s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:10:14 compute-0 nova_compute[192079]: 2025-10-02 12:10:14.041 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:10:14 compute-0 nova_compute[192079]: 2025-10-02 12:10:14.042 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5591MB free_disk=73.3564682006836GB free_vcpus=7 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:10:14 compute-0 nova_compute[192079]: 2025-10-02 12:10:14.042 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:10:14 compute-0 nova_compute[192079]: 2025-10-02 12:10:14.043 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:10:14 compute-0 nova_compute[192079]: 2025-10-02 12:10:14.221 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance 8ac83a4c-49bf-4b6d-94cb-08e337258894 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:10:14 compute-0 nova_compute[192079]: 2025-10-02 12:10:14.222 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 1 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:10:14 compute-0 nova_compute[192079]: 2025-10-02 12:10:14.222 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=640MB phys_disk=79GB used_disk=1GB total_vcpus=8 used_vcpus=1 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:10:14 compute-0 nova_compute[192079]: 2025-10-02 12:10:14.405 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:10:14 compute-0 nova_compute[192079]: 2025-10-02 12:10:14.427 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:10:14 compute-0 nova_compute[192079]: 2025-10-02 12:10:14.456 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:10:14 compute-0 nova_compute[192079]: 2025-10-02 12:10:14.456 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.414s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:10:15 compute-0 podman[227100]: 2025-10-02 12:10:15.134950874 +0000 UTC m=+0.051172432 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=openstack_network_exporter, release=1755695350, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, config_id=edpm, architecture=x86_64, version=9.6, distribution-scope=public, io.openshift.expose-services=, com.redhat.component=ubi9-minimal-container, vendor=Red Hat, Inc., com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.buildah.version=1.33.7, url=https://catalog.redhat.com/en/search?searchType=containers, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, managed_by=edpm_ansible, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.openshift.tags=minimal rhel9, maintainer=Red Hat, Inc., name=ubi9-minimal, vcs-type=git, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., build-date=2025-08-20T13:12:41, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly.)
Oct 02 12:10:15 compute-0 podman[227101]: 2025-10-02 12:10:15.154007829 +0000 UTC m=+0.065750606 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, container_name=multipathd, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_id=multipathd, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS)
Oct 02 12:10:15 compute-0 nova_compute[192079]: 2025-10-02 12:10:15.191 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:15 compute-0 nova_compute[192079]: 2025-10-02 12:10:15.451 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:10:15 compute-0 nova_compute[192079]: 2025-10-02 12:10:15.659 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.111 2 DEBUG oslo_concurrency.lockutils [None req-673530c5-792c-483d-bdae-e3bd2a2559b0 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Acquiring lock "8ac83a4c-49bf-4b6d-94cb-08e337258894" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.111 2 DEBUG oslo_concurrency.lockutils [None req-673530c5-792c-483d-bdae-e3bd2a2559b0 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Lock "8ac83a4c-49bf-4b6d-94cb-08e337258894" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.112 2 DEBUG oslo_concurrency.lockutils [None req-673530c5-792c-483d-bdae-e3bd2a2559b0 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Acquiring lock "8ac83a4c-49bf-4b6d-94cb-08e337258894-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.112 2 DEBUG oslo_concurrency.lockutils [None req-673530c5-792c-483d-bdae-e3bd2a2559b0 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Lock "8ac83a4c-49bf-4b6d-94cb-08e337258894-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.112 2 DEBUG oslo_concurrency.lockutils [None req-673530c5-792c-483d-bdae-e3bd2a2559b0 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Lock "8ac83a4c-49bf-4b6d-94cb-08e337258894-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.134 2 INFO nova.compute.manager [None req-673530c5-792c-483d-bdae-e3bd2a2559b0 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Terminating instance
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.143 2 DEBUG nova.compute.manager [None req-673530c5-792c-483d-bdae-e3bd2a2559b0 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:10:16 compute-0 kernel: tap49c09300-dc (unregistering): left promiscuous mode
Oct 02 12:10:16 compute-0 NetworkManager[51160]: <info>  [1759407016.1582] device (tap49c09300-dc): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:10:16 compute-0 ovn_controller[94336]: 2025-10-02T12:10:16Z|00159|binding|INFO|Releasing lport 49c09300-dc39-461c-bec6-9b97a063d6e7 from this chassis (sb_readonly=0)
Oct 02 12:10:16 compute-0 ovn_controller[94336]: 2025-10-02T12:10:16Z|00160|binding|INFO|Setting lport 49c09300-dc39-461c-bec6-9b97a063d6e7 down in Southbound
Oct 02 12:10:16 compute-0 ovn_controller[94336]: 2025-10-02T12:10:16Z|00161|binding|INFO|Removing iface tap49c09300-dc ovn-installed in OVS
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.167 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.170 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:16.178 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:aa:d5:33 10.100.0.14'], port_security=['fa:16:3e:aa:d5:33 10.100.0.14'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.14/28', 'neutron:device_id': '8ac83a4c-49bf-4b6d-94cb-08e337258894', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-d6b20961-3c12-47aa-aeda-262b5ab075ae', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'cacc76a138fb435ea3d69dd8c9682958', 'neutron:revision_number': '4', 'neutron:security_group_ids': '21894bf7-b45f-48da-9f74-06b9213e547e', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=b1ef904b-f8ba-4627-bbb4-2c765c6f8cd7, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=49c09300-dc39-461c-bec6-9b97a063d6e7) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:10:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:16.180 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 49c09300-dc39-461c-bec6-9b97a063d6e7 in datapath d6b20961-3c12-47aa-aeda-262b5ab075ae unbound from our chassis
Oct 02 12:10:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:16.181 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network d6b20961-3c12-47aa-aeda-262b5ab075ae, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.182 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:16.182 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ffaffe9f-945b-4e2b-83df-8bb6fd20a2b4]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:16.183 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-d6b20961-3c12-47aa-aeda-262b5ab075ae namespace which is not needed anymore
Oct 02 12:10:16 compute-0 systemd[1]: machine-qemu\x2d26\x2dinstance\x2d00000033.scope: Deactivated successfully.
Oct 02 12:10:16 compute-0 systemd[1]: machine-qemu\x2d26\x2dinstance\x2d00000033.scope: Consumed 5.957s CPU time.
Oct 02 12:10:16 compute-0 systemd-machined[152150]: Machine qemu-26-instance-00000033 terminated.
Oct 02 12:10:16 compute-0 neutron-haproxy-ovnmeta-d6b20961-3c12-47aa-aeda-262b5ab075ae[227078]: [NOTICE]   (227082) : haproxy version is 2.8.14-c23fe91
Oct 02 12:10:16 compute-0 neutron-haproxy-ovnmeta-d6b20961-3c12-47aa-aeda-262b5ab075ae[227078]: [NOTICE]   (227082) : path to executable is /usr/sbin/haproxy
Oct 02 12:10:16 compute-0 neutron-haproxy-ovnmeta-d6b20961-3c12-47aa-aeda-262b5ab075ae[227078]: [WARNING]  (227082) : Exiting Master process...
Oct 02 12:10:16 compute-0 neutron-haproxy-ovnmeta-d6b20961-3c12-47aa-aeda-262b5ab075ae[227078]: [WARNING]  (227082) : Exiting Master process...
Oct 02 12:10:16 compute-0 neutron-haproxy-ovnmeta-d6b20961-3c12-47aa-aeda-262b5ab075ae[227078]: [ALERT]    (227082) : Current worker (227084) exited with code 143 (Terminated)
Oct 02 12:10:16 compute-0 neutron-haproxy-ovnmeta-d6b20961-3c12-47aa-aeda-262b5ab075ae[227078]: [WARNING]  (227082) : All workers exited. Exiting... (0)
Oct 02 12:10:16 compute-0 systemd[1]: libpod-9d211802c5a5f6bb0312a0851166e93215cf7573b5fd4ef6fa4d3acbafc6ef89.scope: Deactivated successfully.
Oct 02 12:10:16 compute-0 podman[227160]: 2025-10-02 12:10:16.329482794 +0000 UTC m=+0.069707473 container died 9d211802c5a5f6bb0312a0851166e93215cf7573b5fd4ef6fa4d3acbafc6ef89 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d6b20961-3c12-47aa-aeda-262b5ab075ae, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:10:16 compute-0 kernel: tap49c09300-dc: entered promiscuous mode
Oct 02 12:10:16 compute-0 NetworkManager[51160]: <info>  [1759407016.3644] manager: (tap49c09300-dc): new Tun device (/org/freedesktop/NetworkManager/Devices/80)
Oct 02 12:10:16 compute-0 systemd-udevd[227142]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:10:16 compute-0 kernel: tap49c09300-dc (unregistering): left promiscuous mode
Oct 02 12:10:16 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-9d211802c5a5f6bb0312a0851166e93215cf7573b5fd4ef6fa4d3acbafc6ef89-userdata-shm.mount: Deactivated successfully.
Oct 02 12:10:16 compute-0 systemd[1]: var-lib-containers-storage-overlay-bd127fabb091844cd3cbd693885e391a1ad834e57238dc36e2d55eb2a3611ca5-merged.mount: Deactivated successfully.
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.426 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:16 compute-0 ovn_controller[94336]: 2025-10-02T12:10:16Z|00162|binding|INFO|Claiming lport 49c09300-dc39-461c-bec6-9b97a063d6e7 for this chassis.
Oct 02 12:10:16 compute-0 ovn_controller[94336]: 2025-10-02T12:10:16Z|00163|binding|INFO|49c09300-dc39-461c-bec6-9b97a063d6e7: Claiming fa:16:3e:aa:d5:33 10.100.0.14
Oct 02 12:10:16 compute-0 ovn_controller[94336]: 2025-10-02T12:10:16Z|00164|binding|INFO|Setting lport 49c09300-dc39-461c-bec6-9b97a063d6e7 up in Southbound
Oct 02 12:10:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:16.443 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:aa:d5:33 10.100.0.14'], port_security=['fa:16:3e:aa:d5:33 10.100.0.14'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.14/28', 'neutron:device_id': '8ac83a4c-49bf-4b6d-94cb-08e337258894', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-d6b20961-3c12-47aa-aeda-262b5ab075ae', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'cacc76a138fb435ea3d69dd8c9682958', 'neutron:revision_number': '4', 'neutron:security_group_ids': '21894bf7-b45f-48da-9f74-06b9213e547e', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=b1ef904b-f8ba-4627-bbb4-2c765c6f8cd7, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=49c09300-dc39-461c-bec6-9b97a063d6e7) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:10:16 compute-0 ovn_controller[94336]: 2025-10-02T12:10:16Z|00165|binding|INFO|Setting lport 49c09300-dc39-461c-bec6-9b97a063d6e7 ovn-installed in OVS
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.445 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.446 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:16 compute-0 podman[227160]: 2025-10-02 12:10:16.463191953 +0000 UTC m=+0.203416632 container cleanup 9d211802c5a5f6bb0312a0851166e93215cf7573b5fd4ef6fa4d3acbafc6ef89 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d6b20961-3c12-47aa-aeda-262b5ab075ae, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:10:16 compute-0 systemd[1]: libpod-conmon-9d211802c5a5f6bb0312a0851166e93215cf7573b5fd4ef6fa4d3acbafc6ef89.scope: Deactivated successfully.
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.476 2 INFO nova.virt.libvirt.driver [-] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Instance destroyed successfully.
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.476 2 DEBUG nova.objects.instance [None req-673530c5-792c-483d-bdae-e3bd2a2559b0 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Lazy-loading 'resources' on Instance uuid 8ac83a4c-49bf-4b6d-94cb-08e337258894 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.488 2 DEBUG nova.virt.libvirt.vif [None req-673530c5-792c-483d-bdae-e3bd2a2559b0 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:10:02Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-InstanceActionsNegativeTestJSON-server-1910785266',display_name='tempest-InstanceActionsNegativeTestJSON-server-1910785266',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-instanceactionsnegativetestjson-server-1910785266',id=51,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:10:11Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='cacc76a138fb435ea3d69dd8c9682958',ramdisk_id='',reservation_id='r-0t91p1d0',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-InstanceActionsNegativeTestJSON-725117245',owner_user_name='tempest-InstanceActionsNegativeTestJSON-725117245-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:10:11Z,user_data=None,user_id='5230b7fc4a3b4c28ac245755d300b23a',uuid=8ac83a4c-49bf-4b6d-94cb-08e337258894,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "49c09300-dc39-461c-bec6-9b97a063d6e7", "address": "fa:16:3e:aa:d5:33", "network": {"id": "d6b20961-3c12-47aa-aeda-262b5ab075ae", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1638524493-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "cacc76a138fb435ea3d69dd8c9682958", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap49c09300-dc", "ovs_interfaceid": "49c09300-dc39-461c-bec6-9b97a063d6e7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.489 2 DEBUG nova.network.os_vif_util [None req-673530c5-792c-483d-bdae-e3bd2a2559b0 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Converting VIF {"id": "49c09300-dc39-461c-bec6-9b97a063d6e7", "address": "fa:16:3e:aa:d5:33", "network": {"id": "d6b20961-3c12-47aa-aeda-262b5ab075ae", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1638524493-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "cacc76a138fb435ea3d69dd8c9682958", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap49c09300-dc", "ovs_interfaceid": "49c09300-dc39-461c-bec6-9b97a063d6e7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.490 2 DEBUG nova.network.os_vif_util [None req-673530c5-792c-483d-bdae-e3bd2a2559b0 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:aa:d5:33,bridge_name='br-int',has_traffic_filtering=True,id=49c09300-dc39-461c-bec6-9b97a063d6e7,network=Network(d6b20961-3c12-47aa-aeda-262b5ab075ae),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap49c09300-dc') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.490 2 DEBUG os_vif [None req-673530c5-792c-483d-bdae-e3bd2a2559b0 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:aa:d5:33,bridge_name='br-int',has_traffic_filtering=True,id=49c09300-dc39-461c-bec6-9b97a063d6e7,network=Network(d6b20961-3c12-47aa-aeda-262b5ab075ae),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap49c09300-dc') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.491 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.492 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap49c09300-dc, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:10:16 compute-0 ovn_controller[94336]: 2025-10-02T12:10:16Z|00166|binding|INFO|Releasing lport 49c09300-dc39-461c-bec6-9b97a063d6e7 from this chassis (sb_readonly=0)
Oct 02 12:10:16 compute-0 ovn_controller[94336]: 2025-10-02T12:10:16Z|00167|binding|INFO|Setting lport 49c09300-dc39-461c-bec6-9b97a063d6e7 down in Southbound
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.495 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.497 2 INFO os_vif [None req-673530c5-792c-483d-bdae-e3bd2a2559b0 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:aa:d5:33,bridge_name='br-int',has_traffic_filtering=True,id=49c09300-dc39-461c-bec6-9b97a063d6e7,network=Network(d6b20961-3c12-47aa-aeda-262b5ab075ae),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap49c09300-dc')
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.497 2 INFO nova.virt.libvirt.driver [None req-673530c5-792c-483d-bdae-e3bd2a2559b0 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Deleting instance files /var/lib/nova/instances/8ac83a4c-49bf-4b6d-94cb-08e337258894_del
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.498 2 INFO nova.virt.libvirt.driver [None req-673530c5-792c-483d-bdae-e3bd2a2559b0 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Deletion of /var/lib/nova/instances/8ac83a4c-49bf-4b6d-94cb-08e337258894_del complete
Oct 02 12:10:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:16.502 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:aa:d5:33 10.100.0.14'], port_security=['fa:16:3e:aa:d5:33 10.100.0.14'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.14/28', 'neutron:device_id': '8ac83a4c-49bf-4b6d-94cb-08e337258894', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-d6b20961-3c12-47aa-aeda-262b5ab075ae', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'cacc76a138fb435ea3d69dd8c9682958', 'neutron:revision_number': '4', 'neutron:security_group_ids': '21894bf7-b45f-48da-9f74-06b9213e547e', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=b1ef904b-f8ba-4627-bbb4-2c765c6f8cd7, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=49c09300-dc39-461c-bec6-9b97a063d6e7) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.507 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:16 compute-0 podman[227202]: 2025-10-02 12:10:16.534947971 +0000 UTC m=+0.052199331 container remove 9d211802c5a5f6bb0312a0851166e93215cf7573b5fd4ef6fa4d3acbafc6ef89 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d6b20961-3c12-47aa-aeda-262b5ab075ae, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:10:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:16.539 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a40d1de9-7a2d-49ac-b46c-75f27cef1c4b]: (4, ('Thu Oct  2 12:10:16 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-d6b20961-3c12-47aa-aeda-262b5ab075ae (9d211802c5a5f6bb0312a0851166e93215cf7573b5fd4ef6fa4d3acbafc6ef89)\n9d211802c5a5f6bb0312a0851166e93215cf7573b5fd4ef6fa4d3acbafc6ef89\nThu Oct  2 12:10:16 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-d6b20961-3c12-47aa-aeda-262b5ab075ae (9d211802c5a5f6bb0312a0851166e93215cf7573b5fd4ef6fa4d3acbafc6ef89)\n9d211802c5a5f6bb0312a0851166e93215cf7573b5fd4ef6fa4d3acbafc6ef89\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:16.541 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c4bd9547-3815-432e-9a78-d128f63037d6]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:16.542 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapd6b20961-30, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.543 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:16 compute-0 kernel: tapd6b20961-30: left promiscuous mode
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.554 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:16.557 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c96dbdc6-1aaf-4c87-beb6-a0f94f294220]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:16.585 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1ebf333f-73f3-4940-bd99-65f0816d2850]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:16.587 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c04d366b-f897-469c-a645-717d15ffabf3]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:16.601 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[554f3dbb-4162-4d8d-b3ca-cfa12446a4dc]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 500780, 'reachable_time': 16556, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 227217, 'error': None, 'target': 'ovnmeta-d6b20961-3c12-47aa-aeda-262b5ab075ae', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:16.603 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-d6b20961-3c12-47aa-aeda-262b5ab075ae deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:10:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:16.603 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[0a7baf9d-8f94-4ea8-a50d-f4910cd12bb6]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:16.604 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 49c09300-dc39-461c-bec6-9b97a063d6e7 in datapath d6b20961-3c12-47aa-aeda-262b5ab075ae unbound from our chassis
Oct 02 12:10:16 compute-0 systemd[1]: run-netns-ovnmeta\x2dd6b20961\x2d3c12\x2d47aa\x2daeda\x2d262b5ab075ae.mount: Deactivated successfully.
Oct 02 12:10:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:16.605 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network d6b20961-3c12-47aa-aeda-262b5ab075ae, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:10:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:16.606 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3e8b37c1-c8bf-4079-a381-a6af7da064ac]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:16.606 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 49c09300-dc39-461c-bec6-9b97a063d6e7 in datapath d6b20961-3c12-47aa-aeda-262b5ab075ae unbound from our chassis
Oct 02 12:10:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:16.608 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network d6b20961-3c12-47aa-aeda-262b5ab075ae, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:10:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:16.608 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4efa30a4-10d7-4d72-9bfc-dc6cc8b80633]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.645 2 INFO nova.compute.manager [None req-673530c5-792c-483d-bdae-e3bd2a2559b0 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Took 0.50 seconds to destroy the instance on the hypervisor.
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.646 2 DEBUG oslo.service.loopingcall [None req-673530c5-792c-483d-bdae-e3bd2a2559b0 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.647 2 DEBUG nova.compute.manager [-] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:10:16 compute-0 nova_compute[192079]: 2025-10-02 12:10:16.648 2 DEBUG nova.network.neutron [-] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:10:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:17.070 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=14, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=13) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:10:17 compute-0 nova_compute[192079]: 2025-10-02 12:10:17.071 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:17.071 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 10 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:10:17 compute-0 nova_compute[192079]: 2025-10-02 12:10:17.473 2 DEBUG nova.compute.manager [req-3840ee7c-32df-4667-a0fc-6d4a087a68f0 req-bc44d333-b4a1-4c9e-9032-7e584091ecdd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Received event network-vif-unplugged-49c09300-dc39-461c-bec6-9b97a063d6e7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:10:17 compute-0 nova_compute[192079]: 2025-10-02 12:10:17.473 2 DEBUG oslo_concurrency.lockutils [req-3840ee7c-32df-4667-a0fc-6d4a087a68f0 req-bc44d333-b4a1-4c9e-9032-7e584091ecdd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "8ac83a4c-49bf-4b6d-94cb-08e337258894-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:10:17 compute-0 nova_compute[192079]: 2025-10-02 12:10:17.474 2 DEBUG oslo_concurrency.lockutils [req-3840ee7c-32df-4667-a0fc-6d4a087a68f0 req-bc44d333-b4a1-4c9e-9032-7e584091ecdd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ac83a4c-49bf-4b6d-94cb-08e337258894-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:10:17 compute-0 nova_compute[192079]: 2025-10-02 12:10:17.474 2 DEBUG oslo_concurrency.lockutils [req-3840ee7c-32df-4667-a0fc-6d4a087a68f0 req-bc44d333-b4a1-4c9e-9032-7e584091ecdd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ac83a4c-49bf-4b6d-94cb-08e337258894-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:10:17 compute-0 nova_compute[192079]: 2025-10-02 12:10:17.475 2 DEBUG nova.compute.manager [req-3840ee7c-32df-4667-a0fc-6d4a087a68f0 req-bc44d333-b4a1-4c9e-9032-7e584091ecdd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] No waiting events found dispatching network-vif-unplugged-49c09300-dc39-461c-bec6-9b97a063d6e7 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:10:17 compute-0 nova_compute[192079]: 2025-10-02 12:10:17.475 2 DEBUG nova.compute.manager [req-3840ee7c-32df-4667-a0fc-6d4a087a68f0 req-bc44d333-b4a1-4c9e-9032-7e584091ecdd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Received event network-vif-unplugged-49c09300-dc39-461c-bec6-9b97a063d6e7 for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:10:17 compute-0 nova_compute[192079]: 2025-10-02 12:10:17.475 2 DEBUG nova.compute.manager [req-3840ee7c-32df-4667-a0fc-6d4a087a68f0 req-bc44d333-b4a1-4c9e-9032-7e584091ecdd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Received event network-vif-plugged-49c09300-dc39-461c-bec6-9b97a063d6e7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:10:17 compute-0 nova_compute[192079]: 2025-10-02 12:10:17.476 2 DEBUG oslo_concurrency.lockutils [req-3840ee7c-32df-4667-a0fc-6d4a087a68f0 req-bc44d333-b4a1-4c9e-9032-7e584091ecdd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "8ac83a4c-49bf-4b6d-94cb-08e337258894-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:10:17 compute-0 nova_compute[192079]: 2025-10-02 12:10:17.476 2 DEBUG oslo_concurrency.lockutils [req-3840ee7c-32df-4667-a0fc-6d4a087a68f0 req-bc44d333-b4a1-4c9e-9032-7e584091ecdd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ac83a4c-49bf-4b6d-94cb-08e337258894-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:10:17 compute-0 nova_compute[192079]: 2025-10-02 12:10:17.476 2 DEBUG oslo_concurrency.lockutils [req-3840ee7c-32df-4667-a0fc-6d4a087a68f0 req-bc44d333-b4a1-4c9e-9032-7e584091ecdd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ac83a4c-49bf-4b6d-94cb-08e337258894-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:10:17 compute-0 nova_compute[192079]: 2025-10-02 12:10:17.477 2 DEBUG nova.compute.manager [req-3840ee7c-32df-4667-a0fc-6d4a087a68f0 req-bc44d333-b4a1-4c9e-9032-7e584091ecdd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] No waiting events found dispatching network-vif-plugged-49c09300-dc39-461c-bec6-9b97a063d6e7 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:10:17 compute-0 nova_compute[192079]: 2025-10-02 12:10:17.477 2 WARNING nova.compute.manager [req-3840ee7c-32df-4667-a0fc-6d4a087a68f0 req-bc44d333-b4a1-4c9e-9032-7e584091ecdd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Received unexpected event network-vif-plugged-49c09300-dc39-461c-bec6-9b97a063d6e7 for instance with vm_state active and task_state deleting.
Oct 02 12:10:17 compute-0 nova_compute[192079]: 2025-10-02 12:10:17.477 2 DEBUG nova.compute.manager [req-3840ee7c-32df-4667-a0fc-6d4a087a68f0 req-bc44d333-b4a1-4c9e-9032-7e584091ecdd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Received event network-vif-plugged-49c09300-dc39-461c-bec6-9b97a063d6e7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:10:17 compute-0 nova_compute[192079]: 2025-10-02 12:10:17.478 2 DEBUG oslo_concurrency.lockutils [req-3840ee7c-32df-4667-a0fc-6d4a087a68f0 req-bc44d333-b4a1-4c9e-9032-7e584091ecdd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "8ac83a4c-49bf-4b6d-94cb-08e337258894-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:10:17 compute-0 nova_compute[192079]: 2025-10-02 12:10:17.478 2 DEBUG oslo_concurrency.lockutils [req-3840ee7c-32df-4667-a0fc-6d4a087a68f0 req-bc44d333-b4a1-4c9e-9032-7e584091ecdd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ac83a4c-49bf-4b6d-94cb-08e337258894-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:10:17 compute-0 nova_compute[192079]: 2025-10-02 12:10:17.478 2 DEBUG oslo_concurrency.lockutils [req-3840ee7c-32df-4667-a0fc-6d4a087a68f0 req-bc44d333-b4a1-4c9e-9032-7e584091ecdd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ac83a4c-49bf-4b6d-94cb-08e337258894-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:10:17 compute-0 nova_compute[192079]: 2025-10-02 12:10:17.479 2 DEBUG nova.compute.manager [req-3840ee7c-32df-4667-a0fc-6d4a087a68f0 req-bc44d333-b4a1-4c9e-9032-7e584091ecdd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] No waiting events found dispatching network-vif-plugged-49c09300-dc39-461c-bec6-9b97a063d6e7 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:10:17 compute-0 nova_compute[192079]: 2025-10-02 12:10:17.479 2 WARNING nova.compute.manager [req-3840ee7c-32df-4667-a0fc-6d4a087a68f0 req-bc44d333-b4a1-4c9e-9032-7e584091ecdd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Received unexpected event network-vif-plugged-49c09300-dc39-461c-bec6-9b97a063d6e7 for instance with vm_state active and task_state deleting.
Oct 02 12:10:17 compute-0 nova_compute[192079]: 2025-10-02 12:10:17.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:10:17 compute-0 nova_compute[192079]: 2025-10-02 12:10:17.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:10:18 compute-0 nova_compute[192079]: 2025-10-02 12:10:18.059 2 DEBUG nova.network.neutron [-] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:10:18 compute-0 nova_compute[192079]: 2025-10-02 12:10:18.090 2 INFO nova.compute.manager [-] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Took 1.44 seconds to deallocate network for instance.
Oct 02 12:10:18 compute-0 nova_compute[192079]: 2025-10-02 12:10:18.131 2 DEBUG nova.compute.manager [req-e05c5891-9490-4d09-ac78-0548e10f01c9 req-608af279-5be4-4ed4-b84b-70cd1ee961a8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Received event network-vif-deleted-49c09300-dc39-461c-bec6-9b97a063d6e7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:10:18 compute-0 nova_compute[192079]: 2025-10-02 12:10:18.184 2 DEBUG oslo_concurrency.lockutils [None req-673530c5-792c-483d-bdae-e3bd2a2559b0 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:10:18 compute-0 nova_compute[192079]: 2025-10-02 12:10:18.185 2 DEBUG oslo_concurrency.lockutils [None req-673530c5-792c-483d-bdae-e3bd2a2559b0 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:10:18 compute-0 nova_compute[192079]: 2025-10-02 12:10:18.267 2 DEBUG nova.compute.provider_tree [None req-673530c5-792c-483d-bdae-e3bd2a2559b0 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:10:18 compute-0 nova_compute[192079]: 2025-10-02 12:10:18.304 2 DEBUG nova.scheduler.client.report [None req-673530c5-792c-483d-bdae-e3bd2a2559b0 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:10:18 compute-0 nova_compute[192079]: 2025-10-02 12:10:18.334 2 DEBUG oslo_concurrency.lockutils [None req-673530c5-792c-483d-bdae-e3bd2a2559b0 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.149s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:10:18 compute-0 nova_compute[192079]: 2025-10-02 12:10:18.394 2 INFO nova.scheduler.client.report [None req-673530c5-792c-483d-bdae-e3bd2a2559b0 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Deleted allocations for instance 8ac83a4c-49bf-4b6d-94cb-08e337258894
Oct 02 12:10:18 compute-0 nova_compute[192079]: 2025-10-02 12:10:18.544 2 DEBUG oslo_concurrency.lockutils [None req-673530c5-792c-483d-bdae-e3bd2a2559b0 5230b7fc4a3b4c28ac245755d300b23a cacc76a138fb435ea3d69dd8c9682958 - - default default] Lock "8ac83a4c-49bf-4b6d-94cb-08e337258894" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 2.433s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:10:18 compute-0 nova_compute[192079]: 2025-10-02 12:10:18.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:10:18 compute-0 nova_compute[192079]: 2025-10-02 12:10:18.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:10:18 compute-0 nova_compute[192079]: 2025-10-02 12:10:18.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:10:18 compute-0 nova_compute[192079]: 2025-10-02 12:10:18.687 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:10:18 compute-0 nova_compute[192079]: 2025-10-02 12:10:18.688 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:10:19 compute-0 nova_compute[192079]: 2025-10-02 12:10:19.602 2 DEBUG nova.compute.manager [req-1e991f87-706c-4b7a-8db2-83a6174f7639 req-c39ad481-a764-4262-9439-efc38e932d8f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Received event network-vif-plugged-49c09300-dc39-461c-bec6-9b97a063d6e7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:10:19 compute-0 nova_compute[192079]: 2025-10-02 12:10:19.603 2 DEBUG oslo_concurrency.lockutils [req-1e991f87-706c-4b7a-8db2-83a6174f7639 req-c39ad481-a764-4262-9439-efc38e932d8f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "8ac83a4c-49bf-4b6d-94cb-08e337258894-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:10:19 compute-0 nova_compute[192079]: 2025-10-02 12:10:19.603 2 DEBUG oslo_concurrency.lockutils [req-1e991f87-706c-4b7a-8db2-83a6174f7639 req-c39ad481-a764-4262-9439-efc38e932d8f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ac83a4c-49bf-4b6d-94cb-08e337258894-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:10:19 compute-0 nova_compute[192079]: 2025-10-02 12:10:19.603 2 DEBUG oslo_concurrency.lockutils [req-1e991f87-706c-4b7a-8db2-83a6174f7639 req-c39ad481-a764-4262-9439-efc38e932d8f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ac83a4c-49bf-4b6d-94cb-08e337258894-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:10:19 compute-0 nova_compute[192079]: 2025-10-02 12:10:19.603 2 DEBUG nova.compute.manager [req-1e991f87-706c-4b7a-8db2-83a6174f7639 req-c39ad481-a764-4262-9439-efc38e932d8f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] No waiting events found dispatching network-vif-plugged-49c09300-dc39-461c-bec6-9b97a063d6e7 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:10:19 compute-0 nova_compute[192079]: 2025-10-02 12:10:19.603 2 WARNING nova.compute.manager [req-1e991f87-706c-4b7a-8db2-83a6174f7639 req-c39ad481-a764-4262-9439-efc38e932d8f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Received unexpected event network-vif-plugged-49c09300-dc39-461c-bec6-9b97a063d6e7 for instance with vm_state deleted and task_state None.
Oct 02 12:10:19 compute-0 nova_compute[192079]: 2025-10-02 12:10:19.604 2 DEBUG nova.compute.manager [req-1e991f87-706c-4b7a-8db2-83a6174f7639 req-c39ad481-a764-4262-9439-efc38e932d8f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Received event network-vif-plugged-49c09300-dc39-461c-bec6-9b97a063d6e7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:10:19 compute-0 nova_compute[192079]: 2025-10-02 12:10:19.604 2 DEBUG oslo_concurrency.lockutils [req-1e991f87-706c-4b7a-8db2-83a6174f7639 req-c39ad481-a764-4262-9439-efc38e932d8f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "8ac83a4c-49bf-4b6d-94cb-08e337258894-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:10:19 compute-0 nova_compute[192079]: 2025-10-02 12:10:19.604 2 DEBUG oslo_concurrency.lockutils [req-1e991f87-706c-4b7a-8db2-83a6174f7639 req-c39ad481-a764-4262-9439-efc38e932d8f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ac83a4c-49bf-4b6d-94cb-08e337258894-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:10:19 compute-0 nova_compute[192079]: 2025-10-02 12:10:19.604 2 DEBUG oslo_concurrency.lockutils [req-1e991f87-706c-4b7a-8db2-83a6174f7639 req-c39ad481-a764-4262-9439-efc38e932d8f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ac83a4c-49bf-4b6d-94cb-08e337258894-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:10:19 compute-0 nova_compute[192079]: 2025-10-02 12:10:19.604 2 DEBUG nova.compute.manager [req-1e991f87-706c-4b7a-8db2-83a6174f7639 req-c39ad481-a764-4262-9439-efc38e932d8f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] No waiting events found dispatching network-vif-plugged-49c09300-dc39-461c-bec6-9b97a063d6e7 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:10:19 compute-0 nova_compute[192079]: 2025-10-02 12:10:19.605 2 WARNING nova.compute.manager [req-1e991f87-706c-4b7a-8db2-83a6174f7639 req-c39ad481-a764-4262-9439-efc38e932d8f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Received unexpected event network-vif-plugged-49c09300-dc39-461c-bec6-9b97a063d6e7 for instance with vm_state deleted and task_state None.
Oct 02 12:10:19 compute-0 nova_compute[192079]: 2025-10-02 12:10:19.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:10:20 compute-0 nova_compute[192079]: 2025-10-02 12:10:20.193 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:20 compute-0 nova_compute[192079]: 2025-10-02 12:10:20.663 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:10:21 compute-0 podman[227218]: 2025-10-02 12:10:21.139639757 +0000 UTC m=+0.058373987 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter)
Oct 02 12:10:21 compute-0 podman[227219]: 2025-10-02 12:10:21.140055128 +0000 UTC m=+0.056920507 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=iscsid, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, config_id=iscsid, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, org.label-schema.vendor=CentOS)
Oct 02 12:10:21 compute-0 nova_compute[192079]: 2025-10-02 12:10:21.535 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:25 compute-0 nova_compute[192079]: 2025-10-02 12:10:25.195 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:26 compute-0 nova_compute[192079]: 2025-10-02 12:10:26.538 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:26 compute-0 nova_compute[192079]: 2025-10-02 12:10:26.871 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:27 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:27.073 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '14'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:10:30 compute-0 nova_compute[192079]: 2025-10-02 12:10:30.222 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:31 compute-0 nova_compute[192079]: 2025-10-02 12:10:31.476 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759407016.4740715, 8ac83a4c-49bf-4b6d-94cb-08e337258894 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:10:31 compute-0 nova_compute[192079]: 2025-10-02 12:10:31.476 2 INFO nova.compute.manager [-] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] VM Stopped (Lifecycle Event)
Oct 02 12:10:31 compute-0 nova_compute[192079]: 2025-10-02 12:10:31.545 2 DEBUG nova.compute.manager [None req-b674655d-dae7-4060-aec6-81375561cd02 - - - - - -] [instance: 8ac83a4c-49bf-4b6d-94cb-08e337258894] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:10:31 compute-0 nova_compute[192079]: 2025-10-02 12:10:31.589 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:32 compute-0 podman[227259]: 2025-10-02 12:10:32.145278883 +0000 UTC m=+0.060769141 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.license=GPLv2, container_name=ovn_metadata_agent, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_managed=true, config_id=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']})
Oct 02 12:10:32 compute-0 podman[227261]: 2025-10-02 12:10:32.14922521 +0000 UTC m=+0.055019836 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>)
Oct 02 12:10:32 compute-0 podman[227260]: 2025-10-02 12:10:32.177798272 +0000 UTC m=+0.088010277 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, tcib_managed=true, org.label-schema.build-date=20251001)
Oct 02 12:10:35 compute-0 nova_compute[192079]: 2025-10-02 12:10:35.224 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:36 compute-0 nova_compute[192079]: 2025-10-02 12:10:36.592 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:40 compute-0 nova_compute[192079]: 2025-10-02 12:10:40.226 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:41 compute-0 podman[227326]: 2025-10-02 12:10:41.151828769 +0000 UTC m=+0.064791140 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, container_name=ceilometer_agent_compute, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:10:41 compute-0 nova_compute[192079]: 2025-10-02 12:10:41.602 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:45 compute-0 nova_compute[192079]: 2025-10-02 12:10:45.257 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:46 compute-0 podman[227345]: 2025-10-02 12:10:46.16668462 +0000 UTC m=+0.064705518 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, version=9.6, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., managed_by=edpm_ansible, build-date=2025-08-20T13:12:41, io.openshift.tags=minimal rhel9, release=1755695350, architecture=x86_64, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, container_name=openstack_network_exporter, url=https://catalog.redhat.com/en/search?searchType=containers, config_id=edpm, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., maintainer=Red Hat, Inc., name=ubi9-minimal, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.buildah.version=1.33.7, io.openshift.expose-services=, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vcs-type=git, distribution-scope=public, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, vendor=Red Hat, Inc., com.redhat.component=ubi9-minimal-container)
Oct 02 12:10:46 compute-0 podman[227346]: 2025-10-02 12:10:46.173837393 +0000 UTC m=+0.061243085 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, container_name=multipathd, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, config_id=multipathd, managed_by=edpm_ansible, org.label-schema.license=GPLv2, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']})
Oct 02 12:10:46 compute-0 nova_compute[192079]: 2025-10-02 12:10:46.650 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:49 compute-0 nova_compute[192079]: 2025-10-02 12:10:49.391 2 DEBUG oslo_concurrency.lockutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Acquiring lock "a454ffff-18eb-45a3-a5f4-84882f050b4f" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:10:49 compute-0 nova_compute[192079]: 2025-10-02 12:10:49.392 2 DEBUG oslo_concurrency.lockutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Lock "a454ffff-18eb-45a3-a5f4-84882f050b4f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:10:49 compute-0 nova_compute[192079]: 2025-10-02 12:10:49.414 2 DEBUG nova.compute.manager [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:10:49 compute-0 nova_compute[192079]: 2025-10-02 12:10:49.558 2 DEBUG oslo_concurrency.lockutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:10:49 compute-0 nova_compute[192079]: 2025-10-02 12:10:49.558 2 DEBUG oslo_concurrency.lockutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:10:49 compute-0 nova_compute[192079]: 2025-10-02 12:10:49.564 2 DEBUG nova.virt.hardware [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:10:49 compute-0 nova_compute[192079]: 2025-10-02 12:10:49.564 2 INFO nova.compute.claims [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:10:49 compute-0 nova_compute[192079]: 2025-10-02 12:10:49.801 2 DEBUG nova.compute.provider_tree [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:10:49 compute-0 nova_compute[192079]: 2025-10-02 12:10:49.819 2 DEBUG nova.scheduler.client.report [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:10:49 compute-0 nova_compute[192079]: 2025-10-02 12:10:49.846 2 DEBUG oslo_concurrency.lockutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.288s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:10:49 compute-0 nova_compute[192079]: 2025-10-02 12:10:49.847 2 DEBUG nova.compute.manager [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.007 2 DEBUG nova.compute.manager [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.007 2 DEBUG nova.network.neutron [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.039 2 INFO nova.virt.libvirt.driver [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.057 2 DEBUG nova.compute.manager [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.209 2 DEBUG nova.compute.manager [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.210 2 DEBUG nova.virt.libvirt.driver [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.210 2 INFO nova.virt.libvirt.driver [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Creating image(s)
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.212 2 DEBUG oslo_concurrency.lockutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Acquiring lock "/var/lib/nova/instances/a454ffff-18eb-45a3-a5f4-84882f050b4f/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.212 2 DEBUG oslo_concurrency.lockutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Lock "/var/lib/nova/instances/a454ffff-18eb-45a3-a5f4-84882f050b4f/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.213 2 DEBUG oslo_concurrency.lockutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Lock "/var/lib/nova/instances/a454ffff-18eb-45a3-a5f4-84882f050b4f/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.225 2 DEBUG oslo_concurrency.processutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.295 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.313 2 DEBUG oslo_concurrency.processutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.089s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.314 2 DEBUG oslo_concurrency.lockutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.315 2 DEBUG oslo_concurrency.lockutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.326 2 DEBUG oslo_concurrency.processutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.407 2 DEBUG oslo_concurrency.processutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.081s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.408 2 DEBUG oslo_concurrency.processutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/a454ffff-18eb-45a3-a5f4-84882f050b4f/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.435 2 DEBUG nova.policy [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.459 2 DEBUG oslo_concurrency.processutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/a454ffff-18eb-45a3-a5f4-84882f050b4f/disk 1073741824" returned: 0 in 0.052s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.461 2 DEBUG oslo_concurrency.lockutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.146s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.462 2 DEBUG oslo_concurrency.processutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.545 2 DEBUG oslo_concurrency.processutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.083s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.546 2 DEBUG nova.virt.disk.api [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Checking if we can resize image /var/lib/nova/instances/a454ffff-18eb-45a3-a5f4-84882f050b4f/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.547 2 DEBUG oslo_concurrency.processutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a454ffff-18eb-45a3-a5f4-84882f050b4f/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.636 2 DEBUG oslo_concurrency.processutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a454ffff-18eb-45a3-a5f4-84882f050b4f/disk --force-share --output=json" returned: 0 in 0.089s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.638 2 DEBUG nova.virt.disk.api [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Cannot resize image /var/lib/nova/instances/a454ffff-18eb-45a3-a5f4-84882f050b4f/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.639 2 DEBUG nova.objects.instance [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Lazy-loading 'migration_context' on Instance uuid a454ffff-18eb-45a3-a5f4-84882f050b4f obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.688 2 DEBUG nova.virt.libvirt.driver [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.688 2 DEBUG nova.virt.libvirt.driver [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Ensure instance console log exists: /var/lib/nova/instances/a454ffff-18eb-45a3-a5f4-84882f050b4f/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.689 2 DEBUG oslo_concurrency.lockutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.689 2 DEBUG oslo_concurrency.lockutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:10:50 compute-0 nova_compute[192079]: 2025-10-02 12:10:50.689 2 DEBUG oslo_concurrency.lockutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:10:51 compute-0 nova_compute[192079]: 2025-10-02 12:10:51.653 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:51 compute-0 nova_compute[192079]: 2025-10-02 12:10:51.877 2 DEBUG nova.network.neutron [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Successfully created port: 92134be3-e745-4df5-8491-81ceeaf0c20e _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:10:52 compute-0 podman[227400]: 2025-10-02 12:10:52.166757569 +0000 UTC m=+0.073368142 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']})
Oct 02 12:10:52 compute-0 podman[227401]: 2025-10-02 12:10:52.173366098 +0000 UTC m=+0.083774823 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, config_id=iscsid, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, container_name=iscsid, managed_by=edpm_ansible, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:10:54 compute-0 nova_compute[192079]: 2025-10-02 12:10:54.102 2 DEBUG nova.network.neutron [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Successfully updated port: 92134be3-e745-4df5-8491-81ceeaf0c20e _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:10:54 compute-0 nova_compute[192079]: 2025-10-02 12:10:54.122 2 DEBUG oslo_concurrency.lockutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Acquiring lock "refresh_cache-a454ffff-18eb-45a3-a5f4-84882f050b4f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:10:54 compute-0 nova_compute[192079]: 2025-10-02 12:10:54.122 2 DEBUG oslo_concurrency.lockutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Acquired lock "refresh_cache-a454ffff-18eb-45a3-a5f4-84882f050b4f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:10:54 compute-0 nova_compute[192079]: 2025-10-02 12:10:54.122 2 DEBUG nova.network.neutron [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:10:54 compute-0 nova_compute[192079]: 2025-10-02 12:10:54.358 2 DEBUG nova.network.neutron [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.297 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.681 2 DEBUG nova.network.neutron [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Updating instance_info_cache with network_info: [{"id": "92134be3-e745-4df5-8491-81ceeaf0c20e", "address": "fa:16:3e:42:b4:4f", "network": {"id": "a95667f3-b3ee-4e6a-b815-a7243d0bf012", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-301403611-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "d7dda73e7f3d4e05b0a8d0d1602f6b94", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92134be3-e7", "ovs_interfaceid": "92134be3-e745-4df5-8491-81ceeaf0c20e", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.721 2 DEBUG oslo_concurrency.lockutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Releasing lock "refresh_cache-a454ffff-18eb-45a3-a5f4-84882f050b4f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.722 2 DEBUG nova.compute.manager [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Instance network_info: |[{"id": "92134be3-e745-4df5-8491-81ceeaf0c20e", "address": "fa:16:3e:42:b4:4f", "network": {"id": "a95667f3-b3ee-4e6a-b815-a7243d0bf012", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-301403611-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "d7dda73e7f3d4e05b0a8d0d1602f6b94", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92134be3-e7", "ovs_interfaceid": "92134be3-e745-4df5-8491-81ceeaf0c20e", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.724 2 DEBUG nova.virt.libvirt.driver [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Start _get_guest_xml network_info=[{"id": "92134be3-e745-4df5-8491-81ceeaf0c20e", "address": "fa:16:3e:42:b4:4f", "network": {"id": "a95667f3-b3ee-4e6a-b815-a7243d0bf012", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-301403611-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "d7dda73e7f3d4e05b0a8d0d1602f6b94", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92134be3-e7", "ovs_interfaceid": "92134be3-e745-4df5-8491-81ceeaf0c20e", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.729 2 WARNING nova.virt.libvirt.driver [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.736 2 DEBUG nova.virt.libvirt.host [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.736 2 DEBUG nova.virt.libvirt.host [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.740 2 DEBUG nova.virt.libvirt.host [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.741 2 DEBUG nova.virt.libvirt.host [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.742 2 DEBUG nova.virt.libvirt.driver [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.742 2 DEBUG nova.virt.hardware [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.742 2 DEBUG nova.virt.hardware [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.742 2 DEBUG nova.virt.hardware [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.743 2 DEBUG nova.virt.hardware [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.743 2 DEBUG nova.virt.hardware [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.743 2 DEBUG nova.virt.hardware [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.743 2 DEBUG nova.virt.hardware [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.744 2 DEBUG nova.virt.hardware [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.744 2 DEBUG nova.virt.hardware [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.744 2 DEBUG nova.virt.hardware [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.744 2 DEBUG nova.virt.hardware [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.747 2 DEBUG nova.virt.libvirt.vif [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:10:47Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651',display_name='tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-floatingipsassociationnegativetestjson-server-894962651',id=54,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='d7dda73e7f3d4e05b0a8d0d1602f6b94',ramdisk_id='',reservation_id='r-23wlj9ye',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-FloatingIPsAssociationNegativeTestJSON-611843646',owner_user_name='tempest-FloatingIPsAssociationNegativeTestJSON-611843646-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:10:50Z,user_data=None,user_id='53ded9f664b84e7d85d0944e0b4ecb31',uuid=a454ffff-18eb-45a3-a5f4-84882f050b4f,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "92134be3-e745-4df5-8491-81ceeaf0c20e", "address": "fa:16:3e:42:b4:4f", "network": {"id": "a95667f3-b3ee-4e6a-b815-a7243d0bf012", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-301403611-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "d7dda73e7f3d4e05b0a8d0d1602f6b94", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92134be3-e7", "ovs_interfaceid": "92134be3-e745-4df5-8491-81ceeaf0c20e", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.748 2 DEBUG nova.network.os_vif_util [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Converting VIF {"id": "92134be3-e745-4df5-8491-81ceeaf0c20e", "address": "fa:16:3e:42:b4:4f", "network": {"id": "a95667f3-b3ee-4e6a-b815-a7243d0bf012", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-301403611-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "d7dda73e7f3d4e05b0a8d0d1602f6b94", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92134be3-e7", "ovs_interfaceid": "92134be3-e745-4df5-8491-81ceeaf0c20e", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.748 2 DEBUG nova.network.os_vif_util [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:42:b4:4f,bridge_name='br-int',has_traffic_filtering=True,id=92134be3-e745-4df5-8491-81ceeaf0c20e,network=Network(a95667f3-b3ee-4e6a-b815-a7243d0bf012),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap92134be3-e7') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.749 2 DEBUG nova.objects.instance [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Lazy-loading 'pci_devices' on Instance uuid a454ffff-18eb-45a3-a5f4-84882f050b4f obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.764 2 DEBUG nova.virt.libvirt.driver [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:10:55 compute-0 nova_compute[192079]:   <uuid>a454ffff-18eb-45a3-a5f4-84882f050b4f</uuid>
Oct 02 12:10:55 compute-0 nova_compute[192079]:   <name>instance-00000036</name>
Oct 02 12:10:55 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:10:55 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:10:55 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:10:55 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:       <nova:name>tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651</nova:name>
Oct 02 12:10:55 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:10:55</nova:creationTime>
Oct 02 12:10:55 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:10:55 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:10:55 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:10:55 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:10:55 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:10:55 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:10:55 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:10:55 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:10:55 compute-0 nova_compute[192079]:         <nova:user uuid="53ded9f664b84e7d85d0944e0b4ecb31">tempest-FloatingIPsAssociationNegativeTestJSON-611843646-project-member</nova:user>
Oct 02 12:10:55 compute-0 nova_compute[192079]:         <nova:project uuid="d7dda73e7f3d4e05b0a8d0d1602f6b94">tempest-FloatingIPsAssociationNegativeTestJSON-611843646</nova:project>
Oct 02 12:10:55 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:10:55 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:10:55 compute-0 nova_compute[192079]:         <nova:port uuid="92134be3-e745-4df5-8491-81ceeaf0c20e">
Oct 02 12:10:55 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.7" ipVersion="4"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:10:55 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:10:55 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:10:55 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <system>
Oct 02 12:10:55 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:10:55 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:10:55 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:10:55 compute-0 nova_compute[192079]:       <entry name="serial">a454ffff-18eb-45a3-a5f4-84882f050b4f</entry>
Oct 02 12:10:55 compute-0 nova_compute[192079]:       <entry name="uuid">a454ffff-18eb-45a3-a5f4-84882f050b4f</entry>
Oct 02 12:10:55 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     </system>
Oct 02 12:10:55 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:10:55 compute-0 nova_compute[192079]:   <os>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:   </os>
Oct 02 12:10:55 compute-0 nova_compute[192079]:   <features>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:   </features>
Oct 02 12:10:55 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:10:55 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:10:55 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:10:55 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/a454ffff-18eb-45a3-a5f4-84882f050b4f/disk"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:10:55 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/a454ffff-18eb-45a3-a5f4-84882f050b4f/disk.config"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:10:55 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:42:b4:4f"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:       <target dev="tap92134be3-e7"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:10:55 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/a454ffff-18eb-45a3-a5f4-84882f050b4f/console.log" append="off"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <video>
Oct 02 12:10:55 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     </video>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:10:55 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:10:55 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:10:55 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:10:55 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:10:55 compute-0 nova_compute[192079]: </domain>
Oct 02 12:10:55 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.765 2 DEBUG nova.compute.manager [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Preparing to wait for external event network-vif-plugged-92134be3-e745-4df5-8491-81ceeaf0c20e prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.765 2 DEBUG oslo_concurrency.lockutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Acquiring lock "a454ffff-18eb-45a3-a5f4-84882f050b4f-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.766 2 DEBUG oslo_concurrency.lockutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Lock "a454ffff-18eb-45a3-a5f4-84882f050b4f-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.766 2 DEBUG oslo_concurrency.lockutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Lock "a454ffff-18eb-45a3-a5f4-84882f050b4f-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.766 2 DEBUG nova.virt.libvirt.vif [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:10:47Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651',display_name='tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-floatingipsassociationnegativetestjson-server-894962651',id=54,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='d7dda73e7f3d4e05b0a8d0d1602f6b94',ramdisk_id='',reservation_id='r-23wlj9ye',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-FloatingIPsAssociationNegativeTestJSON-611843646',owner_user_name='tempest-FloatingIPsAssociationNegativeTestJSON-611843646-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:10:50Z,user_data=None,user_id='53ded9f664b84e7d85d0944e0b4ecb31',uuid=a454ffff-18eb-45a3-a5f4-84882f050b4f,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "92134be3-e745-4df5-8491-81ceeaf0c20e", "address": "fa:16:3e:42:b4:4f", "network": {"id": "a95667f3-b3ee-4e6a-b815-a7243d0bf012", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-301403611-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "d7dda73e7f3d4e05b0a8d0d1602f6b94", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92134be3-e7", "ovs_interfaceid": "92134be3-e745-4df5-8491-81ceeaf0c20e", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.767 2 DEBUG nova.network.os_vif_util [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Converting VIF {"id": "92134be3-e745-4df5-8491-81ceeaf0c20e", "address": "fa:16:3e:42:b4:4f", "network": {"id": "a95667f3-b3ee-4e6a-b815-a7243d0bf012", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-301403611-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "d7dda73e7f3d4e05b0a8d0d1602f6b94", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92134be3-e7", "ovs_interfaceid": "92134be3-e745-4df5-8491-81ceeaf0c20e", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.767 2 DEBUG nova.network.os_vif_util [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:42:b4:4f,bridge_name='br-int',has_traffic_filtering=True,id=92134be3-e745-4df5-8491-81ceeaf0c20e,network=Network(a95667f3-b3ee-4e6a-b815-a7243d0bf012),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap92134be3-e7') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.767 2 DEBUG os_vif [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:42:b4:4f,bridge_name='br-int',has_traffic_filtering=True,id=92134be3-e745-4df5-8491-81ceeaf0c20e,network=Network(a95667f3-b3ee-4e6a-b815-a7243d0bf012),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap92134be3-e7') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.768 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.768 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.768 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.771 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.771 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap92134be3-e7, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.771 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap92134be3-e7, col_values=(('external_ids', {'iface-id': '92134be3-e745-4df5-8491-81ceeaf0c20e', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:42:b4:4f', 'vm-uuid': 'a454ffff-18eb-45a3-a5f4-84882f050b4f'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.773 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:55 compute-0 NetworkManager[51160]: <info>  [1759407055.7737] manager: (tap92134be3-e7): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/81)
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.776 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.782 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.783 2 INFO os_vif [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:42:b4:4f,bridge_name='br-int',has_traffic_filtering=True,id=92134be3-e745-4df5-8491-81ceeaf0c20e,network=Network(a95667f3-b3ee-4e6a-b815-a7243d0bf012),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap92134be3-e7')
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.841 2 DEBUG nova.virt.libvirt.driver [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.842 2 DEBUG nova.virt.libvirt.driver [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.842 2 DEBUG nova.virt.libvirt.driver [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] No VIF found with MAC fa:16:3e:42:b4:4f, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.842 2 INFO nova.virt.libvirt.driver [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Using config drive
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.882 2 DEBUG nova.compute.manager [req-d2913431-7d40-4726-a5da-d7ec63ea7486 req-0557fed9-4e43-429d-9880-aa10b92d39bf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Received event network-changed-92134be3-e745-4df5-8491-81ceeaf0c20e external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.882 2 DEBUG nova.compute.manager [req-d2913431-7d40-4726-a5da-d7ec63ea7486 req-0557fed9-4e43-429d-9880-aa10b92d39bf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Refreshing instance network info cache due to event network-changed-92134be3-e745-4df5-8491-81ceeaf0c20e. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.882 2 DEBUG oslo_concurrency.lockutils [req-d2913431-7d40-4726-a5da-d7ec63ea7486 req-0557fed9-4e43-429d-9880-aa10b92d39bf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-a454ffff-18eb-45a3-a5f4-84882f050b4f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.882 2 DEBUG oslo_concurrency.lockutils [req-d2913431-7d40-4726-a5da-d7ec63ea7486 req-0557fed9-4e43-429d-9880-aa10b92d39bf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-a454ffff-18eb-45a3-a5f4-84882f050b4f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:10:55 compute-0 nova_compute[192079]: 2025-10-02 12:10:55.883 2 DEBUG nova.network.neutron [req-d2913431-7d40-4726-a5da-d7ec63ea7486 req-0557fed9-4e43-429d-9880-aa10b92d39bf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Refreshing network info cache for port 92134be3-e745-4df5-8491-81ceeaf0c20e _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:10:56 compute-0 nova_compute[192079]: 2025-10-02 12:10:56.208 2 INFO nova.virt.libvirt.driver [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Creating config drive at /var/lib/nova/instances/a454ffff-18eb-45a3-a5f4-84882f050b4f/disk.config
Oct 02 12:10:56 compute-0 nova_compute[192079]: 2025-10-02 12:10:56.214 2 DEBUG oslo_concurrency.processutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/a454ffff-18eb-45a3-a5f4-84882f050b4f/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp3cc9m51_ execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:10:56 compute-0 nova_compute[192079]: 2025-10-02 12:10:56.343 2 DEBUG oslo_concurrency.processutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/a454ffff-18eb-45a3-a5f4-84882f050b4f/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp3cc9m51_" returned: 0 in 0.129s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:10:56 compute-0 kernel: tap92134be3-e7: entered promiscuous mode
Oct 02 12:10:56 compute-0 NetworkManager[51160]: <info>  [1759407056.4229] manager: (tap92134be3-e7): new Tun device (/org/freedesktop/NetworkManager/Devices/82)
Oct 02 12:10:56 compute-0 nova_compute[192079]: 2025-10-02 12:10:56.472 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:56 compute-0 ovn_controller[94336]: 2025-10-02T12:10:56Z|00168|binding|INFO|Claiming lport 92134be3-e745-4df5-8491-81ceeaf0c20e for this chassis.
Oct 02 12:10:56 compute-0 ovn_controller[94336]: 2025-10-02T12:10:56Z|00169|binding|INFO|92134be3-e745-4df5-8491-81ceeaf0c20e: Claiming fa:16:3e:42:b4:4f 10.100.0.7
Oct 02 12:10:56 compute-0 nova_compute[192079]: 2025-10-02 12:10:56.476 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:56 compute-0 systemd-machined[152150]: New machine qemu-27-instance-00000036.
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:56.495 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:42:b4:4f 10.100.0.7'], port_security=['fa:16:3e:42:b4:4f 10.100.0.7'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.7/28', 'neutron:device_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-a95667f3-b3ee-4e6a-b815-a7243d0bf012', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'neutron:revision_number': '2', 'neutron:security_group_ids': '5c6b9803-4b78-4b5f-8eec-2999cf25dd71', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=116eade2-49ab-4232-a054-5dd54644a8e9, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=92134be3-e745-4df5-8491-81ceeaf0c20e) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:56.496 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 92134be3-e745-4df5-8491-81ceeaf0c20e in datapath a95667f3-b3ee-4e6a-b815-a7243d0bf012 bound to our chassis
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:56.498 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network a95667f3-b3ee-4e6a-b815-a7243d0bf012
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:56.507 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0aced2b2-73f0-4f12-802e-49ab738fb533]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:56.508 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tapa95667f3-b1 in ovnmeta-a95667f3-b3ee-4e6a-b815-a7243d0bf012 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:56.512 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tapa95667f3-b0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:56.512 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[de98d1a2-a4eb-424e-92f6-6d4afa8d11e6]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:56.514 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[025b9138-335c-4f89-8465-922566942d02]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:56.524 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[df455b6b-ee68-44da-92a0-247c31727ae8]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:56.544 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6f5d4907-05d9-4f9f-a4f1-60144a52b48c]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:56 compute-0 ovn_controller[94336]: 2025-10-02T12:10:56Z|00170|binding|INFO|Setting lport 92134be3-e745-4df5-8491-81ceeaf0c20e ovn-installed in OVS
Oct 02 12:10:56 compute-0 ovn_controller[94336]: 2025-10-02T12:10:56Z|00171|binding|INFO|Setting lport 92134be3-e745-4df5-8491-81ceeaf0c20e up in Southbound
Oct 02 12:10:56 compute-0 systemd[1]: Started Virtual Machine qemu-27-instance-00000036.
Oct 02 12:10:56 compute-0 nova_compute[192079]: 2025-10-02 12:10:56.551 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:56 compute-0 systemd-udevd[227468]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:10:56 compute-0 NetworkManager[51160]: <info>  [1759407056.5690] device (tap92134be3-e7): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:10:56 compute-0 NetworkManager[51160]: <info>  [1759407056.5698] device (tap92134be3-e7): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:56.584 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[5557d98e-de53-430f-9453-9bd05d11c914]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:56 compute-0 NetworkManager[51160]: <info>  [1759407056.5905] manager: (tapa95667f3-b0): new Veth device (/org/freedesktop/NetworkManager/Devices/83)
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:56.589 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[21dc7124-e45f-4d69-9f01-e65d805a121b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:56.618 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[a4a5a5e3-0534-4007-8eaf-57a0efa1cba2]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:56.622 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[3286c9bb-2f65-41ba-95c1-fe3425bc689c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:56 compute-0 NetworkManager[51160]: <info>  [1759407056.6413] device (tapa95667f3-b0): carrier: link connected
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:56.646 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[e75b1619-c0df-46e9-a77d-cd9a6c5e6991]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:56.661 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9105b960-cb69-41d6-9b64-509a4759cd66]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapa95667f3-b1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:5a:ae:a9'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 50], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 505426, 'reachable_time': 20698, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 227497, 'error': None, 'target': 'ovnmeta-a95667f3-b3ee-4e6a-b815-a7243d0bf012', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:56.676 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a6ef7e82-0b92-493f-ba40-282445f63a9c]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe5a:aea9'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 505426, 'tstamp': 505426}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 227498, 'error': None, 'target': 'ovnmeta-a95667f3-b3ee-4e6a-b815-a7243d0bf012', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:56.691 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7aa81b32-7999-49b4-956a-fe6d29c5aeaf]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapa95667f3-b1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:5a:ae:a9'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 50], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 505426, 'reachable_time': 20698, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 227499, 'error': None, 'target': 'ovnmeta-a95667f3-b3ee-4e6a-b815-a7243d0bf012', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:56.713 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[37661512-7112-4165-bfde-00ed16f622c0]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:56.765 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3d7af2c4-b411-42c0-b7f6-07e669110c6e]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:56.766 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapa95667f3-b0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:56.767 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:56.767 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapa95667f3-b0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:10:56 compute-0 kernel: tapa95667f3-b0: entered promiscuous mode
Oct 02 12:10:56 compute-0 nova_compute[192079]: 2025-10-02 12:10:56.768 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:56 compute-0 nova_compute[192079]: 2025-10-02 12:10:56.770 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:56.771 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tapa95667f3-b0, col_values=(('external_ids', {'iface-id': 'dfe1a22b-746e-4d91-bcf5-16d45608b6ed'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:10:56 compute-0 ovn_controller[94336]: 2025-10-02T12:10:56Z|00172|binding|INFO|Releasing lport dfe1a22b-746e-4d91-bcf5-16d45608b6ed from this chassis (sb_readonly=0)
Oct 02 12:10:56 compute-0 NetworkManager[51160]: <info>  [1759407056.7730] manager: (tapa95667f3-b0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/84)
Oct 02 12:10:56 compute-0 nova_compute[192079]: 2025-10-02 12:10:56.773 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:56 compute-0 nova_compute[192079]: 2025-10-02 12:10:56.783 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:56.784 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/a95667f3-b3ee-4e6a-b815-a7243d0bf012.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/a95667f3-b3ee-4e6a-b815-a7243d0bf012.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:56.785 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1501d3ca-a90c-4b69-ae43-15cb4c18072b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:56.786 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-a95667f3-b3ee-4e6a-b815-a7243d0bf012
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/a95667f3-b3ee-4e6a-b815-a7243d0bf012.pid.haproxy
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID a95667f3-b3ee-4e6a-b815-a7243d0bf012
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:10:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:10:56.787 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-a95667f3-b3ee-4e6a-b815-a7243d0bf012', 'env', 'PROCESS_TAG=haproxy-a95667f3-b3ee-4e6a-b815-a7243d0bf012', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/a95667f3-b3ee-4e6a-b815-a7243d0bf012.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:10:57 compute-0 podman[227537]: 2025-10-02 12:10:57.168953327 +0000 UTC m=+0.044433771 container create d6370f8d9ebadae2cf70d4a4ce25fe5f3380588d2c1b5121e10633afc508cb7e (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a95667f3-b3ee-4e6a-b815-a7243d0bf012, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:10:57 compute-0 systemd[1]: Started libpod-conmon-d6370f8d9ebadae2cf70d4a4ce25fe5f3380588d2c1b5121e10633afc508cb7e.scope.
Oct 02 12:10:57 compute-0 nova_compute[192079]: 2025-10-02 12:10:57.236 2 DEBUG nova.network.neutron [req-d2913431-7d40-4726-a5da-d7ec63ea7486 req-0557fed9-4e43-429d-9880-aa10b92d39bf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Updated VIF entry in instance network info cache for port 92134be3-e745-4df5-8491-81ceeaf0c20e. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:10:57 compute-0 nova_compute[192079]: 2025-10-02 12:10:57.237 2 DEBUG nova.network.neutron [req-d2913431-7d40-4726-a5da-d7ec63ea7486 req-0557fed9-4e43-429d-9880-aa10b92d39bf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Updating instance_info_cache with network_info: [{"id": "92134be3-e745-4df5-8491-81ceeaf0c20e", "address": "fa:16:3e:42:b4:4f", "network": {"id": "a95667f3-b3ee-4e6a-b815-a7243d0bf012", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-301403611-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "d7dda73e7f3d4e05b0a8d0d1602f6b94", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92134be3-e7", "ovs_interfaceid": "92134be3-e745-4df5-8491-81ceeaf0c20e", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:10:57 compute-0 podman[227537]: 2025-10-02 12:10:57.145176845 +0000 UTC m=+0.020657269 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:10:57 compute-0 nova_compute[192079]: 2025-10-02 12:10:57.253 2 DEBUG oslo_concurrency.lockutils [req-d2913431-7d40-4726-a5da-d7ec63ea7486 req-0557fed9-4e43-429d-9880-aa10b92d39bf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-a454ffff-18eb-45a3-a5f4-84882f050b4f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:10:57 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:10:57 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/32a0f1bcff81f207dcdea285b2e3d2ed9cd8cb943b0d4f90b1b9604f98bbe639/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:10:57 compute-0 podman[227537]: 2025-10-02 12:10:57.272753989 +0000 UTC m=+0.148234443 container init d6370f8d9ebadae2cf70d4a4ce25fe5f3380588d2c1b5121e10633afc508cb7e (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a95667f3-b3ee-4e6a-b815-a7243d0bf012, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:10:57 compute-0 podman[227537]: 2025-10-02 12:10:57.277665892 +0000 UTC m=+0.153146306 container start d6370f8d9ebadae2cf70d4a4ce25fe5f3380588d2c1b5121e10633afc508cb7e (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a95667f3-b3ee-4e6a-b815-a7243d0bf012, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.license=GPLv2, io.buildah.version=1.41.3)
Oct 02 12:10:57 compute-0 neutron-haproxy-ovnmeta-a95667f3-b3ee-4e6a-b815-a7243d0bf012[227552]: [NOTICE]   (227556) : New worker (227558) forked
Oct 02 12:10:57 compute-0 neutron-haproxy-ovnmeta-a95667f3-b3ee-4e6a-b815-a7243d0bf012[227552]: [NOTICE]   (227556) : Loading success.
Oct 02 12:10:57 compute-0 nova_compute[192079]: 2025-10-02 12:10:57.334 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407057.334022, a454ffff-18eb-45a3-a5f4-84882f050b4f => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:10:57 compute-0 nova_compute[192079]: 2025-10-02 12:10:57.335 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] VM Started (Lifecycle Event)
Oct 02 12:10:57 compute-0 nova_compute[192079]: 2025-10-02 12:10:57.358 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:10:57 compute-0 nova_compute[192079]: 2025-10-02 12:10:57.363 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407057.3342352, a454ffff-18eb-45a3-a5f4-84882f050b4f => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:10:57 compute-0 nova_compute[192079]: 2025-10-02 12:10:57.363 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] VM Paused (Lifecycle Event)
Oct 02 12:10:57 compute-0 nova_compute[192079]: 2025-10-02 12:10:57.386 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:10:57 compute-0 nova_compute[192079]: 2025-10-02 12:10:57.390 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:10:57 compute-0 nova_compute[192079]: 2025-10-02 12:10:57.411 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.001 2 DEBUG nova.compute.manager [req-428a3fde-fab2-4c6c-bdd2-a5e942d7510c req-377896e7-6f12-4b10-8b59-87510a71099a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Received event network-vif-plugged-92134be3-e745-4df5-8491-81ceeaf0c20e external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.001 2 DEBUG oslo_concurrency.lockutils [req-428a3fde-fab2-4c6c-bdd2-a5e942d7510c req-377896e7-6f12-4b10-8b59-87510a71099a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "a454ffff-18eb-45a3-a5f4-84882f050b4f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.002 2 DEBUG oslo_concurrency.lockutils [req-428a3fde-fab2-4c6c-bdd2-a5e942d7510c req-377896e7-6f12-4b10-8b59-87510a71099a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a454ffff-18eb-45a3-a5f4-84882f050b4f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.002 2 DEBUG oslo_concurrency.lockutils [req-428a3fde-fab2-4c6c-bdd2-a5e942d7510c req-377896e7-6f12-4b10-8b59-87510a71099a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a454ffff-18eb-45a3-a5f4-84882f050b4f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.002 2 DEBUG nova.compute.manager [req-428a3fde-fab2-4c6c-bdd2-a5e942d7510c req-377896e7-6f12-4b10-8b59-87510a71099a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Processing event network-vif-plugged-92134be3-e745-4df5-8491-81ceeaf0c20e _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.002 2 DEBUG nova.compute.manager [req-428a3fde-fab2-4c6c-bdd2-a5e942d7510c req-377896e7-6f12-4b10-8b59-87510a71099a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Received event network-vif-plugged-92134be3-e745-4df5-8491-81ceeaf0c20e external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.002 2 DEBUG oslo_concurrency.lockutils [req-428a3fde-fab2-4c6c-bdd2-a5e942d7510c req-377896e7-6f12-4b10-8b59-87510a71099a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "a454ffff-18eb-45a3-a5f4-84882f050b4f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.002 2 DEBUG oslo_concurrency.lockutils [req-428a3fde-fab2-4c6c-bdd2-a5e942d7510c req-377896e7-6f12-4b10-8b59-87510a71099a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a454ffff-18eb-45a3-a5f4-84882f050b4f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.003 2 DEBUG oslo_concurrency.lockutils [req-428a3fde-fab2-4c6c-bdd2-a5e942d7510c req-377896e7-6f12-4b10-8b59-87510a71099a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a454ffff-18eb-45a3-a5f4-84882f050b4f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.003 2 DEBUG nova.compute.manager [req-428a3fde-fab2-4c6c-bdd2-a5e942d7510c req-377896e7-6f12-4b10-8b59-87510a71099a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] No waiting events found dispatching network-vif-plugged-92134be3-e745-4df5-8491-81ceeaf0c20e pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.003 2 WARNING nova.compute.manager [req-428a3fde-fab2-4c6c-bdd2-a5e942d7510c req-377896e7-6f12-4b10-8b59-87510a71099a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Received unexpected event network-vif-plugged-92134be3-e745-4df5-8491-81ceeaf0c20e for instance with vm_state building and task_state spawning.
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.003 2 DEBUG nova.compute.manager [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.006 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407058.006713, a454ffff-18eb-45a3-a5f4-84882f050b4f => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.007 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] VM Resumed (Lifecycle Event)
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.009 2 DEBUG nova.virt.libvirt.driver [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.012 2 INFO nova.virt.libvirt.driver [-] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Instance spawned successfully.
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.012 2 DEBUG nova.virt.libvirt.driver [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.075 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.077 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.146 2 DEBUG nova.virt.libvirt.driver [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.147 2 DEBUG nova.virt.libvirt.driver [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.147 2 DEBUG nova.virt.libvirt.driver [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.148 2 DEBUG nova.virt.libvirt.driver [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.148 2 DEBUG nova.virt.libvirt.driver [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.148 2 DEBUG nova.virt.libvirt.driver [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.244 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.291 2 INFO nova.compute.manager [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Took 8.08 seconds to spawn the instance on the hypervisor.
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.291 2 DEBUG nova.compute.manager [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.402 2 INFO nova.compute.manager [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Took 8.92 seconds to build instance.
Oct 02 12:10:58 compute-0 nova_compute[192079]: 2025-10-02 12:10:58.422 2 DEBUG oslo_concurrency.lockutils [None req-8bdf6f1f-962c-41cd-8646-6cdaca1b3476 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Lock "a454ffff-18eb-45a3-a5f4-84882f050b4f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 9.029s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:11:00 compute-0 nova_compute[192079]: 2025-10-02 12:11:00.299 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:00 compute-0 nova_compute[192079]: 2025-10-02 12:11:00.773 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:02.212 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:11:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:02.214 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.002s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:11:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:02.215 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:11:03 compute-0 podman[227570]: 2025-10-02 12:11:03.184731999 +0000 UTC m=+0.057262497 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']})
Oct 02 12:11:03 compute-0 podman[227568]: 2025-10-02 12:11:03.188207123 +0000 UTC m=+0.059516888 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, tcib_managed=true, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:11:03 compute-0 podman[227569]: 2025-10-02 12:11:03.217014371 +0000 UTC m=+0.092236722 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, config_id=ovn_controller, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, managed_by=edpm_ansible, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, container_name=ovn_controller, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:11:05 compute-0 NetworkManager[51160]: <info>  [1759407065.2714] manager: (patch-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d-to-br-int): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/85)
Oct 02 12:11:05 compute-0 NetworkManager[51160]: <info>  [1759407065.2721] manager: (patch-br-int-to-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/86)
Oct 02 12:11:05 compute-0 nova_compute[192079]: 2025-10-02 12:11:05.270 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:05 compute-0 nova_compute[192079]: 2025-10-02 12:11:05.390 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:05 compute-0 ovn_controller[94336]: 2025-10-02T12:11:05Z|00173|binding|INFO|Releasing lport dfe1a22b-746e-4d91-bcf5-16d45608b6ed from this chassis (sb_readonly=0)
Oct 02 12:11:05 compute-0 nova_compute[192079]: 2025-10-02 12:11:05.416 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:05 compute-0 nova_compute[192079]: 2025-10-02 12:11:05.773 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:07 compute-0 nova_compute[192079]: 2025-10-02 12:11:07.704 2 DEBUG nova.compute.manager [req-24f74341-8e36-40dd-a896-21504c75bd7f req-6e4b9bd4-c06e-4ef9-a85d-b0792b297383 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Received event network-changed-92134be3-e745-4df5-8491-81ceeaf0c20e external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:11:07 compute-0 nova_compute[192079]: 2025-10-02 12:11:07.704 2 DEBUG nova.compute.manager [req-24f74341-8e36-40dd-a896-21504c75bd7f req-6e4b9bd4-c06e-4ef9-a85d-b0792b297383 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Refreshing instance network info cache due to event network-changed-92134be3-e745-4df5-8491-81ceeaf0c20e. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:11:07 compute-0 nova_compute[192079]: 2025-10-02 12:11:07.704 2 DEBUG oslo_concurrency.lockutils [req-24f74341-8e36-40dd-a896-21504c75bd7f req-6e4b9bd4-c06e-4ef9-a85d-b0792b297383 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-a454ffff-18eb-45a3-a5f4-84882f050b4f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:11:07 compute-0 nova_compute[192079]: 2025-10-02 12:11:07.704 2 DEBUG oslo_concurrency.lockutils [req-24f74341-8e36-40dd-a896-21504c75bd7f req-6e4b9bd4-c06e-4ef9-a85d-b0792b297383 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-a454ffff-18eb-45a3-a5f4-84882f050b4f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:11:07 compute-0 nova_compute[192079]: 2025-10-02 12:11:07.705 2 DEBUG nova.network.neutron [req-24f74341-8e36-40dd-a896-21504c75bd7f req-6e4b9bd4-c06e-4ef9-a85d-b0792b297383 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Refreshing network info cache for port 92134be3-e745-4df5-8491-81ceeaf0c20e _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:11:09 compute-0 nova_compute[192079]: 2025-10-02 12:11:09.907 2 DEBUG nova.network.neutron [req-24f74341-8e36-40dd-a896-21504c75bd7f req-6e4b9bd4-c06e-4ef9-a85d-b0792b297383 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Updated VIF entry in instance network info cache for port 92134be3-e745-4df5-8491-81ceeaf0c20e. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:11:09 compute-0 nova_compute[192079]: 2025-10-02 12:11:09.909 2 DEBUG nova.network.neutron [req-24f74341-8e36-40dd-a896-21504c75bd7f req-6e4b9bd4-c06e-4ef9-a85d-b0792b297383 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Updating instance_info_cache with network_info: [{"id": "92134be3-e745-4df5-8491-81ceeaf0c20e", "address": "fa:16:3e:42:b4:4f", "network": {"id": "a95667f3-b3ee-4e6a-b815-a7243d0bf012", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-301403611-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "d7dda73e7f3d4e05b0a8d0d1602f6b94", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92134be3-e7", "ovs_interfaceid": "92134be3-e745-4df5-8491-81ceeaf0c20e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:11:09 compute-0 nova_compute[192079]: 2025-10-02 12:11:09.930 2 DEBUG oslo_concurrency.lockutils [req-24f74341-8e36-40dd-a896-21504c75bd7f req-6e4b9bd4-c06e-4ef9-a85d-b0792b297383 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-a454ffff-18eb-45a3-a5f4-84882f050b4f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:11:10 compute-0 nova_compute[192079]: 2025-10-02 12:11:10.394 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:10 compute-0 nova_compute[192079]: 2025-10-02 12:11:10.776 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:11 compute-0 ovn_controller[94336]: 2025-10-02T12:11:11Z|00016|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:42:b4:4f 10.100.0.7
Oct 02 12:11:11 compute-0 ovn_controller[94336]: 2025-10-02T12:11:11Z|00017|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:42:b4:4f 10.100.0.7
Oct 02 12:11:12 compute-0 podman[227650]: 2025-10-02 12:11:12.140949138 +0000 UTC m=+0.059913569 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, config_id=edpm, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ceilometer_agent_compute, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, org.label-schema.build-date=20251001, tcib_managed=true)
Oct 02 12:11:14 compute-0 nova_compute[192079]: 2025-10-02 12:11:14.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:11:14 compute-0 nova_compute[192079]: 2025-10-02 12:11:14.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:11:15 compute-0 nova_compute[192079]: 2025-10-02 12:11:15.034 2 DEBUG nova.compute.manager [req-74bfa25c-4dcb-4385-a16f-c789b9f089e7 req-5d4e04de-bc4e-46d5-8fe7-738910816897 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Received event network-changed-92134be3-e745-4df5-8491-81ceeaf0c20e external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:11:15 compute-0 nova_compute[192079]: 2025-10-02 12:11:15.034 2 DEBUG nova.compute.manager [req-74bfa25c-4dcb-4385-a16f-c789b9f089e7 req-5d4e04de-bc4e-46d5-8fe7-738910816897 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Refreshing instance network info cache due to event network-changed-92134be3-e745-4df5-8491-81ceeaf0c20e. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:11:15 compute-0 nova_compute[192079]: 2025-10-02 12:11:15.035 2 DEBUG oslo_concurrency.lockutils [req-74bfa25c-4dcb-4385-a16f-c789b9f089e7 req-5d4e04de-bc4e-46d5-8fe7-738910816897 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-a454ffff-18eb-45a3-a5f4-84882f050b4f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:11:15 compute-0 nova_compute[192079]: 2025-10-02 12:11:15.035 2 DEBUG oslo_concurrency.lockutils [req-74bfa25c-4dcb-4385-a16f-c789b9f089e7 req-5d4e04de-bc4e-46d5-8fe7-738910816897 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-a454ffff-18eb-45a3-a5f4-84882f050b4f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:11:15 compute-0 nova_compute[192079]: 2025-10-02 12:11:15.035 2 DEBUG nova.network.neutron [req-74bfa25c-4dcb-4385-a16f-c789b9f089e7 req-5d4e04de-bc4e-46d5-8fe7-738910816897 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Refreshing network info cache for port 92134be3-e745-4df5-8491-81ceeaf0c20e _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:11:15 compute-0 nova_compute[192079]: 2025-10-02 12:11:15.395 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:15 compute-0 nova_compute[192079]: 2025-10-02 12:11:15.660 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:11:15 compute-0 nova_compute[192079]: 2025-10-02 12:11:15.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:11:15 compute-0 nova_compute[192079]: 2025-10-02 12:11:15.691 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:11:15 compute-0 nova_compute[192079]: 2025-10-02 12:11:15.692 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:11:15 compute-0 nova_compute[192079]: 2025-10-02 12:11:15.692 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:11:15 compute-0 nova_compute[192079]: 2025-10-02 12:11:15.692 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:11:15 compute-0 nova_compute[192079]: 2025-10-02 12:11:15.756 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a454ffff-18eb-45a3-a5f4-84882f050b4f/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:11:15 compute-0 nova_compute[192079]: 2025-10-02 12:11:15.777 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:15 compute-0 nova_compute[192079]: 2025-10-02 12:11:15.809 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a454ffff-18eb-45a3-a5f4-84882f050b4f/disk --force-share --output=json" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:11:15 compute-0 nova_compute[192079]: 2025-10-02 12:11:15.810 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a454ffff-18eb-45a3-a5f4-84882f050b4f/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:11:15 compute-0 nova_compute[192079]: 2025-10-02 12:11:15.865 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a454ffff-18eb-45a3-a5f4-84882f050b4f/disk --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:11:16 compute-0 nova_compute[192079]: 2025-10-02 12:11:16.020 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:11:16 compute-0 nova_compute[192079]: 2025-10-02 12:11:16.022 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5575MB free_disk=73.3282241821289GB free_vcpus=7 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:11:16 compute-0 nova_compute[192079]: 2025-10-02 12:11:16.023 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:11:16 compute-0 nova_compute[192079]: 2025-10-02 12:11:16.023 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:11:16 compute-0 nova_compute[192079]: 2025-10-02 12:11:16.106 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance a454ffff-18eb-45a3-a5f4-84882f050b4f actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:11:16 compute-0 nova_compute[192079]: 2025-10-02 12:11:16.107 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 1 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:11:16 compute-0 nova_compute[192079]: 2025-10-02 12:11:16.107 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=640MB phys_disk=79GB used_disk=1GB total_vcpus=8 used_vcpus=1 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:11:16 compute-0 nova_compute[192079]: 2025-10-02 12:11:16.237 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing inventories for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708 _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:804
Oct 02 12:11:16 compute-0 nova_compute[192079]: 2025-10-02 12:11:16.268 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Updating ProviderTree inventory for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 from _refresh_and_get_inventory using data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} _refresh_and_get_inventory /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:768
Oct 02 12:11:16 compute-0 nova_compute[192079]: 2025-10-02 12:11:16.269 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Updating inventory in ProviderTree for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 with inventory: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:176
Oct 02 12:11:16 compute-0 nova_compute[192079]: 2025-10-02 12:11:16.281 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing aggregate associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, aggregates: None _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:813
Oct 02 12:11:16 compute-0 nova_compute[192079]: 2025-10-02 12:11:16.313 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing trait associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, traits: COMPUTE_SECURITY_UEFI_SECURE_BOOT,COMPUTE_VIOMMU_MODEL_VIRTIO,COMPUTE_VIOMMU_MODEL_AUTO,COMPUTE_IMAGE_TYPE_AKI,COMPUTE_GRAPHICS_MODEL_VIRTIO,COMPUTE_NET_VIF_MODEL_PCNET,HW_CPU_X86_SSE42,COMPUTE_RESCUE_BFV,COMPUTE_VOLUME_EXTEND,COMPUTE_IMAGE_TYPE_QCOW2,COMPUTE_TRUSTED_CERTS,COMPUTE_SOCKET_PCI_NUMA_AFFINITY,COMPUTE_GRAPHICS_MODEL_CIRRUS,HW_CPU_X86_MMX,COMPUTE_STORAGE_BUS_VIRTIO,COMPUTE_NET_ATTACH_INTERFACE_WITH_TAG,COMPUTE_STORAGE_BUS_FDC,COMPUTE_STORAGE_BUS_USB,COMPUTE_NODE,HW_CPU_X86_SSSE3,HW_CPU_X86_SSE2,COMPUTE_GRAPHICS_MODEL_BOCHS,COMPUTE_NET_VIF_MODEL_E1000E,COMPUTE_IMAGE_TYPE_RAW,COMPUTE_NET_VIF_MODEL_NE2K_PCI,COMPUTE_IMAGE_TYPE_AMI,COMPUTE_VIOMMU_MODEL_INTEL,COMPUTE_SECURITY_TPM_2_0,COMPUTE_STORAGE_BUS_SCSI,COMPUTE_IMAGE_TYPE_ARI,COMPUTE_NET_VIF_MODEL_VMXNET3,COMPUTE_SECURITY_TPM_1_2,COMPUTE_NET_VIF_MODEL_E1000,HW_CPU_X86_SSE,COMPUTE_VOLUME_MULTI_ATTACH,COMPUTE_STORAGE_BUS_IDE,COMPUTE_GRAPHICS_MODEL_NONE,COMPUTE_VOLUME_ATTACH_WITH_TAG,COMPUTE_NET_VIF_MODEL_VIRTIO,HW_CPU_X86_SSE41,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_DEVICE_TAGGING,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_ACCELERATORS,COMPUTE_NET_VIF_MODEL_RTL8139,COMPUTE_GRAPHICS_MODEL_VGA,COMPUTE_STORAGE_BUS_SATA,COMPUTE_NET_VIF_MODEL_SPAPR_VLAN _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:825
Oct 02 12:11:16 compute-0 nova_compute[192079]: 2025-10-02 12:11:16.362 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:11:16 compute-0 nova_compute[192079]: 2025-10-02 12:11:16.380 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:11:16 compute-0 nova_compute[192079]: 2025-10-02 12:11:16.405 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:11:16 compute-0 nova_compute[192079]: 2025-10-02 12:11:16.405 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.382s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:11:16 compute-0 nova_compute[192079]: 2025-10-02 12:11:16.459 2 DEBUG nova.network.neutron [req-74bfa25c-4dcb-4385-a16f-c789b9f089e7 req-5d4e04de-bc4e-46d5-8fe7-738910816897 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Updated VIF entry in instance network info cache for port 92134be3-e745-4df5-8491-81ceeaf0c20e. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:11:16 compute-0 nova_compute[192079]: 2025-10-02 12:11:16.459 2 DEBUG nova.network.neutron [req-74bfa25c-4dcb-4385-a16f-c789b9f089e7 req-5d4e04de-bc4e-46d5-8fe7-738910816897 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Updating instance_info_cache with network_info: [{"id": "92134be3-e745-4df5-8491-81ceeaf0c20e", "address": "fa:16:3e:42:b4:4f", "network": {"id": "a95667f3-b3ee-4e6a-b815-a7243d0bf012", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-301403611-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "d7dda73e7f3d4e05b0a8d0d1602f6b94", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92134be3-e7", "ovs_interfaceid": "92134be3-e745-4df5-8491-81ceeaf0c20e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:11:16 compute-0 nova_compute[192079]: 2025-10-02 12:11:16.482 2 DEBUG oslo_concurrency.lockutils [req-74bfa25c-4dcb-4385-a16f-c789b9f089e7 req-5d4e04de-bc4e-46d5-8fe7-738910816897 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-a454ffff-18eb-45a3-a5f4-84882f050b4f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.103 12 DEBUG ceilometer.compute.discovery [-] instance data: {'id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'os_type': 'hvm', 'architecture': 'x86_64', 'OS-EXT-SRV-ATTR:instance_name': 'instance-00000036', 'OS-EXT-SRV-ATTR:host': 'compute-0.ctlplane.example.com', 'OS-EXT-STS:vm_state': 'running', 'tenant_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'hostId': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'status': 'active', 'metadata': {}} discover_libvirt_polling /usr/lib/python3.9/site-packages/ceilometer/compute/discovery.py:228
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.104 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.bytes in the context of pollsters
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.125 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/disk.device.read.bytes volume: 30747136 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.125 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/disk.device.read.bytes volume: 274750 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'fbfe22fe-d800-4c23-9222-bb8175e95c18', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 30747136, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f-vda', 'timestamp': '2025-10-02T12:11:17.104416', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'instance-00000036', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'e5fa8016-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.791484677, 'message_signature': 'ec558e89eb2873cb760786fa6de2b1af05953783e8f9df183f0ae0913a40126b'}, {'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 274750, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f-sda', 'timestamp': '2025-10-02T12:11:17.104416', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'instance-00000036', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'e5fa8ac0-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.791484677, 'message_signature': 'd98933fd387c8cdb58bbae635ee1d5e06c30f26f7e4ea8ac89d8e84917ecc5c0'}]}, 'timestamp': '2025-10-02 12:11:17.125710', '_unique_id': '895c5aaf2829473dbcda8932ae5cf9d6'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.126 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.127 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets.error in the context of pollsters
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.130 12 DEBUG ceilometer.compute.virt.libvirt.inspector [-] No delta meter predecessor for a454ffff-18eb-45a3-a5f4-84882f050b4f / tap92134be3-e7 inspect_vnics /usr/lib/python3.9/site-packages/ceilometer/compute/virt/libvirt/inspector.py:136
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.131 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/network.incoming.packets.error volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'e8c62edd-78b0-4c27-872b-102558222254', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets.error', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'instance-00000036-a454ffff-18eb-45a3-a5f4-84882f050b4f-tap92134be3-e7', 'timestamp': '2025-10-02T12:11:17.128016', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'tap92134be3-e7', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:42:b4:4f', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap92134be3-e7'}, 'message_id': 'e5fb68b4-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.815110065, 'message_signature': 'e1423c3b4fff6ea7475c324c384137acde7ed11953445beaefab395260bed21b'}]}, 'timestamp': '2025-10-02 12:11:17.131511', '_unique_id': 'e99a6c8e961e4fb9a41d1a7fa9dd9087'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.132 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.133 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.bytes in the context of pollsters
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.133 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/disk.device.write.bytes volume: 72851456 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.133 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/disk.device.write.bytes volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'b857c150-8c00-4f2c-b369-634ddc585b34', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 72851456, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f-vda', 'timestamp': '2025-10-02T12:11:17.133449', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'instance-00000036', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'e5fbc2c8-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.791484677, 'message_signature': '8195e1e7a3a10759b64238385b4df495daf2b5a2f83ccdbaeb5ae762a63a97e9'}, {'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f-sda', 'timestamp': '2025-10-02T12:11:17.133449', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'instance-00000036', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'e5fbcaf2-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.791484677, 'message_signature': '050c2bbf0b66fcaa7b833c6c1acb7792729760984d6b39f8de445fdc8b806390'}]}, 'timestamp': '2025-10-02 12:11:17.133892', '_unique_id': '75e1f2c8b76642599e04b81fe8ed7fc7'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.134 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.135 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.usage in the context of pollsters
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.147 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/disk.device.usage volume: 29884416 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 podman[227677]: 2025-10-02 12:11:17.147451872 +0000 UTC m=+0.062134380 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, url=https://catalog.redhat.com/en/search?searchType=containers, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, vendor=Red Hat, Inc., io.openshift.expose-services=, maintainer=Red Hat, Inc., release=1755695350, build-date=2025-08-20T13:12:41, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, distribution-scope=public, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., name=ubi9-minimal, io.buildah.version=1.33.7, config_id=edpm, container_name=openstack_network_exporter, managed_by=edpm_ansible, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, version=9.6, io.openshift.tags=minimal rhel9, architecture=x86_64, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vcs-type=git, com.redhat.component=ubi9-minimal-container, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9.)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.147 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/disk.device.usage volume: 485376 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'e0145efa-54b4-4057-9a63-4d1b1478aaa1', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 29884416, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f-vda', 'timestamp': '2025-10-02T12:11:17.135113', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'instance-00000036', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'e5fde170-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.822210896, 'message_signature': 'e14aea7e030c5c30cc5cd1f4ff359633d1e2d482eb42975aa5000b8919b96e4f'}, {'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 485376, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f-sda', 'timestamp': '2025-10-02T12:11:17.135113', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'instance-00000036', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'e5fdeb66-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.822210896, 'message_signature': '603094002cc803b130b01d384f18b5488720d2e0dc16690a30bc46c60639e55d'}]}, 'timestamp': '2025-10-02 12:11:17.147838', '_unique_id': '3227ded8965846f9a4f5936306513eee'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.148 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.149 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets in the context of pollsters
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.149 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/network.incoming.packets volume: 11 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 podman[227678]: 2025-10-02 12:11:17.151348407 +0000 UTC m=+0.061952964 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=multipathd, container_name=multipathd, managed_by=edpm_ansible, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'e9fbc145-61fd-435a-bb31-524e45050f50', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 11, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'instance-00000036-a454ffff-18eb-45a3-a5f4-84882f050b4f-tap92134be3-e7', 'timestamp': '2025-10-02T12:11:17.149317', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'tap92134be3-e7', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:42:b4:4f', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap92134be3-e7'}, 'message_id': 'e5fe2e78-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.815110065, 'message_signature': 'c8ce4090d321fe37624edc6d0f5a7589057bb89c5abadbdbe0c1f413d137113e'}]}, 'timestamp': '2025-10-02 12:11:17.149562', '_unique_id': '8b62402d90f3480588b5a744696567e7'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.150 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.151 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.latency in the context of pollsters
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.152 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for PerDeviceDiskLatencyPollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.152 12 ERROR ceilometer.polling.manager [-] Prevent pollster disk.device.latency from polling [<NovaLikeServer: tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651>]
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.152 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes.rate in the context of pollsters
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.152 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for IncomingBytesRatePollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.152 12 ERROR ceilometer.polling.manager [-] Prevent pollster network.incoming.bytes.rate from polling [<NovaLikeServer: tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651>]
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.152 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes.delta in the context of pollsters
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.152 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/network.outgoing.bytes.delta volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '27bf3dfa-7a20-446c-8c7c-b44b2a9564e9', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.bytes.delta', 'counter_type': 'delta', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'instance-00000036-a454ffff-18eb-45a3-a5f4-84882f050b4f-tap92134be3-e7', 'timestamp': '2025-10-02T12:11:17.152668', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'tap92134be3-e7', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:42:b4:4f', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap92134be3-e7'}, 'message_id': 'e5feb28a-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.815110065, 'message_signature': '50b619c5e4708041a430fa636e9f3266acc76436fd54c9a5e597a18531cf04f3'}]}, 'timestamp': '2025-10-02 12:11:17.153010', '_unique_id': '8420b60c9bd24ced9591ab49859f63dd'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.153 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.154 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes.rate in the context of pollsters
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.154 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for OutgoingBytesRatePollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.154 12 ERROR ceilometer.polling.manager [-] Prevent pollster network.outgoing.bytes.rate from polling [<NovaLikeServer: tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651>]
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.154 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.requests in the context of pollsters
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.154 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/disk.device.write.requests volume: 312 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/disk.device.write.requests volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '0dd95287-480c-4060-8a54-f700460302bd', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 312, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f-vda', 'timestamp': '2025-10-02T12:11:17.154920', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'instance-00000036', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'e5ff0a46-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.791484677, 'message_signature': '15a487bc309466d7a315eed36e13a413e85ffdb3ca87375ac62b011a0dedee3a'}, {'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 0, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f-sda', 'timestamp': '2025-10-02T12:11:17.154920', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'instance-00000036', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'e5ff128e-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.791484677, 'message_signature': '9377ebda146cdafbc68420e304b92a7e96a7b5e336b6db83453b0578890babf2'}]}, 'timestamp': '2025-10-02 12:11:17.155387', '_unique_id': '40fbb6dee2f148ef8f9be8dd22d38711'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.155 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.156 12 INFO ceilometer.polling.manager [-] Polling pollster cpu in the context of pollsters
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.170 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/cpu volume: 11840000000 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '7564e903-682e-4e02-b96f-bdbb140a50d7', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'cpu', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 11840000000, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'timestamp': '2025-10-02T12:11:17.156527', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'instance-00000036', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'cpu_number': 1}, 'message_id': 'e60160de-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.857110519, 'message_signature': 'b7a0647f24d481845aea42e698a9eee37603a1604af8bd7d27135a3bdfb9166a'}]}, 'timestamp': '2025-10-02 12:11:17.170558', '_unique_id': 'c2be02c1905b49f1ad4aa381c127c621'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.171 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes in the context of pollsters
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/network.outgoing.bytes volume: 1438 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '98cf02ed-0f49-45ae-bf96-a3550bad82da', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 1438, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'instance-00000036-a454ffff-18eb-45a3-a5f4-84882f050b4f-tap92134be3-e7', 'timestamp': '2025-10-02T12:11:17.172073', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'tap92134be3-e7', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:42:b4:4f', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap92134be3-e7'}, 'message_id': 'e601a710-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.815110065, 'message_signature': 'ea632fe584472144f0b7c7c85f8ac8a01912492ee497dc9aeec9b547a5c719db'}]}, 'timestamp': '2025-10-02 12:11:17.172311', '_unique_id': 'acc23b5b35c54b7cba72a3e6a214f72f'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.172 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.173 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets.error in the context of pollsters
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.173 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/network.outgoing.packets.error volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '7eb41eb4-06d6-43f9-96e3-a2ee5f09b1d7', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets.error', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'instance-00000036-a454ffff-18eb-45a3-a5f4-84882f050b4f-tap92134be3-e7', 'timestamp': '2025-10-02T12:11:17.173524', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'tap92134be3-e7', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:42:b4:4f', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap92134be3-e7'}, 'message_id': 'e601e0ae-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.815110065, 'message_signature': 'b188a85184001ac9e644d91726bede71fbd7acc5a967e835752c45e08bff8f68'}]}, 'timestamp': '2025-10-02 12:11:17.173805', '_unique_id': '40ef0f4e9ff3470bb9ca00f2c387c0f3'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets.drop in the context of pollsters
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.174 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/network.incoming.packets.drop volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '9044432f-f7af-47c7-acba-05dc542fe195', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets.drop', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'instance-00000036-a454ffff-18eb-45a3-a5f4-84882f050b4f-tap92134be3-e7', 'timestamp': '2025-10-02T12:11:17.174922', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'tap92134be3-e7', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:42:b4:4f', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap92134be3-e7'}, 'message_id': 'e60216aa-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.815110065, 'message_signature': '92cf9b9a9d913db0906dda7dd06d1452d62b4d20a0b8972a1479fcc91ab7ce48'}]}, 'timestamp': '2025-10-02 12:11:17.175165', '_unique_id': 'f612cc2e047c44e6b6c194ed4d15b8a1'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.175 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes in the context of pollsters
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/network.incoming.bytes volume: 1652 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '993c2aed-9a34-4919-89fa-984048d3ed05', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 1652, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'instance-00000036-a454ffff-18eb-45a3-a5f4-84882f050b4f-tap92134be3-e7', 'timestamp': '2025-10-02T12:11:17.176226', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'tap92134be3-e7', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:42:b4:4f', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap92134be3-e7'}, 'message_id': 'e60248f0-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.815110065, 'message_signature': 'fbe6434fa2d91132f142042690059bbf5f7011bb9f37c696d338f14b3b0a4e28'}]}, 'timestamp': '2025-10-02 12:11:17.176460', '_unique_id': '91a0be4e950944f68e0ec3478d998422'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.176 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.177 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.latency in the context of pollsters
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.177 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/disk.device.read.latency volume: 548539706 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.177 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/disk.device.read.latency volume: 52622302 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '70bc3762-28e1-4781-9d8d-894e20819870', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 548539706, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f-vda', 'timestamp': '2025-10-02T12:11:17.177499', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'instance-00000036', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'e6027aa0-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.791484677, 'message_signature': '27be138d9459433899cde9a19940469cc49a682aec958569e061543793a798f3'}, {'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 52622302, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f-sda', 'timestamp': '2025-10-02T12:11:17.177499', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'instance-00000036', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'e602837e-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.791484677, 'message_signature': '01b40f57dfb104e7ab3a0fba20571e4b9ae8effc1d9bf8f61eb0ca2c8b6f8dc6'}]}, 'timestamp': '2025-10-02 12:11:17.177977', '_unique_id': 'eecf1996f9cb40fcb27e754f6b057a34'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.178 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes.delta in the context of pollsters
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/network.incoming.bytes.delta volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '4b3d5a79-b8b1-44ea-975d-a0876bc530ec', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.bytes.delta', 'counter_type': 'delta', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'instance-00000036-a454ffff-18eb-45a3-a5f4-84882f050b4f-tap92134be3-e7', 'timestamp': '2025-10-02T12:11:17.179052', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'tap92134be3-e7', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:42:b4:4f', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap92134be3-e7'}, 'message_id': 'e602b754-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.815110065, 'message_signature': '2ce9fdbf4bc1161a961c7d7363ed60871b17b5afdf8e8c491607b89d91535a27'}]}, 'timestamp': '2025-10-02 12:11:17.179278', '_unique_id': '87bc75ef2d8e447ca87d6f51a7341274'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.179 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.180 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.capacity in the context of pollsters
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.180 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/disk.device.capacity volume: 1073741824 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.180 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/disk.device.capacity volume: 485376 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '023035f3-a188-41c5-840d-f9093285721f', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 1073741824, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f-vda', 'timestamp': '2025-10-02T12:11:17.180340', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'instance-00000036', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'e602e990-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.822210896, 'message_signature': '290a1e1b5e04856fc561d3aa279744c4843b88da3d37d75cc906008c9c4bd57b'}, {'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 485376, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f-sda', 'timestamp': '2025-10-02T12:11:17.180340', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'instance-00000036', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'e602f16a-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.822210896, 'message_signature': '778b877ab79145ed128eeae72fea14d806b58419aaf34f84df797e61e4f78b48'}]}, 'timestamp': '2025-10-02 12:11:17.180751', '_unique_id': '5953fe0e68fe46d3a97bc8f5db6144a1'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets in the context of pollsters
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.181 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/network.outgoing.packets volume: 13 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '06f01ee5-d669-4b96-9816-0fe9521aa8ba', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 13, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'instance-00000036-a454ffff-18eb-45a3-a5f4-84882f050b4f-tap92134be3-e7', 'timestamp': '2025-10-02T12:11:17.181827', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'tap92134be3-e7', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:42:b4:4f', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap92134be3-e7'}, 'message_id': 'e60324d2-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.815110065, 'message_signature': '7c134ad00ed4b52c6ecc4154780d2a58e689efc445060199c4feb348423b86ff'}]}, 'timestamp': '2025-10-02 12:11:17.182098', '_unique_id': 'a13c8e9f782b4b6a8455a9f4814cdf57'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.182 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets.drop in the context of pollsters
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/network.outgoing.packets.drop volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '3c94e8d3-995f-4428-9685-acd96860628c', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets.drop', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'instance-00000036-a454ffff-18eb-45a3-a5f4-84882f050b4f-tap92134be3-e7', 'timestamp': '2025-10-02T12:11:17.183185', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'tap92134be3-e7', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:42:b4:4f', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap92134be3-e7'}, 'message_id': 'e603592a-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.815110065, 'message_signature': '7a33fba12f2d25600f68c8b00ad5042bf355e4e09cd0c008b59d7c170ad58f25'}]}, 'timestamp': '2025-10-02 12:11:17.183423', '_unique_id': 'f74ff85b227844c9828c16abbbe52dc4'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.183 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.184 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.allocation in the context of pollsters
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.184 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/disk.device.allocation volume: 30547968 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.184 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/disk.device.allocation volume: 487424 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '9654ad81-90e2-47ae-bf3a-5b42899e0522', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 30547968, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f-vda', 'timestamp': '2025-10-02T12:11:17.184457', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'instance-00000036', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'e6038a44-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.822210896, 'message_signature': '25cfa4a242e1f9889af1ff60c3e66effa1d7b2ca4034dc4db1df00bbef265cab'}, {'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 487424, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f-sda', 'timestamp': '2025-10-02T12:11:17.184457', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'instance-00000036', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'e60391e2-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.822210896, 'message_signature': 'e20e36c3255ca98705e6ea60692b22fe6b61d582a6cd147066b01bc021e592ff'}]}, 'timestamp': '2025-10-02 12:11:17.184859', '_unique_id': 'eff6cf9ef0554a659fcf2afdf43d622e'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.185 12 INFO ceilometer.polling.manager [-] Polling pollster memory.usage in the context of pollsters
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/memory.usage volume: 40.421875 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '77e7f061-b0e7-41f3-a15b-120d46820c12', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'memory.usage', 'counter_type': 'gauge', 'counter_unit': 'MB', 'counter_volume': 40.421875, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'timestamp': '2025-10-02T12:11:17.185977', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'instance-00000036', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1}, 'message_id': 'e603c716-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.857110519, 'message_signature': 'a5be285e506a5aadb25bbb4e75629b06e47f99a902b13d0d6e25e3c14a481adc'}]}, 'timestamp': '2025-10-02 12:11:17.186227', '_unique_id': '4f71d8804d46420298fb32c19902fede'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.186 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.187 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.latency in the context of pollsters
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.187 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/disk.device.write.latency volume: 2945102848 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.187 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/disk.device.write.latency volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '2eb9222e-7d2f-42a8-8a3d-0de87aac3dda', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 2945102848, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f-vda', 'timestamp': '2025-10-02T12:11:17.187305', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'instance-00000036', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'e603f984-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.791484677, 'message_signature': 'bad66461236844dbaf48243073e3d2cec43fa55f5f8e89db829709a3a8240bca'}, {'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 0, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f-sda', 'timestamp': '2025-10-02T12:11:17.187305', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'instance-00000036', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'e604010e-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.791484677, 'message_signature': '235e9a4888615da4647efd3c36eb3d1f006dade98381538ecc1dd20fa7c4ae9f'}]}, 'timestamp': '2025-10-02 12:11:17.187705', '_unique_id': '96e8ef1196c1488381afb72c09f3fed1'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.iops in the context of pollsters
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for PerDeviceDiskIOPSPollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.188 12 ERROR ceilometer.polling.manager [-] Prevent pollster disk.device.iops from polling [<NovaLikeServer: tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651>]
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.189 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.requests in the context of pollsters
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.189 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/disk.device.read.requests volume: 1108 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.189 12 DEBUG ceilometer.compute.pollsters [-] a454ffff-18eb-45a3-a5f4-84882f050b4f/disk.device.read.requests volume: 108 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '9ee16d1a-b7ac-4f2f-9660-1b769c45c7cb', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 1108, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f-vda', 'timestamp': '2025-10-02T12:11:17.189144', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'instance-00000036', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'e6044196-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.791484677, 'message_signature': '1e5bd666650028afaed0f155688caf8f2125164a1874894814c69f69898c46da'}, {'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 108, 'user_id': '53ded9f664b84e7d85d0944e0b4ecb31', 'user_name': None, 'project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'project_name': None, 'resource_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f-sda', 'timestamp': '2025-10-02T12:11:17.189144', 'resource_metadata': {'display_name': 'tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651', 'name': 'instance-00000036', 'instance_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'instance_type': 'm1.nano', 'host': 'ebe87bfd65dba35d315c5caf88f51a3f0e011b08f69eaa7bac7a3174', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'e6044970-9f88-11f0-af18-fa163efc5e78', 'monotonic_time': 5074.791484677, 'message_signature': '41322e6263e97b9f81ee21d75d9490b445c04f07dd377d5e3f6578bc34fdc1bb'}]}, 'timestamp': '2025-10-02 12:11:17.189557', '_unique_id': '605e415480b04af4bf0907754977fe69'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:11:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:11:17.190 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:11:17 compute-0 nova_compute[192079]: 2025-10-02 12:11:17.421 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:17.422 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=15, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=14) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:11:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:17.423 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 5 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.220 2 DEBUG oslo_concurrency.lockutils [None req-0f789aec-d70d-4009-aae3-9482fb324c9b 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Acquiring lock "a454ffff-18eb-45a3-a5f4-84882f050b4f" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.221 2 DEBUG oslo_concurrency.lockutils [None req-0f789aec-d70d-4009-aae3-9482fb324c9b 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Lock "a454ffff-18eb-45a3-a5f4-84882f050b4f" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.221 2 DEBUG oslo_concurrency.lockutils [None req-0f789aec-d70d-4009-aae3-9482fb324c9b 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Acquiring lock "a454ffff-18eb-45a3-a5f4-84882f050b4f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.221 2 DEBUG oslo_concurrency.lockutils [None req-0f789aec-d70d-4009-aae3-9482fb324c9b 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Lock "a454ffff-18eb-45a3-a5f4-84882f050b4f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.221 2 DEBUG oslo_concurrency.lockutils [None req-0f789aec-d70d-4009-aae3-9482fb324c9b 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Lock "a454ffff-18eb-45a3-a5f4-84882f050b4f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.234 2 INFO nova.compute.manager [None req-0f789aec-d70d-4009-aae3-9482fb324c9b 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Terminating instance
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.244 2 DEBUG nova.compute.manager [None req-0f789aec-d70d-4009-aae3-9482fb324c9b 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:11:18 compute-0 kernel: tap92134be3-e7 (unregistering): left promiscuous mode
Oct 02 12:11:18 compute-0 NetworkManager[51160]: <info>  [1759407078.2742] device (tap92134be3-e7): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:11:18 compute-0 ovn_controller[94336]: 2025-10-02T12:11:18Z|00174|binding|INFO|Releasing lport 92134be3-e745-4df5-8491-81ceeaf0c20e from this chassis (sb_readonly=0)
Oct 02 12:11:18 compute-0 ovn_controller[94336]: 2025-10-02T12:11:18Z|00175|binding|INFO|Setting lport 92134be3-e745-4df5-8491-81ceeaf0c20e down in Southbound
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.282 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:18 compute-0 ovn_controller[94336]: 2025-10-02T12:11:18Z|00176|binding|INFO|Removing iface tap92134be3-e7 ovn-installed in OVS
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.283 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.297 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:18.299 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:42:b4:4f 10.100.0.7'], port_security=['fa:16:3e:42:b4:4f 10.100.0.7'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.7/28', 'neutron:device_id': 'a454ffff-18eb-45a3-a5f4-84882f050b4f', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-a95667f3-b3ee-4e6a-b815-a7243d0bf012', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'd7dda73e7f3d4e05b0a8d0d1602f6b94', 'neutron:revision_number': '4', 'neutron:security_group_ids': '5c6b9803-4b78-4b5f-8eec-2999cf25dd71', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=116eade2-49ab-4232-a054-5dd54644a8e9, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=92134be3-e745-4df5-8491-81ceeaf0c20e) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:11:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:18.301 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 92134be3-e745-4df5-8491-81ceeaf0c20e in datapath a95667f3-b3ee-4e6a-b815-a7243d0bf012 unbound from our chassis
Oct 02 12:11:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:18.302 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network a95667f3-b3ee-4e6a-b815-a7243d0bf012, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:11:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:18.303 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[473860db-4d6b-4c08-a6fd-e04b362742e2]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:11:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:18.304 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-a95667f3-b3ee-4e6a-b815-a7243d0bf012 namespace which is not needed anymore
Oct 02 12:11:18 compute-0 systemd[1]: machine-qemu\x2d27\x2dinstance\x2d00000036.scope: Deactivated successfully.
Oct 02 12:11:18 compute-0 systemd[1]: machine-qemu\x2d27\x2dinstance\x2d00000036.scope: Consumed 13.465s CPU time.
Oct 02 12:11:18 compute-0 systemd-machined[152150]: Machine qemu-27-instance-00000036 terminated.
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.405 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.406 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:11:18 compute-0 neutron-haproxy-ovnmeta-a95667f3-b3ee-4e6a-b815-a7243d0bf012[227552]: [NOTICE]   (227556) : haproxy version is 2.8.14-c23fe91
Oct 02 12:11:18 compute-0 neutron-haproxy-ovnmeta-a95667f3-b3ee-4e6a-b815-a7243d0bf012[227552]: [NOTICE]   (227556) : path to executable is /usr/sbin/haproxy
Oct 02 12:11:18 compute-0 neutron-haproxy-ovnmeta-a95667f3-b3ee-4e6a-b815-a7243d0bf012[227552]: [WARNING]  (227556) : Exiting Master process...
Oct 02 12:11:18 compute-0 neutron-haproxy-ovnmeta-a95667f3-b3ee-4e6a-b815-a7243d0bf012[227552]: [ALERT]    (227556) : Current worker (227558) exited with code 143 (Terminated)
Oct 02 12:11:18 compute-0 neutron-haproxy-ovnmeta-a95667f3-b3ee-4e6a-b815-a7243d0bf012[227552]: [WARNING]  (227556) : All workers exited. Exiting... (0)
Oct 02 12:11:18 compute-0 systemd[1]: libpod-d6370f8d9ebadae2cf70d4a4ce25fe5f3380588d2c1b5121e10633afc508cb7e.scope: Deactivated successfully.
Oct 02 12:11:18 compute-0 podman[227741]: 2025-10-02 12:11:18.433574645 +0000 UTC m=+0.044414411 container died d6370f8d9ebadae2cf70d4a4ce25fe5f3380588d2c1b5121e10633afc508cb7e (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a95667f3-b3ee-4e6a-b815-a7243d0bf012, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0)
Oct 02 12:11:18 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-d6370f8d9ebadae2cf70d4a4ce25fe5f3380588d2c1b5121e10633afc508cb7e-userdata-shm.mount: Deactivated successfully.
Oct 02 12:11:18 compute-0 systemd[1]: var-lib-containers-storage-overlay-32a0f1bcff81f207dcdea285b2e3d2ed9cd8cb943b0d4f90b1b9604f98bbe639-merged.mount: Deactivated successfully.
Oct 02 12:11:18 compute-0 podman[227741]: 2025-10-02 12:11:18.484231542 +0000 UTC m=+0.095071308 container cleanup d6370f8d9ebadae2cf70d4a4ce25fe5f3380588d2c1b5121e10633afc508cb7e (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a95667f3-b3ee-4e6a-b815-a7243d0bf012, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0)
Oct 02 12:11:18 compute-0 systemd[1]: libpod-conmon-d6370f8d9ebadae2cf70d4a4ce25fe5f3380588d2c1b5121e10633afc508cb7e.scope: Deactivated successfully.
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.504 2 INFO nova.virt.libvirt.driver [-] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Instance destroyed successfully.
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.504 2 DEBUG nova.objects.instance [None req-0f789aec-d70d-4009-aae3-9482fb324c9b 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Lazy-loading 'resources' on Instance uuid a454ffff-18eb-45a3-a5f4-84882f050b4f obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.519 2 DEBUG nova.virt.libvirt.vif [None req-0f789aec-d70d-4009-aae3-9482fb324c9b 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:10:47Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651',display_name='tempest-FloatingIPsAssociationNegativeTestJSON-server-894962651',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-floatingipsassociationnegativetestjson-server-894962651',id=54,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:10:58Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='d7dda73e7f3d4e05b0a8d0d1602f6b94',ramdisk_id='',reservation_id='r-23wlj9ye',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-FloatingIPsAssociationNegativeTestJSON-611843646',owner_user_name='tempest-FloatingIPsAssociationNegativeTestJSON-611843646-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:10:58Z,user_data=None,user_id='53ded9f664b84e7d85d0944e0b4ecb31',uuid=a454ffff-18eb-45a3-a5f4-84882f050b4f,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "92134be3-e745-4df5-8491-81ceeaf0c20e", "address": "fa:16:3e:42:b4:4f", "network": {"id": "a95667f3-b3ee-4e6a-b815-a7243d0bf012", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-301403611-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "d7dda73e7f3d4e05b0a8d0d1602f6b94", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92134be3-e7", "ovs_interfaceid": "92134be3-e745-4df5-8491-81ceeaf0c20e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.520 2 DEBUG nova.network.os_vif_util [None req-0f789aec-d70d-4009-aae3-9482fb324c9b 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Converting VIF {"id": "92134be3-e745-4df5-8491-81ceeaf0c20e", "address": "fa:16:3e:42:b4:4f", "network": {"id": "a95667f3-b3ee-4e6a-b815-a7243d0bf012", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-301403611-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "d7dda73e7f3d4e05b0a8d0d1602f6b94", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92134be3-e7", "ovs_interfaceid": "92134be3-e745-4df5-8491-81ceeaf0c20e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.520 2 DEBUG nova.network.os_vif_util [None req-0f789aec-d70d-4009-aae3-9482fb324c9b 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:42:b4:4f,bridge_name='br-int',has_traffic_filtering=True,id=92134be3-e745-4df5-8491-81ceeaf0c20e,network=Network(a95667f3-b3ee-4e6a-b815-a7243d0bf012),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap92134be3-e7') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.521 2 DEBUG os_vif [None req-0f789aec-d70d-4009-aae3-9482fb324c9b 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:42:b4:4f,bridge_name='br-int',has_traffic_filtering=True,id=92134be3-e745-4df5-8491-81ceeaf0c20e,network=Network(a95667f3-b3ee-4e6a-b815-a7243d0bf012),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap92134be3-e7') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.522 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.522 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap92134be3-e7, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.524 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.525 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.527 2 INFO os_vif [None req-0f789aec-d70d-4009-aae3-9482fb324c9b 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:42:b4:4f,bridge_name='br-int',has_traffic_filtering=True,id=92134be3-e745-4df5-8491-81ceeaf0c20e,network=Network(a95667f3-b3ee-4e6a-b815-a7243d0bf012),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap92134be3-e7')
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.528 2 INFO nova.virt.libvirt.driver [None req-0f789aec-d70d-4009-aae3-9482fb324c9b 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Deleting instance files /var/lib/nova/instances/a454ffff-18eb-45a3-a5f4-84882f050b4f_del
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.528 2 INFO nova.virt.libvirt.driver [None req-0f789aec-d70d-4009-aae3-9482fb324c9b 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Deletion of /var/lib/nova/instances/a454ffff-18eb-45a3-a5f4-84882f050b4f_del complete
Oct 02 12:11:18 compute-0 podman[227785]: 2025-10-02 12:11:18.550267034 +0000 UTC m=+0.042387955 container remove d6370f8d9ebadae2cf70d4a4ce25fe5f3380588d2c1b5121e10633afc508cb7e (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a95667f3-b3ee-4e6a-b815-a7243d0bf012, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, tcib_managed=true, org.label-schema.license=GPLv2)
Oct 02 12:11:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:18.555 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[548048b5-1b39-4c2f-a49b-1f7225c8046c]: (4, ('Thu Oct  2 12:11:18 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-a95667f3-b3ee-4e6a-b815-a7243d0bf012 (d6370f8d9ebadae2cf70d4a4ce25fe5f3380588d2c1b5121e10633afc508cb7e)\nd6370f8d9ebadae2cf70d4a4ce25fe5f3380588d2c1b5121e10633afc508cb7e\nThu Oct  2 12:11:18 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-a95667f3-b3ee-4e6a-b815-a7243d0bf012 (d6370f8d9ebadae2cf70d4a4ce25fe5f3380588d2c1b5121e10633afc508cb7e)\nd6370f8d9ebadae2cf70d4a4ce25fe5f3380588d2c1b5121e10633afc508cb7e\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:11:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:18.556 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3ff66559-3ec3-4bf0-a8f6-a488d3f91cb2]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:11:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:18.557 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapa95667f3-b0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.605 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:18 compute-0 kernel: tapa95667f3-b0: left promiscuous mode
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.607 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:18.609 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1be2a72b-d58b-40ba-ac46-653fa48964f8]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.617 2 INFO nova.compute.manager [None req-0f789aec-d70d-4009-aae3-9482fb324c9b 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Took 0.37 seconds to destroy the instance on the hypervisor.
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.617 2 DEBUG oslo.service.loopingcall [None req-0f789aec-d70d-4009-aae3-9482fb324c9b 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.617 2 DEBUG nova.compute.manager [-] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.618 2 DEBUG nova.network.neutron [-] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:11:18 compute-0 nova_compute[192079]: 2025-10-02 12:11:18.620 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:18.644 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ca301c18-9217-4f2d-a15d-61fc74382431]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:11:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:18.646 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6a1f19f7-d4a3-4763-b2da-0453d640f522]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:11:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:18.663 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c5cc6239-04e5-48f2-95a7-0a54e416a1ae]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 505420, 'reachable_time': 15232, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 227802, 'error': None, 'target': 'ovnmeta-a95667f3-b3ee-4e6a-b815-a7243d0bf012', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:11:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:18.665 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-a95667f3-b3ee-4e6a-b815-a7243d0bf012 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:11:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:18.665 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[814dca57-e93a-46c4-9389-f9dad1265968]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:11:18 compute-0 systemd[1]: run-netns-ovnmeta\x2da95667f3\x2db3ee\x2d4e6a\x2db815\x2da7243d0bf012.mount: Deactivated successfully.
Oct 02 12:11:19 compute-0 nova_compute[192079]: 2025-10-02 12:11:19.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:11:19 compute-0 nova_compute[192079]: 2025-10-02 12:11:19.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:11:19 compute-0 nova_compute[192079]: 2025-10-02 12:11:19.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:11:19 compute-0 nova_compute[192079]: 2025-10-02 12:11:19.680 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Skipping network cache update for instance because it is being deleted. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9875
Oct 02 12:11:19 compute-0 nova_compute[192079]: 2025-10-02 12:11:19.681 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:11:19 compute-0 nova_compute[192079]: 2025-10-02 12:11:19.878 2 DEBUG nova.compute.manager [req-5d949426-f42a-4dd9-8ba0-cb36f7b95ecb req-0f1f49c9-c3cc-4a8a-8a56-941a1ce10521 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Received event network-vif-unplugged-92134be3-e745-4df5-8491-81ceeaf0c20e external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:11:19 compute-0 nova_compute[192079]: 2025-10-02 12:11:19.878 2 DEBUG oslo_concurrency.lockutils [req-5d949426-f42a-4dd9-8ba0-cb36f7b95ecb req-0f1f49c9-c3cc-4a8a-8a56-941a1ce10521 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "a454ffff-18eb-45a3-a5f4-84882f050b4f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:11:19 compute-0 nova_compute[192079]: 2025-10-02 12:11:19.879 2 DEBUG oslo_concurrency.lockutils [req-5d949426-f42a-4dd9-8ba0-cb36f7b95ecb req-0f1f49c9-c3cc-4a8a-8a56-941a1ce10521 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a454ffff-18eb-45a3-a5f4-84882f050b4f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:11:19 compute-0 nova_compute[192079]: 2025-10-02 12:11:19.879 2 DEBUG oslo_concurrency.lockutils [req-5d949426-f42a-4dd9-8ba0-cb36f7b95ecb req-0f1f49c9-c3cc-4a8a-8a56-941a1ce10521 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a454ffff-18eb-45a3-a5f4-84882f050b4f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:11:19 compute-0 nova_compute[192079]: 2025-10-02 12:11:19.879 2 DEBUG nova.compute.manager [req-5d949426-f42a-4dd9-8ba0-cb36f7b95ecb req-0f1f49c9-c3cc-4a8a-8a56-941a1ce10521 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] No waiting events found dispatching network-vif-unplugged-92134be3-e745-4df5-8491-81ceeaf0c20e pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:11:19 compute-0 nova_compute[192079]: 2025-10-02 12:11:19.879 2 DEBUG nova.compute.manager [req-5d949426-f42a-4dd9-8ba0-cb36f7b95ecb req-0f1f49c9-c3cc-4a8a-8a56-941a1ce10521 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Received event network-vif-unplugged-92134be3-e745-4df5-8491-81ceeaf0c20e for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:11:19 compute-0 nova_compute[192079]: 2025-10-02 12:11:19.880 2 DEBUG nova.compute.manager [req-5d949426-f42a-4dd9-8ba0-cb36f7b95ecb req-0f1f49c9-c3cc-4a8a-8a56-941a1ce10521 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Received event network-vif-plugged-92134be3-e745-4df5-8491-81ceeaf0c20e external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:11:19 compute-0 nova_compute[192079]: 2025-10-02 12:11:19.880 2 DEBUG oslo_concurrency.lockutils [req-5d949426-f42a-4dd9-8ba0-cb36f7b95ecb req-0f1f49c9-c3cc-4a8a-8a56-941a1ce10521 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "a454ffff-18eb-45a3-a5f4-84882f050b4f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:11:19 compute-0 nova_compute[192079]: 2025-10-02 12:11:19.880 2 DEBUG oslo_concurrency.lockutils [req-5d949426-f42a-4dd9-8ba0-cb36f7b95ecb req-0f1f49c9-c3cc-4a8a-8a56-941a1ce10521 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a454ffff-18eb-45a3-a5f4-84882f050b4f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:11:19 compute-0 nova_compute[192079]: 2025-10-02 12:11:19.880 2 DEBUG oslo_concurrency.lockutils [req-5d949426-f42a-4dd9-8ba0-cb36f7b95ecb req-0f1f49c9-c3cc-4a8a-8a56-941a1ce10521 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a454ffff-18eb-45a3-a5f4-84882f050b4f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:11:19 compute-0 nova_compute[192079]: 2025-10-02 12:11:19.881 2 DEBUG nova.compute.manager [req-5d949426-f42a-4dd9-8ba0-cb36f7b95ecb req-0f1f49c9-c3cc-4a8a-8a56-941a1ce10521 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] No waiting events found dispatching network-vif-plugged-92134be3-e745-4df5-8491-81ceeaf0c20e pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:11:19 compute-0 nova_compute[192079]: 2025-10-02 12:11:19.881 2 WARNING nova.compute.manager [req-5d949426-f42a-4dd9-8ba0-cb36f7b95ecb req-0f1f49c9-c3cc-4a8a-8a56-941a1ce10521 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Received unexpected event network-vif-plugged-92134be3-e745-4df5-8491-81ceeaf0c20e for instance with vm_state active and task_state deleting.
Oct 02 12:11:20 compute-0 nova_compute[192079]: 2025-10-02 12:11:20.397 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:20 compute-0 nova_compute[192079]: 2025-10-02 12:11:20.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:11:20 compute-0 nova_compute[192079]: 2025-10-02 12:11:20.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:11:21 compute-0 nova_compute[192079]: 2025-10-02 12:11:21.090 2 DEBUG nova.network.neutron [-] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:11:21 compute-0 nova_compute[192079]: 2025-10-02 12:11:21.107 2 INFO nova.compute.manager [-] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Took 2.49 seconds to deallocate network for instance.
Oct 02 12:11:21 compute-0 nova_compute[192079]: 2025-10-02 12:11:21.212 2 DEBUG oslo_concurrency.lockutils [None req-0f789aec-d70d-4009-aae3-9482fb324c9b 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:11:21 compute-0 nova_compute[192079]: 2025-10-02 12:11:21.212 2 DEBUG oslo_concurrency.lockutils [None req-0f789aec-d70d-4009-aae3-9482fb324c9b 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:11:21 compute-0 nova_compute[192079]: 2025-10-02 12:11:21.265 2 DEBUG nova.compute.provider_tree [None req-0f789aec-d70d-4009-aae3-9482fb324c9b 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:11:21 compute-0 nova_compute[192079]: 2025-10-02 12:11:21.280 2 DEBUG nova.scheduler.client.report [None req-0f789aec-d70d-4009-aae3-9482fb324c9b 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:11:21 compute-0 nova_compute[192079]: 2025-10-02 12:11:21.301 2 DEBUG oslo_concurrency.lockutils [None req-0f789aec-d70d-4009-aae3-9482fb324c9b 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.089s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:11:21 compute-0 nova_compute[192079]: 2025-10-02 12:11:21.334 2 INFO nova.scheduler.client.report [None req-0f789aec-d70d-4009-aae3-9482fb324c9b 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Deleted allocations for instance a454ffff-18eb-45a3-a5f4-84882f050b4f
Oct 02 12:11:21 compute-0 nova_compute[192079]: 2025-10-02 12:11:21.410 2 DEBUG oslo_concurrency.lockutils [None req-0f789aec-d70d-4009-aae3-9482fb324c9b 53ded9f664b84e7d85d0944e0b4ecb31 d7dda73e7f3d4e05b0a8d0d1602f6b94 - - default default] Lock "a454ffff-18eb-45a3-a5f4-84882f050b4f" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 3.190s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:11:21 compute-0 nova_compute[192079]: 2025-10-02 12:11:21.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:11:22 compute-0 nova_compute[192079]: 2025-10-02 12:11:22.092 2 DEBUG nova.compute.manager [req-ba0adce1-ed05-407e-aef2-97d5bb7bf6ef req-49b155f5-775c-45fe-a651-fa6117ea70d5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Received event network-vif-deleted-92134be3-e745-4df5-8491-81ceeaf0c20e external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:11:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:22.424 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '15'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:11:23 compute-0 podman[227803]: 2025-10-02 12:11:23.148853297 +0000 UTC m=+0.061975635 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']})
Oct 02 12:11:23 compute-0 podman[227804]: 2025-10-02 12:11:23.150952843 +0000 UTC m=+0.057993887 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_managed=true, config_id=iscsid, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:11:23 compute-0 nova_compute[192079]: 2025-10-02 12:11:23.570 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:25 compute-0 nova_compute[192079]: 2025-10-02 12:11:25.398 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:26 compute-0 nova_compute[192079]: 2025-10-02 12:11:26.093 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:26 compute-0 nova_compute[192079]: 2025-10-02 12:11:26.275 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:28 compute-0 nova_compute[192079]: 2025-10-02 12:11:28.571 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:30 compute-0 nova_compute[192079]: 2025-10-02 12:11:30.400 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:31 compute-0 nova_compute[192079]: 2025-10-02 12:11:31.253 2 DEBUG oslo_concurrency.lockutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Acquiring lock "8ca5f1bd-aa74-4790-92de-0c18657746f2" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:11:31 compute-0 nova_compute[192079]: 2025-10-02 12:11:31.253 2 DEBUG oslo_concurrency.lockutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:11:31 compute-0 nova_compute[192079]: 2025-10-02 12:11:31.277 2 DEBUG nova.compute.manager [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:11:31 compute-0 nova_compute[192079]: 2025-10-02 12:11:31.394 2 DEBUG oslo_concurrency.lockutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:11:31 compute-0 nova_compute[192079]: 2025-10-02 12:11:31.395 2 DEBUG oslo_concurrency.lockutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:11:31 compute-0 nova_compute[192079]: 2025-10-02 12:11:31.400 2 DEBUG nova.virt.hardware [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:11:31 compute-0 nova_compute[192079]: 2025-10-02 12:11:31.401 2 INFO nova.compute.claims [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:11:31 compute-0 nova_compute[192079]: 2025-10-02 12:11:31.577 2 DEBUG nova.compute.provider_tree [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:11:31 compute-0 nova_compute[192079]: 2025-10-02 12:11:31.589 2 DEBUG nova.scheduler.client.report [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:11:31 compute-0 nova_compute[192079]: 2025-10-02 12:11:31.616 2 DEBUG oslo_concurrency.lockutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.221s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:11:31 compute-0 nova_compute[192079]: 2025-10-02 12:11:31.617 2 DEBUG nova.compute.manager [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:11:31 compute-0 nova_compute[192079]: 2025-10-02 12:11:31.705 2 DEBUG nova.compute.manager [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:11:31 compute-0 nova_compute[192079]: 2025-10-02 12:11:31.705 2 DEBUG nova.network.neutron [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:11:31 compute-0 nova_compute[192079]: 2025-10-02 12:11:31.722 2 INFO nova.virt.libvirt.driver [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:11:31 compute-0 nova_compute[192079]: 2025-10-02 12:11:31.747 2 DEBUG nova.compute.manager [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:11:31 compute-0 nova_compute[192079]: 2025-10-02 12:11:31.865 2 DEBUG nova.compute.manager [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:11:31 compute-0 nova_compute[192079]: 2025-10-02 12:11:31.866 2 DEBUG nova.virt.libvirt.driver [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:11:31 compute-0 nova_compute[192079]: 2025-10-02 12:11:31.867 2 INFO nova.virt.libvirt.driver [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Creating image(s)
Oct 02 12:11:31 compute-0 nova_compute[192079]: 2025-10-02 12:11:31.867 2 DEBUG oslo_concurrency.lockutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Acquiring lock "/var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:11:31 compute-0 nova_compute[192079]: 2025-10-02 12:11:31.868 2 DEBUG oslo_concurrency.lockutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "/var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:11:31 compute-0 nova_compute[192079]: 2025-10-02 12:11:31.868 2 DEBUG oslo_concurrency.lockutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "/var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:11:31 compute-0 nova_compute[192079]: 2025-10-02 12:11:31.882 2 DEBUG oslo_concurrency.processutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:11:31 compute-0 nova_compute[192079]: 2025-10-02 12:11:31.941 2 DEBUG nova.policy [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c2b9eab3da414692b3942505e3441920', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '20417475a6a149d5bc47976f4da9a4ae', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:11:31 compute-0 nova_compute[192079]: 2025-10-02 12:11:31.944 2 DEBUG oslo_concurrency.processutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.062s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:11:31 compute-0 nova_compute[192079]: 2025-10-02 12:11:31.945 2 DEBUG oslo_concurrency.lockutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:11:31 compute-0 nova_compute[192079]: 2025-10-02 12:11:31.945 2 DEBUG oslo_concurrency.lockutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:11:31 compute-0 nova_compute[192079]: 2025-10-02 12:11:31.955 2 DEBUG oslo_concurrency.processutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:11:32 compute-0 nova_compute[192079]: 2025-10-02 12:11:32.008 2 DEBUG oslo_concurrency.processutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.052s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:11:32 compute-0 nova_compute[192079]: 2025-10-02 12:11:32.008 2 DEBUG oslo_concurrency.processutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:11:32 compute-0 nova_compute[192079]: 2025-10-02 12:11:32.145 2 DEBUG oslo_concurrency.processutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/disk 1073741824" returned: 0 in 0.137s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:11:32 compute-0 nova_compute[192079]: 2025-10-02 12:11:32.146 2 DEBUG oslo_concurrency.lockutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.201s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:11:32 compute-0 nova_compute[192079]: 2025-10-02 12:11:32.147 2 DEBUG oslo_concurrency.processutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:11:32 compute-0 nova_compute[192079]: 2025-10-02 12:11:32.198 2 DEBUG oslo_concurrency.processutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.051s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:11:32 compute-0 nova_compute[192079]: 2025-10-02 12:11:32.199 2 DEBUG nova.virt.disk.api [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Checking if we can resize image /var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:11:32 compute-0 nova_compute[192079]: 2025-10-02 12:11:32.200 2 DEBUG oslo_concurrency.processutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:11:32 compute-0 nova_compute[192079]: 2025-10-02 12:11:32.256 2 DEBUG oslo_concurrency.processutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/disk --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:11:32 compute-0 nova_compute[192079]: 2025-10-02 12:11:32.257 2 DEBUG nova.virt.disk.api [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Cannot resize image /var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:11:32 compute-0 nova_compute[192079]: 2025-10-02 12:11:32.258 2 DEBUG nova.objects.instance [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lazy-loading 'migration_context' on Instance uuid 8ca5f1bd-aa74-4790-92de-0c18657746f2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:11:32 compute-0 nova_compute[192079]: 2025-10-02 12:11:32.367 2 DEBUG nova.virt.libvirt.driver [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:11:32 compute-0 nova_compute[192079]: 2025-10-02 12:11:32.368 2 DEBUG nova.virt.libvirt.driver [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Ensure instance console log exists: /var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:11:32 compute-0 nova_compute[192079]: 2025-10-02 12:11:32.368 2 DEBUG oslo_concurrency.lockutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:11:32 compute-0 nova_compute[192079]: 2025-10-02 12:11:32.369 2 DEBUG oslo_concurrency.lockutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:11:32 compute-0 nova_compute[192079]: 2025-10-02 12:11:32.369 2 DEBUG oslo_concurrency.lockutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:11:33 compute-0 nova_compute[192079]: 2025-10-02 12:11:33.219 2 DEBUG nova.network.neutron [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Successfully created port: 4d037109-fde7-4c13-b8b9-598da6a9ad57 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:11:33 compute-0 nova_compute[192079]: 2025-10-02 12:11:33.503 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759407078.502067, a454ffff-18eb-45a3-a5f4-84882f050b4f => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:11:33 compute-0 nova_compute[192079]: 2025-10-02 12:11:33.503 2 INFO nova.compute.manager [-] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] VM Stopped (Lifecycle Event)
Oct 02 12:11:33 compute-0 nova_compute[192079]: 2025-10-02 12:11:33.527 2 DEBUG nova.compute.manager [None req-4a114a88-6ab1-4f35-a3d6-88775ddeac29 - - - - - -] [instance: a454ffff-18eb-45a3-a5f4-84882f050b4f] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:11:33 compute-0 nova_compute[192079]: 2025-10-02 12:11:33.573 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:33 compute-0 nova_compute[192079]: 2025-10-02 12:11:33.965 2 DEBUG nova.network.neutron [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Successfully updated port: 4d037109-fde7-4c13-b8b9-598da6a9ad57 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:11:33 compute-0 nova_compute[192079]: 2025-10-02 12:11:33.979 2 DEBUG oslo_concurrency.lockutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Acquiring lock "refresh_cache-8ca5f1bd-aa74-4790-92de-0c18657746f2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:11:33 compute-0 nova_compute[192079]: 2025-10-02 12:11:33.979 2 DEBUG oslo_concurrency.lockutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Acquired lock "refresh_cache-8ca5f1bd-aa74-4790-92de-0c18657746f2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:11:33 compute-0 nova_compute[192079]: 2025-10-02 12:11:33.980 2 DEBUG nova.network.neutron [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:11:34 compute-0 nova_compute[192079]: 2025-10-02 12:11:34.129 2 DEBUG nova.network.neutron [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:11:34 compute-0 podman[227864]: 2025-10-02 12:11:34.15478374 +0000 UTC m=+0.064060230 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.name=CentOS Stream 9 Base Image, config_id=ovn_metadata_agent, container_name=ovn_metadata_agent, managed_by=edpm_ansible)
Oct 02 12:11:34 compute-0 podman[227866]: 2025-10-02 12:11:34.155052857 +0000 UTC m=+0.060154975 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:11:34 compute-0 podman[227865]: 2025-10-02 12:11:34.179616601 +0000 UTC m=+0.086944038 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, container_name=ovn_controller, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller)
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.356 2 DEBUG nova.network.neutron [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Updating instance_info_cache with network_info: [{"id": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "address": "fa:16:3e:51:b5:9f", "network": {"id": "2bdfd186-139e-456a-92e9-4dc9c37a846a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-953736127-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "20417475a6a149d5bc47976f4da9a4ae", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap4d037109-fd", "ovs_interfaceid": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.382 2 DEBUG oslo_concurrency.lockutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Releasing lock "refresh_cache-8ca5f1bd-aa74-4790-92de-0c18657746f2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.382 2 DEBUG nova.compute.manager [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Instance network_info: |[{"id": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "address": "fa:16:3e:51:b5:9f", "network": {"id": "2bdfd186-139e-456a-92e9-4dc9c37a846a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-953736127-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "20417475a6a149d5bc47976f4da9a4ae", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap4d037109-fd", "ovs_interfaceid": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.384 2 DEBUG nova.virt.libvirt.driver [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Start _get_guest_xml network_info=[{"id": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "address": "fa:16:3e:51:b5:9f", "network": {"id": "2bdfd186-139e-456a-92e9-4dc9c37a846a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-953736127-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "20417475a6a149d5bc47976f4da9a4ae", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap4d037109-fd", "ovs_interfaceid": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.389 2 WARNING nova.virt.libvirt.driver [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.394 2 DEBUG nova.virt.libvirt.host [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.394 2 DEBUG nova.virt.libvirt.host [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.397 2 DEBUG nova.virt.libvirt.host [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.398 2 DEBUG nova.virt.libvirt.host [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.399 2 DEBUG nova.virt.libvirt.driver [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.399 2 DEBUG nova.virt.hardware [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.399 2 DEBUG nova.virt.hardware [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.399 2 DEBUG nova.virt.hardware [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.400 2 DEBUG nova.virt.hardware [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.400 2 DEBUG nova.virt.hardware [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.400 2 DEBUG nova.virt.hardware [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.400 2 DEBUG nova.virt.hardware [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.400 2 DEBUG nova.virt.hardware [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.401 2 DEBUG nova.virt.hardware [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.401 2 DEBUG nova.virt.hardware [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.401 2 DEBUG nova.virt.hardware [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.404 2 DEBUG nova.virt.libvirt.vif [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:11:30Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-tempest.common.compute-instance-645901578',display_name='tempest-tempest.common.compute-instance-645901578',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-tempest-common-compute-instance-645901578',id=57,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='20417475a6a149d5bc47976f4da9a4ae',ramdisk_id='',reservation_id='r-rok8i8zh',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServerActionsTestOtherA-352727288',owner_user_name='tempest-ServerActionsTestOtherA-352727288-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:11:31Z,user_data=None,user_id='c2b9eab3da414692b3942505e3441920',uuid=8ca5f1bd-aa74-4790-92de-0c18657746f2,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "address": "fa:16:3e:51:b5:9f", "network": {"id": "2bdfd186-139e-456a-92e9-4dc9c37a846a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-953736127-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "20417475a6a149d5bc47976f4da9a4ae", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap4d037109-fd", "ovs_interfaceid": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.404 2 DEBUG nova.network.os_vif_util [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Converting VIF {"id": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "address": "fa:16:3e:51:b5:9f", "network": {"id": "2bdfd186-139e-456a-92e9-4dc9c37a846a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-953736127-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "20417475a6a149d5bc47976f4da9a4ae", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap4d037109-fd", "ovs_interfaceid": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.405 2 DEBUG nova.network.os_vif_util [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:51:b5:9f,bridge_name='br-int',has_traffic_filtering=True,id=4d037109-fde7-4c13-b8b9-598da6a9ad57,network=Network(2bdfd186-139e-456a-92e9-4dc9c37a846a),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap4d037109-fd') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.406 2 DEBUG nova.objects.instance [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lazy-loading 'pci_devices' on Instance uuid 8ca5f1bd-aa74-4790-92de-0c18657746f2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.409 2 DEBUG nova.compute.manager [req-cb687539-2a31-4f3d-91ae-ba4801777620 req-079eb2b1-2c36-4f6f-a0d9-632132532e4c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Received event network-changed-4d037109-fde7-4c13-b8b9-598da6a9ad57 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.409 2 DEBUG nova.compute.manager [req-cb687539-2a31-4f3d-91ae-ba4801777620 req-079eb2b1-2c36-4f6f-a0d9-632132532e4c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Refreshing instance network info cache due to event network-changed-4d037109-fde7-4c13-b8b9-598da6a9ad57. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.409 2 DEBUG oslo_concurrency.lockutils [req-cb687539-2a31-4f3d-91ae-ba4801777620 req-079eb2b1-2c36-4f6f-a0d9-632132532e4c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-8ca5f1bd-aa74-4790-92de-0c18657746f2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.409 2 DEBUG oslo_concurrency.lockutils [req-cb687539-2a31-4f3d-91ae-ba4801777620 req-079eb2b1-2c36-4f6f-a0d9-632132532e4c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-8ca5f1bd-aa74-4790-92de-0c18657746f2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.410 2 DEBUG nova.network.neutron [req-cb687539-2a31-4f3d-91ae-ba4801777620 req-079eb2b1-2c36-4f6f-a0d9-632132532e4c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Refreshing network info cache for port 4d037109-fde7-4c13-b8b9-598da6a9ad57 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.433 2 DEBUG nova.virt.libvirt.driver [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:11:35 compute-0 nova_compute[192079]:   <uuid>8ca5f1bd-aa74-4790-92de-0c18657746f2</uuid>
Oct 02 12:11:35 compute-0 nova_compute[192079]:   <name>instance-00000039</name>
Oct 02 12:11:35 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:11:35 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:11:35 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:11:35 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:       <nova:name>tempest-tempest.common.compute-instance-645901578</nova:name>
Oct 02 12:11:35 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:11:35</nova:creationTime>
Oct 02 12:11:35 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:11:35 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:11:35 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:11:35 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:11:35 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:11:35 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:11:35 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:11:35 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:11:35 compute-0 nova_compute[192079]:         <nova:user uuid="c2b9eab3da414692b3942505e3441920">tempest-ServerActionsTestOtherA-352727288-project-member</nova:user>
Oct 02 12:11:35 compute-0 nova_compute[192079]:         <nova:project uuid="20417475a6a149d5bc47976f4da9a4ae">tempest-ServerActionsTestOtherA-352727288</nova:project>
Oct 02 12:11:35 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:11:35 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:11:35 compute-0 nova_compute[192079]:         <nova:port uuid="4d037109-fde7-4c13-b8b9-598da6a9ad57">
Oct 02 12:11:35 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.3" ipVersion="4"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:11:35 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:11:35 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:11:35 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <system>
Oct 02 12:11:35 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:11:35 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:11:35 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:11:35 compute-0 nova_compute[192079]:       <entry name="serial">8ca5f1bd-aa74-4790-92de-0c18657746f2</entry>
Oct 02 12:11:35 compute-0 nova_compute[192079]:       <entry name="uuid">8ca5f1bd-aa74-4790-92de-0c18657746f2</entry>
Oct 02 12:11:35 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     </system>
Oct 02 12:11:35 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:11:35 compute-0 nova_compute[192079]:   <os>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:   </os>
Oct 02 12:11:35 compute-0 nova_compute[192079]:   <features>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:   </features>
Oct 02 12:11:35 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:11:35 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:11:35 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:11:35 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/disk"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:11:35 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/disk.config"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:11:35 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:51:b5:9f"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:       <target dev="tap4d037109-fd"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:11:35 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/console.log" append="off"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <video>
Oct 02 12:11:35 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     </video>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:11:35 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:11:35 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:11:35 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:11:35 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:11:35 compute-0 nova_compute[192079]: </domain>
Oct 02 12:11:35 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.435 2 DEBUG nova.compute.manager [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Preparing to wait for external event network-vif-plugged-4d037109-fde7-4c13-b8b9-598da6a9ad57 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.435 2 DEBUG oslo_concurrency.lockutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Acquiring lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.435 2 DEBUG oslo_concurrency.lockutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.435 2 DEBUG oslo_concurrency.lockutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.436 2 DEBUG nova.virt.libvirt.vif [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:11:30Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-tempest.common.compute-instance-645901578',display_name='tempest-tempest.common.compute-instance-645901578',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-tempest-common-compute-instance-645901578',id=57,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='20417475a6a149d5bc47976f4da9a4ae',ramdisk_id='',reservation_id='r-rok8i8zh',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServerActionsTestOtherA-352727288',owner_user_name='tempest-ServerActionsTestOtherA-352727288-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:11:31Z,user_data=None,user_id='c2b9eab3da414692b3942505e3441920',uuid=8ca5f1bd-aa74-4790-92de-0c18657746f2,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "address": "fa:16:3e:51:b5:9f", "network": {"id": "2bdfd186-139e-456a-92e9-4dc9c37a846a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-953736127-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "20417475a6a149d5bc47976f4da9a4ae", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap4d037109-fd", "ovs_interfaceid": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.436 2 DEBUG nova.network.os_vif_util [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Converting VIF {"id": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "address": "fa:16:3e:51:b5:9f", "network": {"id": "2bdfd186-139e-456a-92e9-4dc9c37a846a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-953736127-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "20417475a6a149d5bc47976f4da9a4ae", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap4d037109-fd", "ovs_interfaceid": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.437 2 DEBUG nova.network.os_vif_util [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:51:b5:9f,bridge_name='br-int',has_traffic_filtering=True,id=4d037109-fde7-4c13-b8b9-598da6a9ad57,network=Network(2bdfd186-139e-456a-92e9-4dc9c37a846a),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap4d037109-fd') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.437 2 DEBUG os_vif [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:51:b5:9f,bridge_name='br-int',has_traffic_filtering=True,id=4d037109-fde7-4c13-b8b9-598da6a9ad57,network=Network(2bdfd186-139e-456a-92e9-4dc9c37a846a),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap4d037109-fd') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.438 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.438 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.439 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.441 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.441 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap4d037109-fd, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.442 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap4d037109-fd, col_values=(('external_ids', {'iface-id': '4d037109-fde7-4c13-b8b9-598da6a9ad57', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:51:b5:9f', 'vm-uuid': '8ca5f1bd-aa74-4790-92de-0c18657746f2'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.443 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:35 compute-0 NetworkManager[51160]: <info>  [1759407095.4453] manager: (tap4d037109-fd): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/87)
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.446 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.451 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.452 2 INFO os_vif [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:51:b5:9f,bridge_name='br-int',has_traffic_filtering=True,id=4d037109-fde7-4c13-b8b9-598da6a9ad57,network=Network(2bdfd186-139e-456a-92e9-4dc9c37a846a),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap4d037109-fd')
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.551 2 DEBUG nova.virt.libvirt.driver [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.551 2 DEBUG nova.virt.libvirt.driver [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.552 2 DEBUG nova.virt.libvirt.driver [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] No VIF found with MAC fa:16:3e:51:b5:9f, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.552 2 INFO nova.virt.libvirt.driver [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Using config drive
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.978 2 INFO nova.virt.libvirt.driver [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Creating config drive at /var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/disk.config
Oct 02 12:11:35 compute-0 nova_compute[192079]: 2025-10-02 12:11:35.982 2 DEBUG oslo_concurrency.processutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpa6ynt3xp execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:11:36 compute-0 nova_compute[192079]: 2025-10-02 12:11:36.106 2 DEBUG oslo_concurrency.processutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpa6ynt3xp" returned: 0 in 0.124s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:11:36 compute-0 kernel: tap4d037109-fd: entered promiscuous mode
Oct 02 12:11:36 compute-0 NetworkManager[51160]: <info>  [1759407096.1627] manager: (tap4d037109-fd): new Tun device (/org/freedesktop/NetworkManager/Devices/88)
Oct 02 12:11:36 compute-0 nova_compute[192079]: 2025-10-02 12:11:36.165 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:36 compute-0 ovn_controller[94336]: 2025-10-02T12:11:36Z|00177|binding|INFO|Claiming lport 4d037109-fde7-4c13-b8b9-598da6a9ad57 for this chassis.
Oct 02 12:11:36 compute-0 ovn_controller[94336]: 2025-10-02T12:11:36Z|00178|binding|INFO|4d037109-fde7-4c13-b8b9-598da6a9ad57: Claiming fa:16:3e:51:b5:9f 10.100.0.3
Oct 02 12:11:36 compute-0 nova_compute[192079]: 2025-10-02 12:11:36.166 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:36 compute-0 nova_compute[192079]: 2025-10-02 12:11:36.171 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:36 compute-0 nova_compute[192079]: 2025-10-02 12:11:36.174 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:36 compute-0 nova_compute[192079]: 2025-10-02 12:11:36.178 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:36 compute-0 NetworkManager[51160]: <info>  [1759407096.1792] manager: (patch-br-int-to-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/89)
Oct 02 12:11:36 compute-0 NetworkManager[51160]: <info>  [1759407096.1802] manager: (patch-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d-to-br-int): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/90)
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:36.185 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:51:b5:9f 10.100.0.3'], port_security=['fa:16:3e:51:b5:9f 10.100.0.3'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.3/28', 'neutron:device_id': '8ca5f1bd-aa74-4790-92de-0c18657746f2', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-2bdfd186-139e-456a-92e9-4dc9c37a846a', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '20417475a6a149d5bc47976f4da9a4ae', 'neutron:revision_number': '2', 'neutron:security_group_ids': 'c517fcc5-4e7c-4008-ac85-cb7cba93cd1e', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=c8a937e8-285b-47d1-b87a-47c75465be5a, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=4d037109-fde7-4c13-b8b9-598da6a9ad57) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:36.187 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 4d037109-fde7-4c13-b8b9-598da6a9ad57 in datapath 2bdfd186-139e-456a-92e9-4dc9c37a846a bound to our chassis
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:36.189 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 2bdfd186-139e-456a-92e9-4dc9c37a846a
Oct 02 12:11:36 compute-0 systemd-udevd[227950]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:11:36 compute-0 systemd-machined[152150]: New machine qemu-28-instance-00000039.
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:36.202 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[88770c8c-358a-4011-8822-a987634aa989]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:36.203 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap2bdfd186-11 in ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:36.205 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap2bdfd186-10 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:36.205 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[38a12f77-d994-4bbd-ad76-d7ba67cdf048]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:36.205 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2183a0eb-69f8-408e-8ac0-705fadefbd67]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:11:36 compute-0 NetworkManager[51160]: <info>  [1759407096.2073] device (tap4d037109-fd): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:11:36 compute-0 NetworkManager[51160]: <info>  [1759407096.2082] device (tap4d037109-fd): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:36.216 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[37e6fdea-01fd-4734-8f56-1aacfa5cda66]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:36.244 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[36d548be-e2ca-47a2-b266-46d9bad9d89e]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:11:36 compute-0 systemd[1]: Started Virtual Machine qemu-28-instance-00000039.
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:36.273 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[87c60198-1452-4c3c-8b8c-3eaccbe8c73d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:11:36 compute-0 NetworkManager[51160]: <info>  [1759407096.3006] manager: (tap2bdfd186-10): new Veth device (/org/freedesktop/NetworkManager/Devices/91)
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:36.301 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[dc1ec259-f2bc-4438-bc26-a289549d95b2]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:11:36 compute-0 nova_compute[192079]: 2025-10-02 12:11:36.323 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:36.336 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[6a0aef0b-ee80-4cef-a120-68b1b9558c17]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:36.339 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[a6b84660-92d5-467f-a1d6-8868e33932a0]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:11:36 compute-0 nova_compute[192079]: 2025-10-02 12:11:36.346 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:36 compute-0 ovn_controller[94336]: 2025-10-02T12:11:36Z|00179|binding|INFO|Setting lport 4d037109-fde7-4c13-b8b9-598da6a9ad57 ovn-installed in OVS
Oct 02 12:11:36 compute-0 ovn_controller[94336]: 2025-10-02T12:11:36Z|00180|binding|INFO|Setting lport 4d037109-fde7-4c13-b8b9-598da6a9ad57 up in Southbound
Oct 02 12:11:36 compute-0 nova_compute[192079]: 2025-10-02 12:11:36.357 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:36 compute-0 NetworkManager[51160]: <info>  [1759407096.3692] device (tap2bdfd186-10): carrier: link connected
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:36.377 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[2e3fc5e4-6b26-47e0-b5fe-8d67450cd2fa]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:36.396 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5c42dfb7-0204-4935-a378-58684714d560]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap2bdfd186-11'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:43:b7:89'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 53], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 509399, 'reachable_time': 24032, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 227986, 'error': None, 'target': 'ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:36.412 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[cd8bd0db-2a27-45d4-8ffa-8f4650be60ae]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe43:b789'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 509399, 'tstamp': 509399}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 227987, 'error': None, 'target': 'ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:36.428 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4d3e7502-fb06-44b1-aa74-ee1934a4f62c]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap2bdfd186-11'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:43:b7:89'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 53], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 509399, 'reachable_time': 24032, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 227988, 'error': None, 'target': 'ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:36.454 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5bf7ef41-374f-4477-9fbd-682270f4a3a9]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:36.506 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f70ac26a-341a-4938-9a8f-547656ad009a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:36.509 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap2bdfd186-10, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:36.510 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:36.510 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap2bdfd186-10, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:11:36 compute-0 nova_compute[192079]: 2025-10-02 12:11:36.553 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:36 compute-0 NetworkManager[51160]: <info>  [1759407096.5541] manager: (tap2bdfd186-10): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/92)
Oct 02 12:11:36 compute-0 kernel: tap2bdfd186-10: entered promiscuous mode
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:36.558 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap2bdfd186-10, col_values=(('external_ids', {'iface-id': '1e2d82b4-a363-4c19-94d1-e62c1ba8e34a'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:11:36 compute-0 nova_compute[192079]: 2025-10-02 12:11:36.560 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:36 compute-0 ovn_controller[94336]: 2025-10-02T12:11:36Z|00181|binding|INFO|Releasing lport 1e2d82b4-a363-4c19-94d1-e62c1ba8e34a from this chassis (sb_readonly=0)
Oct 02 12:11:36 compute-0 nova_compute[192079]: 2025-10-02 12:11:36.560 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:36.561 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/2bdfd186-139e-456a-92e9-4dc9c37a846a.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/2bdfd186-139e-456a-92e9-4dc9c37a846a.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:36.562 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9e3d7ea3-9fc8-411d-9c42-945503ff28dd]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:36.562 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-2bdfd186-139e-456a-92e9-4dc9c37a846a
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/2bdfd186-139e-456a-92e9-4dc9c37a846a.pid.haproxy
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 2bdfd186-139e-456a-92e9-4dc9c37a846a
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:11:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:11:36.563 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a', 'env', 'PROCESS_TAG=haproxy-2bdfd186-139e-456a-92e9-4dc9c37a846a', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/2bdfd186-139e-456a-92e9-4dc9c37a846a.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:11:36 compute-0 nova_compute[192079]: 2025-10-02 12:11:36.571 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:36 compute-0 nova_compute[192079]: 2025-10-02 12:11:36.823 2 DEBUG nova.network.neutron [req-cb687539-2a31-4f3d-91ae-ba4801777620 req-079eb2b1-2c36-4f6f-a0d9-632132532e4c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Updated VIF entry in instance network info cache for port 4d037109-fde7-4c13-b8b9-598da6a9ad57. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:11:36 compute-0 nova_compute[192079]: 2025-10-02 12:11:36.823 2 DEBUG nova.network.neutron [req-cb687539-2a31-4f3d-91ae-ba4801777620 req-079eb2b1-2c36-4f6f-a0d9-632132532e4c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Updating instance_info_cache with network_info: [{"id": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "address": "fa:16:3e:51:b5:9f", "network": {"id": "2bdfd186-139e-456a-92e9-4dc9c37a846a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-953736127-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "20417475a6a149d5bc47976f4da9a4ae", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap4d037109-fd", "ovs_interfaceid": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:11:36 compute-0 nova_compute[192079]: 2025-10-02 12:11:36.841 2 DEBUG oslo_concurrency.lockutils [req-cb687539-2a31-4f3d-91ae-ba4801777620 req-079eb2b1-2c36-4f6f-a0d9-632132532e4c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-8ca5f1bd-aa74-4790-92de-0c18657746f2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:11:36 compute-0 podman[228020]: 2025-10-02 12:11:36.894794764 +0000 UTC m=+0.020717041 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:11:37 compute-0 podman[228020]: 2025-10-02 12:11:37.0639053 +0000 UTC m=+0.189827587 container create 73211fb17aa0e5e2e3aa93ee97eb953c2e3a2e1e90045920dcfa17d5eaa174fd (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.license=GPLv2)
Oct 02 12:11:37 compute-0 ovn_controller[94336]: 2025-10-02T12:11:37Z|00182|binding|INFO|Releasing lport 1e2d82b4-a363-4c19-94d1-e62c1ba8e34a from this chassis (sb_readonly=0)
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.122 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:37 compute-0 systemd[1]: Started libpod-conmon-73211fb17aa0e5e2e3aa93ee97eb953c2e3a2e1e90045920dcfa17d5eaa174fd.scope.
Oct 02 12:11:37 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:11:37 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/405e760131fa1d5dbb8dc0d16e761f71a16501b14c792e9aef15b338123cab84/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:11:37 compute-0 podman[228020]: 2025-10-02 12:11:37.179469789 +0000 UTC m=+0.305392066 container init 73211fb17aa0e5e2e3aa93ee97eb953c2e3a2e1e90045920dcfa17d5eaa174fd (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:11:37 compute-0 podman[228020]: 2025-10-02 12:11:37.184552297 +0000 UTC m=+0.310474554 container start 73211fb17aa0e5e2e3aa93ee97eb953c2e3a2e1e90045920dcfa17d5eaa174fd (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3)
Oct 02 12:11:37 compute-0 neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a[228041]: [NOTICE]   (228045) : New worker (228047) forked
Oct 02 12:11:37 compute-0 neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a[228041]: [NOTICE]   (228045) : Loading success.
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.345 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.431 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407097.4308333, 8ca5f1bd-aa74-4790-92de-0c18657746f2 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.432 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] VM Started (Lifecycle Event)
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.461 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.464 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407097.4310358, 8ca5f1bd-aa74-4790-92de-0c18657746f2 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.464 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] VM Paused (Lifecycle Event)
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.483 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.487 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.490 2 DEBUG nova.compute.manager [req-57eac3f4-6b00-46de-b1a8-f1fabd59bfa6 req-d5febded-96cb-4a4d-9a17-c335b250f418 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Received event network-vif-plugged-4d037109-fde7-4c13-b8b9-598da6a9ad57 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.491 2 DEBUG oslo_concurrency.lockutils [req-57eac3f4-6b00-46de-b1a8-f1fabd59bfa6 req-d5febded-96cb-4a4d-9a17-c335b250f418 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.491 2 DEBUG oslo_concurrency.lockutils [req-57eac3f4-6b00-46de-b1a8-f1fabd59bfa6 req-d5febded-96cb-4a4d-9a17-c335b250f418 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.491 2 DEBUG oslo_concurrency.lockutils [req-57eac3f4-6b00-46de-b1a8-f1fabd59bfa6 req-d5febded-96cb-4a4d-9a17-c335b250f418 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.491 2 DEBUG nova.compute.manager [req-57eac3f4-6b00-46de-b1a8-f1fabd59bfa6 req-d5febded-96cb-4a4d-9a17-c335b250f418 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Processing event network-vif-plugged-4d037109-fde7-4c13-b8b9-598da6a9ad57 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.491 2 DEBUG nova.compute.manager [req-57eac3f4-6b00-46de-b1a8-f1fabd59bfa6 req-d5febded-96cb-4a4d-9a17-c335b250f418 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Received event network-vif-plugged-4d037109-fde7-4c13-b8b9-598da6a9ad57 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.492 2 DEBUG oslo_concurrency.lockutils [req-57eac3f4-6b00-46de-b1a8-f1fabd59bfa6 req-d5febded-96cb-4a4d-9a17-c335b250f418 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.492 2 DEBUG oslo_concurrency.lockutils [req-57eac3f4-6b00-46de-b1a8-f1fabd59bfa6 req-d5febded-96cb-4a4d-9a17-c335b250f418 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.492 2 DEBUG oslo_concurrency.lockutils [req-57eac3f4-6b00-46de-b1a8-f1fabd59bfa6 req-d5febded-96cb-4a4d-9a17-c335b250f418 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.492 2 DEBUG nova.compute.manager [req-57eac3f4-6b00-46de-b1a8-f1fabd59bfa6 req-d5febded-96cb-4a4d-9a17-c335b250f418 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] No waiting events found dispatching network-vif-plugged-4d037109-fde7-4c13-b8b9-598da6a9ad57 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.493 2 WARNING nova.compute.manager [req-57eac3f4-6b00-46de-b1a8-f1fabd59bfa6 req-d5febded-96cb-4a4d-9a17-c335b250f418 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Received unexpected event network-vif-plugged-4d037109-fde7-4c13-b8b9-598da6a9ad57 for instance with vm_state building and task_state spawning.
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.493 2 DEBUG nova.compute.manager [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.498 2 DEBUG nova.virt.libvirt.driver [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.500 2 INFO nova.virt.libvirt.driver [-] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Instance spawned successfully.
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.500 2 DEBUG nova.virt.libvirt.driver [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.505 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.505 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407097.4972482, 8ca5f1bd-aa74-4790-92de-0c18657746f2 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.506 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] VM Resumed (Lifecycle Event)
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.518 2 DEBUG nova.virt.libvirt.driver [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.519 2 DEBUG nova.virt.libvirt.driver [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.519 2 DEBUG nova.virt.libvirt.driver [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.519 2 DEBUG nova.virt.libvirt.driver [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.520 2 DEBUG nova.virt.libvirt.driver [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.520 2 DEBUG nova.virt.libvirt.driver [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.530 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.533 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.553 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.592 2 INFO nova.compute.manager [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Took 5.73 seconds to spawn the instance on the hypervisor.
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.592 2 DEBUG nova.compute.manager [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.677 2 INFO nova.compute.manager [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Took 6.32 seconds to build instance.
Oct 02 12:11:37 compute-0 nova_compute[192079]: 2025-10-02 12:11:37.694 2 DEBUG oslo_concurrency.lockutils [None req-56244ab4-0f3b-43de-a5fe-c118f2306bef c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 6.441s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:11:39 compute-0 nova_compute[192079]: 2025-10-02 12:11:39.903 2 DEBUG oslo_concurrency.lockutils [None req-59ae4dba-e4d1-42ca-9c0d-2bcd0851fbc6 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Acquiring lock "8ca5f1bd-aa74-4790-92de-0c18657746f2" by "nova.compute.manager.ComputeManager.stop_instance.<locals>.do_stop_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:11:39 compute-0 nova_compute[192079]: 2025-10-02 12:11:39.904 2 DEBUG oslo_concurrency.lockutils [None req-59ae4dba-e4d1-42ca-9c0d-2bcd0851fbc6 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2" acquired by "nova.compute.manager.ComputeManager.stop_instance.<locals>.do_stop_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:11:39 compute-0 nova_compute[192079]: 2025-10-02 12:11:39.904 2 DEBUG nova.compute.manager [None req-59ae4dba-e4d1-42ca-9c0d-2bcd0851fbc6 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:11:39 compute-0 nova_compute[192079]: 2025-10-02 12:11:39.908 2 DEBUG nova.compute.manager [None req-59ae4dba-e4d1-42ca-9c0d-2bcd0851fbc6 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 do_stop_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3338
Oct 02 12:11:39 compute-0 nova_compute[192079]: 2025-10-02 12:11:39.909 2 DEBUG nova.objects.instance [None req-59ae4dba-e4d1-42ca-9c0d-2bcd0851fbc6 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lazy-loading 'flavor' on Instance uuid 8ca5f1bd-aa74-4790-92de-0c18657746f2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:11:39 compute-0 nova_compute[192079]: 2025-10-02 12:11:39.941 2 DEBUG nova.objects.instance [None req-59ae4dba-e4d1-42ca-9c0d-2bcd0851fbc6 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lazy-loading 'info_cache' on Instance uuid 8ca5f1bd-aa74-4790-92de-0c18657746f2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:11:39 compute-0 nova_compute[192079]: 2025-10-02 12:11:39.969 2 DEBUG nova.virt.libvirt.driver [None req-59ae4dba-e4d1-42ca-9c0d-2bcd0851fbc6 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Shutting down instance from state 1 _clean_shutdown /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4071
Oct 02 12:11:40 compute-0 ovn_controller[94336]: 2025-10-02T12:11:40Z|00183|binding|INFO|Releasing lport 1e2d82b4-a363-4c19-94d1-e62c1ba8e34a from this chassis (sb_readonly=0)
Oct 02 12:11:40 compute-0 nova_compute[192079]: 2025-10-02 12:11:40.255 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:40 compute-0 nova_compute[192079]: 2025-10-02 12:11:40.444 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:43 compute-0 podman[228056]: 2025-10-02 12:11:43.155391407 +0000 UTC m=+0.064988917 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, maintainer=OpenStack Kubernetes Operator team, container_name=ceilometer_agent_compute)
Oct 02 12:11:43 compute-0 nova_compute[192079]: 2025-10-02 12:11:43.196 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:45 compute-0 nova_compute[192079]: 2025-10-02 12:11:45.447 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:46 compute-0 nova_compute[192079]: 2025-10-02 12:11:46.210 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:46 compute-0 nova_compute[192079]: 2025-10-02 12:11:46.816 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:48 compute-0 podman[228077]: 2025-10-02 12:11:48.154113391 +0000 UTC m=+0.061715387 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_id=multipathd, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=multipathd, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0)
Oct 02 12:11:48 compute-0 podman[228076]: 2025-10-02 12:11:48.155032835 +0000 UTC m=+0.067138823 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.openshift.expose-services=, name=ubi9-minimal, managed_by=edpm_ansible, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., container_name=openstack_network_exporter, architecture=x86_64, com.redhat.component=ubi9-minimal-container, version=9.6, io.buildah.version=1.33.7, io.openshift.tags=minimal rhel9, vcs-type=git, distribution-scope=public, maintainer=Red Hat, Inc., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, vendor=Red Hat, Inc., build-date=2025-08-20T13:12:41, config_id=edpm, url=https://catalog.redhat.com/en/search?searchType=containers, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, release=1755695350)
Oct 02 12:11:49 compute-0 ovn_controller[94336]: 2025-10-02T12:11:49Z|00184|binding|INFO|Releasing lport 1e2d82b4-a363-4c19-94d1-e62c1ba8e34a from this chassis (sb_readonly=0)
Oct 02 12:11:50 compute-0 nova_compute[192079]: 2025-10-02 12:11:50.010 2 DEBUG nova.virt.libvirt.driver [None req-59ae4dba-e4d1-42ca-9c0d-2bcd0851fbc6 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Instance in state 1 after 10 seconds - resending shutdown _clean_shutdown /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4101
Oct 02 12:11:50 compute-0 nova_compute[192079]: 2025-10-02 12:11:50.015 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:50 compute-0 nova_compute[192079]: 2025-10-02 12:11:50.450 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:50 compute-0 nova_compute[192079]: 2025-10-02 12:11:50.452 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:50 compute-0 nova_compute[192079]: 2025-10-02 12:11:50.902 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:11:53 compute-0 ovn_controller[94336]: 2025-10-02T12:11:53Z|00018|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:51:b5:9f 10.100.0.3
Oct 02 12:11:53 compute-0 ovn_controller[94336]: 2025-10-02T12:11:53Z|00019|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:51:b5:9f 10.100.0.3
Oct 02 12:11:54 compute-0 podman[228136]: 2025-10-02 12:11:54.149327698 +0000 UTC m=+0.057568415 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:11:54 compute-0 podman[228137]: 2025-10-02 12:11:54.168966698 +0000 UTC m=+0.072103438 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid, container_name=iscsid, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_managed=true)
Oct 02 12:11:55 compute-0 nova_compute[192079]: 2025-10-02 12:11:55.454 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:00 compute-0 nova_compute[192079]: 2025-10-02 12:12:00.455 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:01 compute-0 nova_compute[192079]: 2025-10-02 12:12:01.060 2 DEBUG nova.virt.libvirt.driver [None req-59ae4dba-e4d1-42ca-9c0d-2bcd0851fbc6 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Instance in state 1 after 21 seconds - resending shutdown _clean_shutdown /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4101
Oct 02 12:12:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:02.213 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:02.214 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:02.214 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:03 compute-0 nova_compute[192079]: 2025-10-02 12:12:03.003 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:03 compute-0 kernel: tap4d037109-fd (unregistering): left promiscuous mode
Oct 02 12:12:03 compute-0 NetworkManager[51160]: <info>  [1759407123.2865] device (tap4d037109-fd): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:12:03 compute-0 ovn_controller[94336]: 2025-10-02T12:12:03Z|00185|binding|INFO|Releasing lport 4d037109-fde7-4c13-b8b9-598da6a9ad57 from this chassis (sb_readonly=0)
Oct 02 12:12:03 compute-0 ovn_controller[94336]: 2025-10-02T12:12:03Z|00186|binding|INFO|Setting lport 4d037109-fde7-4c13-b8b9-598da6a9ad57 down in Southbound
Oct 02 12:12:03 compute-0 ovn_controller[94336]: 2025-10-02T12:12:03Z|00187|binding|INFO|Removing iface tap4d037109-fd ovn-installed in OVS
Oct 02 12:12:03 compute-0 nova_compute[192079]: 2025-10-02 12:12:03.292 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:03 compute-0 nova_compute[192079]: 2025-10-02 12:12:03.295 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:03.302 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:51:b5:9f 10.100.0.3'], port_security=['fa:16:3e:51:b5:9f 10.100.0.3'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.3/28', 'neutron:device_id': '8ca5f1bd-aa74-4790-92de-0c18657746f2', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-2bdfd186-139e-456a-92e9-4dc9c37a846a', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '20417475a6a149d5bc47976f4da9a4ae', 'neutron:revision_number': '4', 'neutron:security_group_ids': 'c517fcc5-4e7c-4008-ac85-cb7cba93cd1e', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=c8a937e8-285b-47d1-b87a-47c75465be5a, chassis=[], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=4d037109-fde7-4c13-b8b9-598da6a9ad57) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:12:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:03.303 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 4d037109-fde7-4c13-b8b9-598da6a9ad57 in datapath 2bdfd186-139e-456a-92e9-4dc9c37a846a unbound from our chassis
Oct 02 12:12:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:03.304 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 2bdfd186-139e-456a-92e9-4dc9c37a846a, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:12:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:03.307 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b1dff13f-0782-4636-9571-af3655994acb]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:03.307 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a namespace which is not needed anymore
Oct 02 12:12:03 compute-0 nova_compute[192079]: 2025-10-02 12:12:03.311 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:03 compute-0 systemd[1]: machine-qemu\x2d28\x2dinstance\x2d00000039.scope: Deactivated successfully.
Oct 02 12:12:03 compute-0 systemd[1]: machine-qemu\x2d28\x2dinstance\x2d00000039.scope: Consumed 14.665s CPU time.
Oct 02 12:12:03 compute-0 systemd-machined[152150]: Machine qemu-28-instance-00000039 terminated.
Oct 02 12:12:03 compute-0 neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a[228041]: [NOTICE]   (228045) : haproxy version is 2.8.14-c23fe91
Oct 02 12:12:03 compute-0 neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a[228041]: [NOTICE]   (228045) : path to executable is /usr/sbin/haproxy
Oct 02 12:12:03 compute-0 neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a[228041]: [WARNING]  (228045) : Exiting Master process...
Oct 02 12:12:03 compute-0 neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a[228041]: [WARNING]  (228045) : Exiting Master process...
Oct 02 12:12:03 compute-0 neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a[228041]: [ALERT]    (228045) : Current worker (228047) exited with code 143 (Terminated)
Oct 02 12:12:03 compute-0 neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a[228041]: [WARNING]  (228045) : All workers exited. Exiting... (0)
Oct 02 12:12:03 compute-0 systemd[1]: libpod-73211fb17aa0e5e2e3aa93ee97eb953c2e3a2e1e90045920dcfa17d5eaa174fd.scope: Deactivated successfully.
Oct 02 12:12:03 compute-0 podman[228204]: 2025-10-02 12:12:03.597779865 +0000 UTC m=+0.193745881 container died 73211fb17aa0e5e2e3aa93ee97eb953c2e3a2e1e90045920dcfa17d5eaa174fd (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:12:03 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-73211fb17aa0e5e2e3aa93ee97eb953c2e3a2e1e90045920dcfa17d5eaa174fd-userdata-shm.mount: Deactivated successfully.
Oct 02 12:12:03 compute-0 systemd[1]: var-lib-containers-storage-overlay-405e760131fa1d5dbb8dc0d16e761f71a16501b14c792e9aef15b338123cab84-merged.mount: Deactivated successfully.
Oct 02 12:12:03 compute-0 podman[228204]: 2025-10-02 12:12:03.971757801 +0000 UTC m=+0.567723807 container cleanup 73211fb17aa0e5e2e3aa93ee97eb953c2e3a2e1e90045920dcfa17d5eaa174fd (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001)
Oct 02 12:12:03 compute-0 systemd[1]: libpod-conmon-73211fb17aa0e5e2e3aa93ee97eb953c2e3a2e1e90045920dcfa17d5eaa174fd.scope: Deactivated successfully.
Oct 02 12:12:04 compute-0 nova_compute[192079]: 2025-10-02 12:12:04.082 2 INFO nova.virt.libvirt.driver [None req-59ae4dba-e4d1-42ca-9c0d-2bcd0851fbc6 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Instance shutdown successfully after 24 seconds.
Oct 02 12:12:04 compute-0 nova_compute[192079]: 2025-10-02 12:12:04.090 2 INFO nova.virt.libvirt.driver [-] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Instance destroyed successfully.
Oct 02 12:12:04 compute-0 nova_compute[192079]: 2025-10-02 12:12:04.090 2 DEBUG nova.objects.instance [None req-59ae4dba-e4d1-42ca-9c0d-2bcd0851fbc6 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lazy-loading 'numa_topology' on Instance uuid 8ca5f1bd-aa74-4790-92de-0c18657746f2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:12:04 compute-0 nova_compute[192079]: 2025-10-02 12:12:04.111 2 DEBUG nova.compute.manager [None req-59ae4dba-e4d1-42ca-9c0d-2bcd0851fbc6 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:12:04 compute-0 nova_compute[192079]: 2025-10-02 12:12:04.203 2 DEBUG oslo_concurrency.lockutils [None req-59ae4dba-e4d1-42ca-9c0d-2bcd0851fbc6 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2" "released" by "nova.compute.manager.ComputeManager.stop_instance.<locals>.do_stop_instance" :: held 24.299s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:04 compute-0 podman[228249]: 2025-10-02 12:12:04.260280861 +0000 UTC m=+0.253857064 container remove 73211fb17aa0e5e2e3aa93ee97eb953c2e3a2e1e90045920dcfa17d5eaa174fd (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001)
Oct 02 12:12:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:04.271 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8490d7a6-4786-4ae3-8982-cdea9774cffa]: (4, ('Thu Oct  2 12:12:03 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a (73211fb17aa0e5e2e3aa93ee97eb953c2e3a2e1e90045920dcfa17d5eaa174fd)\n73211fb17aa0e5e2e3aa93ee97eb953c2e3a2e1e90045920dcfa17d5eaa174fd\nThu Oct  2 12:12:03 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a (73211fb17aa0e5e2e3aa93ee97eb953c2e3a2e1e90045920dcfa17d5eaa174fd)\n73211fb17aa0e5e2e3aa93ee97eb953c2e3a2e1e90045920dcfa17d5eaa174fd\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:04.275 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[bc32e00b-2096-4942-9e95-8917b1a38fa0]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:04.277 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap2bdfd186-10, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:12:04 compute-0 nova_compute[192079]: 2025-10-02 12:12:04.281 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:04 compute-0 kernel: tap2bdfd186-10: left promiscuous mode
Oct 02 12:12:04 compute-0 nova_compute[192079]: 2025-10-02 12:12:04.313 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:04.317 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9ad6c177-33ad-4821-8c69-5fdc749f80a9]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:04.344 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3dc2c478-be0e-4b71-9cca-3d1792a4e8cc]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:04.345 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b92da8b4-8c4f-41d9-9721-1d2e31f45ae0]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:04.359 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7e65f718-0c77-44e8-b67d-b077c3ce9c57]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 509389, 'reachable_time': 30025, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 228292, 'error': None, 'target': 'ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:04.361 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:12:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:04.361 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[e28d5f2e-d7dc-4487-abfd-40b01643bc75]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:04 compute-0 systemd[1]: run-netns-ovnmeta\x2d2bdfd186\x2d139e\x2d456a\x2d92e9\x2d4dc9c37a846a.mount: Deactivated successfully.
Oct 02 12:12:04 compute-0 podman[228267]: 2025-10-02 12:12:04.391874133 +0000 UTC m=+0.055389395 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 12:12:04 compute-0 podman[228263]: 2025-10-02 12:12:04.411787831 +0000 UTC m=+0.069038384 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_metadata_agent, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_managed=true, io.buildah.version=1.41.3, container_name=ovn_metadata_agent, org.label-schema.schema-version=1.0)
Oct 02 12:12:04 compute-0 podman[228265]: 2025-10-02 12:12:04.425272785 +0000 UTC m=+0.087698058 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, config_id=ovn_controller, container_name=ovn_controller, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, managed_by=edpm_ansible, org.label-schema.build-date=20251001)
Oct 02 12:12:05 compute-0 nova_compute[192079]: 2025-10-02 12:12:05.458 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:05 compute-0 nova_compute[192079]: 2025-10-02 12:12:05.469 2 DEBUG nova.compute.manager [req-d234c798-ed16-49a4-995b-706490750da0 req-244f57dc-74f3-4927-82bd-83bc38929419 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Received event network-vif-unplugged-4d037109-fde7-4c13-b8b9-598da6a9ad57 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:12:05 compute-0 nova_compute[192079]: 2025-10-02 12:12:05.470 2 DEBUG oslo_concurrency.lockutils [req-d234c798-ed16-49a4-995b-706490750da0 req-244f57dc-74f3-4927-82bd-83bc38929419 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:05 compute-0 nova_compute[192079]: 2025-10-02 12:12:05.470 2 DEBUG oslo_concurrency.lockutils [req-d234c798-ed16-49a4-995b-706490750da0 req-244f57dc-74f3-4927-82bd-83bc38929419 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:05 compute-0 nova_compute[192079]: 2025-10-02 12:12:05.470 2 DEBUG oslo_concurrency.lockutils [req-d234c798-ed16-49a4-995b-706490750da0 req-244f57dc-74f3-4927-82bd-83bc38929419 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:05 compute-0 nova_compute[192079]: 2025-10-02 12:12:05.471 2 DEBUG nova.compute.manager [req-d234c798-ed16-49a4-995b-706490750da0 req-244f57dc-74f3-4927-82bd-83bc38929419 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] No waiting events found dispatching network-vif-unplugged-4d037109-fde7-4c13-b8b9-598da6a9ad57 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:12:05 compute-0 nova_compute[192079]: 2025-10-02 12:12:05.471 2 WARNING nova.compute.manager [req-d234c798-ed16-49a4-995b-706490750da0 req-244f57dc-74f3-4927-82bd-83bc38929419 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Received unexpected event network-vif-unplugged-4d037109-fde7-4c13-b8b9-598da6a9ad57 for instance with vm_state stopped and task_state rebuilding.
Oct 02 12:12:05 compute-0 nova_compute[192079]: 2025-10-02 12:12:05.472 2 DEBUG nova.compute.manager [req-d234c798-ed16-49a4-995b-706490750da0 req-244f57dc-74f3-4927-82bd-83bc38929419 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Received event network-vif-plugged-4d037109-fde7-4c13-b8b9-598da6a9ad57 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:12:05 compute-0 nova_compute[192079]: 2025-10-02 12:12:05.472 2 DEBUG oslo_concurrency.lockutils [req-d234c798-ed16-49a4-995b-706490750da0 req-244f57dc-74f3-4927-82bd-83bc38929419 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:05 compute-0 nova_compute[192079]: 2025-10-02 12:12:05.472 2 DEBUG oslo_concurrency.lockutils [req-d234c798-ed16-49a4-995b-706490750da0 req-244f57dc-74f3-4927-82bd-83bc38929419 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:05 compute-0 nova_compute[192079]: 2025-10-02 12:12:05.473 2 DEBUG oslo_concurrency.lockutils [req-d234c798-ed16-49a4-995b-706490750da0 req-244f57dc-74f3-4927-82bd-83bc38929419 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:05 compute-0 nova_compute[192079]: 2025-10-02 12:12:05.473 2 DEBUG nova.compute.manager [req-d234c798-ed16-49a4-995b-706490750da0 req-244f57dc-74f3-4927-82bd-83bc38929419 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] No waiting events found dispatching network-vif-plugged-4d037109-fde7-4c13-b8b9-598da6a9ad57 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:12:05 compute-0 nova_compute[192079]: 2025-10-02 12:12:05.473 2 WARNING nova.compute.manager [req-d234c798-ed16-49a4-995b-706490750da0 req-244f57dc-74f3-4927-82bd-83bc38929419 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Received unexpected event network-vif-plugged-4d037109-fde7-4c13-b8b9-598da6a9ad57 for instance with vm_state stopped and task_state rebuilding.
Oct 02 12:12:05 compute-0 nova_compute[192079]: 2025-10-02 12:12:05.822 2 INFO nova.compute.manager [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Rebuilding instance
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.088 2 DEBUG nova.compute.manager [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.191 2 DEBUG nova.objects.instance [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lazy-loading 'pci_requests' on Instance uuid 8ca5f1bd-aa74-4790-92de-0c18657746f2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.203 2 DEBUG nova.objects.instance [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lazy-loading 'pci_devices' on Instance uuid 8ca5f1bd-aa74-4790-92de-0c18657746f2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.239 2 DEBUG nova.objects.instance [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lazy-loading 'resources' on Instance uuid 8ca5f1bd-aa74-4790-92de-0c18657746f2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.252 2 DEBUG nova.objects.instance [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lazy-loading 'migration_context' on Instance uuid 8ca5f1bd-aa74-4790-92de-0c18657746f2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.290 2 DEBUG nova.objects.instance [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Trying to apply a migration context that does not seem to be set for this instance apply_migration_context /usr/lib/python3.9/site-packages/nova/objects/instance.py:1032
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.295 2 INFO nova.virt.libvirt.driver [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Instance already shutdown.
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.302 2 INFO nova.virt.libvirt.driver [-] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Instance destroyed successfully.
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.310 2 INFO nova.virt.libvirt.driver [-] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Instance destroyed successfully.
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.311 2 DEBUG nova.virt.libvirt.vif [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:11:30Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-tempest.common.compute-instance-645901578',display_name='tempest-tempest.common.compute-instance-645901578',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-tempest-common-compute-instance-645901578',id=57,image_ref='062d9f80-76b6-42ce-bee7-0fb82a008353',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:11:37Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=4,progress=0,project_id='20417475a6a149d5bc47976f4da9a4ae',ramdisk_id='',reservation_id='r-rok8i8zh',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='062d9f80-76b6-42ce-bee7-0fb82a008353',image_container_format='bare',image_disk_format='qcow2',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServerActionsTestOtherA-352727288',owner_user_name='tempest-ServerActionsTestOtherA-352727288-project-member'},tags=<?>,task_state='rebuilding',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:12:05Z,user_data=None,user_id='c2b9eab3da414692b3942505e3441920',uuid=8ca5f1bd-aa74-4790-92de-0c18657746f2,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='stopped') vif={"id": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "address": "fa:16:3e:51:b5:9f", "network": {"id": "2bdfd186-139e-456a-92e9-4dc9c37a846a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-953736127-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "20417475a6a149d5bc47976f4da9a4ae", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap4d037109-fd", "ovs_interfaceid": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.312 2 DEBUG nova.network.os_vif_util [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Converting VIF {"id": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "address": "fa:16:3e:51:b5:9f", "network": {"id": "2bdfd186-139e-456a-92e9-4dc9c37a846a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-953736127-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "20417475a6a149d5bc47976f4da9a4ae", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap4d037109-fd", "ovs_interfaceid": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.313 2 DEBUG nova.network.os_vif_util [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:51:b5:9f,bridge_name='br-int',has_traffic_filtering=True,id=4d037109-fde7-4c13-b8b9-598da6a9ad57,network=Network(2bdfd186-139e-456a-92e9-4dc9c37a846a),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap4d037109-fd') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.313 2 DEBUG os_vif [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:51:b5:9f,bridge_name='br-int',has_traffic_filtering=True,id=4d037109-fde7-4c13-b8b9-598da6a9ad57,network=Network(2bdfd186-139e-456a-92e9-4dc9c37a846a),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap4d037109-fd') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.315 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.316 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap4d037109-fd, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.317 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.321 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.325 2 INFO os_vif [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:51:b5:9f,bridge_name='br-int',has_traffic_filtering=True,id=4d037109-fde7-4c13-b8b9-598da6a9ad57,network=Network(2bdfd186-139e-456a-92e9-4dc9c37a846a),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap4d037109-fd')
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.326 2 INFO nova.virt.libvirt.driver [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Deleting instance files /var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2_del
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.328 2 INFO nova.virt.libvirt.driver [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Deletion of /var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2_del complete
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.643 2 DEBUG nova.virt.libvirt.driver [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.645 2 INFO nova.virt.libvirt.driver [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Creating image(s)
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.647 2 DEBUG oslo_concurrency.lockutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Acquiring lock "/var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.647 2 DEBUG oslo_concurrency.lockutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "/var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.649 2 DEBUG oslo_concurrency.lockutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "/var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.677 2 DEBUG oslo_concurrency.processutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.768 2 DEBUG oslo_concurrency.processutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2 --force-share --output=json" returned: 0 in 0.091s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.770 2 DEBUG oslo_concurrency.lockutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Acquiring lock "d7f074efa852dc950deac120296f6eecf48a40d2" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.771 2 DEBUG oslo_concurrency.lockutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "d7f074efa852dc950deac120296f6eecf48a40d2" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.796 2 DEBUG oslo_concurrency.processutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.870 2 DEBUG oslo_concurrency.processutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2 --force-share --output=json" returned: 0 in 0.074s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.871 2 DEBUG oslo_concurrency.processutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2,backing_fmt=raw /var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.988 2 DEBUG oslo_concurrency.processutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2,backing_fmt=raw /var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/disk 1073741824" returned: 0 in 0.117s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.990 2 DEBUG oslo_concurrency.lockutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "d7f074efa852dc950deac120296f6eecf48a40d2" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.219s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:06 compute-0 nova_compute[192079]: 2025-10-02 12:12:06.991 2 DEBUG oslo_concurrency.processutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.084 2 DEBUG oslo_concurrency.processutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2 --force-share --output=json" returned: 0 in 0.094s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.086 2 DEBUG nova.virt.disk.api [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Checking if we can resize image /var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.086 2 DEBUG oslo_concurrency.processutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.142 2 DEBUG oslo_concurrency.processutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/disk --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.143 2 DEBUG nova.virt.disk.api [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Cannot resize image /var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.144 2 DEBUG nova.virt.libvirt.driver [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.144 2 DEBUG nova.virt.libvirt.driver [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Ensure instance console log exists: /var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.144 2 DEBUG oslo_concurrency.lockutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.145 2 DEBUG oslo_concurrency.lockutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.145 2 DEBUG oslo_concurrency.lockutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.147 2 DEBUG nova.virt.libvirt.driver [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Start _get_guest_xml network_info=[{"id": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "address": "fa:16:3e:51:b5:9f", "network": {"id": "2bdfd186-139e-456a-92e9-4dc9c37a846a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-953736127-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "20417475a6a149d5bc47976f4da9a4ae", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap4d037109-fd", "ovs_interfaceid": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:28Z,direct_url=<?>,disk_format='qcow2',id=062d9f80-76b6-42ce-bee7-0fb82a008353,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img_alt',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:29Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.152 2 WARNING nova.virt.libvirt.driver [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.: NotImplementedError
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.157 2 DEBUG nova.virt.libvirt.host [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.158 2 DEBUG nova.virt.libvirt.host [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.161 2 DEBUG nova.virt.libvirt.host [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.162 2 DEBUG nova.virt.libvirt.host [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.163 2 DEBUG nova.virt.libvirt.driver [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.164 2 DEBUG nova.virt.hardware [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:28Z,direct_url=<?>,disk_format='qcow2',id=062d9f80-76b6-42ce-bee7-0fb82a008353,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img_alt',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:29Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.164 2 DEBUG nova.virt.hardware [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.164 2 DEBUG nova.virt.hardware [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.165 2 DEBUG nova.virt.hardware [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.165 2 DEBUG nova.virt.hardware [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.165 2 DEBUG nova.virt.hardware [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.166 2 DEBUG nova.virt.hardware [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.166 2 DEBUG nova.virt.hardware [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.166 2 DEBUG nova.virt.hardware [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.167 2 DEBUG nova.virt.hardware [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.167 2 DEBUG nova.virt.hardware [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.167 2 DEBUG nova.objects.instance [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lazy-loading 'vcpu_model' on Instance uuid 8ca5f1bd-aa74-4790-92de-0c18657746f2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.201 2 DEBUG nova.virt.libvirt.vif [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=True,config_drive='True',created_at=2025-10-02T12:11:30Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-tempest.common.compute-instance-645901578',display_name='tempest-tempest.common.compute-instance-645901578',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-tempest-common-compute-instance-645901578',id=57,image_ref='062d9f80-76b6-42ce-bee7-0fb82a008353',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:11:37Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=4,progress=0,project_id='20417475a6a149d5bc47976f4da9a4ae',ramdisk_id='',reservation_id='r-rok8i8zh',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',clean_attempts='1',image_base_image_ref='062d9f80-76b6-42ce-bee7-0fb82a008353',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServerActionsTestOtherA-352727288',owner_user_name='tempest-ServerActionsTestOtherA-352727288-project-member'},tags=<?>,task_state='rebuild_spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:12:06Z,user_data=None,user_id='c2b9eab3da414692b3942505e3441920',uuid=8ca5f1bd-aa74-4790-92de-0c18657746f2,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='stopped') vif={"id": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "address": "fa:16:3e:51:b5:9f", "network": {"id": "2bdfd186-139e-456a-92e9-4dc9c37a846a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-953736127-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "20417475a6a149d5bc47976f4da9a4ae", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap4d037109-fd", "ovs_interfaceid": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.201 2 DEBUG nova.network.os_vif_util [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Converting VIF {"id": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "address": "fa:16:3e:51:b5:9f", "network": {"id": "2bdfd186-139e-456a-92e9-4dc9c37a846a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-953736127-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "20417475a6a149d5bc47976f4da9a4ae", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap4d037109-fd", "ovs_interfaceid": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.202 2 DEBUG nova.network.os_vif_util [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:51:b5:9f,bridge_name='br-int',has_traffic_filtering=True,id=4d037109-fde7-4c13-b8b9-598da6a9ad57,network=Network(2bdfd186-139e-456a-92e9-4dc9c37a846a),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap4d037109-fd') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.204 2 DEBUG nova.virt.libvirt.driver [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:12:07 compute-0 nova_compute[192079]:   <uuid>8ca5f1bd-aa74-4790-92de-0c18657746f2</uuid>
Oct 02 12:12:07 compute-0 nova_compute[192079]:   <name>instance-00000039</name>
Oct 02 12:12:07 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:12:07 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:12:07 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:12:07 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:       <nova:name>tempest-tempest.common.compute-instance-645901578</nova:name>
Oct 02 12:12:07 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:12:07</nova:creationTime>
Oct 02 12:12:07 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:12:07 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:12:07 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:12:07 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:12:07 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:12:07 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:12:07 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:12:07 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:12:07 compute-0 nova_compute[192079]:         <nova:user uuid="c2b9eab3da414692b3942505e3441920">tempest-ServerActionsTestOtherA-352727288-project-member</nova:user>
Oct 02 12:12:07 compute-0 nova_compute[192079]:         <nova:project uuid="20417475a6a149d5bc47976f4da9a4ae">tempest-ServerActionsTestOtherA-352727288</nova:project>
Oct 02 12:12:07 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:12:07 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="062d9f80-76b6-42ce-bee7-0fb82a008353"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:12:07 compute-0 nova_compute[192079]:         <nova:port uuid="4d037109-fde7-4c13-b8b9-598da6a9ad57">
Oct 02 12:12:07 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.3" ipVersion="4"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:12:07 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:12:07 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:12:07 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <system>
Oct 02 12:12:07 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:12:07 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:12:07 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:12:07 compute-0 nova_compute[192079]:       <entry name="serial">8ca5f1bd-aa74-4790-92de-0c18657746f2</entry>
Oct 02 12:12:07 compute-0 nova_compute[192079]:       <entry name="uuid">8ca5f1bd-aa74-4790-92de-0c18657746f2</entry>
Oct 02 12:12:07 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     </system>
Oct 02 12:12:07 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:12:07 compute-0 nova_compute[192079]:   <os>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:   </os>
Oct 02 12:12:07 compute-0 nova_compute[192079]:   <features>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:   </features>
Oct 02 12:12:07 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:12:07 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:12:07 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:12:07 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/disk"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:12:07 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/disk.config"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:12:07 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:51:b5:9f"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:       <target dev="tap4d037109-fd"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:12:07 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/console.log" append="off"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <video>
Oct 02 12:12:07 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     </video>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:12:07 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:12:07 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:12:07 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:12:07 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:12:07 compute-0 nova_compute[192079]: </domain>
Oct 02 12:12:07 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.206 2 DEBUG nova.compute.manager [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Preparing to wait for external event network-vif-plugged-4d037109-fde7-4c13-b8b9-598da6a9ad57 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.206 2 DEBUG oslo_concurrency.lockutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Acquiring lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.206 2 DEBUG oslo_concurrency.lockutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.206 2 DEBUG oslo_concurrency.lockutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.207 2 DEBUG nova.virt.libvirt.vif [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=True,config_drive='True',created_at=2025-10-02T12:11:30Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-tempest.common.compute-instance-645901578',display_name='tempest-tempest.common.compute-instance-645901578',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-tempest-common-compute-instance-645901578',id=57,image_ref='062d9f80-76b6-42ce-bee7-0fb82a008353',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:11:37Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=4,progress=0,project_id='20417475a6a149d5bc47976f4da9a4ae',ramdisk_id='',reservation_id='r-rok8i8zh',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',clean_attempts='1',image_base_image_ref='062d9f80-76b6-42ce-bee7-0fb82a008353',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServerActionsTestOtherA-352727288',owner_user_name='tempest-ServerActionsTestOtherA-352727288-project-member'},tags=<?>,task_state='rebuild_spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:12:06Z,user_data=None,user_id='c2b9eab3da414692b3942505e3441920',uuid=8ca5f1bd-aa74-4790-92de-0c18657746f2,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='stopped') vif={"id": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "address": "fa:16:3e:51:b5:9f", "network": {"id": "2bdfd186-139e-456a-92e9-4dc9c37a846a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-953736127-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "20417475a6a149d5bc47976f4da9a4ae", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap4d037109-fd", "ovs_interfaceid": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.208 2 DEBUG nova.network.os_vif_util [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Converting VIF {"id": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "address": "fa:16:3e:51:b5:9f", "network": {"id": "2bdfd186-139e-456a-92e9-4dc9c37a846a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-953736127-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "20417475a6a149d5bc47976f4da9a4ae", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap4d037109-fd", "ovs_interfaceid": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.208 2 DEBUG nova.network.os_vif_util [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:51:b5:9f,bridge_name='br-int',has_traffic_filtering=True,id=4d037109-fde7-4c13-b8b9-598da6a9ad57,network=Network(2bdfd186-139e-456a-92e9-4dc9c37a846a),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap4d037109-fd') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.209 2 DEBUG os_vif [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:51:b5:9f,bridge_name='br-int',has_traffic_filtering=True,id=4d037109-fde7-4c13-b8b9-598da6a9ad57,network=Network(2bdfd186-139e-456a-92e9-4dc9c37a846a),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap4d037109-fd') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.209 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.210 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.210 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.213 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.213 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap4d037109-fd, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.214 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap4d037109-fd, col_values=(('external_ids', {'iface-id': '4d037109-fde7-4c13-b8b9-598da6a9ad57', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:51:b5:9f', 'vm-uuid': '8ca5f1bd-aa74-4790-92de-0c18657746f2'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.279 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:07 compute-0 NetworkManager[51160]: <info>  [1759407127.2803] manager: (tap4d037109-fd): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/93)
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.286 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.287 2 INFO os_vif [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:51:b5:9f,bridge_name='br-int',has_traffic_filtering=True,id=4d037109-fde7-4c13-b8b9-598da6a9ad57,network=Network(2bdfd186-139e-456a-92e9-4dc9c37a846a),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap4d037109-fd')
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.414 2 DEBUG nova.virt.libvirt.driver [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.414 2 DEBUG nova.virt.libvirt.driver [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.415 2 DEBUG nova.virt.libvirt.driver [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] No VIF found with MAC fa:16:3e:51:b5:9f, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.415 2 INFO nova.virt.libvirt.driver [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Using config drive
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.441 2 DEBUG nova.objects.instance [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lazy-loading 'ec2_ids' on Instance uuid 8ca5f1bd-aa74-4790-92de-0c18657746f2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:12:07 compute-0 nova_compute[192079]: 2025-10-02 12:12:07.494 2 DEBUG nova.objects.instance [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lazy-loading 'keypairs' on Instance uuid 8ca5f1bd-aa74-4790-92de-0c18657746f2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:12:08 compute-0 nova_compute[192079]: 2025-10-02 12:12:08.087 2 INFO nova.virt.libvirt.driver [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Creating config drive at /var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/disk.config
Oct 02 12:12:08 compute-0 nova_compute[192079]: 2025-10-02 12:12:08.097 2 DEBUG oslo_concurrency.processutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpox7_r35p execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:12:08 compute-0 nova_compute[192079]: 2025-10-02 12:12:08.123 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:08 compute-0 nova_compute[192079]: 2025-10-02 12:12:08.225 2 DEBUG oslo_concurrency.processutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpox7_r35p" returned: 0 in 0.128s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:12:08 compute-0 kernel: tap4d037109-fd: entered promiscuous mode
Oct 02 12:12:08 compute-0 NetworkManager[51160]: <info>  [1759407128.3037] manager: (tap4d037109-fd): new Tun device (/org/freedesktop/NetworkManager/Devices/94)
Oct 02 12:12:08 compute-0 ovn_controller[94336]: 2025-10-02T12:12:08Z|00188|binding|INFO|Claiming lport 4d037109-fde7-4c13-b8b9-598da6a9ad57 for this chassis.
Oct 02 12:12:08 compute-0 ovn_controller[94336]: 2025-10-02T12:12:08Z|00189|binding|INFO|4d037109-fde7-4c13-b8b9-598da6a9ad57: Claiming fa:16:3e:51:b5:9f 10.100.0.3
Oct 02 12:12:08 compute-0 nova_compute[192079]: 2025-10-02 12:12:08.305 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:08.314 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:51:b5:9f 10.100.0.3'], port_security=['fa:16:3e:51:b5:9f 10.100.0.3'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.3/28', 'neutron:device_id': '8ca5f1bd-aa74-4790-92de-0c18657746f2', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-2bdfd186-139e-456a-92e9-4dc9c37a846a', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '20417475a6a149d5bc47976f4da9a4ae', 'neutron:revision_number': '5', 'neutron:security_group_ids': 'c517fcc5-4e7c-4008-ac85-cb7cba93cd1e', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=c8a937e8-285b-47d1-b87a-47c75465be5a, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=4d037109-fde7-4c13-b8b9-598da6a9ad57) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:08.315 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 4d037109-fde7-4c13-b8b9-598da6a9ad57 in datapath 2bdfd186-139e-456a-92e9-4dc9c37a846a bound to our chassis
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:08.317 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 2bdfd186-139e-456a-92e9-4dc9c37a846a
Oct 02 12:12:08 compute-0 ovn_controller[94336]: 2025-10-02T12:12:08Z|00190|binding|INFO|Setting lport 4d037109-fde7-4c13-b8b9-598da6a9ad57 ovn-installed in OVS
Oct 02 12:12:08 compute-0 ovn_controller[94336]: 2025-10-02T12:12:08Z|00191|binding|INFO|Setting lport 4d037109-fde7-4c13-b8b9-598da6a9ad57 up in Southbound
Oct 02 12:12:08 compute-0 nova_compute[192079]: 2025-10-02 12:12:08.318 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:08 compute-0 nova_compute[192079]: 2025-10-02 12:12:08.325 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:08.326 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[95b42e43-c936-4fc3-ba85-6b11a4578032]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:08.327 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap2bdfd186-11 in ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:08.329 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap2bdfd186-10 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:08.329 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[fb3037bb-2982-487d-8ba1-9165c25a9027]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:08.330 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[68df7c96-4345-4ec5-af78-34a6e91dbd25]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:08 compute-0 systemd-udevd[228372]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:08.342 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[3d4659dc-f3f0-435f-9846-e89bfc695328]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:08 compute-0 systemd-machined[152150]: New machine qemu-29-instance-00000039.
Oct 02 12:12:08 compute-0 NetworkManager[51160]: <info>  [1759407128.3531] device (tap4d037109-fd): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:12:08 compute-0 NetworkManager[51160]: <info>  [1759407128.3544] device (tap4d037109-fd): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:12:08 compute-0 systemd[1]: Started Virtual Machine qemu-29-instance-00000039.
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:08.367 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c27c92e8-c119-4ee6-b3b0-a09d74995696]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:08.397 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[b5945ce7-c531-4a0e-90dd-a0a28d568f3b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:08.403 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[dd8cf9b0-c6ff-46eb-8bdc-53f79fe693ee]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:08 compute-0 systemd-udevd[228377]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:12:08 compute-0 NetworkManager[51160]: <info>  [1759407128.4042] manager: (tap2bdfd186-10): new Veth device (/org/freedesktop/NetworkManager/Devices/95)
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:08.434 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[cc178aba-4c2e-478f-b5ae-1b881d98f807]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:08.439 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[b4cd00b1-ed27-4fd5-8ec0-f3a871650cb4]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:08 compute-0 NetworkManager[51160]: <info>  [1759407128.4609] device (tap2bdfd186-10): carrier: link connected
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:08.470 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[1bc89403-a0d4-4c1f-a09e-66808fb59f1c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:08.490 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1464a286-731e-40b8-8801-8f33a0d1cccc]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap2bdfd186-11'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:43:b7:89'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 56], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 512608, 'reachable_time': 36456, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 228405, 'error': None, 'target': 'ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:08.511 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[33087875-3725-4db3-9969-a9706e27d56d]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe43:b789'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 512608, 'tstamp': 512608}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 228406, 'error': None, 'target': 'ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:08.525 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2953d6e8-57e7-4a9f-a476-5c29b90dbf1e]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap2bdfd186-11'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:43:b7:89'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 56], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 512608, 'reachable_time': 36456, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 228407, 'error': None, 'target': 'ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:08.563 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7ba18259-3cba-42bb-b6cd-4f945e24ef65]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:08.624 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[071875ff-8179-455b-8551-195b82b1e313]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:08.625 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap2bdfd186-10, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:08.626 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:08.626 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap2bdfd186-10, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:12:08 compute-0 NetworkManager[51160]: <info>  [1759407128.6291] manager: (tap2bdfd186-10): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/96)
Oct 02 12:12:08 compute-0 nova_compute[192079]: 2025-10-02 12:12:08.628 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:08 compute-0 kernel: tap2bdfd186-10: entered promiscuous mode
Oct 02 12:12:08 compute-0 nova_compute[192079]: 2025-10-02 12:12:08.632 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:08.633 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap2bdfd186-10, col_values=(('external_ids', {'iface-id': '1e2d82b4-a363-4c19-94d1-e62c1ba8e34a'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:12:08 compute-0 nova_compute[192079]: 2025-10-02 12:12:08.634 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:08 compute-0 ovn_controller[94336]: 2025-10-02T12:12:08Z|00192|binding|INFO|Releasing lport 1e2d82b4-a363-4c19-94d1-e62c1ba8e34a from this chassis (sb_readonly=0)
Oct 02 12:12:08 compute-0 nova_compute[192079]: 2025-10-02 12:12:08.652 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:08.655 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/2bdfd186-139e-456a-92e9-4dc9c37a846a.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/2bdfd186-139e-456a-92e9-4dc9c37a846a.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:08.656 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0cedca67-58f8-42fd-b95b-861c08ade59a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:08.658 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-2bdfd186-139e-456a-92e9-4dc9c37a846a
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/2bdfd186-139e-456a-92e9-4dc9c37a846a.pid.haproxy
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 2bdfd186-139e-456a-92e9-4dc9c37a846a
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:12:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:08.661 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a', 'env', 'PROCESS_TAG=haproxy-2bdfd186-139e-456a-92e9-4dc9c37a846a', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/2bdfd186-139e-456a-92e9-4dc9c37a846a.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:12:08 compute-0 nova_compute[192079]: 2025-10-02 12:12:08.726 2 DEBUG nova.compute.manager [req-3f56d06d-b831-4cbd-a161-bbe760a86f4d req-19e21bc9-3fe6-416d-8fc6-f09fbb620539 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Received event network-vif-plugged-4d037109-fde7-4c13-b8b9-598da6a9ad57 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:12:08 compute-0 nova_compute[192079]: 2025-10-02 12:12:08.727 2 DEBUG oslo_concurrency.lockutils [req-3f56d06d-b831-4cbd-a161-bbe760a86f4d req-19e21bc9-3fe6-416d-8fc6-f09fbb620539 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:08 compute-0 nova_compute[192079]: 2025-10-02 12:12:08.727 2 DEBUG oslo_concurrency.lockutils [req-3f56d06d-b831-4cbd-a161-bbe760a86f4d req-19e21bc9-3fe6-416d-8fc6-f09fbb620539 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:08 compute-0 nova_compute[192079]: 2025-10-02 12:12:08.727 2 DEBUG oslo_concurrency.lockutils [req-3f56d06d-b831-4cbd-a161-bbe760a86f4d req-19e21bc9-3fe6-416d-8fc6-f09fbb620539 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:08 compute-0 nova_compute[192079]: 2025-10-02 12:12:08.727 2 DEBUG nova.compute.manager [req-3f56d06d-b831-4cbd-a161-bbe760a86f4d req-19e21bc9-3fe6-416d-8fc6-f09fbb620539 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Processing event network-vif-plugged-4d037109-fde7-4c13-b8b9-598da6a9ad57 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:12:09 compute-0 podman[228446]: 2025-10-02 12:12:09.041455361 +0000 UTC m=+0.022271732 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.305 2 DEBUG nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Removed pending event for 8ca5f1bd-aa74-4790-92de-0c18657746f2 due to event _event_emit_delayed /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:438
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.306 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407129.3050265, 8ca5f1bd-aa74-4790-92de-0c18657746f2 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.306 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] VM Started (Lifecycle Event)
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.308 2 DEBUG nova.compute.manager [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.311 2 DEBUG nova.virt.libvirt.driver [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.316 2 INFO nova.virt.libvirt.driver [-] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Instance spawned successfully.
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.316 2 DEBUG nova.virt.libvirt.driver [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.335 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.342 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Synchronizing instance power state after lifecycle event "Started"; current vm_state: stopped, current task_state: rebuild_spawning, current DB power_state: 4, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.350 2 DEBUG nova.virt.libvirt.driver [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.351 2 DEBUG nova.virt.libvirt.driver [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.352 2 DEBUG nova.virt.libvirt.driver [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.353 2 DEBUG nova.virt.libvirt.driver [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.354 2 DEBUG nova.virt.libvirt.driver [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.355 2 DEBUG nova.virt.libvirt.driver [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.396 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] During sync_power_state the instance has a pending task (rebuild_spawning). Skip.
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.397 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407129.3052862, 8ca5f1bd-aa74-4790-92de-0c18657746f2 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.397 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] VM Paused (Lifecycle Event)
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.426 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.430 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407129.3109586, 8ca5f1bd-aa74-4790-92de-0c18657746f2 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.430 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] VM Resumed (Lifecycle Event)
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.459 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.464 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: stopped, current task_state: rebuild_spawning, current DB power_state: 4, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.470 2 DEBUG nova.compute.manager [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.526 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] During sync_power_state the instance has a pending task (rebuild_spawning). Skip.
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.576 2 INFO nova.compute.manager [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] bringing vm to original state: 'stopped'
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.660 2 DEBUG oslo_concurrency.lockutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Acquiring lock "8ca5f1bd-aa74-4790-92de-0c18657746f2" by "nova.compute.manager.ComputeManager.stop_instance.<locals>.do_stop_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.661 2 DEBUG oslo_concurrency.lockutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2" acquired by "nova.compute.manager.ComputeManager.stop_instance.<locals>.do_stop_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.661 2 DEBUG nova.compute.manager [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.665 2 DEBUG nova.compute.manager [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 do_stop_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3338
Oct 02 12:12:09 compute-0 kernel: tap4d037109-fd (unregistering): left promiscuous mode
Oct 02 12:12:09 compute-0 NetworkManager[51160]: <info>  [1759407129.8210] device (tap4d037109-fd): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.830 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:09 compute-0 ovn_controller[94336]: 2025-10-02T12:12:09Z|00193|binding|INFO|Releasing lport 4d037109-fde7-4c13-b8b9-598da6a9ad57 from this chassis (sb_readonly=0)
Oct 02 12:12:09 compute-0 ovn_controller[94336]: 2025-10-02T12:12:09Z|00194|binding|INFO|Setting lport 4d037109-fde7-4c13-b8b9-598da6a9ad57 down in Southbound
Oct 02 12:12:09 compute-0 ovn_controller[94336]: 2025-10-02T12:12:09Z|00195|binding|INFO|Removing iface tap4d037109-fd ovn-installed in OVS
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.834 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:09 compute-0 nova_compute[192079]: 2025-10-02 12:12:09.847 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:09.850 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:51:b5:9f 10.100.0.3'], port_security=['fa:16:3e:51:b5:9f 10.100.0.3'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.3/28', 'neutron:device_id': '8ca5f1bd-aa74-4790-92de-0c18657746f2', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-2bdfd186-139e-456a-92e9-4dc9c37a846a', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '20417475a6a149d5bc47976f4da9a4ae', 'neutron:revision_number': '6', 'neutron:security_group_ids': 'c517fcc5-4e7c-4008-ac85-cb7cba93cd1e', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=c8a937e8-285b-47d1-b87a-47c75465be5a, chassis=[], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=4d037109-fde7-4c13-b8b9-598da6a9ad57) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:12:09 compute-0 systemd[1]: machine-qemu\x2d29\x2dinstance\x2d00000039.scope: Deactivated successfully.
Oct 02 12:12:09 compute-0 systemd[1]: machine-qemu\x2d29\x2dinstance\x2d00000039.scope: Consumed 1.201s CPU time.
Oct 02 12:12:09 compute-0 systemd-machined[152150]: Machine qemu-29-instance-00000039 terminated.
Oct 02 12:12:10 compute-0 NetworkManager[51160]: <info>  [1759407130.0931] manager: (tap4d037109-fd): new Tun device (/org/freedesktop/NetworkManager/Devices/97)
Oct 02 12:12:10 compute-0 podman[228446]: 2025-10-02 12:12:10.097183443 +0000 UTC m=+1.077999824 container create 5ffff6b4a070b8ee1f7934a8e932d79f06b6f3382743b4d8e6eaa539333a5efa (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.build-date=20251001)
Oct 02 12:12:10 compute-0 nova_compute[192079]: 2025-10-02 12:12:10.140 2 INFO nova.virt.libvirt.driver [-] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Instance destroyed successfully.
Oct 02 12:12:10 compute-0 nova_compute[192079]: 2025-10-02 12:12:10.141 2 DEBUG nova.compute.manager [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:12:10 compute-0 systemd[1]: Started libpod-conmon-5ffff6b4a070b8ee1f7934a8e932d79f06b6f3382743b4d8e6eaa539333a5efa.scope.
Oct 02 12:12:10 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:12:10 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/0edeb2acddc24de87184bab54cf7f1f3a9ecc08d8b4287f0a1ca76f78ee4bda1/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:12:10 compute-0 nova_compute[192079]: 2025-10-02 12:12:10.221 2 DEBUG oslo_concurrency.lockutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2" "released" by "nova.compute.manager.ComputeManager.stop_instance.<locals>.do_stop_instance" :: held 0.560s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:10 compute-0 nova_compute[192079]: 2025-10-02 12:12:10.253 2 DEBUG oslo_concurrency.lockutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:10 compute-0 nova_compute[192079]: 2025-10-02 12:12:10.254 2 DEBUG oslo_concurrency.lockutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:10 compute-0 nova_compute[192079]: 2025-10-02 12:12:10.254 2 DEBUG nova.objects.instance [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Trying to apply a migration context that does not seem to be set for this instance apply_migration_context /usr/lib/python3.9/site-packages/nova/objects/instance.py:1032
Oct 02 12:12:10 compute-0 podman[228446]: 2025-10-02 12:12:10.268328144 +0000 UTC m=+1.249144545 container init 5ffff6b4a070b8ee1f7934a8e932d79f06b6f3382743b4d8e6eaa539333a5efa (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, io.buildah.version=1.41.3)
Oct 02 12:12:10 compute-0 podman[228446]: 2025-10-02 12:12:10.275349714 +0000 UTC m=+1.256166075 container start 5ffff6b4a070b8ee1f7934a8e932d79f06b6f3382743b4d8e6eaa539333a5efa (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_managed=true, org.label-schema.schema-version=1.0)
Oct 02 12:12:10 compute-0 neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a[228483]: [NOTICE]   (228487) : New worker (228489) forked
Oct 02 12:12:10 compute-0 neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a[228483]: [NOTICE]   (228487) : Loading success.
Oct 02 12:12:10 compute-0 nova_compute[192079]: 2025-10-02 12:12:10.341 2 DEBUG oslo_concurrency.lockutils [None req-4c647878-a96c-40e2-9c12-ded6f5d4f862 c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 0.087s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:10.405 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 4d037109-fde7-4c13-b8b9-598da6a9ad57 in datapath 2bdfd186-139e-456a-92e9-4dc9c37a846a unbound from our chassis
Oct 02 12:12:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:10.409 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 2bdfd186-139e-456a-92e9-4dc9c37a846a, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:12:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:10.410 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[26392fea-65ba-4117-85fd-d468b524a60e]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:10.412 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a namespace which is not needed anymore
Oct 02 12:12:10 compute-0 nova_compute[192079]: 2025-10-02 12:12:10.461 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:10 compute-0 neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a[228483]: [NOTICE]   (228487) : haproxy version is 2.8.14-c23fe91
Oct 02 12:12:10 compute-0 neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a[228483]: [NOTICE]   (228487) : path to executable is /usr/sbin/haproxy
Oct 02 12:12:10 compute-0 neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a[228483]: [WARNING]  (228487) : Exiting Master process...
Oct 02 12:12:10 compute-0 neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a[228483]: [WARNING]  (228487) : Exiting Master process...
Oct 02 12:12:10 compute-0 neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a[228483]: [ALERT]    (228487) : Current worker (228489) exited with code 143 (Terminated)
Oct 02 12:12:10 compute-0 neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a[228483]: [WARNING]  (228487) : All workers exited. Exiting... (0)
Oct 02 12:12:10 compute-0 systemd[1]: libpod-5ffff6b4a070b8ee1f7934a8e932d79f06b6f3382743b4d8e6eaa539333a5efa.scope: Deactivated successfully.
Oct 02 12:12:10 compute-0 podman[228515]: 2025-10-02 12:12:10.764873099 +0000 UTC m=+0.250267017 container died 5ffff6b4a070b8ee1f7934a8e932d79f06b6f3382743b4d8e6eaa539333a5efa (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3)
Oct 02 12:12:10 compute-0 nova_compute[192079]: 2025-10-02 12:12:10.861 2 DEBUG nova.compute.manager [req-2d4053ca-2f07-428b-ab6e-b8abbc0a60e9 req-3c2117c7-d9d7-4b63-b40c-7fa155ab9c43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Received event network-vif-plugged-4d037109-fde7-4c13-b8b9-598da6a9ad57 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:12:10 compute-0 nova_compute[192079]: 2025-10-02 12:12:10.862 2 DEBUG oslo_concurrency.lockutils [req-2d4053ca-2f07-428b-ab6e-b8abbc0a60e9 req-3c2117c7-d9d7-4b63-b40c-7fa155ab9c43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:10 compute-0 nova_compute[192079]: 2025-10-02 12:12:10.862 2 DEBUG oslo_concurrency.lockutils [req-2d4053ca-2f07-428b-ab6e-b8abbc0a60e9 req-3c2117c7-d9d7-4b63-b40c-7fa155ab9c43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:10 compute-0 nova_compute[192079]: 2025-10-02 12:12:10.862 2 DEBUG oslo_concurrency.lockutils [req-2d4053ca-2f07-428b-ab6e-b8abbc0a60e9 req-3c2117c7-d9d7-4b63-b40c-7fa155ab9c43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:10 compute-0 nova_compute[192079]: 2025-10-02 12:12:10.863 2 DEBUG nova.compute.manager [req-2d4053ca-2f07-428b-ab6e-b8abbc0a60e9 req-3c2117c7-d9d7-4b63-b40c-7fa155ab9c43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] No waiting events found dispatching network-vif-plugged-4d037109-fde7-4c13-b8b9-598da6a9ad57 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:12:10 compute-0 nova_compute[192079]: 2025-10-02 12:12:10.863 2 WARNING nova.compute.manager [req-2d4053ca-2f07-428b-ab6e-b8abbc0a60e9 req-3c2117c7-d9d7-4b63-b40c-7fa155ab9c43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Received unexpected event network-vif-plugged-4d037109-fde7-4c13-b8b9-598da6a9ad57 for instance with vm_state stopped and task_state None.
Oct 02 12:12:10 compute-0 nova_compute[192079]: 2025-10-02 12:12:10.863 2 DEBUG nova.compute.manager [req-2d4053ca-2f07-428b-ab6e-b8abbc0a60e9 req-3c2117c7-d9d7-4b63-b40c-7fa155ab9c43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Received event network-vif-unplugged-4d037109-fde7-4c13-b8b9-598da6a9ad57 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:12:10 compute-0 nova_compute[192079]: 2025-10-02 12:12:10.863 2 DEBUG oslo_concurrency.lockutils [req-2d4053ca-2f07-428b-ab6e-b8abbc0a60e9 req-3c2117c7-d9d7-4b63-b40c-7fa155ab9c43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:10 compute-0 nova_compute[192079]: 2025-10-02 12:12:10.863 2 DEBUG oslo_concurrency.lockutils [req-2d4053ca-2f07-428b-ab6e-b8abbc0a60e9 req-3c2117c7-d9d7-4b63-b40c-7fa155ab9c43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:10 compute-0 nova_compute[192079]: 2025-10-02 12:12:10.864 2 DEBUG oslo_concurrency.lockutils [req-2d4053ca-2f07-428b-ab6e-b8abbc0a60e9 req-3c2117c7-d9d7-4b63-b40c-7fa155ab9c43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:10 compute-0 nova_compute[192079]: 2025-10-02 12:12:10.864 2 DEBUG nova.compute.manager [req-2d4053ca-2f07-428b-ab6e-b8abbc0a60e9 req-3c2117c7-d9d7-4b63-b40c-7fa155ab9c43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] No waiting events found dispatching network-vif-unplugged-4d037109-fde7-4c13-b8b9-598da6a9ad57 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:12:10 compute-0 nova_compute[192079]: 2025-10-02 12:12:10.864 2 WARNING nova.compute.manager [req-2d4053ca-2f07-428b-ab6e-b8abbc0a60e9 req-3c2117c7-d9d7-4b63-b40c-7fa155ab9c43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Received unexpected event network-vif-unplugged-4d037109-fde7-4c13-b8b9-598da6a9ad57 for instance with vm_state stopped and task_state None.
Oct 02 12:12:10 compute-0 nova_compute[192079]: 2025-10-02 12:12:10.864 2 DEBUG nova.compute.manager [req-2d4053ca-2f07-428b-ab6e-b8abbc0a60e9 req-3c2117c7-d9d7-4b63-b40c-7fa155ab9c43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Received event network-vif-plugged-4d037109-fde7-4c13-b8b9-598da6a9ad57 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:12:10 compute-0 nova_compute[192079]: 2025-10-02 12:12:10.864 2 DEBUG oslo_concurrency.lockutils [req-2d4053ca-2f07-428b-ab6e-b8abbc0a60e9 req-3c2117c7-d9d7-4b63-b40c-7fa155ab9c43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:10 compute-0 nova_compute[192079]: 2025-10-02 12:12:10.865 2 DEBUG oslo_concurrency.lockutils [req-2d4053ca-2f07-428b-ab6e-b8abbc0a60e9 req-3c2117c7-d9d7-4b63-b40c-7fa155ab9c43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:10 compute-0 nova_compute[192079]: 2025-10-02 12:12:10.865 2 DEBUG oslo_concurrency.lockutils [req-2d4053ca-2f07-428b-ab6e-b8abbc0a60e9 req-3c2117c7-d9d7-4b63-b40c-7fa155ab9c43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:10 compute-0 nova_compute[192079]: 2025-10-02 12:12:10.865 2 DEBUG nova.compute.manager [req-2d4053ca-2f07-428b-ab6e-b8abbc0a60e9 req-3c2117c7-d9d7-4b63-b40c-7fa155ab9c43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] No waiting events found dispatching network-vif-plugged-4d037109-fde7-4c13-b8b9-598da6a9ad57 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:12:10 compute-0 nova_compute[192079]: 2025-10-02 12:12:10.865 2 WARNING nova.compute.manager [req-2d4053ca-2f07-428b-ab6e-b8abbc0a60e9 req-3c2117c7-d9d7-4b63-b40c-7fa155ab9c43 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Received unexpected event network-vif-plugged-4d037109-fde7-4c13-b8b9-598da6a9ad57 for instance with vm_state stopped and task_state None.
Oct 02 12:12:11 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-5ffff6b4a070b8ee1f7934a8e932d79f06b6f3382743b4d8e6eaa539333a5efa-userdata-shm.mount: Deactivated successfully.
Oct 02 12:12:11 compute-0 systemd[1]: var-lib-containers-storage-overlay-0edeb2acddc24de87184bab54cf7f1f3a9ecc08d8b4287f0a1ca76f78ee4bda1-merged.mount: Deactivated successfully.
Oct 02 12:12:11 compute-0 podman[228515]: 2025-10-02 12:12:11.770161721 +0000 UTC m=+1.255555639 container cleanup 5ffff6b4a070b8ee1f7934a8e932d79f06b6f3382743b4d8e6eaa539333a5efa (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0)
Oct 02 12:12:11 compute-0 systemd[1]: libpod-conmon-5ffff6b4a070b8ee1f7934a8e932d79f06b6f3382743b4d8e6eaa539333a5efa.scope: Deactivated successfully.
Oct 02 12:12:12 compute-0 nova_compute[192079]: 2025-10-02 12:12:12.282 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:12 compute-0 podman[228546]: 2025-10-02 12:12:12.545483713 +0000 UTC m=+0.750942525 container remove 5ffff6b4a070b8ee1f7934a8e932d79f06b6f3382743b4d8e6eaa539333a5efa (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:12:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:12.554 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3d8526ed-c5b1-4463-900a-4a2a2b420e06]: (4, ('Thu Oct  2 12:12:10 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a (5ffff6b4a070b8ee1f7934a8e932d79f06b6f3382743b4d8e6eaa539333a5efa)\n5ffff6b4a070b8ee1f7934a8e932d79f06b6f3382743b4d8e6eaa539333a5efa\nThu Oct  2 12:12:11 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a (5ffff6b4a070b8ee1f7934a8e932d79f06b6f3382743b4d8e6eaa539333a5efa)\n5ffff6b4a070b8ee1f7934a8e932d79f06b6f3382743b4d8e6eaa539333a5efa\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:12.556 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[98130bee-88d6-49f9-b8a5-8b009f2ef691]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:12.557 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap2bdfd186-10, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:12:12 compute-0 nova_compute[192079]: 2025-10-02 12:12:12.559 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:12 compute-0 kernel: tap2bdfd186-10: left promiscuous mode
Oct 02 12:12:12 compute-0 nova_compute[192079]: 2025-10-02 12:12:12.589 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:12.592 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7b8779b6-4ad0-42a3-a7bc-57da33c8ca2a]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:12.628 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[06d0e9e7-3a3d-4ca3-9c59-f7949d4fd718]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:12.629 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[83c1d30f-d99d-417d-bfc3-bf2b68f3c664]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:12.653 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[df8d691e-8911-42cd-bf68-a4cffab2e7b1]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 512602, 'reachable_time': 25215, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 228564, 'error': None, 'target': 'ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:12.655 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-2bdfd186-139e-456a-92e9-4dc9c37a846a deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:12:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:12.656 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[3af75069-1a01-495f-af92-5dbe6741364c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:12 compute-0 systemd[1]: run-netns-ovnmeta\x2d2bdfd186\x2d139e\x2d456a\x2d92e9\x2d4dc9c37a846a.mount: Deactivated successfully.
Oct 02 12:12:13 compute-0 nova_compute[192079]: 2025-10-02 12:12:13.371 2 DEBUG oslo_concurrency.lockutils [None req-0bd25405-29fe-49b2-aa44-268acab57f9c c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Acquiring lock "8ca5f1bd-aa74-4790-92de-0c18657746f2" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:13 compute-0 nova_compute[192079]: 2025-10-02 12:12:13.372 2 DEBUG oslo_concurrency.lockutils [None req-0bd25405-29fe-49b2-aa44-268acab57f9c c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:13 compute-0 nova_compute[192079]: 2025-10-02 12:12:13.372 2 DEBUG oslo_concurrency.lockutils [None req-0bd25405-29fe-49b2-aa44-268acab57f9c c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Acquiring lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:13 compute-0 nova_compute[192079]: 2025-10-02 12:12:13.372 2 DEBUG oslo_concurrency.lockutils [None req-0bd25405-29fe-49b2-aa44-268acab57f9c c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:13 compute-0 nova_compute[192079]: 2025-10-02 12:12:13.372 2 DEBUG oslo_concurrency.lockutils [None req-0bd25405-29fe-49b2-aa44-268acab57f9c c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:13 compute-0 nova_compute[192079]: 2025-10-02 12:12:13.384 2 INFO nova.compute.manager [None req-0bd25405-29fe-49b2-aa44-268acab57f9c c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Terminating instance
Oct 02 12:12:13 compute-0 nova_compute[192079]: 2025-10-02 12:12:13.395 2 DEBUG nova.compute.manager [None req-0bd25405-29fe-49b2-aa44-268acab57f9c c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:12:13 compute-0 nova_compute[192079]: 2025-10-02 12:12:13.403 2 INFO nova.virt.libvirt.driver [-] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Instance destroyed successfully.
Oct 02 12:12:13 compute-0 nova_compute[192079]: 2025-10-02 12:12:13.403 2 DEBUG nova.objects.instance [None req-0bd25405-29fe-49b2-aa44-268acab57f9c c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lazy-loading 'resources' on Instance uuid 8ca5f1bd-aa74-4790-92de-0c18657746f2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:12:13 compute-0 nova_compute[192079]: 2025-10-02 12:12:13.424 2 DEBUG nova.virt.libvirt.vif [None req-0bd25405-29fe-49b2-aa44-268acab57f9c c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=True,config_drive='True',created_at=2025-10-02T12:11:30Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-tempest.common.compute-instance-645901578',display_name='tempest-tempest.common.compute-instance-645901578',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-tempest-common-compute-instance-645901578',id=57,image_ref='062d9f80-76b6-42ce-bee7-0fb82a008353',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:12:09Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=4,progress=0,project_id='20417475a6a149d5bc47976f4da9a4ae',ramdisk_id='',reservation_id='r-rok8i8zh',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',clean_attempts='1',image_base_image_ref='062d9f80-76b6-42ce-bee7-0fb82a008353',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServerActionsTestOtherA-352727288',owner_user_name='tempest-ServerActionsTestOtherA-352727288-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:12:10Z,user_data=None,user_id='c2b9eab3da414692b3942505e3441920',uuid=8ca5f1bd-aa74-4790-92de-0c18657746f2,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='stopped') vif={"id": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "address": "fa:16:3e:51:b5:9f", "network": {"id": "2bdfd186-139e-456a-92e9-4dc9c37a846a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-953736127-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "20417475a6a149d5bc47976f4da9a4ae", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap4d037109-fd", "ovs_interfaceid": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:12:13 compute-0 nova_compute[192079]: 2025-10-02 12:12:13.424 2 DEBUG nova.network.os_vif_util [None req-0bd25405-29fe-49b2-aa44-268acab57f9c c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Converting VIF {"id": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "address": "fa:16:3e:51:b5:9f", "network": {"id": "2bdfd186-139e-456a-92e9-4dc9c37a846a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-953736127-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "20417475a6a149d5bc47976f4da9a4ae", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap4d037109-fd", "ovs_interfaceid": "4d037109-fde7-4c13-b8b9-598da6a9ad57", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:12:13 compute-0 nova_compute[192079]: 2025-10-02 12:12:13.427 2 DEBUG nova.network.os_vif_util [None req-0bd25405-29fe-49b2-aa44-268acab57f9c c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:51:b5:9f,bridge_name='br-int',has_traffic_filtering=True,id=4d037109-fde7-4c13-b8b9-598da6a9ad57,network=Network(2bdfd186-139e-456a-92e9-4dc9c37a846a),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap4d037109-fd') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:12:13 compute-0 nova_compute[192079]: 2025-10-02 12:12:13.428 2 DEBUG os_vif [None req-0bd25405-29fe-49b2-aa44-268acab57f9c c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:51:b5:9f,bridge_name='br-int',has_traffic_filtering=True,id=4d037109-fde7-4c13-b8b9-598da6a9ad57,network=Network(2bdfd186-139e-456a-92e9-4dc9c37a846a),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap4d037109-fd') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:12:13 compute-0 nova_compute[192079]: 2025-10-02 12:12:13.431 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:13 compute-0 nova_compute[192079]: 2025-10-02 12:12:13.432 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap4d037109-fd, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:12:13 compute-0 nova_compute[192079]: 2025-10-02 12:12:13.435 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:13 compute-0 nova_compute[192079]: 2025-10-02 12:12:13.438 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:12:13 compute-0 nova_compute[192079]: 2025-10-02 12:12:13.440 2 INFO os_vif [None req-0bd25405-29fe-49b2-aa44-268acab57f9c c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:51:b5:9f,bridge_name='br-int',has_traffic_filtering=True,id=4d037109-fde7-4c13-b8b9-598da6a9ad57,network=Network(2bdfd186-139e-456a-92e9-4dc9c37a846a),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap4d037109-fd')
Oct 02 12:12:13 compute-0 nova_compute[192079]: 2025-10-02 12:12:13.441 2 INFO nova.virt.libvirt.driver [None req-0bd25405-29fe-49b2-aa44-268acab57f9c c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Deleting instance files /var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2_del
Oct 02 12:12:13 compute-0 nova_compute[192079]: 2025-10-02 12:12:13.442 2 INFO nova.virt.libvirt.driver [None req-0bd25405-29fe-49b2-aa44-268acab57f9c c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Deletion of /var/lib/nova/instances/8ca5f1bd-aa74-4790-92de-0c18657746f2_del complete
Oct 02 12:12:13 compute-0 nova_compute[192079]: 2025-10-02 12:12:13.572 2 INFO nova.compute.manager [None req-0bd25405-29fe-49b2-aa44-268acab57f9c c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Took 0.18 seconds to destroy the instance on the hypervisor.
Oct 02 12:12:13 compute-0 nova_compute[192079]: 2025-10-02 12:12:13.572 2 DEBUG oslo.service.loopingcall [None req-0bd25405-29fe-49b2-aa44-268acab57f9c c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:12:13 compute-0 nova_compute[192079]: 2025-10-02 12:12:13.573 2 DEBUG nova.compute.manager [-] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:12:13 compute-0 nova_compute[192079]: 2025-10-02 12:12:13.574 2 DEBUG nova.network.neutron [-] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:12:13 compute-0 nova_compute[192079]: 2025-10-02 12:12:13.952 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:14 compute-0 podman[228569]: 2025-10-02 12:12:14.160972697 +0000 UTC m=+0.078546162 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, container_name=ceilometer_agent_compute, org.label-schema.vendor=CentOS)
Oct 02 12:12:14 compute-0 nova_compute[192079]: 2025-10-02 12:12:14.921 2 DEBUG nova.network.neutron [-] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:12:14 compute-0 nova_compute[192079]: 2025-10-02 12:12:14.936 2 INFO nova.compute.manager [-] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Took 1.36 seconds to deallocate network for instance.
Oct 02 12:12:14 compute-0 nova_compute[192079]: 2025-10-02 12:12:14.973 2 DEBUG nova.compute.manager [req-7a760745-bac0-45af-af1d-e1c1f90a2aad req-0eb4ce51-2b0b-4b55-a7f4-7d2f4c00d0e7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Received event network-vif-deleted-4d037109-fde7-4c13-b8b9-598da6a9ad57 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:12:15 compute-0 nova_compute[192079]: 2025-10-02 12:12:15.007 2 DEBUG oslo_concurrency.lockutils [None req-0bd25405-29fe-49b2-aa44-268acab57f9c c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:15 compute-0 nova_compute[192079]: 2025-10-02 12:12:15.007 2 DEBUG oslo_concurrency.lockutils [None req-0bd25405-29fe-49b2-aa44-268acab57f9c c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:15 compute-0 nova_compute[192079]: 2025-10-02 12:12:15.057 2 DEBUG nova.compute.provider_tree [None req-0bd25405-29fe-49b2-aa44-268acab57f9c c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:12:15 compute-0 nova_compute[192079]: 2025-10-02 12:12:15.080 2 DEBUG nova.scheduler.client.report [None req-0bd25405-29fe-49b2-aa44-268acab57f9c c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:12:15 compute-0 nova_compute[192079]: 2025-10-02 12:12:15.147 2 DEBUG oslo_concurrency.lockutils [None req-0bd25405-29fe-49b2-aa44-268acab57f9c c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.140s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:15 compute-0 nova_compute[192079]: 2025-10-02 12:12:15.174 2 INFO nova.scheduler.client.report [None req-0bd25405-29fe-49b2-aa44-268acab57f9c c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Deleted allocations for instance 8ca5f1bd-aa74-4790-92de-0c18657746f2
Oct 02 12:12:15 compute-0 nova_compute[192079]: 2025-10-02 12:12:15.257 2 DEBUG oslo_concurrency.lockutils [None req-0bd25405-29fe-49b2-aa44-268acab57f9c c2b9eab3da414692b3942505e3441920 20417475a6a149d5bc47976f4da9a4ae - - default default] Lock "8ca5f1bd-aa74-4790-92de-0c18657746f2" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.885s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:15 compute-0 nova_compute[192079]: 2025-10-02 12:12:15.463 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:16 compute-0 nova_compute[192079]: 2025-10-02 12:12:16.660 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:12:16 compute-0 nova_compute[192079]: 2025-10-02 12:12:16.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:12:16 compute-0 nova_compute[192079]: 2025-10-02 12:12:16.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:12:16 compute-0 nova_compute[192079]: 2025-10-02 12:12:16.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:12:16 compute-0 nova_compute[192079]: 2025-10-02 12:12:16.691 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:16 compute-0 nova_compute[192079]: 2025-10-02 12:12:16.692 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:16 compute-0 nova_compute[192079]: 2025-10-02 12:12:16.692 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:16 compute-0 nova_compute[192079]: 2025-10-02 12:12:16.693 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:12:16 compute-0 nova_compute[192079]: 2025-10-02 12:12:16.913 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:12:16 compute-0 nova_compute[192079]: 2025-10-02 12:12:16.914 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5737MB free_disk=73.35735702514648GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:12:16 compute-0 nova_compute[192079]: 2025-10-02 12:12:16.915 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:16 compute-0 nova_compute[192079]: 2025-10-02 12:12:16.915 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:16 compute-0 nova_compute[192079]: 2025-10-02 12:12:16.974 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:12:16 compute-0 nova_compute[192079]: 2025-10-02 12:12:16.975 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:12:17 compute-0 nova_compute[192079]: 2025-10-02 12:12:17.000 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:12:17 compute-0 nova_compute[192079]: 2025-10-02 12:12:17.016 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:12:17 compute-0 nova_compute[192079]: 2025-10-02 12:12:17.036 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:12:17 compute-0 nova_compute[192079]: 2025-10-02 12:12:17.036 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.121s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:17 compute-0 nova_compute[192079]: 2025-10-02 12:12:17.454 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:18 compute-0 nova_compute[192079]: 2025-10-02 12:12:18.036 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:12:18 compute-0 nova_compute[192079]: 2025-10-02 12:12:18.037 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:12:18 compute-0 nova_compute[192079]: 2025-10-02 12:12:18.435 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:18 compute-0 nova_compute[192079]: 2025-10-02 12:12:18.661 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:12:19 compute-0 podman[228592]: 2025-10-02 12:12:19.192370953 +0000 UTC m=+0.077076633 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, config_id=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, container_name=multipathd, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:12:19 compute-0 podman[228591]: 2025-10-02 12:12:19.200500912 +0000 UTC m=+0.097648307 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Red Hat, Inc., vendor=Red Hat, Inc., name=ubi9-minimal, url=https://catalog.redhat.com/en/search?searchType=containers, container_name=openstack_network_exporter, managed_by=edpm_ansible, io.openshift.expose-services=, io.openshift.tags=minimal rhel9, release=1755695350, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., config_id=edpm, io.buildah.version=1.33.7, vcs-type=git, com.redhat.component=ubi9-minimal-container, distribution-scope=public, build-date=2025-08-20T13:12:41, version=9.6, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, architecture=x86_64, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly.)
Oct 02 12:12:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:20.390 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=16, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=15) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:12:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:20.391 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 2 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:12:20 compute-0 nova_compute[192079]: 2025-10-02 12:12:20.391 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:20 compute-0 nova_compute[192079]: 2025-10-02 12:12:20.464 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:20 compute-0 nova_compute[192079]: 2025-10-02 12:12:20.603 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:20 compute-0 nova_compute[192079]: 2025-10-02 12:12:20.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:12:20 compute-0 nova_compute[192079]: 2025-10-02 12:12:20.664 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:12:20 compute-0 nova_compute[192079]: 2025-10-02 12:12:20.664 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:12:20 compute-0 nova_compute[192079]: 2025-10-02 12:12:20.678 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:12:20 compute-0 nova_compute[192079]: 2025-10-02 12:12:20.678 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:12:20 compute-0 nova_compute[192079]: 2025-10-02 12:12:20.837 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:21 compute-0 nova_compute[192079]: 2025-10-02 12:12:21.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:12:21 compute-0 nova_compute[192079]: 2025-10-02 12:12:21.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:12:21 compute-0 nova_compute[192079]: 2025-10-02 12:12:21.728 2 DEBUG oslo_concurrency.lockutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "c383e430-a57f-4ac3-9bc4-03d1a4a0542f" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:21 compute-0 nova_compute[192079]: 2025-10-02 12:12:21.728 2 DEBUG oslo_concurrency.lockutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "c383e430-a57f-4ac3-9bc4-03d1a4a0542f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:21 compute-0 nova_compute[192079]: 2025-10-02 12:12:21.743 2 DEBUG nova.compute.manager [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:12:21 compute-0 nova_compute[192079]: 2025-10-02 12:12:21.833 2 DEBUG oslo_concurrency.lockutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:21 compute-0 nova_compute[192079]: 2025-10-02 12:12:21.833 2 DEBUG oslo_concurrency.lockutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:21 compute-0 nova_compute[192079]: 2025-10-02 12:12:21.839 2 DEBUG nova.virt.hardware [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:12:21 compute-0 nova_compute[192079]: 2025-10-02 12:12:21.839 2 INFO nova.compute.claims [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.093 2 DEBUG nova.compute.provider_tree [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.115 2 DEBUG nova.scheduler.client.report [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.170 2 DEBUG oslo_concurrency.lockutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.337s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.171 2 DEBUG nova.compute.manager [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.296 2 DEBUG nova.compute.manager [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.297 2 DEBUG nova.network.neutron [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.328 2 INFO nova.virt.libvirt.driver [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.354 2 DEBUG nova.compute.manager [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:12:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:22.393 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '16'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.476 2 DEBUG nova.compute.manager [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.478 2 DEBUG nova.virt.libvirt.driver [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.478 2 INFO nova.virt.libvirt.driver [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Creating image(s)
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.479 2 DEBUG oslo_concurrency.lockutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "/var/lib/nova/instances/c383e430-a57f-4ac3-9bc4-03d1a4a0542f/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.480 2 DEBUG oslo_concurrency.lockutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "/var/lib/nova/instances/c383e430-a57f-4ac3-9bc4-03d1a4a0542f/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.482 2 DEBUG oslo_concurrency.lockutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "/var/lib/nova/instances/c383e430-a57f-4ac3-9bc4-03d1a4a0542f/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.508 2 DEBUG oslo_concurrency.processutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.562 2 DEBUG nova.policy [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dcdfc3c0f94e42cb931d27f2e3b5b12d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dcf78460093d411988a54040ea4c265a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.588 2 DEBUG oslo_concurrency.processutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.081s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.589 2 DEBUG oslo_concurrency.lockutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.590 2 DEBUG oslo_concurrency.lockutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.602 2 DEBUG oslo_concurrency.processutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.654 2 DEBUG oslo_concurrency.processutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.052s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.655 2 DEBUG oslo_concurrency.processutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/c383e430-a57f-4ac3-9bc4-03d1a4a0542f/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.787 2 DEBUG oslo_concurrency.processutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/c383e430-a57f-4ac3-9bc4-03d1a4a0542f/disk 1073741824" returned: 0 in 0.132s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.788 2 DEBUG oslo_concurrency.lockutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.199s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.789 2 DEBUG oslo_concurrency.processutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.867 2 DEBUG oslo_concurrency.processutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.078s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.868 2 DEBUG nova.virt.disk.api [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Checking if we can resize image /var/lib/nova/instances/c383e430-a57f-4ac3-9bc4-03d1a4a0542f/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.868 2 DEBUG oslo_concurrency.processutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/c383e430-a57f-4ac3-9bc4-03d1a4a0542f/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.923 2 DEBUG oslo_concurrency.processutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/c383e430-a57f-4ac3-9bc4-03d1a4a0542f/disk --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.925 2 DEBUG nova.virt.disk.api [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Cannot resize image /var/lib/nova/instances/c383e430-a57f-4ac3-9bc4-03d1a4a0542f/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.925 2 DEBUG nova.objects.instance [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lazy-loading 'migration_context' on Instance uuid c383e430-a57f-4ac3-9bc4-03d1a4a0542f obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.949 2 DEBUG nova.virt.libvirt.driver [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.949 2 DEBUG nova.virt.libvirt.driver [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Ensure instance console log exists: /var/lib/nova/instances/c383e430-a57f-4ac3-9bc4-03d1a4a0542f/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.950 2 DEBUG oslo_concurrency.lockutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.950 2 DEBUG oslo_concurrency.lockutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:22 compute-0 nova_compute[192079]: 2025-10-02 12:12:22.951 2 DEBUG oslo_concurrency.lockutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:23 compute-0 nova_compute[192079]: 2025-10-02 12:12:23.264 2 DEBUG nova.network.neutron [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Successfully created port: 57d72cf7-b016-42e3-884c-ec35f1924c71 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:12:23 compute-0 nova_compute[192079]: 2025-10-02 12:12:23.438 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:24 compute-0 nova_compute[192079]: 2025-10-02 12:12:24.285 2 DEBUG nova.network.neutron [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Successfully updated port: 57d72cf7-b016-42e3-884c-ec35f1924c71 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:12:24 compute-0 nova_compute[192079]: 2025-10-02 12:12:24.311 2 DEBUG oslo_concurrency.lockutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "refresh_cache-c383e430-a57f-4ac3-9bc4-03d1a4a0542f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:12:24 compute-0 nova_compute[192079]: 2025-10-02 12:12:24.312 2 DEBUG oslo_concurrency.lockutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquired lock "refresh_cache-c383e430-a57f-4ac3-9bc4-03d1a4a0542f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:12:24 compute-0 nova_compute[192079]: 2025-10-02 12:12:24.312 2 DEBUG nova.network.neutron [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:12:24 compute-0 nova_compute[192079]: 2025-10-02 12:12:24.393 2 DEBUG nova.compute.manager [req-a4e3e1c9-eea9-4f49-9064-900fda1bad52 req-1d959a1e-75d7-40ac-bcd3-b7cad0545d68 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Received event network-changed-57d72cf7-b016-42e3-884c-ec35f1924c71 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:12:24 compute-0 nova_compute[192079]: 2025-10-02 12:12:24.394 2 DEBUG nova.compute.manager [req-a4e3e1c9-eea9-4f49-9064-900fda1bad52 req-1d959a1e-75d7-40ac-bcd3-b7cad0545d68 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Refreshing instance network info cache due to event network-changed-57d72cf7-b016-42e3-884c-ec35f1924c71. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:12:24 compute-0 nova_compute[192079]: 2025-10-02 12:12:24.394 2 DEBUG oslo_concurrency.lockutils [req-a4e3e1c9-eea9-4f49-9064-900fda1bad52 req-1d959a1e-75d7-40ac-bcd3-b7cad0545d68 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-c383e430-a57f-4ac3-9bc4-03d1a4a0542f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:12:24 compute-0 nova_compute[192079]: 2025-10-02 12:12:24.464 2 DEBUG nova.network.neutron [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.139 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759407130.1374393, 8ca5f1bd-aa74-4790-92de-0c18657746f2 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.139 2 INFO nova.compute.manager [-] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] VM Stopped (Lifecycle Event)
Oct 02 12:12:25 compute-0 podman[228649]: 2025-10-02 12:12:25.140383886 +0000 UTC m=+0.055156915 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_id=iscsid, container_name=iscsid, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible)
Oct 02 12:12:25 compute-0 podman[228648]: 2025-10-02 12:12:25.140482989 +0000 UTC m=+0.058152897 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.164 2 DEBUG nova.compute.manager [None req-5e325be5-fa4a-48cf-9c92-b80a72196189 - - - - - -] [instance: 8ca5f1bd-aa74-4790-92de-0c18657746f2] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.466 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.856 2 DEBUG nova.network.neutron [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Updating instance_info_cache with network_info: [{"id": "57d72cf7-b016-42e3-884c-ec35f1924c71", "address": "fa:16:3e:f0:7a:c2", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap57d72cf7-b0", "ovs_interfaceid": "57d72cf7-b016-42e3-884c-ec35f1924c71", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.890 2 DEBUG oslo_concurrency.lockutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Releasing lock "refresh_cache-c383e430-a57f-4ac3-9bc4-03d1a4a0542f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.891 2 DEBUG nova.compute.manager [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Instance network_info: |[{"id": "57d72cf7-b016-42e3-884c-ec35f1924c71", "address": "fa:16:3e:f0:7a:c2", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap57d72cf7-b0", "ovs_interfaceid": "57d72cf7-b016-42e3-884c-ec35f1924c71", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.891 2 DEBUG oslo_concurrency.lockutils [req-a4e3e1c9-eea9-4f49-9064-900fda1bad52 req-1d959a1e-75d7-40ac-bcd3-b7cad0545d68 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-c383e430-a57f-4ac3-9bc4-03d1a4a0542f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.891 2 DEBUG nova.network.neutron [req-a4e3e1c9-eea9-4f49-9064-900fda1bad52 req-1d959a1e-75d7-40ac-bcd3-b7cad0545d68 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Refreshing network info cache for port 57d72cf7-b016-42e3-884c-ec35f1924c71 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.894 2 DEBUG nova.virt.libvirt.driver [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Start _get_guest_xml network_info=[{"id": "57d72cf7-b016-42e3-884c-ec35f1924c71", "address": "fa:16:3e:f0:7a:c2", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap57d72cf7-b0", "ovs_interfaceid": "57d72cf7-b016-42e3-884c-ec35f1924c71", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.898 2 WARNING nova.virt.libvirt.driver [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.903 2 DEBUG nova.virt.libvirt.host [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.904 2 DEBUG nova.virt.libvirt.host [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.906 2 DEBUG nova.virt.libvirt.host [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.907 2 DEBUG nova.virt.libvirt.host [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.907 2 DEBUG nova.virt.libvirt.driver [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.908 2 DEBUG nova.virt.hardware [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.908 2 DEBUG nova.virt.hardware [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.908 2 DEBUG nova.virt.hardware [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.908 2 DEBUG nova.virt.hardware [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.909 2 DEBUG nova.virt.hardware [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.909 2 DEBUG nova.virt.hardware [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.909 2 DEBUG nova.virt.hardware [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.909 2 DEBUG nova.virt.hardware [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.910 2 DEBUG nova.virt.hardware [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.910 2 DEBUG nova.virt.hardware [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.910 2 DEBUG nova.virt.hardware [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.914 2 DEBUG nova.virt.libvirt.vif [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:12:20Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ImagesTestJSON-server-266417438',display_name='tempest-ImagesTestJSON-server-266417438',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-imagestestjson-server-266417438',id=60,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='dcf78460093d411988a54040ea4c265a',ramdisk_id='',reservation_id='r-isj554mk',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ImagesTestJSON-437970487',owner_user_name='tempest-ImagesTestJSON-437970487-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:12:22Z,user_data=None,user_id='dcdfc3c0f94e42cb931d27f2e3b5b12d',uuid=c383e430-a57f-4ac3-9bc4-03d1a4a0542f,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "57d72cf7-b016-42e3-884c-ec35f1924c71", "address": "fa:16:3e:f0:7a:c2", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap57d72cf7-b0", "ovs_interfaceid": "57d72cf7-b016-42e3-884c-ec35f1924c71", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.914 2 DEBUG nova.network.os_vif_util [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Converting VIF {"id": "57d72cf7-b016-42e3-884c-ec35f1924c71", "address": "fa:16:3e:f0:7a:c2", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap57d72cf7-b0", "ovs_interfaceid": "57d72cf7-b016-42e3-884c-ec35f1924c71", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.915 2 DEBUG nova.network.os_vif_util [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:f0:7a:c2,bridge_name='br-int',has_traffic_filtering=True,id=57d72cf7-b016-42e3-884c-ec35f1924c71,network=Network(4f195445-fd43-4b92-89dd-a1b2fe9ea8c2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap57d72cf7-b0') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.916 2 DEBUG nova.objects.instance [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lazy-loading 'pci_devices' on Instance uuid c383e430-a57f-4ac3-9bc4-03d1a4a0542f obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.936 2 DEBUG nova.virt.libvirt.driver [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:12:25 compute-0 nova_compute[192079]:   <uuid>c383e430-a57f-4ac3-9bc4-03d1a4a0542f</uuid>
Oct 02 12:12:25 compute-0 nova_compute[192079]:   <name>instance-0000003c</name>
Oct 02 12:12:25 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:12:25 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:12:25 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:12:25 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:       <nova:name>tempest-ImagesTestJSON-server-266417438</nova:name>
Oct 02 12:12:25 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:12:25</nova:creationTime>
Oct 02 12:12:25 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:12:25 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:12:25 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:12:25 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:12:25 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:12:25 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:12:25 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:12:25 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:12:25 compute-0 nova_compute[192079]:         <nova:user uuid="dcdfc3c0f94e42cb931d27f2e3b5b12d">tempest-ImagesTestJSON-437970487-project-member</nova:user>
Oct 02 12:12:25 compute-0 nova_compute[192079]:         <nova:project uuid="dcf78460093d411988a54040ea4c265a">tempest-ImagesTestJSON-437970487</nova:project>
Oct 02 12:12:25 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:12:25 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:12:25 compute-0 nova_compute[192079]:         <nova:port uuid="57d72cf7-b016-42e3-884c-ec35f1924c71">
Oct 02 12:12:25 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.13" ipVersion="4"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:12:25 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:12:25 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:12:25 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <system>
Oct 02 12:12:25 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:12:25 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:12:25 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:12:25 compute-0 nova_compute[192079]:       <entry name="serial">c383e430-a57f-4ac3-9bc4-03d1a4a0542f</entry>
Oct 02 12:12:25 compute-0 nova_compute[192079]:       <entry name="uuid">c383e430-a57f-4ac3-9bc4-03d1a4a0542f</entry>
Oct 02 12:12:25 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     </system>
Oct 02 12:12:25 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:12:25 compute-0 nova_compute[192079]:   <os>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:   </os>
Oct 02 12:12:25 compute-0 nova_compute[192079]:   <features>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:   </features>
Oct 02 12:12:25 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:12:25 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:12:25 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:12:25 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/c383e430-a57f-4ac3-9bc4-03d1a4a0542f/disk"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:12:25 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/c383e430-a57f-4ac3-9bc4-03d1a4a0542f/disk.config"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:12:25 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:f0:7a:c2"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:       <target dev="tap57d72cf7-b0"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:12:25 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/c383e430-a57f-4ac3-9bc4-03d1a4a0542f/console.log" append="off"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <video>
Oct 02 12:12:25 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     </video>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:12:25 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:12:25 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:12:25 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:12:25 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:12:25 compute-0 nova_compute[192079]: </domain>
Oct 02 12:12:25 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.937 2 DEBUG nova.compute.manager [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Preparing to wait for external event network-vif-plugged-57d72cf7-b016-42e3-884c-ec35f1924c71 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.938 2 DEBUG oslo_concurrency.lockutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "c383e430-a57f-4ac3-9bc4-03d1a4a0542f-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.938 2 DEBUG oslo_concurrency.lockutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "c383e430-a57f-4ac3-9bc4-03d1a4a0542f-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.938 2 DEBUG oslo_concurrency.lockutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "c383e430-a57f-4ac3-9bc4-03d1a4a0542f-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.939 2 DEBUG nova.virt.libvirt.vif [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:12:20Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ImagesTestJSON-server-266417438',display_name='tempest-ImagesTestJSON-server-266417438',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-imagestestjson-server-266417438',id=60,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='dcf78460093d411988a54040ea4c265a',ramdisk_id='',reservation_id='r-isj554mk',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ImagesTestJSON-437970487',owner_user_name='tempest-ImagesTestJSON-437970487-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:12:22Z,user_data=None,user_id='dcdfc3c0f94e42cb931d27f2e3b5b12d',uuid=c383e430-a57f-4ac3-9bc4-03d1a4a0542f,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "57d72cf7-b016-42e3-884c-ec35f1924c71", "address": "fa:16:3e:f0:7a:c2", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap57d72cf7-b0", "ovs_interfaceid": "57d72cf7-b016-42e3-884c-ec35f1924c71", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.939 2 DEBUG nova.network.os_vif_util [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Converting VIF {"id": "57d72cf7-b016-42e3-884c-ec35f1924c71", "address": "fa:16:3e:f0:7a:c2", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap57d72cf7-b0", "ovs_interfaceid": "57d72cf7-b016-42e3-884c-ec35f1924c71", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.940 2 DEBUG nova.network.os_vif_util [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:f0:7a:c2,bridge_name='br-int',has_traffic_filtering=True,id=57d72cf7-b016-42e3-884c-ec35f1924c71,network=Network(4f195445-fd43-4b92-89dd-a1b2fe9ea8c2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap57d72cf7-b0') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.940 2 DEBUG os_vif [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:f0:7a:c2,bridge_name='br-int',has_traffic_filtering=True,id=57d72cf7-b016-42e3-884c-ec35f1924c71,network=Network(4f195445-fd43-4b92-89dd-a1b2fe9ea8c2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap57d72cf7-b0') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.940 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.941 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.941 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.944 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.945 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap57d72cf7-b0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.945 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap57d72cf7-b0, col_values=(('external_ids', {'iface-id': '57d72cf7-b016-42e3-884c-ec35f1924c71', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:f0:7a:c2', 'vm-uuid': 'c383e430-a57f-4ac3-9bc4-03d1a4a0542f'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.947 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:25 compute-0 NetworkManager[51160]: <info>  [1759407145.9485] manager: (tap57d72cf7-b0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/98)
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.949 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.954 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:25 compute-0 nova_compute[192079]: 2025-10-02 12:12:25.955 2 INFO os_vif [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:f0:7a:c2,bridge_name='br-int',has_traffic_filtering=True,id=57d72cf7-b016-42e3-884c-ec35f1924c71,network=Network(4f195445-fd43-4b92-89dd-a1b2fe9ea8c2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap57d72cf7-b0')
Oct 02 12:12:26 compute-0 nova_compute[192079]: 2025-10-02 12:12:26.150 2 DEBUG nova.virt.libvirt.driver [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:12:26 compute-0 nova_compute[192079]: 2025-10-02 12:12:26.151 2 DEBUG nova.virt.libvirt.driver [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:12:26 compute-0 nova_compute[192079]: 2025-10-02 12:12:26.151 2 DEBUG nova.virt.libvirt.driver [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] No VIF found with MAC fa:16:3e:f0:7a:c2, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:12:26 compute-0 nova_compute[192079]: 2025-10-02 12:12:26.152 2 INFO nova.virt.libvirt.driver [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Using config drive
Oct 02 12:12:26 compute-0 nova_compute[192079]: 2025-10-02 12:12:26.821 2 INFO nova.virt.libvirt.driver [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Creating config drive at /var/lib/nova/instances/c383e430-a57f-4ac3-9bc4-03d1a4a0542f/disk.config
Oct 02 12:12:26 compute-0 nova_compute[192079]: 2025-10-02 12:12:26.826 2 DEBUG oslo_concurrency.processutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/c383e430-a57f-4ac3-9bc4-03d1a4a0542f/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpqzrq0o6i execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:12:26 compute-0 nova_compute[192079]: 2025-10-02 12:12:26.951 2 DEBUG oslo_concurrency.processutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/c383e430-a57f-4ac3-9bc4-03d1a4a0542f/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpqzrq0o6i" returned: 0 in 0.125s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:12:27 compute-0 kernel: tap57d72cf7-b0: entered promiscuous mode
Oct 02 12:12:27 compute-0 NetworkManager[51160]: <info>  [1759407147.0153] manager: (tap57d72cf7-b0): new Tun device (/org/freedesktop/NetworkManager/Devices/99)
Oct 02 12:12:27 compute-0 systemd-udevd[228706]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:12:27 compute-0 ovn_controller[94336]: 2025-10-02T12:12:27Z|00196|binding|INFO|Claiming lport 57d72cf7-b016-42e3-884c-ec35f1924c71 for this chassis.
Oct 02 12:12:27 compute-0 ovn_controller[94336]: 2025-10-02T12:12:27Z|00197|binding|INFO|57d72cf7-b016-42e3-884c-ec35f1924c71: Claiming fa:16:3e:f0:7a:c2 10.100.0.13
Oct 02 12:12:27 compute-0 nova_compute[192079]: 2025-10-02 12:12:27.087 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:27 compute-0 nova_compute[192079]: 2025-10-02 12:12:27.092 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:27 compute-0 NetworkManager[51160]: <info>  [1759407147.0999] device (tap57d72cf7-b0): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:12:27 compute-0 NetworkManager[51160]: <info>  [1759407147.1013] device (tap57d72cf7-b0): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:12:27 compute-0 systemd-machined[152150]: New machine qemu-30-instance-0000003c.
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:27.115 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:f0:7a:c2 10.100.0.13'], port_security=['fa:16:3e:f0:7a:c2 10.100.0.13'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.13/28', 'neutron:device_id': 'c383e430-a57f-4ac3-9bc4-03d1a4a0542f', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'dcf78460093d411988a54040ea4c265a', 'neutron:revision_number': '2', 'neutron:security_group_ids': 'aacce687-8b76-4e90-b19c-0dd006394188', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=24ae9888-31f5-4083-b5ee-e7ed6a1eee13, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=57d72cf7-b016-42e3-884c-ec35f1924c71) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:27.117 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 57d72cf7-b016-42e3-884c-ec35f1924c71 in datapath 4f195445-fd43-4b92-89dd-a1b2fe9ea8c2 bound to our chassis
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:27.118 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 4f195445-fd43-4b92-89dd-a1b2fe9ea8c2
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:27.129 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c4abda1e-db11-4911-8662-95f56d48934c]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:27.130 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap4f195445-f1 in ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:27.132 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap4f195445-f0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:27.132 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8ebca34b-7fba-46b7-857c-837c82fdcfbe]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:27.133 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[48b42b29-d05e-4f7f-bf95-08997f27b0da]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:27.144 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[9d563174-a8f4-4549-8a21-ea2481a48423]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:27 compute-0 ovn_controller[94336]: 2025-10-02T12:12:27Z|00198|binding|INFO|Setting lport 57d72cf7-b016-42e3-884c-ec35f1924c71 ovn-installed in OVS
Oct 02 12:12:27 compute-0 ovn_controller[94336]: 2025-10-02T12:12:27Z|00199|binding|INFO|Setting lport 57d72cf7-b016-42e3-884c-ec35f1924c71 up in Southbound
Oct 02 12:12:27 compute-0 systemd[1]: Started Virtual Machine qemu-30-instance-0000003c.
Oct 02 12:12:27 compute-0 nova_compute[192079]: 2025-10-02 12:12:27.146 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:27.165 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[bbf81099-13b2-46f6-9f89-335273a0a9ba]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:27.193 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[b869e277-4bab-4483-b423-178581421a17]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:27 compute-0 NetworkManager[51160]: <info>  [1759407147.1994] manager: (tap4f195445-f0): new Veth device (/org/freedesktop/NetworkManager/Devices/100)
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:27.198 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[59c9c078-5222-499c-bec3-fec0e900bc94]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:27.227 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[b1a5eee3-48f3-48e6-bfb3-456dbb474096]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:27.229 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[7b975668-b2bf-4ff7-a65a-5c345210b7f1]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:27 compute-0 NetworkManager[51160]: <info>  [1759407147.2505] device (tap4f195445-f0): carrier: link connected
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:27.256 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[5cefb36e-aac4-4771-b37b-20ca94084214]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:27.271 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[36337fc5-8c86-46b4-a99f-268066c65429]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap4f195445-f1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:65:93:03'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 59], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 514487, 'reachable_time': 26627, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 228742, 'error': None, 'target': 'ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:27.286 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1bc8eb37-ee59-4589-abd8-c80ebafe81d4]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe65:9303'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 514487, 'tstamp': 514487}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 228743, 'error': None, 'target': 'ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:27.300 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0965e876-ea79-45d5-9bd3-5b75a58c3a3d]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap4f195445-f1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:65:93:03'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 59], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 514487, 'reachable_time': 26627, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 228744, 'error': None, 'target': 'ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:27.324 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ba21dd9a-800a-47dd-aeb5-b499c2a3496e]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:27.378 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[162339d8-e2a2-4a72-9d56-1263a3151c83]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:27 compute-0 nova_compute[192079]: 2025-10-02 12:12:27.382 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:27.379 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap4f195445-f0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:27.380 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:27.380 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap4f195445-f0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:12:27 compute-0 NetworkManager[51160]: <info>  [1759407147.3835] manager: (tap4f195445-f0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/101)
Oct 02 12:12:27 compute-0 kernel: tap4f195445-f0: entered promiscuous mode
Oct 02 12:12:27 compute-0 nova_compute[192079]: 2025-10-02 12:12:27.387 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:27.389 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap4f195445-f0, col_values=(('external_ids', {'iface-id': 'd65a1bd0-87e2-4bbf-9945-dacace78444f'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:12:27 compute-0 nova_compute[192079]: 2025-10-02 12:12:27.390 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:27 compute-0 ovn_controller[94336]: 2025-10-02T12:12:27Z|00200|binding|INFO|Releasing lport d65a1bd0-87e2-4bbf-9945-dacace78444f from this chassis (sb_readonly=0)
Oct 02 12:12:27 compute-0 nova_compute[192079]: 2025-10-02 12:12:27.414 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:27.416 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/4f195445-fd43-4b92-89dd-a1b2fe9ea8c2.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/4f195445-fd43-4b92-89dd-a1b2fe9ea8c2.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:27.417 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d5e00396-2d14-4e1c-982f-c9bfaccd79bd]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:27.418 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/4f195445-fd43-4b92-89dd-a1b2fe9ea8c2.pid.haproxy
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 4f195445-fd43-4b92-89dd-a1b2fe9ea8c2
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:12:27 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:27.418 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'env', 'PROCESS_TAG=haproxy-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/4f195445-fd43-4b92-89dd-a1b2fe9ea8c2.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:12:27 compute-0 podman[228781]: 2025-10-02 12:12:27.779007671 +0000 UTC m=+0.023873883 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.133 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407148.1327822, c383e430-a57f-4ac3-9bc4-03d1a4a0542f => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.134 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] VM Started (Lifecycle Event)
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.195 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.199 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407148.133375, c383e430-a57f-4ac3-9bc4-03d1a4a0542f => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.199 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] VM Paused (Lifecycle Event)
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.227 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.231 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.275 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:12:28 compute-0 podman[228781]: 2025-10-02 12:12:28.341918381 +0000 UTC m=+0.586784623 container create 356af9db23980f7cdf8f860d2d0e0afe509e96b3055d2f5a59d2f4c085bc3917 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.399 2 DEBUG nova.compute.manager [req-edde5c2c-3366-4ab8-8f9b-0ed6239ab1f6 req-2fadc227-41b5-40ac-9395-4d82c388c773 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Received event network-vif-plugged-57d72cf7-b016-42e3-884c-ec35f1924c71 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.400 2 DEBUG oslo_concurrency.lockutils [req-edde5c2c-3366-4ab8-8f9b-0ed6239ab1f6 req-2fadc227-41b5-40ac-9395-4d82c388c773 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "c383e430-a57f-4ac3-9bc4-03d1a4a0542f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.400 2 DEBUG oslo_concurrency.lockutils [req-edde5c2c-3366-4ab8-8f9b-0ed6239ab1f6 req-2fadc227-41b5-40ac-9395-4d82c388c773 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "c383e430-a57f-4ac3-9bc4-03d1a4a0542f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.401 2 DEBUG oslo_concurrency.lockutils [req-edde5c2c-3366-4ab8-8f9b-0ed6239ab1f6 req-2fadc227-41b5-40ac-9395-4d82c388c773 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "c383e430-a57f-4ac3-9bc4-03d1a4a0542f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.401 2 DEBUG nova.compute.manager [req-edde5c2c-3366-4ab8-8f9b-0ed6239ab1f6 req-2fadc227-41b5-40ac-9395-4d82c388c773 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Processing event network-vif-plugged-57d72cf7-b016-42e3-884c-ec35f1924c71 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.402 2 DEBUG nova.compute.manager [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.407 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407148.4073193, c383e430-a57f-4ac3-9bc4-03d1a4a0542f => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.407 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] VM Resumed (Lifecycle Event)
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.410 2 DEBUG nova.virt.libvirt.driver [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.412 2 INFO nova.virt.libvirt.driver [-] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Instance spawned successfully.
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.413 2 DEBUG nova.virt.libvirt.driver [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.448 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:12:28 compute-0 systemd[1]: Started libpod-conmon-356af9db23980f7cdf8f860d2d0e0afe509e96b3055d2f5a59d2f4c085bc3917.scope.
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.454 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.458 2 DEBUG nova.virt.libvirt.driver [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.458 2 DEBUG nova.virt.libvirt.driver [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.459 2 DEBUG nova.virt.libvirt.driver [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.459 2 DEBUG nova.virt.libvirt.driver [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.460 2 DEBUG nova.virt.libvirt.driver [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.461 2 DEBUG nova.virt.libvirt.driver [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.488 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:12:28 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:12:28 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/335cb8038a0e9ea428aced7237ea7ce6dea2e8f9bd32367681edd54001ec6612/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.544 2 INFO nova.compute.manager [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Took 6.07 seconds to spawn the instance on the hypervisor.
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.545 2 DEBUG nova.compute.manager [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.546 2 DEBUG nova.network.neutron [req-a4e3e1c9-eea9-4f49-9064-900fda1bad52 req-1d959a1e-75d7-40ac-bcd3-b7cad0545d68 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Updated VIF entry in instance network info cache for port 57d72cf7-b016-42e3-884c-ec35f1924c71. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.547 2 DEBUG nova.network.neutron [req-a4e3e1c9-eea9-4f49-9064-900fda1bad52 req-1d959a1e-75d7-40ac-bcd3-b7cad0545d68 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Updating instance_info_cache with network_info: [{"id": "57d72cf7-b016-42e3-884c-ec35f1924c71", "address": "fa:16:3e:f0:7a:c2", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap57d72cf7-b0", "ovs_interfaceid": "57d72cf7-b016-42e3-884c-ec35f1924c71", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:12:28 compute-0 podman[228781]: 2025-10-02 12:12:28.56082292 +0000 UTC m=+0.805689152 container init 356af9db23980f7cdf8f860d2d0e0afe509e96b3055d2f5a59d2f4c085bc3917 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2, org.label-schema.build-date=20251001, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0)
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.564 2 DEBUG oslo_concurrency.lockutils [req-a4e3e1c9-eea9-4f49-9064-900fda1bad52 req-1d959a1e-75d7-40ac-bcd3-b7cad0545d68 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-c383e430-a57f-4ac3-9bc4-03d1a4a0542f" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:12:28 compute-0 podman[228781]: 2025-10-02 12:12:28.567493562 +0000 UTC m=+0.812359764 container start 356af9db23980f7cdf8f860d2d0e0afe509e96b3055d2f5a59d2f4c085bc3917 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0)
Oct 02 12:12:28 compute-0 neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2[228794]: [NOTICE]   (228798) : New worker (228800) forked
Oct 02 12:12:28 compute-0 neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2[228794]: [NOTICE]   (228798) : Loading success.
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.718 2 INFO nova.compute.manager [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Took 6.92 seconds to build instance.
Oct 02 12:12:28 compute-0 nova_compute[192079]: 2025-10-02 12:12:28.737 2 DEBUG oslo_concurrency.lockutils [None req-780f79d7-e411-425b-8cac-724c2239ef13 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "c383e430-a57f-4ac3-9bc4-03d1a4a0542f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 7.008s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:30 compute-0 nova_compute[192079]: 2025-10-02 12:12:30.469 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:30 compute-0 nova_compute[192079]: 2025-10-02 12:12:30.799 2 DEBUG nova.compute.manager [req-11b60710-211d-4aeb-8cb5-b5c23459d12f req-3366c7ae-e6a5-4feb-9b1c-5edd4b88562c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Received event network-vif-plugged-57d72cf7-b016-42e3-884c-ec35f1924c71 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:12:30 compute-0 nova_compute[192079]: 2025-10-02 12:12:30.801 2 DEBUG oslo_concurrency.lockutils [req-11b60710-211d-4aeb-8cb5-b5c23459d12f req-3366c7ae-e6a5-4feb-9b1c-5edd4b88562c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "c383e430-a57f-4ac3-9bc4-03d1a4a0542f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:30 compute-0 nova_compute[192079]: 2025-10-02 12:12:30.801 2 DEBUG oslo_concurrency.lockutils [req-11b60710-211d-4aeb-8cb5-b5c23459d12f req-3366c7ae-e6a5-4feb-9b1c-5edd4b88562c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "c383e430-a57f-4ac3-9bc4-03d1a4a0542f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:30 compute-0 nova_compute[192079]: 2025-10-02 12:12:30.802 2 DEBUG oslo_concurrency.lockutils [req-11b60710-211d-4aeb-8cb5-b5c23459d12f req-3366c7ae-e6a5-4feb-9b1c-5edd4b88562c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "c383e430-a57f-4ac3-9bc4-03d1a4a0542f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:30 compute-0 nova_compute[192079]: 2025-10-02 12:12:30.803 2 DEBUG nova.compute.manager [req-11b60710-211d-4aeb-8cb5-b5c23459d12f req-3366c7ae-e6a5-4feb-9b1c-5edd4b88562c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] No waiting events found dispatching network-vif-plugged-57d72cf7-b016-42e3-884c-ec35f1924c71 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:12:30 compute-0 nova_compute[192079]: 2025-10-02 12:12:30.803 2 WARNING nova.compute.manager [req-11b60710-211d-4aeb-8cb5-b5c23459d12f req-3366c7ae-e6a5-4feb-9b1c-5edd4b88562c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Received unexpected event network-vif-plugged-57d72cf7-b016-42e3-884c-ec35f1924c71 for instance with vm_state active and task_state None.
Oct 02 12:12:30 compute-0 nova_compute[192079]: 2025-10-02 12:12:30.947 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:31 compute-0 nova_compute[192079]: 2025-10-02 12:12:31.189 2 DEBUG oslo_concurrency.lockutils [None req-cf061fe0-73e0-472a-aeff-73d0fca7fca9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "c383e430-a57f-4ac3-9bc4-03d1a4a0542f" by "nova.compute.manager.ComputeManager.stop_instance.<locals>.do_stop_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:31 compute-0 nova_compute[192079]: 2025-10-02 12:12:31.190 2 DEBUG oslo_concurrency.lockutils [None req-cf061fe0-73e0-472a-aeff-73d0fca7fca9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "c383e430-a57f-4ac3-9bc4-03d1a4a0542f" acquired by "nova.compute.manager.ComputeManager.stop_instance.<locals>.do_stop_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:31 compute-0 nova_compute[192079]: 2025-10-02 12:12:31.190 2 DEBUG nova.compute.manager [None req-cf061fe0-73e0-472a-aeff-73d0fca7fca9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:12:31 compute-0 nova_compute[192079]: 2025-10-02 12:12:31.195 2 DEBUG nova.compute.manager [None req-cf061fe0-73e0-472a-aeff-73d0fca7fca9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 do_stop_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3338
Oct 02 12:12:31 compute-0 nova_compute[192079]: 2025-10-02 12:12:31.197 2 DEBUG nova.objects.instance [None req-cf061fe0-73e0-472a-aeff-73d0fca7fca9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lazy-loading 'flavor' on Instance uuid c383e430-a57f-4ac3-9bc4-03d1a4a0542f obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:12:31 compute-0 nova_compute[192079]: 2025-10-02 12:12:31.226 2 DEBUG nova.objects.instance [None req-cf061fe0-73e0-472a-aeff-73d0fca7fca9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lazy-loading 'info_cache' on Instance uuid c383e430-a57f-4ac3-9bc4-03d1a4a0542f obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:12:31 compute-0 nova_compute[192079]: 2025-10-02 12:12:31.258 2 DEBUG nova.virt.libvirt.driver [None req-cf061fe0-73e0-472a-aeff-73d0fca7fca9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Shutting down instance from state 1 _clean_shutdown /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4071
Oct 02 12:12:35 compute-0 podman[228809]: 2025-10-02 12:12:35.132728836 +0000 UTC m=+0.050072137 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, container_name=ovn_metadata_agent, org.label-schema.license=GPLv2, config_id=ovn_metadata_agent, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:12:35 compute-0 podman[228811]: 2025-10-02 12:12:35.134120823 +0000 UTC m=+0.047799654 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:12:35 compute-0 podman[228810]: 2025-10-02 12:12:35.168190312 +0000 UTC m=+0.084073794 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, container_name=ovn_controller, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=ovn_controller)
Oct 02 12:12:35 compute-0 nova_compute[192079]: 2025-10-02 12:12:35.470 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:35 compute-0 nova_compute[192079]: 2025-10-02 12:12:35.978 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:40 compute-0 nova_compute[192079]: 2025-10-02 12:12:40.472 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:40 compute-0 nova_compute[192079]: 2025-10-02 12:12:40.981 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:41 compute-0 nova_compute[192079]: 2025-10-02 12:12:41.298 2 DEBUG nova.virt.libvirt.driver [None req-cf061fe0-73e0-472a-aeff-73d0fca7fca9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Instance in state 1 after 10 seconds - resending shutdown _clean_shutdown /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4101
Oct 02 12:12:42 compute-0 ovn_controller[94336]: 2025-10-02T12:12:42Z|00020|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:f0:7a:c2 10.100.0.13
Oct 02 12:12:42 compute-0 ovn_controller[94336]: 2025-10-02T12:12:42Z|00021|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:f0:7a:c2 10.100.0.13
Oct 02 12:12:45 compute-0 podman[228890]: 2025-10-02 12:12:45.145707026 +0000 UTC m=+0.060690984 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, container_name=ceilometer_agent_compute, managed_by=edpm_ansible)
Oct 02 12:12:45 compute-0 nova_compute[192079]: 2025-10-02 12:12:45.474 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:45 compute-0 kernel: tap57d72cf7-b0 (unregistering): left promiscuous mode
Oct 02 12:12:45 compute-0 NetworkManager[51160]: <info>  [1759407165.7397] device (tap57d72cf7-b0): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:12:45 compute-0 ovn_controller[94336]: 2025-10-02T12:12:45Z|00201|binding|INFO|Releasing lport 57d72cf7-b016-42e3-884c-ec35f1924c71 from this chassis (sb_readonly=0)
Oct 02 12:12:45 compute-0 ovn_controller[94336]: 2025-10-02T12:12:45Z|00202|binding|INFO|Setting lport 57d72cf7-b016-42e3-884c-ec35f1924c71 down in Southbound
Oct 02 12:12:45 compute-0 ovn_controller[94336]: 2025-10-02T12:12:45Z|00203|binding|INFO|Removing iface tap57d72cf7-b0 ovn-installed in OVS
Oct 02 12:12:45 compute-0 nova_compute[192079]: 2025-10-02 12:12:45.747 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:45 compute-0 nova_compute[192079]: 2025-10-02 12:12:45.749 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:45.757 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:f0:7a:c2 10.100.0.13'], port_security=['fa:16:3e:f0:7a:c2 10.100.0.13'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.13/28', 'neutron:device_id': 'c383e430-a57f-4ac3-9bc4-03d1a4a0542f', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'dcf78460093d411988a54040ea4c265a', 'neutron:revision_number': '4', 'neutron:security_group_ids': 'aacce687-8b76-4e90-b19c-0dd006394188', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=24ae9888-31f5-4083-b5ee-e7ed6a1eee13, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=57d72cf7-b016-42e3-884c-ec35f1924c71) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:12:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:45.759 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 57d72cf7-b016-42e3-884c-ec35f1924c71 in datapath 4f195445-fd43-4b92-89dd-a1b2fe9ea8c2 unbound from our chassis
Oct 02 12:12:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:45.762 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 4f195445-fd43-4b92-89dd-a1b2fe9ea8c2, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:12:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:45.765 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[851518d6-4c02-47b8-9505-a68bd642794e]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:45.766 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2 namespace which is not needed anymore
Oct 02 12:12:45 compute-0 nova_compute[192079]: 2025-10-02 12:12:45.777 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:45 compute-0 systemd[1]: machine-qemu\x2d30\x2dinstance\x2d0000003c.scope: Deactivated successfully.
Oct 02 12:12:45 compute-0 systemd[1]: machine-qemu\x2d30\x2dinstance\x2d0000003c.scope: Consumed 14.837s CPU time.
Oct 02 12:12:45 compute-0 systemd-machined[152150]: Machine qemu-30-instance-0000003c terminated.
Oct 02 12:12:45 compute-0 neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2[228794]: [NOTICE]   (228798) : haproxy version is 2.8.14-c23fe91
Oct 02 12:12:45 compute-0 neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2[228794]: [NOTICE]   (228798) : path to executable is /usr/sbin/haproxy
Oct 02 12:12:45 compute-0 neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2[228794]: [WARNING]  (228798) : Exiting Master process...
Oct 02 12:12:45 compute-0 neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2[228794]: [ALERT]    (228798) : Current worker (228800) exited with code 143 (Terminated)
Oct 02 12:12:45 compute-0 neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2[228794]: [WARNING]  (228798) : All workers exited. Exiting... (0)
Oct 02 12:12:45 compute-0 systemd[1]: libpod-356af9db23980f7cdf8f860d2d0e0afe509e96b3055d2f5a59d2f4c085bc3917.scope: Deactivated successfully.
Oct 02 12:12:45 compute-0 podman[228936]: 2025-10-02 12:12:45.905588824 +0000 UTC m=+0.042249171 container died 356af9db23980f7cdf8f860d2d0e0afe509e96b3055d2f5a59d2f4c085bc3917 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS)
Oct 02 12:12:45 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-356af9db23980f7cdf8f860d2d0e0afe509e96b3055d2f5a59d2f4c085bc3917-userdata-shm.mount: Deactivated successfully.
Oct 02 12:12:46 compute-0 systemd[1]: var-lib-containers-storage-overlay-335cb8038a0e9ea428aced7237ea7ce6dea2e8f9bd32367681edd54001ec6612-merged.mount: Deactivated successfully.
Oct 02 12:12:46 compute-0 nova_compute[192079]: 2025-10-02 12:12:46.001 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:46 compute-0 podman[228936]: 2025-10-02 12:12:46.061233345 +0000 UTC m=+0.197893672 container cleanup 356af9db23980f7cdf8f860d2d0e0afe509e96b3055d2f5a59d2f4c085bc3917 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:12:46 compute-0 systemd[1]: libpod-conmon-356af9db23980f7cdf8f860d2d0e0afe509e96b3055d2f5a59d2f4c085bc3917.scope: Deactivated successfully.
Oct 02 12:12:46 compute-0 podman[228983]: 2025-10-02 12:12:46.132552437 +0000 UTC m=+0.045028228 container remove 356af9db23980f7cdf8f860d2d0e0afe509e96b3055d2f5a59d2f4c085bc3917 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:12:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:46.138 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[38e81614-a13d-4b54-a799-7c11bbaa6f07]: (4, ('Thu Oct  2 12:12:45 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2 (356af9db23980f7cdf8f860d2d0e0afe509e96b3055d2f5a59d2f4c085bc3917)\n356af9db23980f7cdf8f860d2d0e0afe509e96b3055d2f5a59d2f4c085bc3917\nThu Oct  2 12:12:46 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2 (356af9db23980f7cdf8f860d2d0e0afe509e96b3055d2f5a59d2f4c085bc3917)\n356af9db23980f7cdf8f860d2d0e0afe509e96b3055d2f5a59d2f4c085bc3917\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:46.140 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f06ef926-fab6-40b9-9415-aff5787ca5b3]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:46.141 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap4f195445-f0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:12:46 compute-0 kernel: tap4f195445-f0: left promiscuous mode
Oct 02 12:12:46 compute-0 nova_compute[192079]: 2025-10-02 12:12:46.144 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:46 compute-0 nova_compute[192079]: 2025-10-02 12:12:46.159 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:46.164 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2fcd752f-473e-4ed1-ae50-d1482ae5c2d8]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:46.196 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8f045eca-f87e-4a75-a806-a01aca873d19]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:46.198 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[cb658a17-02a4-46d5-9b34-faf411575f6f]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:46.216 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[afaa74bc-9c5a-4c15-b511-703676c3fe0b]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 514481, 'reachable_time': 37578, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 229000, 'error': None, 'target': 'ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:46.218 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:12:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:12:46.219 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[b7570730-c1c2-4bc9-90a4-b4bc99e88114]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:12:46 compute-0 systemd[1]: run-netns-ovnmeta\x2d4f195445\x2dfd43\x2d4b92\x2d89dd\x2da1b2fe9ea8c2.mount: Deactivated successfully.
Oct 02 12:12:46 compute-0 nova_compute[192079]: 2025-10-02 12:12:46.322 2 INFO nova.virt.libvirt.driver [None req-cf061fe0-73e0-472a-aeff-73d0fca7fca9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Instance shutdown successfully after 15 seconds.
Oct 02 12:12:46 compute-0 nova_compute[192079]: 2025-10-02 12:12:46.327 2 INFO nova.virt.libvirt.driver [-] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Instance destroyed successfully.
Oct 02 12:12:46 compute-0 nova_compute[192079]: 2025-10-02 12:12:46.327 2 DEBUG nova.objects.instance [None req-cf061fe0-73e0-472a-aeff-73d0fca7fca9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lazy-loading 'numa_topology' on Instance uuid c383e430-a57f-4ac3-9bc4-03d1a4a0542f obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:12:46 compute-0 nova_compute[192079]: 2025-10-02 12:12:46.341 2 DEBUG nova.compute.manager [None req-cf061fe0-73e0-472a-aeff-73d0fca7fca9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:12:46 compute-0 nova_compute[192079]: 2025-10-02 12:12:46.437 2 DEBUG oslo_concurrency.lockutils [None req-cf061fe0-73e0-472a-aeff-73d0fca7fca9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "c383e430-a57f-4ac3-9bc4-03d1a4a0542f" "released" by "nova.compute.manager.ComputeManager.stop_instance.<locals>.do_stop_instance" :: held 15.247s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:46 compute-0 nova_compute[192079]: 2025-10-02 12:12:46.632 2 DEBUG nova.compute.manager [req-ee1df975-7875-4884-abcd-3a2ede34fe8d req-4d5309d2-2f83-4696-ab54-7691aa16f766 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Received event network-vif-unplugged-57d72cf7-b016-42e3-884c-ec35f1924c71 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:12:46 compute-0 nova_compute[192079]: 2025-10-02 12:12:46.633 2 DEBUG oslo_concurrency.lockutils [req-ee1df975-7875-4884-abcd-3a2ede34fe8d req-4d5309d2-2f83-4696-ab54-7691aa16f766 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "c383e430-a57f-4ac3-9bc4-03d1a4a0542f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:46 compute-0 nova_compute[192079]: 2025-10-02 12:12:46.634 2 DEBUG oslo_concurrency.lockutils [req-ee1df975-7875-4884-abcd-3a2ede34fe8d req-4d5309d2-2f83-4696-ab54-7691aa16f766 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "c383e430-a57f-4ac3-9bc4-03d1a4a0542f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:46 compute-0 nova_compute[192079]: 2025-10-02 12:12:46.634 2 DEBUG oslo_concurrency.lockutils [req-ee1df975-7875-4884-abcd-3a2ede34fe8d req-4d5309d2-2f83-4696-ab54-7691aa16f766 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "c383e430-a57f-4ac3-9bc4-03d1a4a0542f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:46 compute-0 nova_compute[192079]: 2025-10-02 12:12:46.635 2 DEBUG nova.compute.manager [req-ee1df975-7875-4884-abcd-3a2ede34fe8d req-4d5309d2-2f83-4696-ab54-7691aa16f766 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] No waiting events found dispatching network-vif-unplugged-57d72cf7-b016-42e3-884c-ec35f1924c71 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:12:46 compute-0 nova_compute[192079]: 2025-10-02 12:12:46.636 2 WARNING nova.compute.manager [req-ee1df975-7875-4884-abcd-3a2ede34fe8d req-4d5309d2-2f83-4696-ab54-7691aa16f766 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Received unexpected event network-vif-unplugged-57d72cf7-b016-42e3-884c-ec35f1924c71 for instance with vm_state stopped and task_state None.
Oct 02 12:12:48 compute-0 nova_compute[192079]: 2025-10-02 12:12:48.073 2 DEBUG nova.compute.manager [None req-809fdddc-2ecb-4838-beca-1f31c0f65fac dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:12:48 compute-0 nova_compute[192079]: 2025-10-02 12:12:48.131 2 INFO nova.compute.manager [None req-809fdddc-2ecb-4838-beca-1f31c0f65fac dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] instance snapshotting
Oct 02 12:12:48 compute-0 nova_compute[192079]: 2025-10-02 12:12:48.132 2 WARNING nova.compute.manager [None req-809fdddc-2ecb-4838-beca-1f31c0f65fac dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] trying to snapshot a non-running instance: (state: 4 expected: 1)
Oct 02 12:12:48 compute-0 nova_compute[192079]: 2025-10-02 12:12:48.368 2 INFO nova.virt.libvirt.driver [None req-809fdddc-2ecb-4838-beca-1f31c0f65fac dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Beginning cold snapshot process
Oct 02 12:12:48 compute-0 nova_compute[192079]: 2025-10-02 12:12:48.535 2 DEBUG nova.privsep.utils [None req-809fdddc-2ecb-4838-beca-1f31c0f65fac dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Path '/var/lib/nova/instances' supports direct I/O supports_direct_io /usr/lib/python3.9/site-packages/nova/privsep/utils.py:63
Oct 02 12:12:48 compute-0 nova_compute[192079]: 2025-10-02 12:12:48.536 2 DEBUG oslo_concurrency.processutils [None req-809fdddc-2ecb-4838-beca-1f31c0f65fac dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): qemu-img convert -t none -O qcow2 -f qcow2 /var/lib/nova/instances/c383e430-a57f-4ac3-9bc4-03d1a4a0542f/disk /var/lib/nova/instances/snapshots/tmpdj19sss_/76e10c7d0d5e4f38865172c47f456e65 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:12:48 compute-0 nova_compute[192079]: 2025-10-02 12:12:48.763 2 DEBUG nova.compute.manager [req-5c976a30-7fa2-4fd7-9c4b-ef50260e621b req-1d90ce78-8312-4d01-a23b-fa4b8c17458c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Received event network-vif-plugged-57d72cf7-b016-42e3-884c-ec35f1924c71 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:12:48 compute-0 nova_compute[192079]: 2025-10-02 12:12:48.764 2 DEBUG oslo_concurrency.lockutils [req-5c976a30-7fa2-4fd7-9c4b-ef50260e621b req-1d90ce78-8312-4d01-a23b-fa4b8c17458c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "c383e430-a57f-4ac3-9bc4-03d1a4a0542f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:48 compute-0 nova_compute[192079]: 2025-10-02 12:12:48.764 2 DEBUG oslo_concurrency.lockutils [req-5c976a30-7fa2-4fd7-9c4b-ef50260e621b req-1d90ce78-8312-4d01-a23b-fa4b8c17458c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "c383e430-a57f-4ac3-9bc4-03d1a4a0542f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:48 compute-0 nova_compute[192079]: 2025-10-02 12:12:48.764 2 DEBUG oslo_concurrency.lockutils [req-5c976a30-7fa2-4fd7-9c4b-ef50260e621b req-1d90ce78-8312-4d01-a23b-fa4b8c17458c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "c383e430-a57f-4ac3-9bc4-03d1a4a0542f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:48 compute-0 nova_compute[192079]: 2025-10-02 12:12:48.764 2 DEBUG nova.compute.manager [req-5c976a30-7fa2-4fd7-9c4b-ef50260e621b req-1d90ce78-8312-4d01-a23b-fa4b8c17458c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] No waiting events found dispatching network-vif-plugged-57d72cf7-b016-42e3-884c-ec35f1924c71 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:12:48 compute-0 nova_compute[192079]: 2025-10-02 12:12:48.764 2 WARNING nova.compute.manager [req-5c976a30-7fa2-4fd7-9c4b-ef50260e621b req-1d90ce78-8312-4d01-a23b-fa4b8c17458c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Received unexpected event network-vif-plugged-57d72cf7-b016-42e3-884c-ec35f1924c71 for instance with vm_state stopped and task_state image_pending_upload.
Oct 02 12:12:48 compute-0 nova_compute[192079]: 2025-10-02 12:12:48.964 2 DEBUG oslo_concurrency.processutils [None req-809fdddc-2ecb-4838-beca-1f31c0f65fac dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "qemu-img convert -t none -O qcow2 -f qcow2 /var/lib/nova/instances/c383e430-a57f-4ac3-9bc4-03d1a4a0542f/disk /var/lib/nova/instances/snapshots/tmpdj19sss_/76e10c7d0d5e4f38865172c47f456e65" returned: 0 in 0.428s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:12:48 compute-0 nova_compute[192079]: 2025-10-02 12:12:48.965 2 INFO nova.virt.libvirt.driver [None req-809fdddc-2ecb-4838-beca-1f31c0f65fac dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Snapshot extracted, beginning image upload
Oct 02 12:12:50 compute-0 podman[229011]: 2025-10-02 12:12:50.139913444 +0000 UTC m=+0.054732752 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., distribution-scope=public, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., managed_by=edpm_ansible, version=9.6, io.buildah.version=1.33.7, build-date=2025-08-20T13:12:41, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.openshift.expose-services=, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., url=https://catalog.redhat.com/en/search?searchType=containers, io.openshift.tags=minimal rhel9, vendor=Red Hat, Inc., com.redhat.component=ubi9-minimal-container, architecture=x86_64, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, release=1755695350, vcs-type=git, config_id=edpm, maintainer=Red Hat, Inc., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, container_name=openstack_network_exporter, name=ubi9-minimal)
Oct 02 12:12:50 compute-0 podman[229012]: 2025-10-02 12:12:50.143393579 +0000 UTC m=+0.056661824 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, container_name=multipathd, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=multipathd, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, tcib_managed=true)
Oct 02 12:12:50 compute-0 nova_compute[192079]: 2025-10-02 12:12:50.475 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:51 compute-0 nova_compute[192079]: 2025-10-02 12:12:51.004 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:52 compute-0 nova_compute[192079]: 2025-10-02 12:12:52.165 2 INFO nova.virt.libvirt.driver [None req-809fdddc-2ecb-4838-beca-1f31c0f65fac dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Snapshot image upload complete
Oct 02 12:12:52 compute-0 nova_compute[192079]: 2025-10-02 12:12:52.166 2 INFO nova.compute.manager [None req-809fdddc-2ecb-4838-beca-1f31c0f65fac dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Took 4.02 seconds to snapshot the instance on the hypervisor.
Oct 02 12:12:54 compute-0 nova_compute[192079]: 2025-10-02 12:12:54.806 2 DEBUG oslo_concurrency.lockutils [None req-96aec8ee-b35d-4934-b1fa-4970833a0f64 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "c383e430-a57f-4ac3-9bc4-03d1a4a0542f" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:54 compute-0 nova_compute[192079]: 2025-10-02 12:12:54.807 2 DEBUG oslo_concurrency.lockutils [None req-96aec8ee-b35d-4934-b1fa-4970833a0f64 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "c383e430-a57f-4ac3-9bc4-03d1a4a0542f" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:54 compute-0 nova_compute[192079]: 2025-10-02 12:12:54.807 2 DEBUG oslo_concurrency.lockutils [None req-96aec8ee-b35d-4934-b1fa-4970833a0f64 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "c383e430-a57f-4ac3-9bc4-03d1a4a0542f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:54 compute-0 nova_compute[192079]: 2025-10-02 12:12:54.807 2 DEBUG oslo_concurrency.lockutils [None req-96aec8ee-b35d-4934-b1fa-4970833a0f64 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "c383e430-a57f-4ac3-9bc4-03d1a4a0542f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:54 compute-0 nova_compute[192079]: 2025-10-02 12:12:54.807 2 DEBUG oslo_concurrency.lockutils [None req-96aec8ee-b35d-4934-b1fa-4970833a0f64 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "c383e430-a57f-4ac3-9bc4-03d1a4a0542f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:55 compute-0 nova_compute[192079]: 2025-10-02 12:12:55.054 2 INFO nova.compute.manager [None req-96aec8ee-b35d-4934-b1fa-4970833a0f64 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Terminating instance
Oct 02 12:12:55 compute-0 nova_compute[192079]: 2025-10-02 12:12:55.201 2 DEBUG nova.compute.manager [None req-96aec8ee-b35d-4934-b1fa-4970833a0f64 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:12:55 compute-0 nova_compute[192079]: 2025-10-02 12:12:55.208 2 INFO nova.virt.libvirt.driver [-] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Instance destroyed successfully.
Oct 02 12:12:55 compute-0 nova_compute[192079]: 2025-10-02 12:12:55.208 2 DEBUG nova.objects.instance [None req-96aec8ee-b35d-4934-b1fa-4970833a0f64 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lazy-loading 'resources' on Instance uuid c383e430-a57f-4ac3-9bc4-03d1a4a0542f obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:12:55 compute-0 nova_compute[192079]: 2025-10-02 12:12:55.225 2 DEBUG nova.virt.libvirt.vif [None req-96aec8ee-b35d-4934-b1fa-4970833a0f64 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:12:20Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ImagesTestJSON-server-266417438',display_name='tempest-ImagesTestJSON-server-266417438',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-imagestestjson-server-266417438',id=60,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:12:28Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=4,progress=0,project_id='dcf78460093d411988a54040ea4c265a',ramdisk_id='',reservation_id='r-isj554mk',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ImagesTestJSON-437970487',owner_user_name='tempest-ImagesTestJSON-437970487-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:12:52Z,user_data=None,user_id='dcdfc3c0f94e42cb931d27f2e3b5b12d',uuid=c383e430-a57f-4ac3-9bc4-03d1a4a0542f,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='stopped') vif={"id": "57d72cf7-b016-42e3-884c-ec35f1924c71", "address": "fa:16:3e:f0:7a:c2", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap57d72cf7-b0", "ovs_interfaceid": "57d72cf7-b016-42e3-884c-ec35f1924c71", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:12:55 compute-0 nova_compute[192079]: 2025-10-02 12:12:55.226 2 DEBUG nova.network.os_vif_util [None req-96aec8ee-b35d-4934-b1fa-4970833a0f64 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Converting VIF {"id": "57d72cf7-b016-42e3-884c-ec35f1924c71", "address": "fa:16:3e:f0:7a:c2", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap57d72cf7-b0", "ovs_interfaceid": "57d72cf7-b016-42e3-884c-ec35f1924c71", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:12:55 compute-0 nova_compute[192079]: 2025-10-02 12:12:55.227 2 DEBUG nova.network.os_vif_util [None req-96aec8ee-b35d-4934-b1fa-4970833a0f64 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:f0:7a:c2,bridge_name='br-int',has_traffic_filtering=True,id=57d72cf7-b016-42e3-884c-ec35f1924c71,network=Network(4f195445-fd43-4b92-89dd-a1b2fe9ea8c2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap57d72cf7-b0') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:12:55 compute-0 nova_compute[192079]: 2025-10-02 12:12:55.227 2 DEBUG os_vif [None req-96aec8ee-b35d-4934-b1fa-4970833a0f64 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:f0:7a:c2,bridge_name='br-int',has_traffic_filtering=True,id=57d72cf7-b016-42e3-884c-ec35f1924c71,network=Network(4f195445-fd43-4b92-89dd-a1b2fe9ea8c2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap57d72cf7-b0') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:12:55 compute-0 nova_compute[192079]: 2025-10-02 12:12:55.229 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:55 compute-0 nova_compute[192079]: 2025-10-02 12:12:55.229 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap57d72cf7-b0, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:12:55 compute-0 nova_compute[192079]: 2025-10-02 12:12:55.231 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:55 compute-0 nova_compute[192079]: 2025-10-02 12:12:55.234 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:12:55 compute-0 nova_compute[192079]: 2025-10-02 12:12:55.237 2 INFO os_vif [None req-96aec8ee-b35d-4934-b1fa-4970833a0f64 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:f0:7a:c2,bridge_name='br-int',has_traffic_filtering=True,id=57d72cf7-b016-42e3-884c-ec35f1924c71,network=Network(4f195445-fd43-4b92-89dd-a1b2fe9ea8c2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap57d72cf7-b0')
Oct 02 12:12:55 compute-0 nova_compute[192079]: 2025-10-02 12:12:55.237 2 INFO nova.virt.libvirt.driver [None req-96aec8ee-b35d-4934-b1fa-4970833a0f64 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Deleting instance files /var/lib/nova/instances/c383e430-a57f-4ac3-9bc4-03d1a4a0542f_del
Oct 02 12:12:55 compute-0 nova_compute[192079]: 2025-10-02 12:12:55.249 2 INFO nova.virt.libvirt.driver [None req-96aec8ee-b35d-4934-b1fa-4970833a0f64 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Deletion of /var/lib/nova/instances/c383e430-a57f-4ac3-9bc4-03d1a4a0542f_del complete
Oct 02 12:12:55 compute-0 nova_compute[192079]: 2025-10-02 12:12:55.342 2 INFO nova.compute.manager [None req-96aec8ee-b35d-4934-b1fa-4970833a0f64 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Took 0.14 seconds to destroy the instance on the hypervisor.
Oct 02 12:12:55 compute-0 nova_compute[192079]: 2025-10-02 12:12:55.343 2 DEBUG oslo.service.loopingcall [None req-96aec8ee-b35d-4934-b1fa-4970833a0f64 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:12:55 compute-0 nova_compute[192079]: 2025-10-02 12:12:55.343 2 DEBUG nova.compute.manager [-] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:12:55 compute-0 nova_compute[192079]: 2025-10-02 12:12:55.343 2 DEBUG nova.network.neutron [-] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:12:55 compute-0 nova_compute[192079]: 2025-10-02 12:12:55.503 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:12:56 compute-0 nova_compute[192079]: 2025-10-02 12:12:56.079 2 DEBUG nova.network.neutron [-] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:12:56 compute-0 nova_compute[192079]: 2025-10-02 12:12:56.108 2 INFO nova.compute.manager [-] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Took 0.76 seconds to deallocate network for instance.
Oct 02 12:12:56 compute-0 podman[229052]: 2025-10-02 12:12:56.137258808 +0000 UTC m=+0.045156241 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:12:56 compute-0 nova_compute[192079]: 2025-10-02 12:12:56.179 2 DEBUG nova.compute.manager [req-f37a3f71-274c-461b-b1b7-1cf008ad96bd req-8848f195-2449-404d-83d4-0c5f6acd033a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Received event network-vif-deleted-57d72cf7-b016-42e3-884c-ec35f1924c71 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:12:56 compute-0 nova_compute[192079]: 2025-10-02 12:12:56.196 2 DEBUG oslo_concurrency.lockutils [None req-96aec8ee-b35d-4934-b1fa-4970833a0f64 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:56 compute-0 nova_compute[192079]: 2025-10-02 12:12:56.197 2 DEBUG oslo_concurrency.lockutils [None req-96aec8ee-b35d-4934-b1fa-4970833a0f64 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:56 compute-0 podman[229053]: 2025-10-02 12:12:56.223843286 +0000 UTC m=+0.129652013 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, config_id=iscsid, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, tcib_managed=true, container_name=iscsid, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:12:56 compute-0 nova_compute[192079]: 2025-10-02 12:12:56.294 2 DEBUG nova.compute.provider_tree [None req-96aec8ee-b35d-4934-b1fa-4970833a0f64 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:12:56 compute-0 nova_compute[192079]: 2025-10-02 12:12:56.347 2 DEBUG nova.scheduler.client.report [None req-96aec8ee-b35d-4934-b1fa-4970833a0f64 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:12:56 compute-0 nova_compute[192079]: 2025-10-02 12:12:56.430 2 DEBUG oslo_concurrency.lockutils [None req-96aec8ee-b35d-4934-b1fa-4970833a0f64 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.233s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:56 compute-0 nova_compute[192079]: 2025-10-02 12:12:56.460 2 INFO nova.scheduler.client.report [None req-96aec8ee-b35d-4934-b1fa-4970833a0f64 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Deleted allocations for instance c383e430-a57f-4ac3-9bc4-03d1a4a0542f
Oct 02 12:12:56 compute-0 nova_compute[192079]: 2025-10-02 12:12:56.542 2 DEBUG oslo_concurrency.lockutils [None req-96aec8ee-b35d-4934-b1fa-4970833a0f64 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "c383e430-a57f-4ac3-9bc4-03d1a4a0542f" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.735s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:57 compute-0 nova_compute[192079]: 2025-10-02 12:12:57.591 2 DEBUG oslo_concurrency.lockutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:57 compute-0 nova_compute[192079]: 2025-10-02 12:12:57.591 2 DEBUG oslo_concurrency.lockutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:57 compute-0 nova_compute[192079]: 2025-10-02 12:12:57.609 2 DEBUG nova.compute.manager [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:12:57 compute-0 nova_compute[192079]: 2025-10-02 12:12:57.719 2 DEBUG oslo_concurrency.lockutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:57 compute-0 nova_compute[192079]: 2025-10-02 12:12:57.720 2 DEBUG oslo_concurrency.lockutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:57 compute-0 nova_compute[192079]: 2025-10-02 12:12:57.736 2 DEBUG nova.virt.hardware [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:12:57 compute-0 nova_compute[192079]: 2025-10-02 12:12:57.736 2 INFO nova.compute.claims [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:12:57 compute-0 nova_compute[192079]: 2025-10-02 12:12:57.853 2 DEBUG nova.compute.provider_tree [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:12:57 compute-0 nova_compute[192079]: 2025-10-02 12:12:57.868 2 DEBUG nova.scheduler.client.report [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:12:57 compute-0 nova_compute[192079]: 2025-10-02 12:12:57.889 2 DEBUG oslo_concurrency.lockutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.169s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:57 compute-0 nova_compute[192079]: 2025-10-02 12:12:57.890 2 DEBUG nova.compute.manager [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:12:57 compute-0 nova_compute[192079]: 2025-10-02 12:12:57.943 2 DEBUG nova.compute.manager [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:12:57 compute-0 nova_compute[192079]: 2025-10-02 12:12:57.944 2 DEBUG nova.network.neutron [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:12:57 compute-0 nova_compute[192079]: 2025-10-02 12:12:57.967 2 INFO nova.virt.libvirt.driver [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:12:57 compute-0 nova_compute[192079]: 2025-10-02 12:12:57.984 2 DEBUG nova.compute.manager [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:12:58 compute-0 nova_compute[192079]: 2025-10-02 12:12:58.122 2 DEBUG nova.compute.manager [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:12:58 compute-0 nova_compute[192079]: 2025-10-02 12:12:58.124 2 DEBUG nova.virt.libvirt.driver [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:12:58 compute-0 nova_compute[192079]: 2025-10-02 12:12:58.125 2 INFO nova.virt.libvirt.driver [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Creating image(s)
Oct 02 12:12:58 compute-0 nova_compute[192079]: 2025-10-02 12:12:58.126 2 DEBUG oslo_concurrency.lockutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "/var/lib/nova/instances/f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:58 compute-0 nova_compute[192079]: 2025-10-02 12:12:58.126 2 DEBUG oslo_concurrency.lockutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "/var/lib/nova/instances/f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:58 compute-0 nova_compute[192079]: 2025-10-02 12:12:58.126 2 DEBUG oslo_concurrency.lockutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "/var/lib/nova/instances/f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:58 compute-0 nova_compute[192079]: 2025-10-02 12:12:58.144 2 DEBUG nova.policy [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dcdfc3c0f94e42cb931d27f2e3b5b12d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dcf78460093d411988a54040ea4c265a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:12:58 compute-0 nova_compute[192079]: 2025-10-02 12:12:58.147 2 DEBUG oslo_concurrency.processutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:12:58 compute-0 nova_compute[192079]: 2025-10-02 12:12:58.205 2 DEBUG oslo_concurrency.processutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.058s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:12:58 compute-0 nova_compute[192079]: 2025-10-02 12:12:58.206 2 DEBUG oslo_concurrency.lockutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:58 compute-0 nova_compute[192079]: 2025-10-02 12:12:58.207 2 DEBUG oslo_concurrency.lockutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:58 compute-0 nova_compute[192079]: 2025-10-02 12:12:58.217 2 DEBUG oslo_concurrency.processutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:12:58 compute-0 nova_compute[192079]: 2025-10-02 12:12:58.275 2 DEBUG oslo_concurrency.processutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.058s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:12:58 compute-0 nova_compute[192079]: 2025-10-02 12:12:58.276 2 DEBUG oslo_concurrency.processutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:12:58 compute-0 nova_compute[192079]: 2025-10-02 12:12:58.377 2 DEBUG oslo_concurrency.processutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386/disk 1073741824" returned: 0 in 0.101s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:12:58 compute-0 nova_compute[192079]: 2025-10-02 12:12:58.379 2 DEBUG oslo_concurrency.lockutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.172s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:58 compute-0 nova_compute[192079]: 2025-10-02 12:12:58.380 2 DEBUG oslo_concurrency.processutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:12:58 compute-0 nova_compute[192079]: 2025-10-02 12:12:58.454 2 DEBUG oslo_concurrency.processutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.074s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:12:58 compute-0 nova_compute[192079]: 2025-10-02 12:12:58.456 2 DEBUG nova.virt.disk.api [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Checking if we can resize image /var/lib/nova/instances/f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:12:58 compute-0 nova_compute[192079]: 2025-10-02 12:12:58.456 2 DEBUG oslo_concurrency.processutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:12:58 compute-0 nova_compute[192079]: 2025-10-02 12:12:58.524 2 DEBUG oslo_concurrency.processutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386/disk --force-share --output=json" returned: 0 in 0.068s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:12:58 compute-0 nova_compute[192079]: 2025-10-02 12:12:58.525 2 DEBUG nova.virt.disk.api [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Cannot resize image /var/lib/nova/instances/f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:12:58 compute-0 nova_compute[192079]: 2025-10-02 12:12:58.526 2 DEBUG nova.objects.instance [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lazy-loading 'migration_context' on Instance uuid f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:12:58 compute-0 nova_compute[192079]: 2025-10-02 12:12:58.545 2 DEBUG nova.virt.libvirt.driver [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:12:58 compute-0 nova_compute[192079]: 2025-10-02 12:12:58.545 2 DEBUG nova.virt.libvirt.driver [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Ensure instance console log exists: /var/lib/nova/instances/f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:12:58 compute-0 nova_compute[192079]: 2025-10-02 12:12:58.546 2 DEBUG oslo_concurrency.lockutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:12:58 compute-0 nova_compute[192079]: 2025-10-02 12:12:58.546 2 DEBUG oslo_concurrency.lockutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:12:58 compute-0 nova_compute[192079]: 2025-10-02 12:12:58.546 2 DEBUG oslo_concurrency.lockutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:12:59 compute-0 nova_compute[192079]: 2025-10-02 12:12:59.771 2 DEBUG nova.network.neutron [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Successfully created port: 669d1898-3fca-421e-86fb-0ef482f202e2 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:13:00 compute-0 nova_compute[192079]: 2025-10-02 12:13:00.232 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:00 compute-0 nova_compute[192079]: 2025-10-02 12:13:00.505 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:00 compute-0 nova_compute[192079]: 2025-10-02 12:13:00.934 2 DEBUG nova.network.neutron [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Successfully updated port: 669d1898-3fca-421e-86fb-0ef482f202e2 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:13:00 compute-0 nova_compute[192079]: 2025-10-02 12:13:00.952 2 DEBUG oslo_concurrency.lockutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "refresh_cache-f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:13:00 compute-0 nova_compute[192079]: 2025-10-02 12:13:00.952 2 DEBUG oslo_concurrency.lockutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquired lock "refresh_cache-f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:13:00 compute-0 nova_compute[192079]: 2025-10-02 12:13:00.953 2 DEBUG nova.network.neutron [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:13:01 compute-0 nova_compute[192079]: 2025-10-02 12:13:01.050 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759407166.0489528, c383e430-a57f-4ac3-9bc4-03d1a4a0542f => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:13:01 compute-0 nova_compute[192079]: 2025-10-02 12:13:01.051 2 INFO nova.compute.manager [-] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] VM Stopped (Lifecycle Event)
Oct 02 12:13:01 compute-0 nova_compute[192079]: 2025-10-02 12:13:01.077 2 DEBUG nova.compute.manager [None req-61180ee2-2ce2-4d31-aee3-bf480f49ab35 - - - - - -] [instance: c383e430-a57f-4ac3-9bc4-03d1a4a0542f] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:13:01 compute-0 nova_compute[192079]: 2025-10-02 12:13:01.166 2 DEBUG nova.network.neutron [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:13:01 compute-0 nova_compute[192079]: 2025-10-02 12:13:01.982 2 DEBUG nova.compute.manager [req-833c1b8c-d490-4de6-a6a9-cedac9baec55 req-15df68d1-fc11-4950-98af-c4e2a8d430b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Received event network-changed-669d1898-3fca-421e-86fb-0ef482f202e2 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:13:01 compute-0 nova_compute[192079]: 2025-10-02 12:13:01.983 2 DEBUG nova.compute.manager [req-833c1b8c-d490-4de6-a6a9-cedac9baec55 req-15df68d1-fc11-4950-98af-c4e2a8d430b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Refreshing instance network info cache due to event network-changed-669d1898-3fca-421e-86fb-0ef482f202e2. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:13:01 compute-0 nova_compute[192079]: 2025-10-02 12:13:01.984 2 DEBUG oslo_concurrency.lockutils [req-833c1b8c-d490-4de6-a6a9-cedac9baec55 req-15df68d1-fc11-4950-98af-c4e2a8d430b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.172 2 DEBUG nova.network.neutron [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Updating instance_info_cache with network_info: [{"id": "669d1898-3fca-421e-86fb-0ef482f202e2", "address": "fa:16:3e:3b:69:44", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap669d1898-3f", "ovs_interfaceid": "669d1898-3fca-421e-86fb-0ef482f202e2", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:13:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:02.213 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:13:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:02.214 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:13:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:02.214 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.221 2 DEBUG oslo_concurrency.lockutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Releasing lock "refresh_cache-f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.222 2 DEBUG nova.compute.manager [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Instance network_info: |[{"id": "669d1898-3fca-421e-86fb-0ef482f202e2", "address": "fa:16:3e:3b:69:44", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap669d1898-3f", "ovs_interfaceid": "669d1898-3fca-421e-86fb-0ef482f202e2", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.222 2 DEBUG oslo_concurrency.lockutils [req-833c1b8c-d490-4de6-a6a9-cedac9baec55 req-15df68d1-fc11-4950-98af-c4e2a8d430b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.222 2 DEBUG nova.network.neutron [req-833c1b8c-d490-4de6-a6a9-cedac9baec55 req-15df68d1-fc11-4950-98af-c4e2a8d430b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Refreshing network info cache for port 669d1898-3fca-421e-86fb-0ef482f202e2 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.225 2 DEBUG nova.virt.libvirt.driver [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Start _get_guest_xml network_info=[{"id": "669d1898-3fca-421e-86fb-0ef482f202e2", "address": "fa:16:3e:3b:69:44", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap669d1898-3f", "ovs_interfaceid": "669d1898-3fca-421e-86fb-0ef482f202e2", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.229 2 WARNING nova.virt.libvirt.driver [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.234 2 DEBUG nova.virt.libvirt.host [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.235 2 DEBUG nova.virt.libvirt.host [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.238 2 DEBUG nova.virt.libvirt.host [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.239 2 DEBUG nova.virt.libvirt.host [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.240 2 DEBUG nova.virt.libvirt.driver [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.240 2 DEBUG nova.virt.hardware [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.240 2 DEBUG nova.virt.hardware [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.241 2 DEBUG nova.virt.hardware [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.241 2 DEBUG nova.virt.hardware [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.241 2 DEBUG nova.virt.hardware [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.241 2 DEBUG nova.virt.hardware [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.241 2 DEBUG nova.virt.hardware [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.242 2 DEBUG nova.virt.hardware [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.242 2 DEBUG nova.virt.hardware [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.242 2 DEBUG nova.virt.hardware [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.242 2 DEBUG nova.virt.hardware [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.245 2 DEBUG nova.virt.libvirt.vif [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:12:56Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ImagesTestJSON-server-719598230',display_name='tempest-ImagesTestJSON-server-719598230',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-imagestestjson-server-719598230',id=65,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='dcf78460093d411988a54040ea4c265a',ramdisk_id='',reservation_id='r-2hxyaomx',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ImagesTestJSON-437970487',owner_user_name='tempest-ImagesTestJSON-437970487-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:12:58Z,user_data=None,user_id='dcdfc3c0f94e42cb931d27f2e3b5b12d',uuid=f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "669d1898-3fca-421e-86fb-0ef482f202e2", "address": "fa:16:3e:3b:69:44", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap669d1898-3f", "ovs_interfaceid": "669d1898-3fca-421e-86fb-0ef482f202e2", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.246 2 DEBUG nova.network.os_vif_util [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Converting VIF {"id": "669d1898-3fca-421e-86fb-0ef482f202e2", "address": "fa:16:3e:3b:69:44", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap669d1898-3f", "ovs_interfaceid": "669d1898-3fca-421e-86fb-0ef482f202e2", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.247 2 DEBUG nova.network.os_vif_util [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:3b:69:44,bridge_name='br-int',has_traffic_filtering=True,id=669d1898-3fca-421e-86fb-0ef482f202e2,network=Network(4f195445-fd43-4b92-89dd-a1b2fe9ea8c2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap669d1898-3f') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.247 2 DEBUG nova.objects.instance [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lazy-loading 'pci_devices' on Instance uuid f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.261 2 DEBUG nova.virt.libvirt.driver [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:13:02 compute-0 nova_compute[192079]:   <uuid>f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386</uuid>
Oct 02 12:13:02 compute-0 nova_compute[192079]:   <name>instance-00000041</name>
Oct 02 12:13:02 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:13:02 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:13:02 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:13:02 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:       <nova:name>tempest-ImagesTestJSON-server-719598230</nova:name>
Oct 02 12:13:02 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:13:02</nova:creationTime>
Oct 02 12:13:02 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:13:02 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:13:02 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:13:02 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:13:02 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:13:02 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:13:02 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:13:02 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:13:02 compute-0 nova_compute[192079]:         <nova:user uuid="dcdfc3c0f94e42cb931d27f2e3b5b12d">tempest-ImagesTestJSON-437970487-project-member</nova:user>
Oct 02 12:13:02 compute-0 nova_compute[192079]:         <nova:project uuid="dcf78460093d411988a54040ea4c265a">tempest-ImagesTestJSON-437970487</nova:project>
Oct 02 12:13:02 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:13:02 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:13:02 compute-0 nova_compute[192079]:         <nova:port uuid="669d1898-3fca-421e-86fb-0ef482f202e2">
Oct 02 12:13:02 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.10" ipVersion="4"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:13:02 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:13:02 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:13:02 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <system>
Oct 02 12:13:02 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:13:02 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:13:02 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:13:02 compute-0 nova_compute[192079]:       <entry name="serial">f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386</entry>
Oct 02 12:13:02 compute-0 nova_compute[192079]:       <entry name="uuid">f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386</entry>
Oct 02 12:13:02 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     </system>
Oct 02 12:13:02 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:13:02 compute-0 nova_compute[192079]:   <os>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:   </os>
Oct 02 12:13:02 compute-0 nova_compute[192079]:   <features>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:   </features>
Oct 02 12:13:02 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:13:02 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:13:02 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:13:02 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386/disk"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:13:02 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386/disk.config"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:13:02 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:3b:69:44"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:       <target dev="tap669d1898-3f"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:13:02 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386/console.log" append="off"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <video>
Oct 02 12:13:02 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     </video>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:13:02 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:13:02 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:13:02 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:13:02 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:13:02 compute-0 nova_compute[192079]: </domain>
Oct 02 12:13:02 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.263 2 DEBUG nova.compute.manager [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Preparing to wait for external event network-vif-plugged-669d1898-3fca-421e-86fb-0ef482f202e2 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.263 2 DEBUG oslo_concurrency.lockutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.263 2 DEBUG oslo_concurrency.lockutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.263 2 DEBUG oslo_concurrency.lockutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.264 2 DEBUG nova.virt.libvirt.vif [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:12:56Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ImagesTestJSON-server-719598230',display_name='tempest-ImagesTestJSON-server-719598230',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-imagestestjson-server-719598230',id=65,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='dcf78460093d411988a54040ea4c265a',ramdisk_id='',reservation_id='r-2hxyaomx',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ImagesTestJSON-437970487',owner_user_name='tempest-ImagesTestJSON-437970487-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:12:58Z,user_data=None,user_id='dcdfc3c0f94e42cb931d27f2e3b5b12d',uuid=f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "669d1898-3fca-421e-86fb-0ef482f202e2", "address": "fa:16:3e:3b:69:44", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap669d1898-3f", "ovs_interfaceid": "669d1898-3fca-421e-86fb-0ef482f202e2", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.264 2 DEBUG nova.network.os_vif_util [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Converting VIF {"id": "669d1898-3fca-421e-86fb-0ef482f202e2", "address": "fa:16:3e:3b:69:44", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap669d1898-3f", "ovs_interfaceid": "669d1898-3fca-421e-86fb-0ef482f202e2", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.265 2 DEBUG nova.network.os_vif_util [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:3b:69:44,bridge_name='br-int',has_traffic_filtering=True,id=669d1898-3fca-421e-86fb-0ef482f202e2,network=Network(4f195445-fd43-4b92-89dd-a1b2fe9ea8c2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap669d1898-3f') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.265 2 DEBUG os_vif [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:3b:69:44,bridge_name='br-int',has_traffic_filtering=True,id=669d1898-3fca-421e-86fb-0ef482f202e2,network=Network(4f195445-fd43-4b92-89dd-a1b2fe9ea8c2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap669d1898-3f') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.266 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.266 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.267 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.270 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.271 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap669d1898-3f, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.271 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap669d1898-3f, col_values=(('external_ids', {'iface-id': '669d1898-3fca-421e-86fb-0ef482f202e2', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:3b:69:44', 'vm-uuid': 'f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.273 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:02 compute-0 NetworkManager[51160]: <info>  [1759407182.2743] manager: (tap669d1898-3f): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/102)
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.276 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.278 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.279 2 INFO os_vif [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:3b:69:44,bridge_name='br-int',has_traffic_filtering=True,id=669d1898-3fca-421e-86fb-0ef482f202e2,network=Network(4f195445-fd43-4b92-89dd-a1b2fe9ea8c2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap669d1898-3f')
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.362 2 DEBUG nova.virt.libvirt.driver [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.363 2 DEBUG nova.virt.libvirt.driver [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.363 2 DEBUG nova.virt.libvirt.driver [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] No VIF found with MAC fa:16:3e:3b:69:44, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.364 2 INFO nova.virt.libvirt.driver [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Using config drive
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.801 2 INFO nova.virt.libvirt.driver [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Creating config drive at /var/lib/nova/instances/f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386/disk.config
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.807 2 DEBUG oslo_concurrency.processutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpbsf_1vbj execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:13:02 compute-0 nova_compute[192079]: 2025-10-02 12:13:02.947 2 DEBUG oslo_concurrency.processutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpbsf_1vbj" returned: 0 in 0.140s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:13:03 compute-0 kernel: tap669d1898-3f: entered promiscuous mode
Oct 02 12:13:03 compute-0 NetworkManager[51160]: <info>  [1759407183.0136] manager: (tap669d1898-3f): new Tun device (/org/freedesktop/NetworkManager/Devices/103)
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.013 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:03 compute-0 ovn_controller[94336]: 2025-10-02T12:13:03Z|00204|binding|INFO|Claiming lport 669d1898-3fca-421e-86fb-0ef482f202e2 for this chassis.
Oct 02 12:13:03 compute-0 ovn_controller[94336]: 2025-10-02T12:13:03Z|00205|binding|INFO|669d1898-3fca-421e-86fb-0ef482f202e2: Claiming fa:16:3e:3b:69:44 10.100.0.10
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:03.021 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:3b:69:44 10.100.0.10'], port_security=['fa:16:3e:3b:69:44 10.100.0.10'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.10/28', 'neutron:device_id': 'f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'dcf78460093d411988a54040ea4c265a', 'neutron:revision_number': '2', 'neutron:security_group_ids': 'aacce687-8b76-4e90-b19c-0dd006394188', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=24ae9888-31f5-4083-b5ee-e7ed6a1eee13, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=669d1898-3fca-421e-86fb-0ef482f202e2) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:03.022 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 669d1898-3fca-421e-86fb-0ef482f202e2 in datapath 4f195445-fd43-4b92-89dd-a1b2fe9ea8c2 bound to our chassis
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:03.023 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 4f195445-fd43-4b92-89dd-a1b2fe9ea8c2
Oct 02 12:13:03 compute-0 ovn_controller[94336]: 2025-10-02T12:13:03Z|00206|binding|INFO|Setting lport 669d1898-3fca-421e-86fb-0ef482f202e2 ovn-installed in OVS
Oct 02 12:13:03 compute-0 ovn_controller[94336]: 2025-10-02T12:13:03Z|00207|binding|INFO|Setting lport 669d1898-3fca-421e-86fb-0ef482f202e2 up in Southbound
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.035 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:03.039 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b5010fc7-23f0-42a8-964e-6ba382d4ac0d]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:03.040 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap4f195445-f1 in ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:03.042 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap4f195445-f0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:03.043 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[76ab2947-64b4-4631-b072-c179923626f3]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:03.043 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9a7a0847-7be6-46ac-be3e-64e9768ae080]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:03 compute-0 systemd-udevd[229134]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:03.054 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[d73f6d98-e3f7-4181-b89c-24398b0a2962]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:03 compute-0 systemd-machined[152150]: New machine qemu-31-instance-00000041.
Oct 02 12:13:03 compute-0 systemd[1]: Started Virtual Machine qemu-31-instance-00000041.
Oct 02 12:13:03 compute-0 NetworkManager[51160]: <info>  [1759407183.0711] device (tap669d1898-3f): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:13:03 compute-0 NetworkManager[51160]: <info>  [1759407183.0721] device (tap669d1898-3f): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:03.080 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f6c48fc7-5dd7-4e4e-b453-ffbe62201bce]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:03.112 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[b7739c9b-f011-45ea-9b31-0b629e98eedd]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:03.116 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[70f8ca41-cceb-4b3e-8b6a-5a155350fd44]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:03 compute-0 NetworkManager[51160]: <info>  [1759407183.1177] manager: (tap4f195445-f0): new Veth device (/org/freedesktop/NetworkManager/Devices/104)
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:03.147 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[9634011b-5cea-4ca6-b45f-0bfea8058fe3]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:03.152 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[7e571c0d-0dd5-42e2-8282-8a93942b76b1]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:03 compute-0 NetworkManager[51160]: <info>  [1759407183.1722] device (tap4f195445-f0): carrier: link connected
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:03.177 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[ab464ea6-596b-4214-bd00-4e6fdc49d831]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:03.193 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ffd72e97-5310-44bf-b59e-9da49434e4e0]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap4f195445-f1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:65:93:03'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 62], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 518080, 'reachable_time': 41348, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 229167, 'error': None, 'target': 'ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:03.205 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[27aa8dcf-4dbc-4f44-a3f2-b17fcf45de4a]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe65:9303'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 518080, 'tstamp': 518080}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 229168, 'error': None, 'target': 'ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:03.222 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1dbce670-f57a-42ba-8aa8-27a77c23791c]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap4f195445-f1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:65:93:03'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 62], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 518080, 'reachable_time': 41348, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 229169, 'error': None, 'target': 'ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:03.252 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[cb1f827d-1655-4518-906c-8a9e642fab26]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:03.323 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[bdb4c5ba-f28f-4897-a086-a404b996d675]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:03.324 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap4f195445-f0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:03.324 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:03.325 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap4f195445-f0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:13:03 compute-0 kernel: tap4f195445-f0: entered promiscuous mode
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.358 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:03 compute-0 NetworkManager[51160]: <info>  [1759407183.3615] manager: (tap4f195445-f0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/105)
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.361 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:03.362 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap4f195445-f0, col_values=(('external_ids', {'iface-id': 'd65a1bd0-87e2-4bbf-9945-dacace78444f'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:13:03 compute-0 ovn_controller[94336]: 2025-10-02T12:13:03Z|00208|binding|INFO|Releasing lport d65a1bd0-87e2-4bbf-9945-dacace78444f from this chassis (sb_readonly=0)
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.363 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:03.365 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/4f195445-fd43-4b92-89dd-a1b2fe9ea8c2.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/4f195445-fd43-4b92-89dd-a1b2fe9ea8c2.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:03.365 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[fd66809d-15c2-4a0e-8498-7d161b23ca22]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:03.366 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/4f195445-fd43-4b92-89dd-a1b2fe9ea8c2.pid.haproxy
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 4f195445-fd43-4b92-89dd-a1b2fe9ea8c2
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:13:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:03.367 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'env', 'PROCESS_TAG=haproxy-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/4f195445-fd43-4b92-89dd-a1b2fe9ea8c2.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.377 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.547 2 DEBUG nova.compute.manager [req-a3c34a0c-8f8d-40a5-a630-aa0ec385523b req-d8fd8079-05f8-46a0-975b-f25e11bbab7a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Received event network-vif-plugged-669d1898-3fca-421e-86fb-0ef482f202e2 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.548 2 DEBUG oslo_concurrency.lockutils [req-a3c34a0c-8f8d-40a5-a630-aa0ec385523b req-d8fd8079-05f8-46a0-975b-f25e11bbab7a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.548 2 DEBUG oslo_concurrency.lockutils [req-a3c34a0c-8f8d-40a5-a630-aa0ec385523b req-d8fd8079-05f8-46a0-975b-f25e11bbab7a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.548 2 DEBUG oslo_concurrency.lockutils [req-a3c34a0c-8f8d-40a5-a630-aa0ec385523b req-d8fd8079-05f8-46a0-975b-f25e11bbab7a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.549 2 DEBUG nova.compute.manager [req-a3c34a0c-8f8d-40a5-a630-aa0ec385523b req-d8fd8079-05f8-46a0-975b-f25e11bbab7a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Processing event network-vif-plugged-669d1898-3fca-421e-86fb-0ef482f202e2 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.693 2 DEBUG nova.network.neutron [req-833c1b8c-d490-4de6-a6a9-cedac9baec55 req-15df68d1-fc11-4950-98af-c4e2a8d430b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Updated VIF entry in instance network info cache for port 669d1898-3fca-421e-86fb-0ef482f202e2. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.694 2 DEBUG nova.network.neutron [req-833c1b8c-d490-4de6-a6a9-cedac9baec55 req-15df68d1-fc11-4950-98af-c4e2a8d430b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Updating instance_info_cache with network_info: [{"id": "669d1898-3fca-421e-86fb-0ef482f202e2", "address": "fa:16:3e:3b:69:44", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap669d1898-3f", "ovs_interfaceid": "669d1898-3fca-421e-86fb-0ef482f202e2", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.713 2 DEBUG oslo_concurrency.lockutils [req-833c1b8c-d490-4de6-a6a9-cedac9baec55 req-15df68d1-fc11-4950-98af-c4e2a8d430b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:13:03 compute-0 podman[229207]: 2025-10-02 12:13:03.770259222 +0000 UTC m=+0.074759036 container create 046ba8ca89ae96efa95e50bc9ce5df80c98d009a95520bac3d0d2a13ffede14a (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3, tcib_managed=true, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 12:13:03 compute-0 podman[229207]: 2025-10-02 12:13:03.718184755 +0000 UTC m=+0.022684589 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:13:03 compute-0 systemd[1]: Started libpod-conmon-046ba8ca89ae96efa95e50bc9ce5df80c98d009a95520bac3d0d2a13ffede14a.scope.
Oct 02 12:13:03 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:13:03 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/c77b94591e0f8a21ae9ab3366d51eaed7f25f51c37fc0552d1bb69983ac1bac8/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.864 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407183.8634584, f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.865 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] VM Started (Lifecycle Event)
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.866 2 DEBUG nova.compute.manager [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.871 2 DEBUG nova.virt.libvirt.driver [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.875 2 INFO nova.virt.libvirt.driver [-] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Instance spawned successfully.
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.875 2 DEBUG nova.virt.libvirt.driver [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:13:03 compute-0 podman[229207]: 2025-10-02 12:13:03.882124599 +0000 UTC m=+0.186624433 container init 046ba8ca89ae96efa95e50bc9ce5df80c98d009a95520bac3d0d2a13ffede14a (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3, org.label-schema.build-date=20251001)
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.884 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.888 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:13:03 compute-0 podman[229207]: 2025-10-02 12:13:03.889366517 +0000 UTC m=+0.193866331 container start 046ba8ca89ae96efa95e50bc9ce5df80c98d009a95520bac3d0d2a13ffede14a (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.894 2 DEBUG nova.virt.libvirt.driver [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.895 2 DEBUG nova.virt.libvirt.driver [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.895 2 DEBUG nova.virt.libvirt.driver [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.895 2 DEBUG nova.virt.libvirt.driver [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.896 2 DEBUG nova.virt.libvirt.driver [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.896 2 DEBUG nova.virt.libvirt.driver [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.903 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.903 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407183.8635938, f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.904 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] VM Paused (Lifecycle Event)
Oct 02 12:13:03 compute-0 neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2[229223]: [NOTICE]   (229227) : New worker (229229) forked
Oct 02 12:13:03 compute-0 neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2[229223]: [NOTICE]   (229227) : Loading success.
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.929 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.932 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407183.8691602, f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.932 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] VM Resumed (Lifecycle Event)
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.949 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.955 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.969 2 INFO nova.compute.manager [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Took 5.85 seconds to spawn the instance on the hypervisor.
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.969 2 DEBUG nova.compute.manager [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:13:03 compute-0 nova_compute[192079]: 2025-10-02 12:13:03.974 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:13:04 compute-0 nova_compute[192079]: 2025-10-02 12:13:04.044 2 INFO nova.compute.manager [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Took 6.37 seconds to build instance.
Oct 02 12:13:04 compute-0 nova_compute[192079]: 2025-10-02 12:13:04.064 2 DEBUG oslo_concurrency.lockutils [None req-7ac53df3-baa1-4f68-90af-4b6a51e70724 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 6.472s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:13:05 compute-0 nova_compute[192079]: 2025-10-02 12:13:05.500 2 DEBUG nova.objects.instance [None req-7957b7b2-d973-4903-8df9-605673e8ef84 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lazy-loading 'pci_devices' on Instance uuid f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:13:05 compute-0 nova_compute[192079]: 2025-10-02 12:13:05.507 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:05 compute-0 nova_compute[192079]: 2025-10-02 12:13:05.521 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407185.5209744, f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:13:05 compute-0 nova_compute[192079]: 2025-10-02 12:13:05.521 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] VM Paused (Lifecycle Event)
Oct 02 12:13:05 compute-0 nova_compute[192079]: 2025-10-02 12:13:05.557 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:13:05 compute-0 nova_compute[192079]: 2025-10-02 12:13:05.562 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: active, current task_state: suspending, current DB power_state: 1, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:13:05 compute-0 nova_compute[192079]: 2025-10-02 12:13:05.595 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] During sync_power_state the instance has a pending task (suspending). Skip.
Oct 02 12:13:05 compute-0 kernel: tap669d1898-3f (unregistering): left promiscuous mode
Oct 02 12:13:05 compute-0 nova_compute[192079]: 2025-10-02 12:13:05.761 2 DEBUG nova.compute.manager [req-5272aa7b-1f11-457c-be7f-62fbff0ca986 req-c719f048-e470-43cc-bd68-9efa214c36e4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Received event network-vif-plugged-669d1898-3fca-421e-86fb-0ef482f202e2 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:13:05 compute-0 nova_compute[192079]: 2025-10-02 12:13:05.761 2 DEBUG oslo_concurrency.lockutils [req-5272aa7b-1f11-457c-be7f-62fbff0ca986 req-c719f048-e470-43cc-bd68-9efa214c36e4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:13:05 compute-0 nova_compute[192079]: 2025-10-02 12:13:05.761 2 DEBUG oslo_concurrency.lockutils [req-5272aa7b-1f11-457c-be7f-62fbff0ca986 req-c719f048-e470-43cc-bd68-9efa214c36e4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:13:05 compute-0 nova_compute[192079]: 2025-10-02 12:13:05.761 2 DEBUG oslo_concurrency.lockutils [req-5272aa7b-1f11-457c-be7f-62fbff0ca986 req-c719f048-e470-43cc-bd68-9efa214c36e4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:13:05 compute-0 nova_compute[192079]: 2025-10-02 12:13:05.762 2 DEBUG nova.compute.manager [req-5272aa7b-1f11-457c-be7f-62fbff0ca986 req-c719f048-e470-43cc-bd68-9efa214c36e4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] No waiting events found dispatching network-vif-plugged-669d1898-3fca-421e-86fb-0ef482f202e2 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:13:05 compute-0 nova_compute[192079]: 2025-10-02 12:13:05.762 2 WARNING nova.compute.manager [req-5272aa7b-1f11-457c-be7f-62fbff0ca986 req-c719f048-e470-43cc-bd68-9efa214c36e4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Received unexpected event network-vif-plugged-669d1898-3fca-421e-86fb-0ef482f202e2 for instance with vm_state active and task_state suspending.
Oct 02 12:13:05 compute-0 NetworkManager[51160]: <info>  [1759407185.7646] device (tap669d1898-3f): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:13:05 compute-0 nova_compute[192079]: 2025-10-02 12:13:05.774 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:05 compute-0 ovn_controller[94336]: 2025-10-02T12:13:05Z|00209|binding|INFO|Releasing lport 669d1898-3fca-421e-86fb-0ef482f202e2 from this chassis (sb_readonly=0)
Oct 02 12:13:05 compute-0 ovn_controller[94336]: 2025-10-02T12:13:05Z|00210|binding|INFO|Setting lport 669d1898-3fca-421e-86fb-0ef482f202e2 down in Southbound
Oct 02 12:13:05 compute-0 ovn_controller[94336]: 2025-10-02T12:13:05Z|00211|binding|INFO|Removing iface tap669d1898-3f ovn-installed in OVS
Oct 02 12:13:05 compute-0 nova_compute[192079]: 2025-10-02 12:13:05.782 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:05 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:05.791 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:3b:69:44 10.100.0.10'], port_security=['fa:16:3e:3b:69:44 10.100.0.10'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.10/28', 'neutron:device_id': 'f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'dcf78460093d411988a54040ea4c265a', 'neutron:revision_number': '4', 'neutron:security_group_ids': 'aacce687-8b76-4e90-b19c-0dd006394188', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=24ae9888-31f5-4083-b5ee-e7ed6a1eee13, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=669d1898-3fca-421e-86fb-0ef482f202e2) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:13:05 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:05.793 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 669d1898-3fca-421e-86fb-0ef482f202e2 in datapath 4f195445-fd43-4b92-89dd-a1b2fe9ea8c2 unbound from our chassis
Oct 02 12:13:05 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:05.794 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 4f195445-fd43-4b92-89dd-a1b2fe9ea8c2, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:13:05 compute-0 nova_compute[192079]: 2025-10-02 12:13:05.804 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:05 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:05.805 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[93aa10f7-32d4-48ba-847d-0b67d5497a23]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:05 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:05.806 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2 namespace which is not needed anymore
Oct 02 12:13:05 compute-0 systemd[1]: machine-qemu\x2d31\x2dinstance\x2d00000041.scope: Deactivated successfully.
Oct 02 12:13:05 compute-0 systemd[1]: machine-qemu\x2d31\x2dinstance\x2d00000041.scope: Consumed 2.441s CPU time.
Oct 02 12:13:05 compute-0 systemd-machined[152150]: Machine qemu-31-instance-00000041 terminated.
Oct 02 12:13:05 compute-0 podman[229241]: 2025-10-02 12:13:05.868326102 +0000 UTC m=+0.077121091 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, container_name=ovn_metadata_agent, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2)
Oct 02 12:13:05 compute-0 podman[229246]: 2025-10-02 12:13:05.931789051 +0000 UTC m=+0.120479513 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_managed=true, config_id=ovn_controller, container_name=ovn_controller, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:13:05 compute-0 podman[229247]: 2025-10-02 12:13:05.93724866 +0000 UTC m=+0.121781219 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:13:05 compute-0 NetworkManager[51160]: <info>  [1759407185.9628] manager: (tap669d1898-3f): new Tun device (/org/freedesktop/NetworkManager/Devices/106)
Oct 02 12:13:05 compute-0 neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2[229223]: [NOTICE]   (229227) : haproxy version is 2.8.14-c23fe91
Oct 02 12:13:05 compute-0 neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2[229223]: [NOTICE]   (229227) : path to executable is /usr/sbin/haproxy
Oct 02 12:13:05 compute-0 neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2[229223]: [WARNING]  (229227) : Exiting Master process...
Oct 02 12:13:05 compute-0 neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2[229223]: [WARNING]  (229227) : Exiting Master process...
Oct 02 12:13:05 compute-0 neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2[229223]: [ALERT]    (229227) : Current worker (229229) exited with code 143 (Terminated)
Oct 02 12:13:05 compute-0 neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2[229223]: [WARNING]  (229227) : All workers exited. Exiting... (0)
Oct 02 12:13:05 compute-0 systemd[1]: libpod-046ba8ca89ae96efa95e50bc9ce5df80c98d009a95520bac3d0d2a13ffede14a.scope: Deactivated successfully.
Oct 02 12:13:05 compute-0 podman[229325]: 2025-10-02 12:13:05.976219711 +0000 UTC m=+0.071315324 container died 046ba8ca89ae96efa95e50bc9ce5df80c98d009a95520bac3d0d2a13ffede14a (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_managed=true, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3)
Oct 02 12:13:06 compute-0 nova_compute[192079]: 2025-10-02 12:13:06.006 2 DEBUG nova.compute.manager [None req-7957b7b2-d973-4903-8df9-605673e8ef84 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:13:06 compute-0 systemd[1]: var-lib-containers-storage-overlay-c77b94591e0f8a21ae9ab3366d51eaed7f25f51c37fc0552d1bb69983ac1bac8-merged.mount: Deactivated successfully.
Oct 02 12:13:06 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-046ba8ca89ae96efa95e50bc9ce5df80c98d009a95520bac3d0d2a13ffede14a-userdata-shm.mount: Deactivated successfully.
Oct 02 12:13:06 compute-0 podman[229325]: 2025-10-02 12:13:06.038191449 +0000 UTC m=+0.133287052 container cleanup 046ba8ca89ae96efa95e50bc9ce5df80c98d009a95520bac3d0d2a13ffede14a (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3)
Oct 02 12:13:06 compute-0 systemd[1]: libpod-conmon-046ba8ca89ae96efa95e50bc9ce5df80c98d009a95520bac3d0d2a13ffede14a.scope: Deactivated successfully.
Oct 02 12:13:06 compute-0 podman[229371]: 2025-10-02 12:13:06.10908893 +0000 UTC m=+0.051958756 container remove 046ba8ca89ae96efa95e50bc9ce5df80c98d009a95520bac3d0d2a13ffede14a (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 12:13:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:06.113 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5d8cb742-64b0-4360-a4ba-997845def5a0]: (4, ('Thu Oct  2 12:13:05 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2 (046ba8ca89ae96efa95e50bc9ce5df80c98d009a95520bac3d0d2a13ffede14a)\n046ba8ca89ae96efa95e50bc9ce5df80c98d009a95520bac3d0d2a13ffede14a\nThu Oct  2 12:13:06 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2 (046ba8ca89ae96efa95e50bc9ce5df80c98d009a95520bac3d0d2a13ffede14a)\n046ba8ca89ae96efa95e50bc9ce5df80c98d009a95520bac3d0d2a13ffede14a\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:06.115 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ecdb45df-2c7b-4360-b0b1-a5d83fd1339e]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:06.116 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap4f195445-f0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:13:06 compute-0 nova_compute[192079]: 2025-10-02 12:13:06.117 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:06 compute-0 kernel: tap4f195445-f0: left promiscuous mode
Oct 02 12:13:06 compute-0 nova_compute[192079]: 2025-10-02 12:13:06.132 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:06.135 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f35cc36f-e1f7-4e4b-bcd6-64b27fcb96ce]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:06.161 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9ac4a72d-bfe7-4326-9430-563156e41dd5]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:06.162 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[284b29ab-d827-40e1-a436-6db918fb220e]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:06.176 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[248ed29d-7dc9-465a-a025-5437eda75e3c]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 518073, 'reachable_time': 21263, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 229388, 'error': None, 'target': 'ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:06 compute-0 systemd[1]: run-netns-ovnmeta\x2d4f195445\x2dfd43\x2d4b92\x2d89dd\x2da1b2fe9ea8c2.mount: Deactivated successfully.
Oct 02 12:13:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:06.180 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:13:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:06.180 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[e51edd4a-6d6d-4e61-8bcf-a7c3a2ec7d94]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:07 compute-0 nova_compute[192079]: 2025-10-02 12:13:07.275 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:08 compute-0 nova_compute[192079]: 2025-10-02 12:13:08.057 2 DEBUG nova.compute.manager [req-6f12e6a7-7f7a-497f-92da-f02f1ed6f81c req-21126fcf-6ebb-4ded-ab81-fe60dabf53e0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Received event network-vif-unplugged-669d1898-3fca-421e-86fb-0ef482f202e2 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:13:08 compute-0 nova_compute[192079]: 2025-10-02 12:13:08.058 2 DEBUG oslo_concurrency.lockutils [req-6f12e6a7-7f7a-497f-92da-f02f1ed6f81c req-21126fcf-6ebb-4ded-ab81-fe60dabf53e0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:13:08 compute-0 nova_compute[192079]: 2025-10-02 12:13:08.058 2 DEBUG oslo_concurrency.lockutils [req-6f12e6a7-7f7a-497f-92da-f02f1ed6f81c req-21126fcf-6ebb-4ded-ab81-fe60dabf53e0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:13:08 compute-0 nova_compute[192079]: 2025-10-02 12:13:08.058 2 DEBUG oslo_concurrency.lockutils [req-6f12e6a7-7f7a-497f-92da-f02f1ed6f81c req-21126fcf-6ebb-4ded-ab81-fe60dabf53e0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:13:08 compute-0 nova_compute[192079]: 2025-10-02 12:13:08.059 2 DEBUG nova.compute.manager [req-6f12e6a7-7f7a-497f-92da-f02f1ed6f81c req-21126fcf-6ebb-4ded-ab81-fe60dabf53e0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] No waiting events found dispatching network-vif-unplugged-669d1898-3fca-421e-86fb-0ef482f202e2 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:13:08 compute-0 nova_compute[192079]: 2025-10-02 12:13:08.059 2 WARNING nova.compute.manager [req-6f12e6a7-7f7a-497f-92da-f02f1ed6f81c req-21126fcf-6ebb-4ded-ab81-fe60dabf53e0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Received unexpected event network-vif-unplugged-669d1898-3fca-421e-86fb-0ef482f202e2 for instance with vm_state suspended and task_state None.
Oct 02 12:13:08 compute-0 nova_compute[192079]: 2025-10-02 12:13:08.059 2 DEBUG nova.compute.manager [req-6f12e6a7-7f7a-497f-92da-f02f1ed6f81c req-21126fcf-6ebb-4ded-ab81-fe60dabf53e0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Received event network-vif-plugged-669d1898-3fca-421e-86fb-0ef482f202e2 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:13:08 compute-0 nova_compute[192079]: 2025-10-02 12:13:08.059 2 DEBUG oslo_concurrency.lockutils [req-6f12e6a7-7f7a-497f-92da-f02f1ed6f81c req-21126fcf-6ebb-4ded-ab81-fe60dabf53e0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:13:08 compute-0 nova_compute[192079]: 2025-10-02 12:13:08.059 2 DEBUG oslo_concurrency.lockutils [req-6f12e6a7-7f7a-497f-92da-f02f1ed6f81c req-21126fcf-6ebb-4ded-ab81-fe60dabf53e0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:13:08 compute-0 nova_compute[192079]: 2025-10-02 12:13:08.060 2 DEBUG oslo_concurrency.lockutils [req-6f12e6a7-7f7a-497f-92da-f02f1ed6f81c req-21126fcf-6ebb-4ded-ab81-fe60dabf53e0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:13:08 compute-0 nova_compute[192079]: 2025-10-02 12:13:08.060 2 DEBUG nova.compute.manager [req-6f12e6a7-7f7a-497f-92da-f02f1ed6f81c req-21126fcf-6ebb-4ded-ab81-fe60dabf53e0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] No waiting events found dispatching network-vif-plugged-669d1898-3fca-421e-86fb-0ef482f202e2 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:13:08 compute-0 nova_compute[192079]: 2025-10-02 12:13:08.060 2 WARNING nova.compute.manager [req-6f12e6a7-7f7a-497f-92da-f02f1ed6f81c req-21126fcf-6ebb-4ded-ab81-fe60dabf53e0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Received unexpected event network-vif-plugged-669d1898-3fca-421e-86fb-0ef482f202e2 for instance with vm_state suspended and task_state None.
Oct 02 12:13:09 compute-0 nova_compute[192079]: 2025-10-02 12:13:09.432 2 DEBUG nova.compute.manager [None req-a8e7f4ba-6b03-47a2-b326-89ea7456b8f5 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:13:09 compute-0 nova_compute[192079]: 2025-10-02 12:13:09.514 2 INFO nova.compute.manager [None req-a8e7f4ba-6b03-47a2-b326-89ea7456b8f5 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] instance snapshotting
Oct 02 12:13:09 compute-0 nova_compute[192079]: 2025-10-02 12:13:09.515 2 WARNING nova.compute.manager [None req-a8e7f4ba-6b03-47a2-b326-89ea7456b8f5 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] trying to snapshot a non-running instance: (state: 4 expected: 1)
Oct 02 12:13:09 compute-0 nova_compute[192079]: 2025-10-02 12:13:09.948 2 INFO nova.virt.libvirt.driver [None req-a8e7f4ba-6b03-47a2-b326-89ea7456b8f5 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Beginning cold snapshot process
Oct 02 12:13:10 compute-0 nova_compute[192079]: 2025-10-02 12:13:10.246 2 DEBUG nova.privsep.utils [None req-a8e7f4ba-6b03-47a2-b326-89ea7456b8f5 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Path '/var/lib/nova/instances' supports direct I/O supports_direct_io /usr/lib/python3.9/site-packages/nova/privsep/utils.py:63
Oct 02 12:13:10 compute-0 nova_compute[192079]: 2025-10-02 12:13:10.247 2 DEBUG oslo_concurrency.processutils [None req-a8e7f4ba-6b03-47a2-b326-89ea7456b8f5 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): qemu-img convert -t none -O qcow2 -f qcow2 /var/lib/nova/instances/f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386/disk /var/lib/nova/instances/snapshots/tmp2rcuh73l/c586de58f4d64434926774dc496be99d execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:13:10 compute-0 nova_compute[192079]: 2025-10-02 12:13:10.509 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:10 compute-0 nova_compute[192079]: 2025-10-02 12:13:10.718 2 DEBUG oslo_concurrency.processutils [None req-a8e7f4ba-6b03-47a2-b326-89ea7456b8f5 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "qemu-img convert -t none -O qcow2 -f qcow2 /var/lib/nova/instances/f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386/disk /var/lib/nova/instances/snapshots/tmp2rcuh73l/c586de58f4d64434926774dc496be99d" returned: 0 in 0.471s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:13:10 compute-0 nova_compute[192079]: 2025-10-02 12:13:10.719 2 INFO nova.virt.libvirt.driver [None req-a8e7f4ba-6b03-47a2-b326-89ea7456b8f5 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Snapshot extracted, beginning image upload
Oct 02 12:13:12 compute-0 nova_compute[192079]: 2025-10-02 12:13:12.277 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:13 compute-0 nova_compute[192079]: 2025-10-02 12:13:13.325 2 INFO nova.virt.libvirt.driver [None req-a8e7f4ba-6b03-47a2-b326-89ea7456b8f5 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Snapshot image upload complete
Oct 02 12:13:13 compute-0 nova_compute[192079]: 2025-10-02 12:13:13.326 2 INFO nova.compute.manager [None req-a8e7f4ba-6b03-47a2-b326-89ea7456b8f5 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Took 3.80 seconds to snapshot the instance on the hypervisor.
Oct 02 12:13:15 compute-0 nova_compute[192079]: 2025-10-02 12:13:15.510 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:16 compute-0 nova_compute[192079]: 2025-10-02 12:13:16.093 2 DEBUG oslo_concurrency.lockutils [None req-4689718d-c569-4c14-b7fc-1533a89a4d65 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:13:16 compute-0 nova_compute[192079]: 2025-10-02 12:13:16.093 2 DEBUG oslo_concurrency.lockutils [None req-4689718d-c569-4c14-b7fc-1533a89a4d65 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:13:16 compute-0 nova_compute[192079]: 2025-10-02 12:13:16.094 2 DEBUG oslo_concurrency.lockutils [None req-4689718d-c569-4c14-b7fc-1533a89a4d65 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:13:16 compute-0 nova_compute[192079]: 2025-10-02 12:13:16.094 2 DEBUG oslo_concurrency.lockutils [None req-4689718d-c569-4c14-b7fc-1533a89a4d65 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:13:16 compute-0 nova_compute[192079]: 2025-10-02 12:13:16.095 2 DEBUG oslo_concurrency.lockutils [None req-4689718d-c569-4c14-b7fc-1533a89a4d65 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:13:16 compute-0 nova_compute[192079]: 2025-10-02 12:13:16.109 2 INFO nova.compute.manager [None req-4689718d-c569-4c14-b7fc-1533a89a4d65 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Terminating instance
Oct 02 12:13:16 compute-0 nova_compute[192079]: 2025-10-02 12:13:16.121 2 DEBUG nova.compute.manager [None req-4689718d-c569-4c14-b7fc-1533a89a4d65 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:13:16 compute-0 nova_compute[192079]: 2025-10-02 12:13:16.129 2 INFO nova.virt.libvirt.driver [-] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Instance destroyed successfully.
Oct 02 12:13:16 compute-0 nova_compute[192079]: 2025-10-02 12:13:16.129 2 DEBUG nova.objects.instance [None req-4689718d-c569-4c14-b7fc-1533a89a4d65 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lazy-loading 'resources' on Instance uuid f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:13:16 compute-0 nova_compute[192079]: 2025-10-02 12:13:16.148 2 DEBUG nova.virt.libvirt.vif [None req-4689718d-c569-4c14-b7fc-1533a89a4d65 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:12:56Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ImagesTestJSON-server-719598230',display_name='tempest-ImagesTestJSON-server-719598230',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-imagestestjson-server-719598230',id=65,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:13:03Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=4,progress=0,project_id='dcf78460093d411988a54040ea4c265a',ramdisk_id='',reservation_id='r-2hxyaomx',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',old_vm_state='active',owner_project_name='tempest-ImagesTestJSON-437970487',owner_user_name='tempest-ImagesTestJSON-437970487-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:13:13Z,user_data=None,user_id='dcdfc3c0f94e42cb931d27f2e3b5b12d',uuid=f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='suspended') vif={"id": "669d1898-3fca-421e-86fb-0ef482f202e2", "address": "fa:16:3e:3b:69:44", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap669d1898-3f", "ovs_interfaceid": "669d1898-3fca-421e-86fb-0ef482f202e2", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:13:16 compute-0 nova_compute[192079]: 2025-10-02 12:13:16.149 2 DEBUG nova.network.os_vif_util [None req-4689718d-c569-4c14-b7fc-1533a89a4d65 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Converting VIF {"id": "669d1898-3fca-421e-86fb-0ef482f202e2", "address": "fa:16:3e:3b:69:44", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap669d1898-3f", "ovs_interfaceid": "669d1898-3fca-421e-86fb-0ef482f202e2", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:13:16 compute-0 nova_compute[192079]: 2025-10-02 12:13:16.149 2 DEBUG nova.network.os_vif_util [None req-4689718d-c569-4c14-b7fc-1533a89a4d65 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:3b:69:44,bridge_name='br-int',has_traffic_filtering=True,id=669d1898-3fca-421e-86fb-0ef482f202e2,network=Network(4f195445-fd43-4b92-89dd-a1b2fe9ea8c2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap669d1898-3f') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:13:16 compute-0 nova_compute[192079]: 2025-10-02 12:13:16.149 2 DEBUG os_vif [None req-4689718d-c569-4c14-b7fc-1533a89a4d65 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:3b:69:44,bridge_name='br-int',has_traffic_filtering=True,id=669d1898-3fca-421e-86fb-0ef482f202e2,network=Network(4f195445-fd43-4b92-89dd-a1b2fe9ea8c2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap669d1898-3f') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:13:16 compute-0 nova_compute[192079]: 2025-10-02 12:13:16.151 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:16 compute-0 nova_compute[192079]: 2025-10-02 12:13:16.151 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap669d1898-3f, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:13:16 compute-0 nova_compute[192079]: 2025-10-02 12:13:16.153 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:16 compute-0 nova_compute[192079]: 2025-10-02 12:13:16.154 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:16 compute-0 nova_compute[192079]: 2025-10-02 12:13:16.156 2 INFO os_vif [None req-4689718d-c569-4c14-b7fc-1533a89a4d65 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:3b:69:44,bridge_name='br-int',has_traffic_filtering=True,id=669d1898-3fca-421e-86fb-0ef482f202e2,network=Network(4f195445-fd43-4b92-89dd-a1b2fe9ea8c2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap669d1898-3f')
Oct 02 12:13:16 compute-0 nova_compute[192079]: 2025-10-02 12:13:16.156 2 INFO nova.virt.libvirt.driver [None req-4689718d-c569-4c14-b7fc-1533a89a4d65 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Deleting instance files /var/lib/nova/instances/f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386_del
Oct 02 12:13:16 compute-0 nova_compute[192079]: 2025-10-02 12:13:16.157 2 INFO nova.virt.libvirt.driver [None req-4689718d-c569-4c14-b7fc-1533a89a4d65 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Deletion of /var/lib/nova/instances/f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386_del complete
Oct 02 12:13:16 compute-0 podman[229400]: 2025-10-02 12:13:16.185801822 +0000 UTC m=+0.093280781 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 12:13:16 compute-0 nova_compute[192079]: 2025-10-02 12:13:16.261 2 INFO nova.compute.manager [None req-4689718d-c569-4c14-b7fc-1533a89a4d65 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Took 0.14 seconds to destroy the instance on the hypervisor.
Oct 02 12:13:16 compute-0 nova_compute[192079]: 2025-10-02 12:13:16.262 2 DEBUG oslo.service.loopingcall [None req-4689718d-c569-4c14-b7fc-1533a89a4d65 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:13:16 compute-0 nova_compute[192079]: 2025-10-02 12:13:16.262 2 DEBUG nova.compute.manager [-] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:13:16 compute-0 nova_compute[192079]: 2025-10-02 12:13:16.262 2 DEBUG nova.network.neutron [-] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:13:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:13:17.101 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:13:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:13:17.101 12 DEBUG ceilometer.polling.manager [-] Skip pollster cpu, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:13:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:13:17.102 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:13:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:13:17.102 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:13:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:13:17.102 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:13:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:13:17.102 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:13:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:13:17.102 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:13:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:13:17.102 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:13:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:13:17.102 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.capacity, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:13:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:13:17.103 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.iops, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:13:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:13:17.103 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:13:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:13:17.103 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:13:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:13:17.103 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:13:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:13:17.103 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:13:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:13:17.103 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:13:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:13:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:13:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:13:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:13:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:13:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.allocation, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:13:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:13:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster memory.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:13:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:13:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:13:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:13:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:13:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:13:17.105 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:13:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:13:17.105 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:13:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:13:17.105 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:13:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:13:17.105 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:13:17 compute-0 nova_compute[192079]: 2025-10-02 12:13:17.361 2 DEBUG nova.network.neutron [-] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:13:17 compute-0 nova_compute[192079]: 2025-10-02 12:13:17.395 2 INFO nova.compute.manager [-] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Took 1.13 seconds to deallocate network for instance.
Oct 02 12:13:17 compute-0 nova_compute[192079]: 2025-10-02 12:13:17.496 2 DEBUG nova.compute.manager [req-7b8023ea-05d5-4654-93a0-2124e665d17d req-77b9c1dd-4ac1-459c-b4af-136fdf78f5cf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Received event network-vif-deleted-669d1898-3fca-421e-86fb-0ef482f202e2 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:13:17 compute-0 nova_compute[192079]: 2025-10-02 12:13:17.504 2 DEBUG oslo_concurrency.lockutils [None req-4689718d-c569-4c14-b7fc-1533a89a4d65 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:13:17 compute-0 nova_compute[192079]: 2025-10-02 12:13:17.504 2 DEBUG oslo_concurrency.lockutils [None req-4689718d-c569-4c14-b7fc-1533a89a4d65 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:13:17 compute-0 nova_compute[192079]: 2025-10-02 12:13:17.592 2 DEBUG nova.compute.provider_tree [None req-4689718d-c569-4c14-b7fc-1533a89a4d65 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:13:17 compute-0 nova_compute[192079]: 2025-10-02 12:13:17.614 2 DEBUG nova.scheduler.client.report [None req-4689718d-c569-4c14-b7fc-1533a89a4d65 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:13:17 compute-0 nova_compute[192079]: 2025-10-02 12:13:17.649 2 DEBUG oslo_concurrency.lockutils [None req-4689718d-c569-4c14-b7fc-1533a89a4d65 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.145s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:13:17 compute-0 nova_compute[192079]: 2025-10-02 12:13:17.660 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:13:17 compute-0 nova_compute[192079]: 2025-10-02 12:13:17.663 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:13:17 compute-0 nova_compute[192079]: 2025-10-02 12:13:17.689 2 INFO nova.scheduler.client.report [None req-4689718d-c569-4c14-b7fc-1533a89a4d65 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Deleted allocations for instance f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386
Oct 02 12:13:17 compute-0 nova_compute[192079]: 2025-10-02 12:13:17.814 2 DEBUG oslo_concurrency.lockutils [None req-4689718d-c569-4c14-b7fc-1533a89a4d65 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.721s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:13:18 compute-0 nova_compute[192079]: 2025-10-02 12:13:18.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:13:18 compute-0 nova_compute[192079]: 2025-10-02 12:13:18.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:13:18 compute-0 nova_compute[192079]: 2025-10-02 12:13:18.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:13:18 compute-0 nova_compute[192079]: 2025-10-02 12:13:18.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:13:18 compute-0 nova_compute[192079]: 2025-10-02 12:13:18.705 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:13:18 compute-0 nova_compute[192079]: 2025-10-02 12:13:18.705 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:13:18 compute-0 nova_compute[192079]: 2025-10-02 12:13:18.705 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:13:18 compute-0 nova_compute[192079]: 2025-10-02 12:13:18.706 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:13:18 compute-0 nova_compute[192079]: 2025-10-02 12:13:18.853 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:13:18 compute-0 nova_compute[192079]: 2025-10-02 12:13:18.855 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5674MB free_disk=73.35731887817383GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:13:18 compute-0 nova_compute[192079]: 2025-10-02 12:13:18.855 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:13:18 compute-0 nova_compute[192079]: 2025-10-02 12:13:18.855 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:13:18 compute-0 nova_compute[192079]: 2025-10-02 12:13:18.908 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:13:18 compute-0 nova_compute[192079]: 2025-10-02 12:13:18.909 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:13:18 compute-0 nova_compute[192079]: 2025-10-02 12:13:18.928 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:13:18 compute-0 nova_compute[192079]: 2025-10-02 12:13:18.944 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:13:18 compute-0 nova_compute[192079]: 2025-10-02 12:13:18.971 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:13:18 compute-0 nova_compute[192079]: 2025-10-02 12:13:18.971 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.116s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:13:19 compute-0 nova_compute[192079]: 2025-10-02 12:13:19.473 2 DEBUG oslo_concurrency.lockutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "3277cbd6-2706-4647-b0df-b789c49f80ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:13:19 compute-0 nova_compute[192079]: 2025-10-02 12:13:19.473 2 DEBUG oslo_concurrency.lockutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "3277cbd6-2706-4647-b0df-b789c49f80ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:13:19 compute-0 nova_compute[192079]: 2025-10-02 12:13:19.505 2 DEBUG nova.compute.manager [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:13:19 compute-0 nova_compute[192079]: 2025-10-02 12:13:19.646 2 DEBUG oslo_concurrency.lockutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:13:19 compute-0 nova_compute[192079]: 2025-10-02 12:13:19.647 2 DEBUG oslo_concurrency.lockutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:13:19 compute-0 nova_compute[192079]: 2025-10-02 12:13:19.654 2 DEBUG nova.virt.hardware [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:13:19 compute-0 nova_compute[192079]: 2025-10-02 12:13:19.654 2 INFO nova.compute.claims [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:13:19 compute-0 nova_compute[192079]: 2025-10-02 12:13:19.890 2 DEBUG nova.compute.provider_tree [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:13:19 compute-0 nova_compute[192079]: 2025-10-02 12:13:19.905 2 DEBUG nova.scheduler.client.report [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:13:19 compute-0 nova_compute[192079]: 2025-10-02 12:13:19.952 2 DEBUG oslo_concurrency.lockutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.306s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:13:19 compute-0 nova_compute[192079]: 2025-10-02 12:13:19.953 2 DEBUG nova.compute.manager [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.068 2 DEBUG nova.compute.manager [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.069 2 DEBUG nova.network.neutron [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.108 2 INFO nova.virt.libvirt.driver [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.181 2 DEBUG nova.compute.manager [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.337 2 DEBUG nova.compute.manager [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.338 2 DEBUG nova.virt.libvirt.driver [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.339 2 INFO nova.virt.libvirt.driver [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Creating image(s)
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.339 2 DEBUG oslo_concurrency.lockutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "/var/lib/nova/instances/3277cbd6-2706-4647-b0df-b789c49f80ea/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.339 2 DEBUG oslo_concurrency.lockutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "/var/lib/nova/instances/3277cbd6-2706-4647-b0df-b789c49f80ea/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.340 2 DEBUG oslo_concurrency.lockutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "/var/lib/nova/instances/3277cbd6-2706-4647-b0df-b789c49f80ea/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.351 2 DEBUG oslo_concurrency.processutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.423 2 DEBUG oslo_concurrency.processutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.072s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.424 2 DEBUG oslo_concurrency.lockutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.425 2 DEBUG oslo_concurrency.lockutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.436 2 DEBUG oslo_concurrency.processutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.493 2 DEBUG oslo_concurrency.processutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.057s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.494 2 DEBUG oslo_concurrency.processutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/3277cbd6-2706-4647-b0df-b789c49f80ea/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.512 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.576 2 DEBUG oslo_concurrency.processutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/3277cbd6-2706-4647-b0df-b789c49f80ea/disk 1073741824" returned: 0 in 0.082s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.577 2 DEBUG oslo_concurrency.lockutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.152s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.577 2 DEBUG oslo_concurrency.processutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.629 2 DEBUG oslo_concurrency.processutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.052s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.630 2 DEBUG nova.virt.disk.api [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Checking if we can resize image /var/lib/nova/instances/3277cbd6-2706-4647-b0df-b789c49f80ea/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.631 2 DEBUG oslo_concurrency.processutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/3277cbd6-2706-4647-b0df-b789c49f80ea/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.687 2 DEBUG oslo_concurrency.processutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/3277cbd6-2706-4647-b0df-b789c49f80ea/disk --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.688 2 DEBUG nova.virt.disk.api [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Cannot resize image /var/lib/nova/instances/3277cbd6-2706-4647-b0df-b789c49f80ea/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.689 2 DEBUG nova.objects.instance [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lazy-loading 'migration_context' on Instance uuid 3277cbd6-2706-4647-b0df-b789c49f80ea obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.710 2 DEBUG nova.virt.libvirt.driver [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.710 2 DEBUG nova.virt.libvirt.driver [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Ensure instance console log exists: /var/lib/nova/instances/3277cbd6-2706-4647-b0df-b789c49f80ea/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.711 2 DEBUG oslo_concurrency.lockutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.711 2 DEBUG oslo_concurrency.lockutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.711 2 DEBUG oslo_concurrency.lockutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:13:20 compute-0 nova_compute[192079]: 2025-10-02 12:13:20.988 2 DEBUG nova.policy [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dcdfc3c0f94e42cb931d27f2e3b5b12d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dcf78460093d411988a54040ea4c265a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:13:21 compute-0 nova_compute[192079]: 2025-10-02 12:13:21.007 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759407186.0061498, f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:13:21 compute-0 nova_compute[192079]: 2025-10-02 12:13:21.007 2 INFO nova.compute.manager [-] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] VM Stopped (Lifecycle Event)
Oct 02 12:13:21 compute-0 nova_compute[192079]: 2025-10-02 12:13:21.045 2 DEBUG nova.compute.manager [None req-f47d7ae7-50fb-47f7-b250-8ea95067873e - - - - - -] [instance: f0f2f9f9-dbfc-47d2-96e5-48d3b5a5e386] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:13:21 compute-0 podman[229434]: 2025-10-02 12:13:21.143896526 +0000 UTC m=+0.053997791 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, vcs-type=git, version=9.6, container_name=openstack_network_exporter, io.buildah.version=1.33.7, maintainer=Red Hat, Inc., managed_by=edpm_ansible, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.tags=minimal rhel9, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., url=https://catalog.redhat.com/en/search?searchType=containers, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, config_id=edpm, distribution-scope=public, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, build-date=2025-08-20T13:12:41, architecture=x86_64, com.redhat.component=ubi9-minimal-container, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, release=1755695350, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vendor=Red Hat, Inc., io.openshift.expose-services=, name=ubi9-minimal)
Oct 02 12:13:21 compute-0 podman[229435]: 2025-10-02 12:13:21.146887018 +0000 UTC m=+0.053337344 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=multipathd, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_id=multipathd, org.label-schema.build-date=20251001, tcib_managed=true)
Oct 02 12:13:21 compute-0 nova_compute[192079]: 2025-10-02 12:13:21.153 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:21 compute-0 nova_compute[192079]: 2025-10-02 12:13:21.972 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:13:21 compute-0 nova_compute[192079]: 2025-10-02 12:13:21.972 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:13:21 compute-0 nova_compute[192079]: 2025-10-02 12:13:21.972 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:13:21 compute-0 nova_compute[192079]: 2025-10-02 12:13:21.997 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Skipping network cache update for instance because it is Building. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9871
Oct 02 12:13:21 compute-0 nova_compute[192079]: 2025-10-02 12:13:21.997 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:13:21 compute-0 nova_compute[192079]: 2025-10-02 12:13:21.998 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:13:22 compute-0 nova_compute[192079]: 2025-10-02 12:13:22.238 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:22.238 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=17, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=16) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:13:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:22.239 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 0 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:13:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:22.240 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '17'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:13:22 compute-0 nova_compute[192079]: 2025-10-02 12:13:22.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:13:23 compute-0 nova_compute[192079]: 2025-10-02 12:13:23.016 2 DEBUG nova.network.neutron [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Successfully created port: 3f80aeff-247c-4194-b243-35f8690fee57 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:13:23 compute-0 nova_compute[192079]: 2025-10-02 12:13:23.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:13:25 compute-0 nova_compute[192079]: 2025-10-02 12:13:25.202 2 DEBUG nova.network.neutron [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Successfully updated port: 3f80aeff-247c-4194-b243-35f8690fee57 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:13:25 compute-0 nova_compute[192079]: 2025-10-02 12:13:25.219 2 DEBUG oslo_concurrency.lockutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "refresh_cache-3277cbd6-2706-4647-b0df-b789c49f80ea" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:13:25 compute-0 nova_compute[192079]: 2025-10-02 12:13:25.219 2 DEBUG oslo_concurrency.lockutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquired lock "refresh_cache-3277cbd6-2706-4647-b0df-b789c49f80ea" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:13:25 compute-0 nova_compute[192079]: 2025-10-02 12:13:25.219 2 DEBUG nova.network.neutron [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:13:25 compute-0 nova_compute[192079]: 2025-10-02 12:13:25.342 2 DEBUG nova.compute.manager [req-83959ded-8335-462d-aeee-1bec13375889 req-ee8bcc10-e17a-4470-b5e2-0edd905b241f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Received event network-changed-3f80aeff-247c-4194-b243-35f8690fee57 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:13:25 compute-0 nova_compute[192079]: 2025-10-02 12:13:25.342 2 DEBUG nova.compute.manager [req-83959ded-8335-462d-aeee-1bec13375889 req-ee8bcc10-e17a-4470-b5e2-0edd905b241f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Refreshing instance network info cache due to event network-changed-3f80aeff-247c-4194-b243-35f8690fee57. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:13:25 compute-0 nova_compute[192079]: 2025-10-02 12:13:25.343 2 DEBUG oslo_concurrency.lockutils [req-83959ded-8335-462d-aeee-1bec13375889 req-ee8bcc10-e17a-4470-b5e2-0edd905b241f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-3277cbd6-2706-4647-b0df-b789c49f80ea" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:13:25 compute-0 nova_compute[192079]: 2025-10-02 12:13:25.511 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:25 compute-0 nova_compute[192079]: 2025-10-02 12:13:25.586 2 DEBUG nova.network.neutron [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:13:26 compute-0 nova_compute[192079]: 2025-10-02 12:13:26.155 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:27 compute-0 podman[229475]: 2025-10-02 12:13:27.142312798 +0000 UTC m=+0.058616767 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter)
Oct 02 12:13:27 compute-0 podman[229476]: 2025-10-02 12:13:27.157200324 +0000 UTC m=+0.072534647 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=iscsid, container_name=iscsid, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, io.buildah.version=1.41.3, managed_by=edpm_ansible)
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.346 2 DEBUG nova.network.neutron [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Updating instance_info_cache with network_info: [{"id": "3f80aeff-247c-4194-b243-35f8690fee57", "address": "fa:16:3e:9a:82:c5", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap3f80aeff-24", "ovs_interfaceid": "3f80aeff-247c-4194-b243-35f8690fee57", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.389 2 DEBUG oslo_concurrency.lockutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Releasing lock "refresh_cache-3277cbd6-2706-4647-b0df-b789c49f80ea" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.389 2 DEBUG nova.compute.manager [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Instance network_info: |[{"id": "3f80aeff-247c-4194-b243-35f8690fee57", "address": "fa:16:3e:9a:82:c5", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap3f80aeff-24", "ovs_interfaceid": "3f80aeff-247c-4194-b243-35f8690fee57", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.390 2 DEBUG oslo_concurrency.lockutils [req-83959ded-8335-462d-aeee-1bec13375889 req-ee8bcc10-e17a-4470-b5e2-0edd905b241f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-3277cbd6-2706-4647-b0df-b789c49f80ea" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.390 2 DEBUG nova.network.neutron [req-83959ded-8335-462d-aeee-1bec13375889 req-ee8bcc10-e17a-4470-b5e2-0edd905b241f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Refreshing network info cache for port 3f80aeff-247c-4194-b243-35f8690fee57 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.396 2 DEBUG nova.virt.libvirt.driver [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Start _get_guest_xml network_info=[{"id": "3f80aeff-247c-4194-b243-35f8690fee57", "address": "fa:16:3e:9a:82:c5", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap3f80aeff-24", "ovs_interfaceid": "3f80aeff-247c-4194-b243-35f8690fee57", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.403 2 WARNING nova.virt.libvirt.driver [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.412 2 DEBUG nova.virt.libvirt.host [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.413 2 DEBUG nova.virt.libvirt.host [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.420 2 DEBUG nova.virt.libvirt.host [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.420 2 DEBUG nova.virt.libvirt.host [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.422 2 DEBUG nova.virt.libvirt.driver [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.422 2 DEBUG nova.virt.hardware [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.422 2 DEBUG nova.virt.hardware [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.423 2 DEBUG nova.virt.hardware [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.423 2 DEBUG nova.virt.hardware [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.423 2 DEBUG nova.virt.hardware [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.423 2 DEBUG nova.virt.hardware [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.424 2 DEBUG nova.virt.hardware [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.424 2 DEBUG nova.virt.hardware [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.424 2 DEBUG nova.virt.hardware [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.424 2 DEBUG nova.virt.hardware [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.424 2 DEBUG nova.virt.hardware [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.428 2 DEBUG nova.virt.libvirt.vif [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:13:18Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ImagesTestJSON-server-2081809098',display_name='tempest-ImagesTestJSON-server-2081809098',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-imagestestjson-server-2081809098',id=66,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='dcf78460093d411988a54040ea4c265a',ramdisk_id='',reservation_id='r-hd1dbp0p',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ImagesTestJSON-437970487',owner_user_name='tempest-ImagesTestJSON-437970487-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:13:20Z,user_data=None,user_id='dcdfc3c0f94e42cb931d27f2e3b5b12d',uuid=3277cbd6-2706-4647-b0df-b789c49f80ea,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "3f80aeff-247c-4194-b243-35f8690fee57", "address": "fa:16:3e:9a:82:c5", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap3f80aeff-24", "ovs_interfaceid": "3f80aeff-247c-4194-b243-35f8690fee57", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.428 2 DEBUG nova.network.os_vif_util [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Converting VIF {"id": "3f80aeff-247c-4194-b243-35f8690fee57", "address": "fa:16:3e:9a:82:c5", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap3f80aeff-24", "ovs_interfaceid": "3f80aeff-247c-4194-b243-35f8690fee57", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.429 2 DEBUG nova.network.os_vif_util [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:9a:82:c5,bridge_name='br-int',has_traffic_filtering=True,id=3f80aeff-247c-4194-b243-35f8690fee57,network=Network(4f195445-fd43-4b92-89dd-a1b2fe9ea8c2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap3f80aeff-24') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.430 2 DEBUG nova.objects.instance [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lazy-loading 'pci_devices' on Instance uuid 3277cbd6-2706-4647-b0df-b789c49f80ea obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.443 2 DEBUG nova.virt.libvirt.driver [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:13:27 compute-0 nova_compute[192079]:   <uuid>3277cbd6-2706-4647-b0df-b789c49f80ea</uuid>
Oct 02 12:13:27 compute-0 nova_compute[192079]:   <name>instance-00000042</name>
Oct 02 12:13:27 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:13:27 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:13:27 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:13:27 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:       <nova:name>tempest-ImagesTestJSON-server-2081809098</nova:name>
Oct 02 12:13:27 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:13:27</nova:creationTime>
Oct 02 12:13:27 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:13:27 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:13:27 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:13:27 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:13:27 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:13:27 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:13:27 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:13:27 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:13:27 compute-0 nova_compute[192079]:         <nova:user uuid="dcdfc3c0f94e42cb931d27f2e3b5b12d">tempest-ImagesTestJSON-437970487-project-member</nova:user>
Oct 02 12:13:27 compute-0 nova_compute[192079]:         <nova:project uuid="dcf78460093d411988a54040ea4c265a">tempest-ImagesTestJSON-437970487</nova:project>
Oct 02 12:13:27 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:13:27 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:13:27 compute-0 nova_compute[192079]:         <nova:port uuid="3f80aeff-247c-4194-b243-35f8690fee57">
Oct 02 12:13:27 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.7" ipVersion="4"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:13:27 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:13:27 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:13:27 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <system>
Oct 02 12:13:27 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:13:27 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:13:27 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:13:27 compute-0 nova_compute[192079]:       <entry name="serial">3277cbd6-2706-4647-b0df-b789c49f80ea</entry>
Oct 02 12:13:27 compute-0 nova_compute[192079]:       <entry name="uuid">3277cbd6-2706-4647-b0df-b789c49f80ea</entry>
Oct 02 12:13:27 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     </system>
Oct 02 12:13:27 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:13:27 compute-0 nova_compute[192079]:   <os>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:   </os>
Oct 02 12:13:27 compute-0 nova_compute[192079]:   <features>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:   </features>
Oct 02 12:13:27 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:13:27 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:13:27 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:13:27 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/3277cbd6-2706-4647-b0df-b789c49f80ea/disk"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:13:27 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/3277cbd6-2706-4647-b0df-b789c49f80ea/disk.config"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:13:27 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:9a:82:c5"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:       <target dev="tap3f80aeff-24"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:13:27 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/3277cbd6-2706-4647-b0df-b789c49f80ea/console.log" append="off"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <video>
Oct 02 12:13:27 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     </video>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:13:27 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:13:27 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:13:27 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:13:27 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:13:27 compute-0 nova_compute[192079]: </domain>
Oct 02 12:13:27 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.445 2 DEBUG nova.compute.manager [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Preparing to wait for external event network-vif-plugged-3f80aeff-247c-4194-b243-35f8690fee57 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.445 2 DEBUG oslo_concurrency.lockutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "3277cbd6-2706-4647-b0df-b789c49f80ea-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.445 2 DEBUG oslo_concurrency.lockutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "3277cbd6-2706-4647-b0df-b789c49f80ea-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.445 2 DEBUG oslo_concurrency.lockutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "3277cbd6-2706-4647-b0df-b789c49f80ea-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.446 2 DEBUG nova.virt.libvirt.vif [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:13:18Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ImagesTestJSON-server-2081809098',display_name='tempest-ImagesTestJSON-server-2081809098',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-imagestestjson-server-2081809098',id=66,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='dcf78460093d411988a54040ea4c265a',ramdisk_id='',reservation_id='r-hd1dbp0p',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ImagesTestJSON-437970487',owner_user_name='tempest-ImagesTestJSON-437970487-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:13:20Z,user_data=None,user_id='dcdfc3c0f94e42cb931d27f2e3b5b12d',uuid=3277cbd6-2706-4647-b0df-b789c49f80ea,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "3f80aeff-247c-4194-b243-35f8690fee57", "address": "fa:16:3e:9a:82:c5", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap3f80aeff-24", "ovs_interfaceid": "3f80aeff-247c-4194-b243-35f8690fee57", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.446 2 DEBUG nova.network.os_vif_util [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Converting VIF {"id": "3f80aeff-247c-4194-b243-35f8690fee57", "address": "fa:16:3e:9a:82:c5", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap3f80aeff-24", "ovs_interfaceid": "3f80aeff-247c-4194-b243-35f8690fee57", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.447 2 DEBUG nova.network.os_vif_util [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:9a:82:c5,bridge_name='br-int',has_traffic_filtering=True,id=3f80aeff-247c-4194-b243-35f8690fee57,network=Network(4f195445-fd43-4b92-89dd-a1b2fe9ea8c2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap3f80aeff-24') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.447 2 DEBUG os_vif [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:9a:82:c5,bridge_name='br-int',has_traffic_filtering=True,id=3f80aeff-247c-4194-b243-35f8690fee57,network=Network(4f195445-fd43-4b92-89dd-a1b2fe9ea8c2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap3f80aeff-24') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.448 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.448 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.448 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.451 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.452 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap3f80aeff-24, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.452 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap3f80aeff-24, col_values=(('external_ids', {'iface-id': '3f80aeff-247c-4194-b243-35f8690fee57', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:9a:82:c5', 'vm-uuid': '3277cbd6-2706-4647-b0df-b789c49f80ea'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:13:27 compute-0 NetworkManager[51160]: <info>  [1759407207.4542] manager: (tap3f80aeff-24): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/107)
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.453 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.454 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.460 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.461 2 INFO os_vif [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:9a:82:c5,bridge_name='br-int',has_traffic_filtering=True,id=3f80aeff-247c-4194-b243-35f8690fee57,network=Network(4f195445-fd43-4b92-89dd-a1b2fe9ea8c2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap3f80aeff-24')
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.609 2 DEBUG nova.virt.libvirt.driver [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.610 2 DEBUG nova.virt.libvirt.driver [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.610 2 DEBUG nova.virt.libvirt.driver [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] No VIF found with MAC fa:16:3e:9a:82:c5, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:13:27 compute-0 nova_compute[192079]: 2025-10-02 12:13:27.610 2 INFO nova.virt.libvirt.driver [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Using config drive
Oct 02 12:13:28 compute-0 nova_compute[192079]: 2025-10-02 12:13:28.201 2 INFO nova.virt.libvirt.driver [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Creating config drive at /var/lib/nova/instances/3277cbd6-2706-4647-b0df-b789c49f80ea/disk.config
Oct 02 12:13:28 compute-0 nova_compute[192079]: 2025-10-02 12:13:28.207 2 DEBUG oslo_concurrency.processutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/3277cbd6-2706-4647-b0df-b789c49f80ea/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpnnuui74q execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:13:28 compute-0 nova_compute[192079]: 2025-10-02 12:13:28.334 2 DEBUG oslo_concurrency.processutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/3277cbd6-2706-4647-b0df-b789c49f80ea/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpnnuui74q" returned: 0 in 0.126s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:13:28 compute-0 systemd[1]: Starting dnf makecache...
Oct 02 12:13:28 compute-0 kernel: tap3f80aeff-24: entered promiscuous mode
Oct 02 12:13:28 compute-0 NetworkManager[51160]: <info>  [1759407208.4015] manager: (tap3f80aeff-24): new Tun device (/org/freedesktop/NetworkManager/Devices/108)
Oct 02 12:13:28 compute-0 ovn_controller[94336]: 2025-10-02T12:13:28Z|00212|binding|INFO|Claiming lport 3f80aeff-247c-4194-b243-35f8690fee57 for this chassis.
Oct 02 12:13:28 compute-0 ovn_controller[94336]: 2025-10-02T12:13:28Z|00213|binding|INFO|3f80aeff-247c-4194-b243-35f8690fee57: Claiming fa:16:3e:9a:82:c5 10.100.0.7
Oct 02 12:13:28 compute-0 nova_compute[192079]: 2025-10-02 12:13:28.405 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:28.412 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:9a:82:c5 10.100.0.7'], port_security=['fa:16:3e:9a:82:c5 10.100.0.7'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.7/28', 'neutron:device_id': '3277cbd6-2706-4647-b0df-b789c49f80ea', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'dcf78460093d411988a54040ea4c265a', 'neutron:revision_number': '2', 'neutron:security_group_ids': 'aacce687-8b76-4e90-b19c-0dd006394188', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=24ae9888-31f5-4083-b5ee-e7ed6a1eee13, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=3f80aeff-247c-4194-b243-35f8690fee57) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:28.413 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 3f80aeff-247c-4194-b243-35f8690fee57 in datapath 4f195445-fd43-4b92-89dd-a1b2fe9ea8c2 bound to our chassis
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:28.415 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 4f195445-fd43-4b92-89dd-a1b2fe9ea8c2
Oct 02 12:13:28 compute-0 ovn_controller[94336]: 2025-10-02T12:13:28Z|00214|binding|INFO|Setting lport 3f80aeff-247c-4194-b243-35f8690fee57 ovn-installed in OVS
Oct 02 12:13:28 compute-0 ovn_controller[94336]: 2025-10-02T12:13:28Z|00215|binding|INFO|Setting lport 3f80aeff-247c-4194-b243-35f8690fee57 up in Southbound
Oct 02 12:13:28 compute-0 nova_compute[192079]: 2025-10-02 12:13:28.422 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:28 compute-0 nova_compute[192079]: 2025-10-02 12:13:28.429 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:28.434 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[38bfd503-e770-4591-9992-22ba9c6a5694]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:28.435 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap4f195445-f1 in ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:28.437 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap4f195445-f0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:28.438 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[32def0be-2075-44e8-8c70-99cd4c64d65f]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:28.439 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[12029585-64fd-4bb3-b70d-abfd69871b9a]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:28 compute-0 systemd-udevd[229539]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:28.455 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[fee1be90-5858-4bf6-8c1c-d01423c2c077]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:28 compute-0 systemd-machined[152150]: New machine qemu-32-instance-00000042.
Oct 02 12:13:28 compute-0 NetworkManager[51160]: <info>  [1759407208.4730] device (tap3f80aeff-24): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:13:28 compute-0 NetworkManager[51160]: <info>  [1759407208.4737] device (tap3f80aeff-24): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:28.478 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3277d211-51b0-4e1f-9f7b-190879bc1ca0]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:28 compute-0 systemd[1]: Started Virtual Machine qemu-32-instance-00000042.
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:28.510 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[767e8c6f-9cd2-4693-aed8-ef341e977c16]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:28.515 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8e0c3892-caa9-421d-a1c8-1a9f94eb1dd1]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:28 compute-0 NetworkManager[51160]: <info>  [1759407208.5169] manager: (tap4f195445-f0): new Veth device (/org/freedesktop/NetworkManager/Devices/109)
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:28.549 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[1cd2f12b-fbc6-4e49-9e6f-6412e4ef786f]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:28.552 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[65e0af08-49f8-4c1b-a161-15f11c55422d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:28 compute-0 NetworkManager[51160]: <info>  [1759407208.5731] device (tap4f195445-f0): carrier: link connected
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:28.578 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[34aa0951-c8c1-4b2e-a87d-43d2167e9b0d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:28.597 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6b270c2a-3374-4730-8756-1d2c0e349d15]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap4f195445-f1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:65:93:03'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 65], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 520620, 'reachable_time': 33561, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 229570, 'error': None, 'target': 'ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:28.612 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[962e2c08-5a7f-4156-a5b6-04ec7f0e3da3]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe65:9303'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 520620, 'tstamp': 520620}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 229571, 'error': None, 'target': 'ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:28.626 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[711157a1-f284-4b81-8280-f6e2675abe03]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap4f195445-f1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:65:93:03'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 65], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 520620, 'reachable_time': 33561, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 229572, 'error': None, 'target': 'ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:28.661 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1d7de77b-3916-49aa-bc79-2f1169ddfcaf]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:28 compute-0 dnf[229528]: Metadata cache refreshed recently.
Oct 02 12:13:28 compute-0 systemd[1]: dnf-makecache.service: Deactivated successfully.
Oct 02 12:13:28 compute-0 systemd[1]: Finished dnf makecache.
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:28.729 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d4b74701-0400-4ad5-b80c-01c452949d58]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:28.730 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap4f195445-f0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:28.731 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:28.731 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap4f195445-f0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:13:28 compute-0 kernel: tap4f195445-f0: entered promiscuous mode
Oct 02 12:13:28 compute-0 nova_compute[192079]: 2025-10-02 12:13:28.733 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:28.736 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap4f195445-f0, col_values=(('external_ids', {'iface-id': 'd65a1bd0-87e2-4bbf-9945-dacace78444f'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:13:28 compute-0 NetworkManager[51160]: <info>  [1759407208.7366] manager: (tap4f195445-f0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/110)
Oct 02 12:13:28 compute-0 ovn_controller[94336]: 2025-10-02T12:13:28Z|00216|binding|INFO|Releasing lport d65a1bd0-87e2-4bbf-9945-dacace78444f from this chassis (sb_readonly=0)
Oct 02 12:13:28 compute-0 nova_compute[192079]: 2025-10-02 12:13:28.754 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:28.755 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/4f195445-fd43-4b92-89dd-a1b2fe9ea8c2.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/4f195445-fd43-4b92-89dd-a1b2fe9ea8c2.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:28.756 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[56df7ed9-b772-47b6-8ba3-212ac41c3f18]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:28.756 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/4f195445-fd43-4b92-89dd-a1b2fe9ea8c2.pid.haproxy
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 4f195445-fd43-4b92-89dd-a1b2fe9ea8c2
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:13:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:28.757 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'env', 'PROCESS_TAG=haproxy-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/4f195445-fd43-4b92-89dd-a1b2fe9ea8c2.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:13:28 compute-0 nova_compute[192079]: 2025-10-02 12:13:28.847 2 DEBUG nova.compute.manager [req-bbac2eb8-0b93-4254-a5cf-e3f388e64287 req-6f277e80-48de-4355-a80d-e14ed8297d8b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Received event network-vif-plugged-3f80aeff-247c-4194-b243-35f8690fee57 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:13:28 compute-0 nova_compute[192079]: 2025-10-02 12:13:28.847 2 DEBUG oslo_concurrency.lockutils [req-bbac2eb8-0b93-4254-a5cf-e3f388e64287 req-6f277e80-48de-4355-a80d-e14ed8297d8b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "3277cbd6-2706-4647-b0df-b789c49f80ea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:13:28 compute-0 nova_compute[192079]: 2025-10-02 12:13:28.848 2 DEBUG oslo_concurrency.lockutils [req-bbac2eb8-0b93-4254-a5cf-e3f388e64287 req-6f277e80-48de-4355-a80d-e14ed8297d8b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "3277cbd6-2706-4647-b0df-b789c49f80ea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:13:28 compute-0 nova_compute[192079]: 2025-10-02 12:13:28.848 2 DEBUG oslo_concurrency.lockutils [req-bbac2eb8-0b93-4254-a5cf-e3f388e64287 req-6f277e80-48de-4355-a80d-e14ed8297d8b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "3277cbd6-2706-4647-b0df-b789c49f80ea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:13:28 compute-0 nova_compute[192079]: 2025-10-02 12:13:28.849 2 DEBUG nova.compute.manager [req-bbac2eb8-0b93-4254-a5cf-e3f388e64287 req-6f277e80-48de-4355-a80d-e14ed8297d8b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Processing event network-vif-plugged-3f80aeff-247c-4194-b243-35f8690fee57 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:13:29 compute-0 podman[229604]: 2025-10-02 12:13:29.08991141 +0000 UTC m=+0.023668476 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:13:29 compute-0 podman[229604]: 2025-10-02 12:13:29.288453158 +0000 UTC m=+0.222210174 container create 66f6ed517867365bda9971012fafcbe55842d30091e7163d9a4a7d896f47f026 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, tcib_managed=true, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3)
Oct 02 12:13:29 compute-0 systemd[1]: Started libpod-conmon-66f6ed517867365bda9971012fafcbe55842d30091e7163d9a4a7d896f47f026.scope.
Oct 02 12:13:29 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:13:29 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/e396b660c75596730a767aba29d884f0f50aafa547ae0c00a8b0974991fb639f/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:13:29 compute-0 podman[229604]: 2025-10-02 12:13:29.491590521 +0000 UTC m=+0.425347617 container init 66f6ed517867365bda9971012fafcbe55842d30091e7163d9a4a7d896f47f026 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:13:29 compute-0 podman[229604]: 2025-10-02 12:13:29.498407507 +0000 UTC m=+0.432164553 container start 66f6ed517867365bda9971012fafcbe55842d30091e7163d9a4a7d896f47f026 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.build-date=20251001)
Oct 02 12:13:29 compute-0 neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2[229626]: [NOTICE]   (229630) : New worker (229632) forked
Oct 02 12:13:29 compute-0 neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2[229626]: [NOTICE]   (229630) : Loading success.
Oct 02 12:13:29 compute-0 nova_compute[192079]: 2025-10-02 12:13:29.533 2 DEBUG nova.network.neutron [req-83959ded-8335-462d-aeee-1bec13375889 req-ee8bcc10-e17a-4470-b5e2-0edd905b241f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Updated VIF entry in instance network info cache for port 3f80aeff-247c-4194-b243-35f8690fee57. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:13:29 compute-0 nova_compute[192079]: 2025-10-02 12:13:29.534 2 DEBUG nova.network.neutron [req-83959ded-8335-462d-aeee-1bec13375889 req-ee8bcc10-e17a-4470-b5e2-0edd905b241f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Updating instance_info_cache with network_info: [{"id": "3f80aeff-247c-4194-b243-35f8690fee57", "address": "fa:16:3e:9a:82:c5", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap3f80aeff-24", "ovs_interfaceid": "3f80aeff-247c-4194-b243-35f8690fee57", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:13:29 compute-0 nova_compute[192079]: 2025-10-02 12:13:29.553 2 DEBUG oslo_concurrency.lockutils [req-83959ded-8335-462d-aeee-1bec13375889 req-ee8bcc10-e17a-4470-b5e2-0edd905b241f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-3277cbd6-2706-4647-b0df-b789c49f80ea" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:13:29 compute-0 nova_compute[192079]: 2025-10-02 12:13:29.737 2 DEBUG nova.compute.manager [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:13:29 compute-0 nova_compute[192079]: 2025-10-02 12:13:29.738 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407209.736955, 3277cbd6-2706-4647-b0df-b789c49f80ea => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:13:29 compute-0 nova_compute[192079]: 2025-10-02 12:13:29.738 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] VM Started (Lifecycle Event)
Oct 02 12:13:29 compute-0 nova_compute[192079]: 2025-10-02 12:13:29.743 2 DEBUG nova.virt.libvirt.driver [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:13:29 compute-0 nova_compute[192079]: 2025-10-02 12:13:29.747 2 INFO nova.virt.libvirt.driver [-] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Instance spawned successfully.
Oct 02 12:13:29 compute-0 nova_compute[192079]: 2025-10-02 12:13:29.747 2 DEBUG nova.virt.libvirt.driver [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:13:29 compute-0 nova_compute[192079]: 2025-10-02 12:13:29.780 2 DEBUG nova.virt.libvirt.driver [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:13:29 compute-0 nova_compute[192079]: 2025-10-02 12:13:29.780 2 DEBUG nova.virt.libvirt.driver [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:13:29 compute-0 nova_compute[192079]: 2025-10-02 12:13:29.781 2 DEBUG nova.virt.libvirt.driver [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:13:29 compute-0 nova_compute[192079]: 2025-10-02 12:13:29.781 2 DEBUG nova.virt.libvirt.driver [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:13:29 compute-0 nova_compute[192079]: 2025-10-02 12:13:29.782 2 DEBUG nova.virt.libvirt.driver [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:13:29 compute-0 nova_compute[192079]: 2025-10-02 12:13:29.782 2 DEBUG nova.virt.libvirt.driver [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:13:29 compute-0 nova_compute[192079]: 2025-10-02 12:13:29.788 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:13:29 compute-0 nova_compute[192079]: 2025-10-02 12:13:29.790 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:13:29 compute-0 nova_compute[192079]: 2025-10-02 12:13:29.821 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:13:29 compute-0 nova_compute[192079]: 2025-10-02 12:13:29.821 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407209.7404141, 3277cbd6-2706-4647-b0df-b789c49f80ea => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:13:29 compute-0 nova_compute[192079]: 2025-10-02 12:13:29.822 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] VM Paused (Lifecycle Event)
Oct 02 12:13:29 compute-0 nova_compute[192079]: 2025-10-02 12:13:29.852 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:13:29 compute-0 nova_compute[192079]: 2025-10-02 12:13:29.855 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407209.74195, 3277cbd6-2706-4647-b0df-b789c49f80ea => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:13:29 compute-0 nova_compute[192079]: 2025-10-02 12:13:29.855 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] VM Resumed (Lifecycle Event)
Oct 02 12:13:29 compute-0 nova_compute[192079]: 2025-10-02 12:13:29.883 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:13:29 compute-0 nova_compute[192079]: 2025-10-02 12:13:29.888 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:13:29 compute-0 nova_compute[192079]: 2025-10-02 12:13:29.916 2 INFO nova.compute.manager [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Took 9.58 seconds to spawn the instance on the hypervisor.
Oct 02 12:13:29 compute-0 nova_compute[192079]: 2025-10-02 12:13:29.917 2 DEBUG nova.compute.manager [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:13:29 compute-0 nova_compute[192079]: 2025-10-02 12:13:29.922 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:13:30 compute-0 nova_compute[192079]: 2025-10-02 12:13:30.011 2 INFO nova.compute.manager [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Took 10.40 seconds to build instance.
Oct 02 12:13:30 compute-0 nova_compute[192079]: 2025-10-02 12:13:30.041 2 DEBUG oslo_concurrency.lockutils [None req-a4cd85e1-e4b7-4a7e-8d72-6f28637ad945 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "3277cbd6-2706-4647-b0df-b789c49f80ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 10.568s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:13:30 compute-0 nova_compute[192079]: 2025-10-02 12:13:30.513 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:30 compute-0 nova_compute[192079]: 2025-10-02 12:13:30.957 2 DEBUG nova.compute.manager [req-ae57b75a-b609-4d79-80a7-a6c755505792 req-40089786-70c3-475d-ad5b-cb1923cc870a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Received event network-vif-plugged-3f80aeff-247c-4194-b243-35f8690fee57 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:13:30 compute-0 nova_compute[192079]: 2025-10-02 12:13:30.958 2 DEBUG oslo_concurrency.lockutils [req-ae57b75a-b609-4d79-80a7-a6c755505792 req-40089786-70c3-475d-ad5b-cb1923cc870a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "3277cbd6-2706-4647-b0df-b789c49f80ea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:13:30 compute-0 nova_compute[192079]: 2025-10-02 12:13:30.958 2 DEBUG oslo_concurrency.lockutils [req-ae57b75a-b609-4d79-80a7-a6c755505792 req-40089786-70c3-475d-ad5b-cb1923cc870a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "3277cbd6-2706-4647-b0df-b789c49f80ea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:13:30 compute-0 nova_compute[192079]: 2025-10-02 12:13:30.958 2 DEBUG oslo_concurrency.lockutils [req-ae57b75a-b609-4d79-80a7-a6c755505792 req-40089786-70c3-475d-ad5b-cb1923cc870a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "3277cbd6-2706-4647-b0df-b789c49f80ea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:13:30 compute-0 nova_compute[192079]: 2025-10-02 12:13:30.958 2 DEBUG nova.compute.manager [req-ae57b75a-b609-4d79-80a7-a6c755505792 req-40089786-70c3-475d-ad5b-cb1923cc870a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] No waiting events found dispatching network-vif-plugged-3f80aeff-247c-4194-b243-35f8690fee57 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:13:30 compute-0 nova_compute[192079]: 2025-10-02 12:13:30.959 2 WARNING nova.compute.manager [req-ae57b75a-b609-4d79-80a7-a6c755505792 req-40089786-70c3-475d-ad5b-cb1923cc870a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Received unexpected event network-vif-plugged-3f80aeff-247c-4194-b243-35f8690fee57 for instance with vm_state active and task_state None.
Oct 02 12:13:32 compute-0 nova_compute[192079]: 2025-10-02 12:13:32.455 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:32 compute-0 nova_compute[192079]: 2025-10-02 12:13:32.861 2 DEBUG nova.compute.manager [None req-8509c398-e5e7-4a35-b5e6-b148d83ebda8 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:13:32 compute-0 nova_compute[192079]: 2025-10-02 12:13:32.926 2 INFO nova.compute.manager [None req-8509c398-e5e7-4a35-b5e6-b148d83ebda8 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] instance snapshotting
Oct 02 12:13:33 compute-0 nova_compute[192079]: 2025-10-02 12:13:33.247 2 INFO nova.virt.libvirt.driver [None req-8509c398-e5e7-4a35-b5e6-b148d83ebda8 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Beginning live snapshot process
Oct 02 12:13:34 compute-0 virtqemud[191807]: invalid argument: disk vda does not have an active block job
Oct 02 12:13:34 compute-0 nova_compute[192079]: 2025-10-02 12:13:34.018 2 DEBUG oslo_concurrency.processutils [None req-8509c398-e5e7-4a35-b5e6-b148d83ebda8 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/3277cbd6-2706-4647-b0df-b789c49f80ea/disk --force-share --output=json -f qcow2 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:13:34 compute-0 nova_compute[192079]: 2025-10-02 12:13:34.076 2 DEBUG oslo_concurrency.processutils [None req-8509c398-e5e7-4a35-b5e6-b148d83ebda8 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/3277cbd6-2706-4647-b0df-b789c49f80ea/disk --force-share --output=json -f qcow2" returned: 0 in 0.058s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:13:34 compute-0 nova_compute[192079]: 2025-10-02 12:13:34.078 2 DEBUG oslo_concurrency.processutils [None req-8509c398-e5e7-4a35-b5e6-b148d83ebda8 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/3277cbd6-2706-4647-b0df-b789c49f80ea/disk --force-share --output=json -f qcow2 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:13:34 compute-0 nova_compute[192079]: 2025-10-02 12:13:34.132 2 DEBUG oslo_concurrency.processutils [None req-8509c398-e5e7-4a35-b5e6-b148d83ebda8 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/3277cbd6-2706-4647-b0df-b789c49f80ea/disk --force-share --output=json -f qcow2" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:13:34 compute-0 nova_compute[192079]: 2025-10-02 12:13:34.153 2 DEBUG oslo_concurrency.processutils [None req-8509c398-e5e7-4a35-b5e6-b148d83ebda8 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:13:34 compute-0 nova_compute[192079]: 2025-10-02 12:13:34.207 2 DEBUG oslo_concurrency.processutils [None req-8509c398-e5e7-4a35-b5e6-b148d83ebda8 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.053s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:13:34 compute-0 nova_compute[192079]: 2025-10-02 12:13:34.208 2 DEBUG oslo_concurrency.processutils [None req-8509c398-e5e7-4a35-b5e6-b148d83ebda8 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/snapshots/tmpenxpghyh/6305f5dd7730495691852e67b186d56a.delta 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:13:34 compute-0 nova_compute[192079]: 2025-10-02 12:13:34.582 2 DEBUG oslo_concurrency.processutils [None req-8509c398-e5e7-4a35-b5e6-b148d83ebda8 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/snapshots/tmpenxpghyh/6305f5dd7730495691852e67b186d56a.delta 1073741824" returned: 0 in 0.374s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:13:34 compute-0 nova_compute[192079]: 2025-10-02 12:13:34.582 2 INFO nova.virt.libvirt.driver [None req-8509c398-e5e7-4a35-b5e6-b148d83ebda8 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Quiescing instance not available: QEMU guest agent is not enabled.
Oct 02 12:13:34 compute-0 nova_compute[192079]: 2025-10-02 12:13:34.631 2 DEBUG nova.virt.libvirt.guest [None req-8509c398-e5e7-4a35-b5e6-b148d83ebda8 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] COPY block job progress, current cursor: 1 final cursor: 1 is_job_complete /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:846
Oct 02 12:13:34 compute-0 nova_compute[192079]: 2025-10-02 12:13:34.633 2 INFO nova.virt.libvirt.driver [None req-8509c398-e5e7-4a35-b5e6-b148d83ebda8 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Skipping quiescing instance: QEMU guest agent is not enabled.
Oct 02 12:13:34 compute-0 nova_compute[192079]: 2025-10-02 12:13:34.944 2 DEBUG nova.privsep.utils [None req-8509c398-e5e7-4a35-b5e6-b148d83ebda8 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Path '/var/lib/nova/instances' supports direct I/O supports_direct_io /usr/lib/python3.9/site-packages/nova/privsep/utils.py:63
Oct 02 12:13:34 compute-0 nova_compute[192079]: 2025-10-02 12:13:34.945 2 DEBUG oslo_concurrency.processutils [None req-8509c398-e5e7-4a35-b5e6-b148d83ebda8 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): qemu-img convert -t none -O qcow2 -f qcow2 /var/lib/nova/instances/snapshots/tmpenxpghyh/6305f5dd7730495691852e67b186d56a.delta /var/lib/nova/instances/snapshots/tmpenxpghyh/6305f5dd7730495691852e67b186d56a execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:13:35 compute-0 nova_compute[192079]: 2025-10-02 12:13:35.515 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:36 compute-0 nova_compute[192079]: 2025-10-02 12:13:36.093 2 DEBUG oslo_concurrency.processutils [None req-8509c398-e5e7-4a35-b5e6-b148d83ebda8 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "qemu-img convert -t none -O qcow2 -f qcow2 /var/lib/nova/instances/snapshots/tmpenxpghyh/6305f5dd7730495691852e67b186d56a.delta /var/lib/nova/instances/snapshots/tmpenxpghyh/6305f5dd7730495691852e67b186d56a" returned: 0 in 1.147s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:13:36 compute-0 nova_compute[192079]: 2025-10-02 12:13:36.094 2 INFO nova.virt.libvirt.driver [None req-8509c398-e5e7-4a35-b5e6-b148d83ebda8 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Snapshot extracted, beginning image upload
Oct 02 12:13:36 compute-0 podman[229667]: 2025-10-02 12:13:36.138238881 +0000 UTC m=+0.048780031 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_metadata_agent, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.schema-version=1.0)
Oct 02 12:13:36 compute-0 podman[229669]: 2025-10-02 12:13:36.171882317 +0000 UTC m=+0.075434336 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:13:36 compute-0 podman[229668]: 2025-10-02 12:13:36.185270681 +0000 UTC m=+0.090634049 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, tcib_managed=true, config_id=ovn_controller, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, container_name=ovn_controller, io.buildah.version=1.41.3)
Oct 02 12:13:37 compute-0 nova_compute[192079]: 2025-10-02 12:13:37.458 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:39 compute-0 nova_compute[192079]: 2025-10-02 12:13:39.199 2 INFO nova.virt.libvirt.driver [None req-8509c398-e5e7-4a35-b5e6-b148d83ebda8 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Snapshot image upload complete
Oct 02 12:13:39 compute-0 nova_compute[192079]: 2025-10-02 12:13:39.200 2 INFO nova.compute.manager [None req-8509c398-e5e7-4a35-b5e6-b148d83ebda8 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Took 6.26 seconds to snapshot the instance on the hypervisor.
Oct 02 12:13:40 compute-0 nova_compute[192079]: 2025-10-02 12:13:40.517 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:42 compute-0 nova_compute[192079]: 2025-10-02 12:13:42.462 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:45 compute-0 ovn_controller[94336]: 2025-10-02T12:13:45Z|00022|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:9a:82:c5 10.100.0.7
Oct 02 12:13:45 compute-0 ovn_controller[94336]: 2025-10-02T12:13:45Z|00023|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:9a:82:c5 10.100.0.7
Oct 02 12:13:45 compute-0 nova_compute[192079]: 2025-10-02 12:13:45.552 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:47 compute-0 podman[229751]: 2025-10-02 12:13:47.1488748 +0000 UTC m=+0.060051176 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, container_name=ceilometer_agent_compute, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, config_id=edpm, managed_by=edpm_ansible, org.label-schema.schema-version=1.0)
Oct 02 12:13:47 compute-0 nova_compute[192079]: 2025-10-02 12:13:47.467 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:50 compute-0 nova_compute[192079]: 2025-10-02 12:13:50.553 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:52 compute-0 podman[229774]: 2025-10-02 12:13:52.157467641 +0000 UTC m=+0.055828762 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, config_id=multipathd, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.vendor=CentOS, container_name=multipathd, io.buildah.version=1.41.3, managed_by=edpm_ansible)
Oct 02 12:13:52 compute-0 podman[229773]: 2025-10-02 12:13:52.158503939 +0000 UTC m=+0.062337289 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, distribution-scope=public, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, vcs-type=git, com.redhat.component=ubi9-minimal-container, container_name=openstack_network_exporter, io.openshift.expose-services=, config_id=edpm, version=9.6, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, maintainer=Red Hat, Inc., vendor=Red Hat, Inc., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, name=ubi9-minimal, url=https://catalog.redhat.com/en/search?searchType=containers, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., architecture=x86_64, io.openshift.tags=minimal rhel9, build-date=2025-08-20T13:12:41, io.buildah.version=1.33.7, release=1755695350, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly.)
Oct 02 12:13:52 compute-0 nova_compute[192079]: 2025-10-02 12:13:52.469 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:55 compute-0 nova_compute[192079]: 2025-10-02 12:13:55.555 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:57 compute-0 nova_compute[192079]: 2025-10-02 12:13:57.472 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.119 2 DEBUG oslo_concurrency.lockutils [None req-ecfc1702-a5d6-4506-a7cc-8091f7a0d4d1 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "3277cbd6-2706-4647-b0df-b789c49f80ea" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.120 2 DEBUG oslo_concurrency.lockutils [None req-ecfc1702-a5d6-4506-a7cc-8091f7a0d4d1 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "3277cbd6-2706-4647-b0df-b789c49f80ea" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.120 2 DEBUG oslo_concurrency.lockutils [None req-ecfc1702-a5d6-4506-a7cc-8091f7a0d4d1 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "3277cbd6-2706-4647-b0df-b789c49f80ea-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.120 2 DEBUG oslo_concurrency.lockutils [None req-ecfc1702-a5d6-4506-a7cc-8091f7a0d4d1 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "3277cbd6-2706-4647-b0df-b789c49f80ea-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.121 2 DEBUG oslo_concurrency.lockutils [None req-ecfc1702-a5d6-4506-a7cc-8091f7a0d4d1 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "3277cbd6-2706-4647-b0df-b789c49f80ea-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.131 2 INFO nova.compute.manager [None req-ecfc1702-a5d6-4506-a7cc-8091f7a0d4d1 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Terminating instance
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.141 2 DEBUG nova.compute.manager [None req-ecfc1702-a5d6-4506-a7cc-8091f7a0d4d1 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:13:58 compute-0 podman[229812]: 2025-10-02 12:13:58.143917207 +0000 UTC m=+0.058874354 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter)
Oct 02 12:13:58 compute-0 kernel: tap3f80aeff-24 (unregistering): left promiscuous mode
Oct 02 12:13:58 compute-0 NetworkManager[51160]: <info>  [1759407238.1597] device (tap3f80aeff-24): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:13:58 compute-0 ovn_controller[94336]: 2025-10-02T12:13:58Z|00217|binding|INFO|Releasing lport 3f80aeff-247c-4194-b243-35f8690fee57 from this chassis (sb_readonly=0)
Oct 02 12:13:58 compute-0 ovn_controller[94336]: 2025-10-02T12:13:58Z|00218|binding|INFO|Setting lport 3f80aeff-247c-4194-b243-35f8690fee57 down in Southbound
Oct 02 12:13:58 compute-0 ovn_controller[94336]: 2025-10-02T12:13:58Z|00219|binding|INFO|Removing iface tap3f80aeff-24 ovn-installed in OVS
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.170 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.172 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:58 compute-0 podman[229813]: 2025-10-02 12:13:58.180712489 +0000 UTC m=+0.086097526 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=iscsid, container_name=iscsid, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:13:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:58.180 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:9a:82:c5 10.100.0.7'], port_security=['fa:16:3e:9a:82:c5 10.100.0.7'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.7/28', 'neutron:device_id': '3277cbd6-2706-4647-b0df-b789c49f80ea', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'dcf78460093d411988a54040ea4c265a', 'neutron:revision_number': '4', 'neutron:security_group_ids': 'aacce687-8b76-4e90-b19c-0dd006394188', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=24ae9888-31f5-4083-b5ee-e7ed6a1eee13, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=3f80aeff-247c-4194-b243-35f8690fee57) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:13:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:58.182 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 3f80aeff-247c-4194-b243-35f8690fee57 in datapath 4f195445-fd43-4b92-89dd-a1b2fe9ea8c2 unbound from our chassis
Oct 02 12:13:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:58.183 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 4f195445-fd43-4b92-89dd-a1b2fe9ea8c2, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:13:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:58.185 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[436c5b8e-b5c4-4964-be72-ccf4c22f4482]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:58.186 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2 namespace which is not needed anymore
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.187 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:58 compute-0 systemd[1]: machine-qemu\x2d32\x2dinstance\x2d00000042.scope: Deactivated successfully.
Oct 02 12:13:58 compute-0 systemd[1]: machine-qemu\x2d32\x2dinstance\x2d00000042.scope: Consumed 15.024s CPU time.
Oct 02 12:13:58 compute-0 systemd-machined[152150]: Machine qemu-32-instance-00000042 terminated.
Oct 02 12:13:58 compute-0 neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2[229626]: [NOTICE]   (229630) : haproxy version is 2.8.14-c23fe91
Oct 02 12:13:58 compute-0 neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2[229626]: [NOTICE]   (229630) : path to executable is /usr/sbin/haproxy
Oct 02 12:13:58 compute-0 neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2[229626]: [WARNING]  (229630) : Exiting Master process...
Oct 02 12:13:58 compute-0 neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2[229626]: [ALERT]    (229630) : Current worker (229632) exited with code 143 (Terminated)
Oct 02 12:13:58 compute-0 neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2[229626]: [WARNING]  (229630) : All workers exited. Exiting... (0)
Oct 02 12:13:58 compute-0 systemd[1]: libpod-66f6ed517867365bda9971012fafcbe55842d30091e7163d9a4a7d896f47f026.scope: Deactivated successfully.
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.369 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:58 compute-0 podman[229880]: 2025-10-02 12:13:58.370193841 +0000 UTC m=+0.086880377 container died 66f6ed517867365bda9971012fafcbe55842d30091e7163d9a4a7d896f47f026 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001)
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.375 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.420 2 INFO nova.virt.libvirt.driver [-] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Instance destroyed successfully.
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.420 2 DEBUG nova.objects.instance [None req-ecfc1702-a5d6-4506-a7cc-8091f7a0d4d1 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lazy-loading 'resources' on Instance uuid 3277cbd6-2706-4647-b0df-b789c49f80ea obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.449 2 DEBUG nova.virt.libvirt.vif [None req-ecfc1702-a5d6-4506-a7cc-8091f7a0d4d1 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:13:18Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ImagesTestJSON-server-2081809098',display_name='tempest-ImagesTestJSON-server-2081809098',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-imagestestjson-server-2081809098',id=66,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:13:29Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='dcf78460093d411988a54040ea4c265a',ramdisk_id='',reservation_id='r-hd1dbp0p',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ImagesTestJSON-437970487',owner_user_name='tempest-ImagesTestJSON-437970487-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:13:39Z,user_data=None,user_id='dcdfc3c0f94e42cb931d27f2e3b5b12d',uuid=3277cbd6-2706-4647-b0df-b789c49f80ea,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "3f80aeff-247c-4194-b243-35f8690fee57", "address": "fa:16:3e:9a:82:c5", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap3f80aeff-24", "ovs_interfaceid": "3f80aeff-247c-4194-b243-35f8690fee57", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.450 2 DEBUG nova.network.os_vif_util [None req-ecfc1702-a5d6-4506-a7cc-8091f7a0d4d1 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Converting VIF {"id": "3f80aeff-247c-4194-b243-35f8690fee57", "address": "fa:16:3e:9a:82:c5", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap3f80aeff-24", "ovs_interfaceid": "3f80aeff-247c-4194-b243-35f8690fee57", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.451 2 DEBUG nova.network.os_vif_util [None req-ecfc1702-a5d6-4506-a7cc-8091f7a0d4d1 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:9a:82:c5,bridge_name='br-int',has_traffic_filtering=True,id=3f80aeff-247c-4194-b243-35f8690fee57,network=Network(4f195445-fd43-4b92-89dd-a1b2fe9ea8c2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap3f80aeff-24') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.451 2 DEBUG os_vif [None req-ecfc1702-a5d6-4506-a7cc-8091f7a0d4d1 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:9a:82:c5,bridge_name='br-int',has_traffic_filtering=True,id=3f80aeff-247c-4194-b243-35f8690fee57,network=Network(4f195445-fd43-4b92-89dd-a1b2fe9ea8c2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap3f80aeff-24') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.453 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.454 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap3f80aeff-24, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.455 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.458 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.461 2 INFO os_vif [None req-ecfc1702-a5d6-4506-a7cc-8091f7a0d4d1 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:9a:82:c5,bridge_name='br-int',has_traffic_filtering=True,id=3f80aeff-247c-4194-b243-35f8690fee57,network=Network(4f195445-fd43-4b92-89dd-a1b2fe9ea8c2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap3f80aeff-24')
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.462 2 INFO nova.virt.libvirt.driver [None req-ecfc1702-a5d6-4506-a7cc-8091f7a0d4d1 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Deleting instance files /var/lib/nova/instances/3277cbd6-2706-4647-b0df-b789c49f80ea_del
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.463 2 INFO nova.virt.libvirt.driver [None req-ecfc1702-a5d6-4506-a7cc-8091f7a0d4d1 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Deletion of /var/lib/nova/instances/3277cbd6-2706-4647-b0df-b789c49f80ea_del complete
Oct 02 12:13:58 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-66f6ed517867365bda9971012fafcbe55842d30091e7163d9a4a7d896f47f026-userdata-shm.mount: Deactivated successfully.
Oct 02 12:13:58 compute-0 systemd[1]: var-lib-containers-storage-overlay-e396b660c75596730a767aba29d884f0f50aafa547ae0c00a8b0974991fb639f-merged.mount: Deactivated successfully.
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.551 2 INFO nova.compute.manager [None req-ecfc1702-a5d6-4506-a7cc-8091f7a0d4d1 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Took 0.41 seconds to destroy the instance on the hypervisor.
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.551 2 DEBUG oslo.service.loopingcall [None req-ecfc1702-a5d6-4506-a7cc-8091f7a0d4d1 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.552 2 DEBUG nova.compute.manager [-] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.552 2 DEBUG nova.network.neutron [-] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:13:58 compute-0 podman[229880]: 2025-10-02 12:13:58.574367543 +0000 UTC m=+0.291054079 container cleanup 66f6ed517867365bda9971012fafcbe55842d30091e7163d9a4a7d896f47f026 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0)
Oct 02 12:13:58 compute-0 systemd[1]: libpod-conmon-66f6ed517867365bda9971012fafcbe55842d30091e7163d9a4a7d896f47f026.scope: Deactivated successfully.
Oct 02 12:13:58 compute-0 podman[229928]: 2025-10-02 12:13:58.75529505 +0000 UTC m=+0.153437570 container remove 66f6ed517867365bda9971012fafcbe55842d30091e7163d9a4a7d896f47f026 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2)
Oct 02 12:13:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:58.761 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b84dd9e0-dc05-498b-917e-5580d042cf89]: (4, ('Thu Oct  2 12:13:58 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2 (66f6ed517867365bda9971012fafcbe55842d30091e7163d9a4a7d896f47f026)\n66f6ed517867365bda9971012fafcbe55842d30091e7163d9a4a7d896f47f026\nThu Oct  2 12:13:58 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2 (66f6ed517867365bda9971012fafcbe55842d30091e7163d9a4a7d896f47f026)\n66f6ed517867365bda9971012fafcbe55842d30091e7163d9a4a7d896f47f026\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:58.763 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[64db4e20-3767-4cb9-89d9-a19a080db853]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:58.764 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap4f195445-f0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.766 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:58 compute-0 kernel: tap4f195445-f0: left promiscuous mode
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.768 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:58.771 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d3742b9d-4554-4af0-9334-1d9faeb05104]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:58 compute-0 nova_compute[192079]: 2025-10-02 12:13:58.780 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:13:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:58.798 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7938414c-fe52-4e6c-ae0d-42d4f3461356]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:58.800 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e7d9c700-ea55-4fe1-a55d-c6ae022bd4fa]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:58.817 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[960c3111-388f-46da-9618-7810254a5a8f]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 520613, 'reachable_time': 37305, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 229944, 'error': None, 'target': 'ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:58 compute-0 systemd[1]: run-netns-ovnmeta\x2d4f195445\x2dfd43\x2d4b92\x2d89dd\x2da1b2fe9ea8c2.mount: Deactivated successfully.
Oct 02 12:13:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:58.820 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:13:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:13:58.820 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[1214273e-2d05-4ecf-912c-fc0e9fe08cb6]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:13:59 compute-0 nova_compute[192079]: 2025-10-02 12:13:59.439 2 DEBUG nova.network.neutron [-] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:13:59 compute-0 nova_compute[192079]: 2025-10-02 12:13:59.464 2 INFO nova.compute.manager [-] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Took 0.91 seconds to deallocate network for instance.
Oct 02 12:13:59 compute-0 nova_compute[192079]: 2025-10-02 12:13:59.584 2 DEBUG oslo_concurrency.lockutils [None req-ecfc1702-a5d6-4506-a7cc-8091f7a0d4d1 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:13:59 compute-0 nova_compute[192079]: 2025-10-02 12:13:59.585 2 DEBUG oslo_concurrency.lockutils [None req-ecfc1702-a5d6-4506-a7cc-8091f7a0d4d1 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:13:59 compute-0 nova_compute[192079]: 2025-10-02 12:13:59.601 2 DEBUG nova.compute.manager [req-711f3ffc-c355-4529-8528-39deed15c888 req-329c5f98-f6b1-48e3-aa52-0f416fc7562c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Received event network-vif-deleted-3f80aeff-247c-4194-b243-35f8690fee57 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:13:59 compute-0 nova_compute[192079]: 2025-10-02 12:13:59.679 2 DEBUG nova.compute.provider_tree [None req-ecfc1702-a5d6-4506-a7cc-8091f7a0d4d1 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:13:59 compute-0 nova_compute[192079]: 2025-10-02 12:13:59.732 2 DEBUG nova.scheduler.client.report [None req-ecfc1702-a5d6-4506-a7cc-8091f7a0d4d1 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:13:59 compute-0 nova_compute[192079]: 2025-10-02 12:13:59.770 2 DEBUG oslo_concurrency.lockutils [None req-ecfc1702-a5d6-4506-a7cc-8091f7a0d4d1 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.186s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:13:59 compute-0 nova_compute[192079]: 2025-10-02 12:13:59.814 2 INFO nova.scheduler.client.report [None req-ecfc1702-a5d6-4506-a7cc-8091f7a0d4d1 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Deleted allocations for instance 3277cbd6-2706-4647-b0df-b789c49f80ea
Oct 02 12:13:59 compute-0 nova_compute[192079]: 2025-10-02 12:13:59.980 2 DEBUG oslo_concurrency.lockutils [None req-ecfc1702-a5d6-4506-a7cc-8091f7a0d4d1 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "3277cbd6-2706-4647-b0df-b789c49f80ea" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.860s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:00 compute-0 nova_compute[192079]: 2025-10-02 12:14:00.557 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:01 compute-0 nova_compute[192079]: 2025-10-02 12:14:01.904 2 DEBUG oslo_concurrency.lockutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "6db1ac7f-726d-4ad6-8992-86f0c23d4d79" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:01 compute-0 nova_compute[192079]: 2025-10-02 12:14:01.904 2 DEBUG oslo_concurrency.lockutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "6db1ac7f-726d-4ad6-8992-86f0c23d4d79" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:01 compute-0 nova_compute[192079]: 2025-10-02 12:14:01.918 2 DEBUG nova.compute.manager [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.014 2 DEBUG oslo_concurrency.lockutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.015 2 DEBUG oslo_concurrency.lockutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.022 2 DEBUG nova.virt.hardware [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.023 2 INFO nova.compute.claims [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.208 2 DEBUG nova.compute.provider_tree [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:14:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:02.214 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:02.215 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:02.215 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.250 2 DEBUG nova.scheduler.client.report [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.314 2 DEBUG oslo_concurrency.lockutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.299s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.315 2 DEBUG nova.compute.manager [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.456 2 DEBUG nova.compute.manager [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.458 2 DEBUG nova.network.neutron [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.488 2 INFO nova.virt.libvirt.driver [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.530 2 DEBUG nova.compute.manager [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.685 2 DEBUG nova.compute.manager [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.687 2 DEBUG nova.virt.libvirt.driver [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.688 2 INFO nova.virt.libvirt.driver [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Creating image(s)
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.688 2 DEBUG oslo_concurrency.lockutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "/var/lib/nova/instances/6db1ac7f-726d-4ad6-8992-86f0c23d4d79/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.689 2 DEBUG oslo_concurrency.lockutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "/var/lib/nova/instances/6db1ac7f-726d-4ad6-8992-86f0c23d4d79/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.689 2 DEBUG oslo_concurrency.lockutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "/var/lib/nova/instances/6db1ac7f-726d-4ad6-8992-86f0c23d4d79/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.701 2 DEBUG oslo_concurrency.processutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.726 2 DEBUG oslo_concurrency.lockutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Acquiring lock "0d926f1d-a2a1-4e3d-b0d0-072c744cd745" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.726 2 DEBUG oslo_concurrency.lockutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Lock "0d926f1d-a2a1-4e3d-b0d0-072c744cd745" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.765 2 DEBUG nova.compute.manager [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.771 2 DEBUG oslo_concurrency.processutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.070s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.772 2 DEBUG oslo_concurrency.lockutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.773 2 DEBUG oslo_concurrency.lockutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.790 2 DEBUG oslo_concurrency.processutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.850 2 DEBUG oslo_concurrency.processutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.060s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.851 2 DEBUG oslo_concurrency.processutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/6db1ac7f-726d-4ad6-8992-86f0c23d4d79/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.887 2 DEBUG oslo_concurrency.lockutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.888 2 DEBUG oslo_concurrency.lockutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.891 2 DEBUG oslo_concurrency.processutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/6db1ac7f-726d-4ad6-8992-86f0c23d4d79/disk 1073741824" returned: 0 in 0.040s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.892 2 DEBUG oslo_concurrency.lockutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.119s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.892 2 DEBUG oslo_concurrency.processutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.918 2 DEBUG nova.virt.hardware [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.919 2 INFO nova.compute.claims [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.961 2 DEBUG oslo_concurrency.processutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.069s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.961 2 DEBUG nova.virt.disk.api [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Checking if we can resize image /var/lib/nova/instances/6db1ac7f-726d-4ad6-8992-86f0c23d4d79/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:14:02 compute-0 nova_compute[192079]: 2025-10-02 12:14:02.962 2 DEBUG oslo_concurrency.processutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/6db1ac7f-726d-4ad6-8992-86f0c23d4d79/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.031 2 DEBUG oslo_concurrency.processutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/6db1ac7f-726d-4ad6-8992-86f0c23d4d79/disk --force-share --output=json" returned: 0 in 0.069s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.032 2 DEBUG nova.virt.disk.api [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Cannot resize image /var/lib/nova/instances/6db1ac7f-726d-4ad6-8992-86f0c23d4d79/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.032 2 DEBUG nova.objects.instance [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lazy-loading 'migration_context' on Instance uuid 6db1ac7f-726d-4ad6-8992-86f0c23d4d79 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.044 2 DEBUG nova.virt.libvirt.driver [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.044 2 DEBUG nova.virt.libvirt.driver [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Ensure instance console log exists: /var/lib/nova/instances/6db1ac7f-726d-4ad6-8992-86f0c23d4d79/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.044 2 DEBUG oslo_concurrency.lockutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.045 2 DEBUG oslo_concurrency.lockutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.045 2 DEBUG oslo_concurrency.lockutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.067 2 DEBUG nova.policy [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dcdfc3c0f94e42cb931d27f2e3b5b12d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dcf78460093d411988a54040ea4c265a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.125 2 DEBUG nova.compute.provider_tree [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.141 2 DEBUG nova.scheduler.client.report [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.163 2 DEBUG oslo_concurrency.lockutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.275s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.164 2 DEBUG nova.compute.manager [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.247 2 DEBUG nova.compute.manager [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.247 2 DEBUG nova.network.neutron [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.288 2 INFO nova.virt.libvirt.driver [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.329 2 DEBUG nova.compute.manager [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.457 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.514 2 DEBUG nova.policy [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fae0e5ee734643f6a2642e748e51d97f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '389df3c9188c4d8194eb17d703c957db', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.875 2 DEBUG nova.compute.manager [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.876 2 DEBUG nova.virt.libvirt.driver [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.877 2 INFO nova.virt.libvirt.driver [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Creating image(s)
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.877 2 DEBUG oslo_concurrency.lockutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Acquiring lock "/var/lib/nova/instances/0d926f1d-a2a1-4e3d-b0d0-072c744cd745/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.878 2 DEBUG oslo_concurrency.lockutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Lock "/var/lib/nova/instances/0d926f1d-a2a1-4e3d-b0d0-072c744cd745/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.879 2 DEBUG oslo_concurrency.lockutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Lock "/var/lib/nova/instances/0d926f1d-a2a1-4e3d-b0d0-072c744cd745/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.002s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.890 2 DEBUG oslo_concurrency.processutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.944 2 DEBUG oslo_concurrency.processutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.945 2 DEBUG oslo_concurrency.lockutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.946 2 DEBUG oslo_concurrency.lockutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:03 compute-0 nova_compute[192079]: 2025-10-02 12:14:03.957 2 DEBUG oslo_concurrency.processutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:14:04 compute-0 nova_compute[192079]: 2025-10-02 12:14:04.022 2 DEBUG oslo_concurrency.processutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.065s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:14:04 compute-0 nova_compute[192079]: 2025-10-02 12:14:04.023 2 DEBUG oslo_concurrency.processutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/0d926f1d-a2a1-4e3d-b0d0-072c744cd745/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:14:04 compute-0 nova_compute[192079]: 2025-10-02 12:14:04.081 2 DEBUG oslo_concurrency.processutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/0d926f1d-a2a1-4e3d-b0d0-072c744cd745/disk 1073741824" returned: 0 in 0.058s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:14:04 compute-0 nova_compute[192079]: 2025-10-02 12:14:04.082 2 DEBUG oslo_concurrency.lockutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.136s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:04 compute-0 nova_compute[192079]: 2025-10-02 12:14:04.082 2 DEBUG oslo_concurrency.processutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:14:04 compute-0 nova_compute[192079]: 2025-10-02 12:14:04.145 2 DEBUG oslo_concurrency.processutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.062s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:14:04 compute-0 nova_compute[192079]: 2025-10-02 12:14:04.146 2 DEBUG nova.virt.disk.api [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Checking if we can resize image /var/lib/nova/instances/0d926f1d-a2a1-4e3d-b0d0-072c744cd745/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:14:04 compute-0 nova_compute[192079]: 2025-10-02 12:14:04.146 2 DEBUG oslo_concurrency.processutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/0d926f1d-a2a1-4e3d-b0d0-072c744cd745/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:14:04 compute-0 nova_compute[192079]: 2025-10-02 12:14:04.209 2 DEBUG oslo_concurrency.processutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/0d926f1d-a2a1-4e3d-b0d0-072c744cd745/disk --force-share --output=json" returned: 0 in 0.062s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:14:04 compute-0 nova_compute[192079]: 2025-10-02 12:14:04.210 2 DEBUG nova.virt.disk.api [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Cannot resize image /var/lib/nova/instances/0d926f1d-a2a1-4e3d-b0d0-072c744cd745/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:14:04 compute-0 nova_compute[192079]: 2025-10-02 12:14:04.210 2 DEBUG nova.objects.instance [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Lazy-loading 'migration_context' on Instance uuid 0d926f1d-a2a1-4e3d-b0d0-072c744cd745 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:14:04 compute-0 nova_compute[192079]: 2025-10-02 12:14:04.226 2 DEBUG nova.virt.libvirt.driver [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:14:04 compute-0 nova_compute[192079]: 2025-10-02 12:14:04.227 2 DEBUG nova.virt.libvirt.driver [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Ensure instance console log exists: /var/lib/nova/instances/0d926f1d-a2a1-4e3d-b0d0-072c744cd745/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:14:04 compute-0 nova_compute[192079]: 2025-10-02 12:14:04.227 2 DEBUG oslo_concurrency.lockutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:04 compute-0 nova_compute[192079]: 2025-10-02 12:14:04.227 2 DEBUG oslo_concurrency.lockutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:04 compute-0 nova_compute[192079]: 2025-10-02 12:14:04.228 2 DEBUG oslo_concurrency.lockutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:04 compute-0 nova_compute[192079]: 2025-10-02 12:14:04.266 2 DEBUG nova.network.neutron [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Successfully created port: c1d6d6c7-23c7-45e9-b50d-f589c7908b63 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:14:04 compute-0 nova_compute[192079]: 2025-10-02 12:14:04.522 2 DEBUG nova.network.neutron [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Successfully created port: 8aed8ea7-afed-4dca-9a39-26b03675eec8 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:14:04 compute-0 nova_compute[192079]: 2025-10-02 12:14:04.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_incomplete_migrations run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:14:04 compute-0 nova_compute[192079]: 2025-10-02 12:14:04.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Cleaning up deleted instances with incomplete migration  _cleanup_incomplete_migrations /usr/lib/python3.9/site-packages/nova/compute/manager.py:11183
Oct 02 12:14:05 compute-0 nova_compute[192079]: 2025-10-02 12:14:05.384 2 DEBUG nova.network.neutron [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Successfully updated port: c1d6d6c7-23c7-45e9-b50d-f589c7908b63 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:14:05 compute-0 nova_compute[192079]: 2025-10-02 12:14:05.404 2 DEBUG oslo_concurrency.lockutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "refresh_cache-6db1ac7f-726d-4ad6-8992-86f0c23d4d79" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:14:05 compute-0 nova_compute[192079]: 2025-10-02 12:14:05.404 2 DEBUG oslo_concurrency.lockutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquired lock "refresh_cache-6db1ac7f-726d-4ad6-8992-86f0c23d4d79" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:14:05 compute-0 nova_compute[192079]: 2025-10-02 12:14:05.404 2 DEBUG nova.network.neutron [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:14:05 compute-0 nova_compute[192079]: 2025-10-02 12:14:05.561 2 DEBUG nova.compute.manager [req-e64cd353-6ad6-4750-93b2-77bdd7a277f8 req-8b333329-dcb8-4180-a6d4-74bcd3d1b865 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Received event network-changed-c1d6d6c7-23c7-45e9-b50d-f589c7908b63 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:14:05 compute-0 nova_compute[192079]: 2025-10-02 12:14:05.561 2 DEBUG nova.compute.manager [req-e64cd353-6ad6-4750-93b2-77bdd7a277f8 req-8b333329-dcb8-4180-a6d4-74bcd3d1b865 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Refreshing instance network info cache due to event network-changed-c1d6d6c7-23c7-45e9-b50d-f589c7908b63. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:14:05 compute-0 nova_compute[192079]: 2025-10-02 12:14:05.561 2 DEBUG oslo_concurrency.lockutils [req-e64cd353-6ad6-4750-93b2-77bdd7a277f8 req-8b333329-dcb8-4180-a6d4-74bcd3d1b865 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-6db1ac7f-726d-4ad6-8992-86f0c23d4d79" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:14:05 compute-0 nova_compute[192079]: 2025-10-02 12:14:05.561 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:05 compute-0 nova_compute[192079]: 2025-10-02 12:14:05.736 2 DEBUG nova.network.neutron [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:14:05 compute-0 nova_compute[192079]: 2025-10-02 12:14:05.893 2 DEBUG nova.network.neutron [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Successfully updated port: 8aed8ea7-afed-4dca-9a39-26b03675eec8 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:14:05 compute-0 nova_compute[192079]: 2025-10-02 12:14:05.912 2 DEBUG oslo_concurrency.lockutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Acquiring lock "refresh_cache-0d926f1d-a2a1-4e3d-b0d0-072c744cd745" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:14:05 compute-0 nova_compute[192079]: 2025-10-02 12:14:05.913 2 DEBUG oslo_concurrency.lockutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Acquired lock "refresh_cache-0d926f1d-a2a1-4e3d-b0d0-072c744cd745" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:14:05 compute-0 nova_compute[192079]: 2025-10-02 12:14:05.913 2 DEBUG nova.network.neutron [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:14:06 compute-0 nova_compute[192079]: 2025-10-02 12:14:06.033 2 DEBUG nova.compute.manager [req-a14f51c1-796a-458c-8d5f-05b2a9f08578 req-c9e2fd1e-80fb-4f01-9126-1f664dcadb73 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Received event network-changed-8aed8ea7-afed-4dca-9a39-26b03675eec8 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:14:06 compute-0 nova_compute[192079]: 2025-10-02 12:14:06.033 2 DEBUG nova.compute.manager [req-a14f51c1-796a-458c-8d5f-05b2a9f08578 req-c9e2fd1e-80fb-4f01-9126-1f664dcadb73 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Refreshing instance network info cache due to event network-changed-8aed8ea7-afed-4dca-9a39-26b03675eec8. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:14:06 compute-0 nova_compute[192079]: 2025-10-02 12:14:06.034 2 DEBUG oslo_concurrency.lockutils [req-a14f51c1-796a-458c-8d5f-05b2a9f08578 req-c9e2fd1e-80fb-4f01-9126-1f664dcadb73 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-0d926f1d-a2a1-4e3d-b0d0-072c744cd745" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:14:06 compute-0 nova_compute[192079]: 2025-10-02 12:14:06.156 2 DEBUG nova.network.neutron [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.102 2 DEBUG nova.network.neutron [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Updating instance_info_cache with network_info: [{"id": "c1d6d6c7-23c7-45e9-b50d-f589c7908b63", "address": "fa:16:3e:38:15:28", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc1d6d6c7-23", "ovs_interfaceid": "c1d6d6c7-23c7-45e9-b50d-f589c7908b63", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.138 2 DEBUG oslo_concurrency.lockutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Releasing lock "refresh_cache-6db1ac7f-726d-4ad6-8992-86f0c23d4d79" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.138 2 DEBUG nova.compute.manager [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Instance network_info: |[{"id": "c1d6d6c7-23c7-45e9-b50d-f589c7908b63", "address": "fa:16:3e:38:15:28", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc1d6d6c7-23", "ovs_interfaceid": "c1d6d6c7-23c7-45e9-b50d-f589c7908b63", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.139 2 DEBUG oslo_concurrency.lockutils [req-e64cd353-6ad6-4750-93b2-77bdd7a277f8 req-8b333329-dcb8-4180-a6d4-74bcd3d1b865 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-6db1ac7f-726d-4ad6-8992-86f0c23d4d79" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.139 2 DEBUG nova.network.neutron [req-e64cd353-6ad6-4750-93b2-77bdd7a277f8 req-8b333329-dcb8-4180-a6d4-74bcd3d1b865 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Refreshing network info cache for port c1d6d6c7-23c7-45e9-b50d-f589c7908b63 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.142 2 DEBUG nova.virt.libvirt.driver [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Start _get_guest_xml network_info=[{"id": "c1d6d6c7-23c7-45e9-b50d-f589c7908b63", "address": "fa:16:3e:38:15:28", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc1d6d6c7-23", "ovs_interfaceid": "c1d6d6c7-23c7-45e9-b50d-f589c7908b63", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:14:07 compute-0 podman[229976]: 2025-10-02 12:14:07.143170988 +0000 UTC m=+0.055338038 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_managed=true)
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.146 2 WARNING nova.virt.libvirt.driver [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:14:07 compute-0 podman[229978]: 2025-10-02 12:14:07.151925327 +0000 UTC m=+0.057847697 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.152 2 DEBUG nova.virt.libvirt.host [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.153 2 DEBUG nova.virt.libvirt.host [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.159 2 DEBUG nova.virt.libvirt.host [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.160 2 DEBUG nova.virt.libvirt.host [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.161 2 DEBUG nova.virt.libvirt.driver [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.161 2 DEBUG nova.virt.hardware [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.162 2 DEBUG nova.virt.hardware [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.162 2 DEBUG nova.virt.hardware [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.162 2 DEBUG nova.virt.hardware [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.162 2 DEBUG nova.virt.hardware [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.163 2 DEBUG nova.virt.hardware [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.163 2 DEBUG nova.virt.hardware [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.163 2 DEBUG nova.virt.hardware [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.163 2 DEBUG nova.virt.hardware [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.163 2 DEBUG nova.virt.hardware [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.163 2 DEBUG nova.virt.hardware [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.167 2 DEBUG nova.virt.libvirt.vif [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:14:00Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ImagesTestJSON-server-225816649',display_name='tempest-ImagesTestJSON-server-225816649',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-imagestestjson-server-225816649',id=70,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='dcf78460093d411988a54040ea4c265a',ramdisk_id='',reservation_id='r-x30nkn60',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ImagesTestJSON-437970487',owner_user_name='tempest-ImagesTestJSON-437970487-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:14:02Z,user_data=None,user_id='dcdfc3c0f94e42cb931d27f2e3b5b12d',uuid=6db1ac7f-726d-4ad6-8992-86f0c23d4d79,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "c1d6d6c7-23c7-45e9-b50d-f589c7908b63", "address": "fa:16:3e:38:15:28", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc1d6d6c7-23", "ovs_interfaceid": "c1d6d6c7-23c7-45e9-b50d-f589c7908b63", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.167 2 DEBUG nova.network.os_vif_util [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Converting VIF {"id": "c1d6d6c7-23c7-45e9-b50d-f589c7908b63", "address": "fa:16:3e:38:15:28", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc1d6d6c7-23", "ovs_interfaceid": "c1d6d6c7-23c7-45e9-b50d-f589c7908b63", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.168 2 DEBUG nova.network.os_vif_util [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:38:15:28,bridge_name='br-int',has_traffic_filtering=True,id=c1d6d6c7-23c7-45e9-b50d-f589c7908b63,network=Network(4f195445-fd43-4b92-89dd-a1b2fe9ea8c2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapc1d6d6c7-23') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.169 2 DEBUG nova.objects.instance [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lazy-loading 'pci_devices' on Instance uuid 6db1ac7f-726d-4ad6-8992-86f0c23d4d79 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:14:07 compute-0 podman[229977]: 2025-10-02 12:14:07.175954521 +0000 UTC m=+0.085504589 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, container_name=ovn_controller, org.label-schema.schema-version=1.0, config_id=ovn_controller, managed_by=edpm_ansible, org.label-schema.build-date=20251001, tcib_managed=true, io.buildah.version=1.41.3)
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.188 2 DEBUG nova.virt.libvirt.driver [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:14:07 compute-0 nova_compute[192079]:   <uuid>6db1ac7f-726d-4ad6-8992-86f0c23d4d79</uuid>
Oct 02 12:14:07 compute-0 nova_compute[192079]:   <name>instance-00000046</name>
Oct 02 12:14:07 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:14:07 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:14:07 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:14:07 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:       <nova:name>tempest-ImagesTestJSON-server-225816649</nova:name>
Oct 02 12:14:07 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:14:07</nova:creationTime>
Oct 02 12:14:07 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:14:07 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:14:07 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:14:07 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:14:07 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:14:07 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:14:07 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:14:07 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:14:07 compute-0 nova_compute[192079]:         <nova:user uuid="dcdfc3c0f94e42cb931d27f2e3b5b12d">tempest-ImagesTestJSON-437970487-project-member</nova:user>
Oct 02 12:14:07 compute-0 nova_compute[192079]:         <nova:project uuid="dcf78460093d411988a54040ea4c265a">tempest-ImagesTestJSON-437970487</nova:project>
Oct 02 12:14:07 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:14:07 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:14:07 compute-0 nova_compute[192079]:         <nova:port uuid="c1d6d6c7-23c7-45e9-b50d-f589c7908b63">
Oct 02 12:14:07 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.4" ipVersion="4"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:14:07 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:14:07 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:14:07 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <system>
Oct 02 12:14:07 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:14:07 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:14:07 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:14:07 compute-0 nova_compute[192079]:       <entry name="serial">6db1ac7f-726d-4ad6-8992-86f0c23d4d79</entry>
Oct 02 12:14:07 compute-0 nova_compute[192079]:       <entry name="uuid">6db1ac7f-726d-4ad6-8992-86f0c23d4d79</entry>
Oct 02 12:14:07 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     </system>
Oct 02 12:14:07 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:14:07 compute-0 nova_compute[192079]:   <os>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:   </os>
Oct 02 12:14:07 compute-0 nova_compute[192079]:   <features>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:   </features>
Oct 02 12:14:07 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:14:07 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:14:07 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:14:07 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/6db1ac7f-726d-4ad6-8992-86f0c23d4d79/disk"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:14:07 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/6db1ac7f-726d-4ad6-8992-86f0c23d4d79/disk.config"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:14:07 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:38:15:28"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:       <target dev="tapc1d6d6c7-23"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:14:07 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/6db1ac7f-726d-4ad6-8992-86f0c23d4d79/console.log" append="off"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <video>
Oct 02 12:14:07 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     </video>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:14:07 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:14:07 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:14:07 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:14:07 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:14:07 compute-0 nova_compute[192079]: </domain>
Oct 02 12:14:07 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.190 2 DEBUG nova.compute.manager [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Preparing to wait for external event network-vif-plugged-c1d6d6c7-23c7-45e9-b50d-f589c7908b63 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.190 2 DEBUG oslo_concurrency.lockutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "6db1ac7f-726d-4ad6-8992-86f0c23d4d79-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.190 2 DEBUG oslo_concurrency.lockutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "6db1ac7f-726d-4ad6-8992-86f0c23d4d79-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.191 2 DEBUG oslo_concurrency.lockutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "6db1ac7f-726d-4ad6-8992-86f0c23d4d79-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.191 2 DEBUG nova.virt.libvirt.vif [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:14:00Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ImagesTestJSON-server-225816649',display_name='tempest-ImagesTestJSON-server-225816649',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-imagestestjson-server-225816649',id=70,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='dcf78460093d411988a54040ea4c265a',ramdisk_id='',reservation_id='r-x30nkn60',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ImagesTestJSON-437970487',owner_user_name='tempest-ImagesTestJSON-437970487-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:14:02Z,user_data=None,user_id='dcdfc3c0f94e42cb931d27f2e3b5b12d',uuid=6db1ac7f-726d-4ad6-8992-86f0c23d4d79,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "c1d6d6c7-23c7-45e9-b50d-f589c7908b63", "address": "fa:16:3e:38:15:28", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc1d6d6c7-23", "ovs_interfaceid": "c1d6d6c7-23c7-45e9-b50d-f589c7908b63", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.192 2 DEBUG nova.network.os_vif_util [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Converting VIF {"id": "c1d6d6c7-23c7-45e9-b50d-f589c7908b63", "address": "fa:16:3e:38:15:28", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc1d6d6c7-23", "ovs_interfaceid": "c1d6d6c7-23c7-45e9-b50d-f589c7908b63", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.192 2 DEBUG nova.network.os_vif_util [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:38:15:28,bridge_name='br-int',has_traffic_filtering=True,id=c1d6d6c7-23c7-45e9-b50d-f589c7908b63,network=Network(4f195445-fd43-4b92-89dd-a1b2fe9ea8c2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapc1d6d6c7-23') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.192 2 DEBUG os_vif [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:38:15:28,bridge_name='br-int',has_traffic_filtering=True,id=c1d6d6c7-23c7-45e9-b50d-f589c7908b63,network=Network(4f195445-fd43-4b92-89dd-a1b2fe9ea8c2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapc1d6d6c7-23') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.193 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.193 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.194 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.197 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.197 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapc1d6d6c7-23, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.198 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapc1d6d6c7-23, col_values=(('external_ids', {'iface-id': 'c1d6d6c7-23c7-45e9-b50d-f589c7908b63', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:38:15:28', 'vm-uuid': '6db1ac7f-726d-4ad6-8992-86f0c23d4d79'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.199 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:07 compute-0 NetworkManager[51160]: <info>  [1759407247.2006] manager: (tapc1d6d6c7-23): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/111)
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.202 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.206 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.207 2 INFO os_vif [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:38:15:28,bridge_name='br-int',has_traffic_filtering=True,id=c1d6d6c7-23c7-45e9-b50d-f589c7908b63,network=Network(4f195445-fd43-4b92-89dd-a1b2fe9ea8c2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapc1d6d6c7-23')
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.290 2 DEBUG nova.virt.libvirt.driver [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.291 2 DEBUG nova.virt.libvirt.driver [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.291 2 DEBUG nova.virt.libvirt.driver [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] No VIF found with MAC fa:16:3e:38:15:28, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.291 2 INFO nova.virt.libvirt.driver [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Using config drive
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.750 2 INFO nova.virt.libvirt.driver [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Creating config drive at /var/lib/nova/instances/6db1ac7f-726d-4ad6-8992-86f0c23d4d79/disk.config
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.755 2 DEBUG oslo_concurrency.processutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/6db1ac7f-726d-4ad6-8992-86f0c23d4d79/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpgnqudkhg execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.881 2 DEBUG oslo_concurrency.processutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/6db1ac7f-726d-4ad6-8992-86f0c23d4d79/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpgnqudkhg" returned: 0 in 0.126s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:14:07 compute-0 NetworkManager[51160]: <info>  [1759407247.9461] manager: (tapc1d6d6c7-23): new Tun device (/org/freedesktop/NetworkManager/Devices/112)
Oct 02 12:14:07 compute-0 kernel: tapc1d6d6c7-23: entered promiscuous mode
Oct 02 12:14:07 compute-0 ovn_controller[94336]: 2025-10-02T12:14:07Z|00220|binding|INFO|Claiming lport c1d6d6c7-23c7-45e9-b50d-f589c7908b63 for this chassis.
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.951 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:07 compute-0 ovn_controller[94336]: 2025-10-02T12:14:07Z|00221|binding|INFO|c1d6d6c7-23c7-45e9-b50d-f589c7908b63: Claiming fa:16:3e:38:15:28 10.100.0.4
Oct 02 12:14:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:07.958 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:38:15:28 10.100.0.4'], port_security=['fa:16:3e:38:15:28 10.100.0.4'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.4/28', 'neutron:device_id': '6db1ac7f-726d-4ad6-8992-86f0c23d4d79', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'dcf78460093d411988a54040ea4c265a', 'neutron:revision_number': '2', 'neutron:security_group_ids': 'aacce687-8b76-4e90-b19c-0dd006394188', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=24ae9888-31f5-4083-b5ee-e7ed6a1eee13, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=c1d6d6c7-23c7-45e9-b50d-f589c7908b63) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:14:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:07.960 103294 INFO neutron.agent.ovn.metadata.agent [-] Port c1d6d6c7-23c7-45e9-b50d-f589c7908b63 in datapath 4f195445-fd43-4b92-89dd-a1b2fe9ea8c2 bound to our chassis
Oct 02 12:14:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:07.961 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 4f195445-fd43-4b92-89dd-a1b2fe9ea8c2
Oct 02 12:14:07 compute-0 ovn_controller[94336]: 2025-10-02T12:14:07Z|00222|binding|INFO|Setting lport c1d6d6c7-23c7-45e9-b50d-f589c7908b63 ovn-installed in OVS
Oct 02 12:14:07 compute-0 ovn_controller[94336]: 2025-10-02T12:14:07Z|00223|binding|INFO|Setting lport c1d6d6c7-23c7-45e9-b50d-f589c7908b63 up in Southbound
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.967 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.973 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:07.975 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[466d587f-6c94-421c-b0a6-69f1bb098cb8]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:07.976 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap4f195445-f1 in ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:14:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:07.978 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap4f195445-f0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:14:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:07.978 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b414110b-e5c7-4837-8b65-6524e2b8abab]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:07 compute-0 systemd-udevd[230062]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:14:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:07.979 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e3d4c718-cfca-48ea-8c74-f3e8b28f1491]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:07.992 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[8845ed01-34c8-4ba1-86d8-a86744c4157c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:07 compute-0 NetworkManager[51160]: <info>  [1759407247.9946] device (tapc1d6d6c7-23): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:14:07 compute-0 NetworkManager[51160]: <info>  [1759407247.9954] device (tapc1d6d6c7-23): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:14:07 compute-0 nova_compute[192079]: 2025-10-02 12:14:07.995 2 DEBUG nova.network.neutron [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Updating instance_info_cache with network_info: [{"id": "8aed8ea7-afed-4dca-9a39-26b03675eec8", "address": "fa:16:3e:87:36:d3", "network": {"id": "80aeccdc-b23c-43b5-ada7-eabdfc0b0b19", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-421813777-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "389df3c9188c4d8194eb17d703c957db", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap8aed8ea7-af", "ovs_interfaceid": "8aed8ea7-afed-4dca-9a39-26b03675eec8", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:14:07 compute-0 systemd-machined[152150]: New machine qemu-33-instance-00000046.
Oct 02 12:14:08 compute-0 systemd[1]: Started Virtual Machine qemu-33-instance-00000046.
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:08.016 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[70afe0bf-00e5-466c-9598-e6f3eb137b88]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.041 2 DEBUG oslo_concurrency.lockutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Releasing lock "refresh_cache-0d926f1d-a2a1-4e3d-b0d0-072c744cd745" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.041 2 DEBUG nova.compute.manager [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Instance network_info: |[{"id": "8aed8ea7-afed-4dca-9a39-26b03675eec8", "address": "fa:16:3e:87:36:d3", "network": {"id": "80aeccdc-b23c-43b5-ada7-eabdfc0b0b19", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-421813777-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "389df3c9188c4d8194eb17d703c957db", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap8aed8ea7-af", "ovs_interfaceid": "8aed8ea7-afed-4dca-9a39-26b03675eec8", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.041 2 DEBUG oslo_concurrency.lockutils [req-a14f51c1-796a-458c-8d5f-05b2a9f08578 req-c9e2fd1e-80fb-4f01-9126-1f664dcadb73 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-0d926f1d-a2a1-4e3d-b0d0-072c744cd745" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.042 2 DEBUG nova.network.neutron [req-a14f51c1-796a-458c-8d5f-05b2a9f08578 req-c9e2fd1e-80fb-4f01-9126-1f664dcadb73 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Refreshing network info cache for port 8aed8ea7-afed-4dca-9a39-26b03675eec8 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.044 2 DEBUG nova.virt.libvirt.driver [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Start _get_guest_xml network_info=[{"id": "8aed8ea7-afed-4dca-9a39-26b03675eec8", "address": "fa:16:3e:87:36:d3", "network": {"id": "80aeccdc-b23c-43b5-ada7-eabdfc0b0b19", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-421813777-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "389df3c9188c4d8194eb17d703c957db", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap8aed8ea7-af", "ovs_interfaceid": "8aed8ea7-afed-4dca-9a39-26b03675eec8", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:08.047 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[4e630bd3-7190-4bd7-baf3-49c60295a91f]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.049 2 WARNING nova.virt.libvirt.driver [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.053 2 DEBUG nova.virt.libvirt.host [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.054 2 DEBUG nova.virt.libvirt.host [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:14:08 compute-0 systemd-udevd[230068]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:08.054 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[aa4e56d6-48a1-497f-b73d-bbff1d691c0b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:08 compute-0 NetworkManager[51160]: <info>  [1759407248.0575] manager: (tap4f195445-f0): new Veth device (/org/freedesktop/NetworkManager/Devices/113)
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.064 2 DEBUG nova.virt.libvirt.host [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.065 2 DEBUG nova.virt.libvirt.host [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.067 2 DEBUG nova.virt.libvirt.driver [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.067 2 DEBUG nova.virt.hardware [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.068 2 DEBUG nova.virt.hardware [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.068 2 DEBUG nova.virt.hardware [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.068 2 DEBUG nova.virt.hardware [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.068 2 DEBUG nova.virt.hardware [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.069 2 DEBUG nova.virt.hardware [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.069 2 DEBUG nova.virt.hardware [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.069 2 DEBUG nova.virt.hardware [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.069 2 DEBUG nova.virt.hardware [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.069 2 DEBUG nova.virt.hardware [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.070 2 DEBUG nova.virt.hardware [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.075 2 DEBUG nova.virt.libvirt.vif [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:14:01Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ServerMetadataTestJSON-server-1682429524',display_name='tempest-ServerMetadataTestJSON-server-1682429524',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-servermetadatatestjson-server-1682429524',id=71,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='389df3c9188c4d8194eb17d703c957db',ramdisk_id='',reservation_id='r-qkkjqj0t',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServerMetadataTestJSON-1070274954',owner_user_name='tempest-ServerMetadataTestJSON-1070274954-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:14:03Z,user_data=None,user_id='fae0e5ee734643f6a2642e748e51d97f',uuid=0d926f1d-a2a1-4e3d-b0d0-072c744cd745,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "8aed8ea7-afed-4dca-9a39-26b03675eec8", "address": "fa:16:3e:87:36:d3", "network": {"id": "80aeccdc-b23c-43b5-ada7-eabdfc0b0b19", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-421813777-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "389df3c9188c4d8194eb17d703c957db", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap8aed8ea7-af", "ovs_interfaceid": "8aed8ea7-afed-4dca-9a39-26b03675eec8", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.076 2 DEBUG nova.network.os_vif_util [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Converting VIF {"id": "8aed8ea7-afed-4dca-9a39-26b03675eec8", "address": "fa:16:3e:87:36:d3", "network": {"id": "80aeccdc-b23c-43b5-ada7-eabdfc0b0b19", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-421813777-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "389df3c9188c4d8194eb17d703c957db", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap8aed8ea7-af", "ovs_interfaceid": "8aed8ea7-afed-4dca-9a39-26b03675eec8", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.077 2 DEBUG nova.network.os_vif_util [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:87:36:d3,bridge_name='br-int',has_traffic_filtering=True,id=8aed8ea7-afed-4dca-9a39-26b03675eec8,network=Network(80aeccdc-b23c-43b5-ada7-eabdfc0b0b19),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap8aed8ea7-af') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.078 2 DEBUG nova.objects.instance [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Lazy-loading 'pci_devices' on Instance uuid 0d926f1d-a2a1-4e3d-b0d0-072c744cd745 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:08.090 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[e81c9be0-60f5-49e3-97c8-e243b28d4397]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:08.094 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[e99fab3e-ebc7-4ad8-8a72-8515ad63cc90]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:08 compute-0 NetworkManager[51160]: <info>  [1759407248.1172] device (tap4f195445-f0): carrier: link connected
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.124 2 DEBUG nova.virt.libvirt.driver [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:14:08 compute-0 nova_compute[192079]:   <uuid>0d926f1d-a2a1-4e3d-b0d0-072c744cd745</uuid>
Oct 02 12:14:08 compute-0 nova_compute[192079]:   <name>instance-00000047</name>
Oct 02 12:14:08 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:14:08 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:14:08 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:14:08 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:       <nova:name>tempest-ServerMetadataTestJSON-server-1682429524</nova:name>
Oct 02 12:14:08 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:14:08</nova:creationTime>
Oct 02 12:14:08 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:14:08 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:14:08 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:14:08 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:14:08 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:14:08 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:14:08 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:14:08 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:14:08 compute-0 nova_compute[192079]:         <nova:user uuid="fae0e5ee734643f6a2642e748e51d97f">tempest-ServerMetadataTestJSON-1070274954-project-member</nova:user>
Oct 02 12:14:08 compute-0 nova_compute[192079]:         <nova:project uuid="389df3c9188c4d8194eb17d703c957db">tempest-ServerMetadataTestJSON-1070274954</nova:project>
Oct 02 12:14:08 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:14:08 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:14:08 compute-0 nova_compute[192079]:         <nova:port uuid="8aed8ea7-afed-4dca-9a39-26b03675eec8">
Oct 02 12:14:08 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.11" ipVersion="4"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:14:08 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:14:08 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:14:08 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <system>
Oct 02 12:14:08 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:14:08 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:14:08 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:14:08 compute-0 nova_compute[192079]:       <entry name="serial">0d926f1d-a2a1-4e3d-b0d0-072c744cd745</entry>
Oct 02 12:14:08 compute-0 nova_compute[192079]:       <entry name="uuid">0d926f1d-a2a1-4e3d-b0d0-072c744cd745</entry>
Oct 02 12:14:08 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     </system>
Oct 02 12:14:08 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:14:08 compute-0 nova_compute[192079]:   <os>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:   </os>
Oct 02 12:14:08 compute-0 nova_compute[192079]:   <features>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:   </features>
Oct 02 12:14:08 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:14:08 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:14:08 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:14:08 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/0d926f1d-a2a1-4e3d-b0d0-072c744cd745/disk"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:14:08 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/0d926f1d-a2a1-4e3d-b0d0-072c744cd745/disk.config"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:14:08 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:87:36:d3"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:       <target dev="tap8aed8ea7-af"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:14:08 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/0d926f1d-a2a1-4e3d-b0d0-072c744cd745/console.log" append="off"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <video>
Oct 02 12:14:08 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     </video>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:14:08 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:14:08 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:14:08 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:14:08 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:14:08 compute-0 nova_compute[192079]: </domain>
Oct 02 12:14:08 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.126 2 DEBUG nova.compute.manager [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Preparing to wait for external event network-vif-plugged-8aed8ea7-afed-4dca-9a39-26b03675eec8 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.127 2 DEBUG oslo_concurrency.lockutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Acquiring lock "0d926f1d-a2a1-4e3d-b0d0-072c744cd745-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.127 2 DEBUG oslo_concurrency.lockutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Lock "0d926f1d-a2a1-4e3d-b0d0-072c744cd745-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.127 2 DEBUG oslo_concurrency.lockutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Lock "0d926f1d-a2a1-4e3d-b0d0-072c744cd745-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.128 2 DEBUG nova.virt.libvirt.vif [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:14:01Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ServerMetadataTestJSON-server-1682429524',display_name='tempest-ServerMetadataTestJSON-server-1682429524',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-servermetadatatestjson-server-1682429524',id=71,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='389df3c9188c4d8194eb17d703c957db',ramdisk_id='',reservation_id='r-qkkjqj0t',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServerMetadataTestJSON-1070274954',owner_user_name='tempest-ServerMetadataTestJSON-1070274954-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:14:03Z,user_data=None,user_id='fae0e5ee734643f6a2642e748e51d97f',uuid=0d926f1d-a2a1-4e3d-b0d0-072c744cd745,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "8aed8ea7-afed-4dca-9a39-26b03675eec8", "address": "fa:16:3e:87:36:d3", "network": {"id": "80aeccdc-b23c-43b5-ada7-eabdfc0b0b19", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-421813777-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "389df3c9188c4d8194eb17d703c957db", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap8aed8ea7-af", "ovs_interfaceid": "8aed8ea7-afed-4dca-9a39-26b03675eec8", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.129 2 DEBUG nova.network.os_vif_util [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Converting VIF {"id": "8aed8ea7-afed-4dca-9a39-26b03675eec8", "address": "fa:16:3e:87:36:d3", "network": {"id": "80aeccdc-b23c-43b5-ada7-eabdfc0b0b19", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-421813777-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "389df3c9188c4d8194eb17d703c957db", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap8aed8ea7-af", "ovs_interfaceid": "8aed8ea7-afed-4dca-9a39-26b03675eec8", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.129 2 DEBUG nova.network.os_vif_util [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:87:36:d3,bridge_name='br-int',has_traffic_filtering=True,id=8aed8ea7-afed-4dca-9a39-26b03675eec8,network=Network(80aeccdc-b23c-43b5-ada7-eabdfc0b0b19),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap8aed8ea7-af') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.130 2 DEBUG os_vif [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:87:36:d3,bridge_name='br-int',has_traffic_filtering=True,id=8aed8ea7-afed-4dca-9a39-26b03675eec8,network=Network(80aeccdc-b23c-43b5-ada7-eabdfc0b0b19),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap8aed8ea7-af') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.131 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.131 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.132 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:08.133 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[4d3a5436-c4d3-481f-a8dd-b20fe6d4f3f3]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.135 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.135 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap8aed8ea7-af, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.136 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap8aed8ea7-af, col_values=(('external_ids', {'iface-id': '8aed8ea7-afed-4dca-9a39-26b03675eec8', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:87:36:d3', 'vm-uuid': '0d926f1d-a2a1-4e3d-b0d0-072c744cd745'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:14:08 compute-0 NetworkManager[51160]: <info>  [1759407248.1393] manager: (tap8aed8ea7-af): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/114)
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.141 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.146 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.147 2 INFO os_vif [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:87:36:d3,bridge_name='br-int',has_traffic_filtering=True,id=8aed8ea7-afed-4dca-9a39-26b03675eec8,network=Network(80aeccdc-b23c-43b5-ada7-eabdfc0b0b19),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap8aed8ea7-af')
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:08.155 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6ecd6ae5-99ee-4b03-a8dd-15e0c530531e]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap4f195445-f1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:65:93:03'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 68], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 524574, 'reachable_time': 22867, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 230098, 'error': None, 'target': 'ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:08.176 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[000f59bc-f557-48cc-9290-b147895cfd2e]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe65:9303'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 524574, 'tstamp': 524574}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 230099, 'error': None, 'target': 'ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:08.191 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[88e27c8f-320e-4861-ab66-f92c95741f87]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap4f195445-f1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:65:93:03'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 68], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 524574, 'reachable_time': 22867, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 230100, 'error': None, 'target': 'ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:08.221 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b1c82ebf-8428-42af-a616-a43df962eafa]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.223 2 DEBUG nova.virt.libvirt.driver [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.224 2 DEBUG nova.virt.libvirt.driver [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.224 2 DEBUG nova.virt.libvirt.driver [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] No VIF found with MAC fa:16:3e:87:36:d3, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.225 2 INFO nova.virt.libvirt.driver [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Using config drive
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:08.278 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a592c193-bd16-440f-97f8-240306dd5812]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:08.280 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap4f195445-f0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:08.280 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:08.280 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap4f195445-f0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:14:08 compute-0 NetworkManager[51160]: <info>  [1759407248.2831] manager: (tap4f195445-f0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/115)
Oct 02 12:14:08 compute-0 kernel: tap4f195445-f0: entered promiscuous mode
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.284 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:08.289 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap4f195445-f0, col_values=(('external_ids', {'iface-id': 'd65a1bd0-87e2-4bbf-9945-dacace78444f'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:14:08 compute-0 ovn_controller[94336]: 2025-10-02T12:14:08Z|00224|binding|INFO|Releasing lport d65a1bd0-87e2-4bbf-9945-dacace78444f from this chassis (sb_readonly=0)
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:08.316 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/4f195445-fd43-4b92-89dd-a1b2fe9ea8c2.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/4f195445-fd43-4b92-89dd-a1b2fe9ea8c2.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.316 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:08.317 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e70aa4b6-179e-4197-af09-70f7aa21d6d1]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:08.318 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/4f195445-fd43-4b92-89dd-a1b2fe9ea8c2.pid.haproxy
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 4f195445-fd43-4b92-89dd-a1b2fe9ea8c2
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:14:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:08.319 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'env', 'PROCESS_TAG=haproxy-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/4f195445-fd43-4b92-89dd-a1b2fe9ea8c2.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.633 2 DEBUG nova.compute.manager [req-68fce8d0-013f-4ecb-b2a7-08c56032a3df req-19d9ff81-0ed5-447c-b6ff-cd37924a8fe4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Received event network-vif-plugged-c1d6d6c7-23c7-45e9-b50d-f589c7908b63 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.636 2 DEBUG oslo_concurrency.lockutils [req-68fce8d0-013f-4ecb-b2a7-08c56032a3df req-19d9ff81-0ed5-447c-b6ff-cd37924a8fe4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "6db1ac7f-726d-4ad6-8992-86f0c23d4d79-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.636 2 DEBUG oslo_concurrency.lockutils [req-68fce8d0-013f-4ecb-b2a7-08c56032a3df req-19d9ff81-0ed5-447c-b6ff-cd37924a8fe4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6db1ac7f-726d-4ad6-8992-86f0c23d4d79-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.637 2 DEBUG oslo_concurrency.lockutils [req-68fce8d0-013f-4ecb-b2a7-08c56032a3df req-19d9ff81-0ed5-447c-b6ff-cd37924a8fe4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6db1ac7f-726d-4ad6-8992-86f0c23d4d79-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.637 2 DEBUG nova.compute.manager [req-68fce8d0-013f-4ecb-b2a7-08c56032a3df req-19d9ff81-0ed5-447c-b6ff-cd37924a8fe4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Processing event network-vif-plugged-c1d6d6c7-23c7-45e9-b50d-f589c7908b63 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:14:08 compute-0 podman[230144]: 2025-10-02 12:14:08.658933876 +0000 UTC m=+0.025398913 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:14:08 compute-0 podman[230144]: 2025-10-02 12:14:08.865534353 +0000 UTC m=+0.231999380 container create e35fcdd54cf4a0ce2499ba5531d0b2fd8f95b306b337f99523f5084f46746a54 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.902 2 DEBUG nova.network.neutron [req-e64cd353-6ad6-4750-93b2-77bdd7a277f8 req-8b333329-dcb8-4180-a6d4-74bcd3d1b865 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Updated VIF entry in instance network info cache for port c1d6d6c7-23c7-45e9-b50d-f589c7908b63. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.903 2 DEBUG nova.network.neutron [req-e64cd353-6ad6-4750-93b2-77bdd7a277f8 req-8b333329-dcb8-4180-a6d4-74bcd3d1b865 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Updating instance_info_cache with network_info: [{"id": "c1d6d6c7-23c7-45e9-b50d-f589c7908b63", "address": "fa:16:3e:38:15:28", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc1d6d6c7-23", "ovs_interfaceid": "c1d6d6c7-23c7-45e9-b50d-f589c7908b63", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:14:08 compute-0 systemd[1]: Started libpod-conmon-e35fcdd54cf4a0ce2499ba5531d0b2fd8f95b306b337f99523f5084f46746a54.scope.
Oct 02 12:14:08 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.921 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407248.9207425, 6db1ac7f-726d-4ad6-8992-86f0c23d4d79 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.921 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] VM Started (Lifecycle Event)
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.923 2 DEBUG nova.compute.manager [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.924 2 DEBUG oslo_concurrency.lockutils [req-e64cd353-6ad6-4750-93b2-77bdd7a277f8 req-8b333329-dcb8-4180-a6d4-74bcd3d1b865 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-6db1ac7f-726d-4ad6-8992-86f0c23d4d79" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.926 2 DEBUG nova.virt.libvirt.driver [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:14:08 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/61fe78b7eb6321d27215f73b146be54eb1c1c0231c6f707859a38972a8a97058/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.929 2 INFO nova.virt.libvirt.driver [-] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Instance spawned successfully.
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.929 2 DEBUG nova.virt.libvirt.driver [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.945 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.950 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.952 2 DEBUG nova.virt.libvirt.driver [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.953 2 DEBUG nova.virt.libvirt.driver [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.953 2 DEBUG nova.virt.libvirt.driver [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.953 2 DEBUG nova.virt.libvirt.driver [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.954 2 DEBUG nova.virt.libvirt.driver [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.954 2 DEBUG nova.virt.libvirt.driver [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.977 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.977 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407248.9209518, 6db1ac7f-726d-4ad6-8992-86f0c23d4d79 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.978 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] VM Paused (Lifecycle Event)
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.981 2 INFO nova.virt.libvirt.driver [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Creating config drive at /var/lib/nova/instances/0d926f1d-a2a1-4e3d-b0d0-072c744cd745/disk.config
Oct 02 12:14:08 compute-0 podman[230144]: 2025-10-02 12:14:08.981635246 +0000 UTC m=+0.348100303 container init e35fcdd54cf4a0ce2499ba5531d0b2fd8f95b306b337f99523f5084f46746a54 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001)
Oct 02 12:14:08 compute-0 nova_compute[192079]: 2025-10-02 12:14:08.986 2 DEBUG oslo_concurrency.processutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/0d926f1d-a2a1-4e3d-b0d0-072c744cd745/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpy7eepp9i execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:14:08 compute-0 podman[230144]: 2025-10-02 12:14:08.988283167 +0000 UTC m=+0.354748194 container start e35fcdd54cf4a0ce2499ba5531d0b2fd8f95b306b337f99523f5084f46746a54 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0)
Oct 02 12:14:09 compute-0 neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2[230159]: [NOTICE]   (230163) : New worker (230166) forked
Oct 02 12:14:09 compute-0 neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2[230159]: [NOTICE]   (230163) : Loading success.
Oct 02 12:14:09 compute-0 nova_compute[192079]: 2025-10-02 12:14:09.032 2 INFO nova.compute.manager [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Took 6.35 seconds to spawn the instance on the hypervisor.
Oct 02 12:14:09 compute-0 nova_compute[192079]: 2025-10-02 12:14:09.033 2 DEBUG nova.compute.manager [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:14:09 compute-0 nova_compute[192079]: 2025-10-02 12:14:09.034 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:14:09 compute-0 nova_compute[192079]: 2025-10-02 12:14:09.043 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407248.925631, 6db1ac7f-726d-4ad6-8992-86f0c23d4d79 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:14:09 compute-0 nova_compute[192079]: 2025-10-02 12:14:09.044 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] VM Resumed (Lifecycle Event)
Oct 02 12:14:09 compute-0 nova_compute[192079]: 2025-10-02 12:14:09.083 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:14:09 compute-0 nova_compute[192079]: 2025-10-02 12:14:09.086 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:14:09 compute-0 nova_compute[192079]: 2025-10-02 12:14:09.123 2 DEBUG oslo_concurrency.processutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/0d926f1d-a2a1-4e3d-b0d0-072c744cd745/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpy7eepp9i" returned: 0 in 0.137s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:14:09 compute-0 nova_compute[192079]: 2025-10-02 12:14:09.140 2 INFO nova.compute.manager [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Took 7.15 seconds to build instance.
Oct 02 12:14:09 compute-0 nova_compute[192079]: 2025-10-02 12:14:09.162 2 DEBUG oslo_concurrency.lockutils [None req-bd99cccb-869b-430e-941d-2f79aab7f4ff dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "6db1ac7f-726d-4ad6-8992-86f0c23d4d79" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 7.257s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:09 compute-0 NetworkManager[51160]: <info>  [1759407249.1967] manager: (tap8aed8ea7-af): new Tun device (/org/freedesktop/NetworkManager/Devices/116)
Oct 02 12:14:09 compute-0 systemd-udevd[230091]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:14:09 compute-0 kernel: tap8aed8ea7-af: entered promiscuous mode
Oct 02 12:14:09 compute-0 ovn_controller[94336]: 2025-10-02T12:14:09Z|00225|binding|INFO|Claiming lport 8aed8ea7-afed-4dca-9a39-26b03675eec8 for this chassis.
Oct 02 12:14:09 compute-0 ovn_controller[94336]: 2025-10-02T12:14:09Z|00226|binding|INFO|8aed8ea7-afed-4dca-9a39-26b03675eec8: Claiming fa:16:3e:87:36:d3 10.100.0.11
Oct 02 12:14:09 compute-0 nova_compute[192079]: 2025-10-02 12:14:09.206 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:09 compute-0 NetworkManager[51160]: <info>  [1759407249.2114] device (tap8aed8ea7-af): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:14:09 compute-0 NetworkManager[51160]: <info>  [1759407249.2123] device (tap8aed8ea7-af): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:09.222 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:87:36:d3 10.100.0.11'], port_security=['fa:16:3e:87:36:d3 10.100.0.11'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.11/28', 'neutron:device_id': '0d926f1d-a2a1-4e3d-b0d0-072c744cd745', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-80aeccdc-b23c-43b5-ada7-eabdfc0b0b19', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '389df3c9188c4d8194eb17d703c957db', 'neutron:revision_number': '2', 'neutron:security_group_ids': 'e2269b20-1ca6-4ad2-b43d-9aec139eedb0', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=3cbada61-0eee-4c57-92ca-456b863dedea, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=8aed8ea7-afed-4dca-9a39-26b03675eec8) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:09.224 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 8aed8ea7-afed-4dca-9a39-26b03675eec8 in datapath 80aeccdc-b23c-43b5-ada7-eabdfc0b0b19 bound to our chassis
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:09.226 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 80aeccdc-b23c-43b5-ada7-eabdfc0b0b19
Oct 02 12:14:09 compute-0 systemd-machined[152150]: New machine qemu-34-instance-00000047.
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:09.239 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[aed63fcd-4805-4978-8ffd-de5a30774d5b]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:09.241 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap80aeccdc-b1 in ovnmeta-80aeccdc-b23c-43b5-ada7-eabdfc0b0b19 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:09.250 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap80aeccdc-b0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:09.250 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2e314d04-282b-46df-9f9b-3213aad5ad60]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:09.251 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[74d821df-4cbf-4e27-9761-4389d61c355a]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:09.263 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[b7833e1f-f32e-4c4d-82f3-10417599c0ad]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:09 compute-0 nova_compute[192079]: 2025-10-02 12:14:09.270 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:09 compute-0 systemd[1]: Started Virtual Machine qemu-34-instance-00000047.
Oct 02 12:14:09 compute-0 ovn_controller[94336]: 2025-10-02T12:14:09Z|00227|binding|INFO|Setting lport 8aed8ea7-afed-4dca-9a39-26b03675eec8 ovn-installed in OVS
Oct 02 12:14:09 compute-0 ovn_controller[94336]: 2025-10-02T12:14:09Z|00228|binding|INFO|Setting lport 8aed8ea7-afed-4dca-9a39-26b03675eec8 up in Southbound
Oct 02 12:14:09 compute-0 nova_compute[192079]: 2025-10-02 12:14:09.274 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:09.277 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[475e4d8c-f06d-456c-aa51-2673e81b568e]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:09.306 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[47380e6d-3c9b-4f6b-99f5-d181410f5002]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:09.312 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[bf8d107b-61d3-4939-bb8f-cd5c07f8d034]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:09 compute-0 NetworkManager[51160]: <info>  [1759407249.3139] manager: (tap80aeccdc-b0): new Veth device (/org/freedesktop/NetworkManager/Devices/117)
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:09.354 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[18d347e8-f00c-4734-bbf5-d0839562b1b6]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:09.361 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[5a06ecfa-a581-4bdd-9dd9-ca08aac449f8]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:09 compute-0 NetworkManager[51160]: <info>  [1759407249.3843] device (tap80aeccdc-b0): carrier: link connected
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:09.390 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[2ffa2e7f-754b-4122-addd-e881e9c442a1]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:09.413 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b05b3e6d-4ac4-4f29-9a5d-33959f92b0bd]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap80aeccdc-b1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:ba:ee:24'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 70], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 524701, 'reachable_time': 26124, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 230209, 'error': None, 'target': 'ovnmeta-80aeccdc-b23c-43b5-ada7-eabdfc0b0b19', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:09.436 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[64755603-ff50-451e-b4ff-de61ac19432e]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:feba:ee24'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 524701, 'tstamp': 524701}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 230210, 'error': None, 'target': 'ovnmeta-80aeccdc-b23c-43b5-ada7-eabdfc0b0b19', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:09.457 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[702d62cc-00bf-4701-8029-5d41959f186a]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap80aeccdc-b1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:ba:ee:24'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 70], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 524701, 'reachable_time': 26124, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 230211, 'error': None, 'target': 'ovnmeta-80aeccdc-b23c-43b5-ada7-eabdfc0b0b19', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:09.491 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9249bf06-7850-4b69-90d1-e79360e24c43]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:09.550 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4d119bf4-f315-4bbc-b993-78116e745d20]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:09.552 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap80aeccdc-b0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:09.552 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:09.552 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap80aeccdc-b0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:14:09 compute-0 NetworkManager[51160]: <info>  [1759407249.5554] manager: (tap80aeccdc-b0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/118)
Oct 02 12:14:09 compute-0 kernel: tap80aeccdc-b0: entered promiscuous mode
Oct 02 12:14:09 compute-0 nova_compute[192079]: 2025-10-02 12:14:09.554 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:09.558 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap80aeccdc-b0, col_values=(('external_ids', {'iface-id': '34a70587-8c1c-41f7-aa30-55322f88bf50'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:14:09 compute-0 nova_compute[192079]: 2025-10-02 12:14:09.559 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:09 compute-0 ovn_controller[94336]: 2025-10-02T12:14:09Z|00229|binding|INFO|Releasing lport 34a70587-8c1c-41f7-aa30-55322f88bf50 from this chassis (sb_readonly=0)
Oct 02 12:14:09 compute-0 nova_compute[192079]: 2025-10-02 12:14:09.572 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:09.574 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/80aeccdc-b23c-43b5-ada7-eabdfc0b0b19.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/80aeccdc-b23c-43b5-ada7-eabdfc0b0b19.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:09.575 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9884b832-81c9-4f82-8ddd-6eef4f86fc9c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:09.575 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-80aeccdc-b23c-43b5-ada7-eabdfc0b0b19
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/80aeccdc-b23c-43b5-ada7-eabdfc0b0b19.pid.haproxy
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 80aeccdc-b23c-43b5-ada7-eabdfc0b0b19
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:14:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:09.576 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-80aeccdc-b23c-43b5-ada7-eabdfc0b0b19', 'env', 'PROCESS_TAG=haproxy-80aeccdc-b23c-43b5-ada7-eabdfc0b0b19', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/80aeccdc-b23c-43b5-ada7-eabdfc0b0b19.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:14:10 compute-0 podman[230250]: 2025-10-02 12:14:09.969359381 +0000 UTC m=+0.025980089 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.076 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407250.0764158, 0d926f1d-a2a1-4e3d-b0d0-072c744cd745 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.077 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] VM Started (Lifecycle Event)
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.152 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.157 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407250.0765948, 0d926f1d-a2a1-4e3d-b0d0-072c744cd745 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.157 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] VM Paused (Lifecycle Event)
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.202 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.205 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:14:10 compute-0 podman[230250]: 2025-10-02 12:14:10.224263124 +0000 UTC m=+0.280883812 container create 01b6a0d24d946def6ac4e01a0c7dd5032048d1c2915d9dc76516b892a74a446a (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-80aeccdc-b23c-43b5-ada7-eabdfc0b0b19, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001)
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.229 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:14:10 compute-0 systemd[1]: Started libpod-conmon-01b6a0d24d946def6ac4e01a0c7dd5032048d1c2915d9dc76516b892a74a446a.scope.
Oct 02 12:14:10 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:14:10 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/0b99a64160e3ce2cb000ee7fa28d4b41187d837554662ca9123305d922ec4d97/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.376 2 DEBUG nova.network.neutron [req-a14f51c1-796a-458c-8d5f-05b2a9f08578 req-c9e2fd1e-80fb-4f01-9126-1f664dcadb73 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Updated VIF entry in instance network info cache for port 8aed8ea7-afed-4dca-9a39-26b03675eec8. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.378 2 DEBUG nova.network.neutron [req-a14f51c1-796a-458c-8d5f-05b2a9f08578 req-c9e2fd1e-80fb-4f01-9126-1f664dcadb73 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Updating instance_info_cache with network_info: [{"id": "8aed8ea7-afed-4dca-9a39-26b03675eec8", "address": "fa:16:3e:87:36:d3", "network": {"id": "80aeccdc-b23c-43b5-ada7-eabdfc0b0b19", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-421813777-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "389df3c9188c4d8194eb17d703c957db", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap8aed8ea7-af", "ovs_interfaceid": "8aed8ea7-afed-4dca-9a39-26b03675eec8", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:14:10 compute-0 podman[230250]: 2025-10-02 12:14:10.390980756 +0000 UTC m=+0.447601464 container init 01b6a0d24d946def6ac4e01a0c7dd5032048d1c2915d9dc76516b892a74a446a (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-80aeccdc-b23c-43b5-ada7-eabdfc0b0b19, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001)
Oct 02 12:14:10 compute-0 podman[230250]: 2025-10-02 12:14:10.39701794 +0000 UTC m=+0.453638658 container start 01b6a0d24d946def6ac4e01a0c7dd5032048d1c2915d9dc76516b892a74a446a (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-80aeccdc-b23c-43b5-ada7-eabdfc0b0b19, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.400 2 DEBUG oslo_concurrency.lockutils [req-a14f51c1-796a-458c-8d5f-05b2a9f08578 req-c9e2fd1e-80fb-4f01-9126-1f664dcadb73 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-0d926f1d-a2a1-4e3d-b0d0-072c744cd745" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:14:10 compute-0 neutron-haproxy-ovnmeta-80aeccdc-b23c-43b5-ada7-eabdfc0b0b19[230264]: [NOTICE]   (230268) : New worker (230270) forked
Oct 02 12:14:10 compute-0 neutron-haproxy-ovnmeta-80aeccdc-b23c-43b5-ada7-eabdfc0b0b19[230264]: [NOTICE]   (230268) : Loading success.
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.622 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.691 2 DEBUG nova.compute.manager [req-463b716f-310f-486a-8eb0-298f520c246b req-d92dd083-bdd6-4d4a-98ef-bc53e08d3812 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Received event network-vif-plugged-8aed8ea7-afed-4dca-9a39-26b03675eec8 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.691 2 DEBUG oslo_concurrency.lockutils [req-463b716f-310f-486a-8eb0-298f520c246b req-d92dd083-bdd6-4d4a-98ef-bc53e08d3812 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "0d926f1d-a2a1-4e3d-b0d0-072c744cd745-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.691 2 DEBUG oslo_concurrency.lockutils [req-463b716f-310f-486a-8eb0-298f520c246b req-d92dd083-bdd6-4d4a-98ef-bc53e08d3812 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0d926f1d-a2a1-4e3d-b0d0-072c744cd745-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.692 2 DEBUG oslo_concurrency.lockutils [req-463b716f-310f-486a-8eb0-298f520c246b req-d92dd083-bdd6-4d4a-98ef-bc53e08d3812 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0d926f1d-a2a1-4e3d-b0d0-072c744cd745-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.692 2 DEBUG nova.compute.manager [req-463b716f-310f-486a-8eb0-298f520c246b req-d92dd083-bdd6-4d4a-98ef-bc53e08d3812 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Processing event network-vif-plugged-8aed8ea7-afed-4dca-9a39-26b03675eec8 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.692 2 DEBUG nova.compute.manager [req-463b716f-310f-486a-8eb0-298f520c246b req-d92dd083-bdd6-4d4a-98ef-bc53e08d3812 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Received event network-vif-plugged-8aed8ea7-afed-4dca-9a39-26b03675eec8 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.692 2 DEBUG oslo_concurrency.lockutils [req-463b716f-310f-486a-8eb0-298f520c246b req-d92dd083-bdd6-4d4a-98ef-bc53e08d3812 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "0d926f1d-a2a1-4e3d-b0d0-072c744cd745-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.692 2 DEBUG oslo_concurrency.lockutils [req-463b716f-310f-486a-8eb0-298f520c246b req-d92dd083-bdd6-4d4a-98ef-bc53e08d3812 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0d926f1d-a2a1-4e3d-b0d0-072c744cd745-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.693 2 DEBUG oslo_concurrency.lockutils [req-463b716f-310f-486a-8eb0-298f520c246b req-d92dd083-bdd6-4d4a-98ef-bc53e08d3812 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0d926f1d-a2a1-4e3d-b0d0-072c744cd745-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.693 2 DEBUG nova.compute.manager [req-463b716f-310f-486a-8eb0-298f520c246b req-d92dd083-bdd6-4d4a-98ef-bc53e08d3812 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] No waiting events found dispatching network-vif-plugged-8aed8ea7-afed-4dca-9a39-26b03675eec8 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.693 2 WARNING nova.compute.manager [req-463b716f-310f-486a-8eb0-298f520c246b req-d92dd083-bdd6-4d4a-98ef-bc53e08d3812 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Received unexpected event network-vif-plugged-8aed8ea7-afed-4dca-9a39-26b03675eec8 for instance with vm_state building and task_state spawning.
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.694 2 DEBUG nova.compute.manager [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.698 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407250.6987205, 0d926f1d-a2a1-4e3d-b0d0-072c744cd745 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.699 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] VM Resumed (Lifecycle Event)
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.701 2 DEBUG nova.virt.libvirt.driver [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.704 2 INFO nova.virt.libvirt.driver [-] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Instance spawned successfully.
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.705 2 DEBUG nova.virt.libvirt.driver [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.731 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.735 2 DEBUG nova.virt.libvirt.driver [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.736 2 DEBUG nova.virt.libvirt.driver [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.737 2 DEBUG nova.virt.libvirt.driver [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.737 2 DEBUG nova.virt.libvirt.driver [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.737 2 DEBUG nova.virt.libvirt.driver [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.738 2 DEBUG nova.virt.libvirt.driver [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.743 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.775 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.837 2 INFO nova.compute.manager [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Took 6.96 seconds to spawn the instance on the hypervisor.
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.838 2 DEBUG nova.compute.manager [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.944 2 INFO nova.compute.manager [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Took 8.10 seconds to build instance.
Oct 02 12:14:10 compute-0 nova_compute[192079]: 2025-10-02 12:14:10.985 2 DEBUG oslo_concurrency.lockutils [None req-32c67eff-1cbf-491c-873f-b133a88599e4 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Lock "0d926f1d-a2a1-4e3d-b0d0-072c744cd745" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 8.259s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:11 compute-0 nova_compute[192079]: 2025-10-02 12:14:11.001 2 DEBUG nova.compute.manager [req-917e015b-8181-4ff4-a16f-1f568d11176e req-221d8b51-d720-436e-b500-12e3a541bcd1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Received event network-vif-plugged-c1d6d6c7-23c7-45e9-b50d-f589c7908b63 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:14:11 compute-0 nova_compute[192079]: 2025-10-02 12:14:11.002 2 DEBUG oslo_concurrency.lockutils [req-917e015b-8181-4ff4-a16f-1f568d11176e req-221d8b51-d720-436e-b500-12e3a541bcd1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "6db1ac7f-726d-4ad6-8992-86f0c23d4d79-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:11 compute-0 nova_compute[192079]: 2025-10-02 12:14:11.002 2 DEBUG oslo_concurrency.lockutils [req-917e015b-8181-4ff4-a16f-1f568d11176e req-221d8b51-d720-436e-b500-12e3a541bcd1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6db1ac7f-726d-4ad6-8992-86f0c23d4d79-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:11 compute-0 nova_compute[192079]: 2025-10-02 12:14:11.003 2 DEBUG oslo_concurrency.lockutils [req-917e015b-8181-4ff4-a16f-1f568d11176e req-221d8b51-d720-436e-b500-12e3a541bcd1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6db1ac7f-726d-4ad6-8992-86f0c23d4d79-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:11 compute-0 nova_compute[192079]: 2025-10-02 12:14:11.003 2 DEBUG nova.compute.manager [req-917e015b-8181-4ff4-a16f-1f568d11176e req-221d8b51-d720-436e-b500-12e3a541bcd1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] No waiting events found dispatching network-vif-plugged-c1d6d6c7-23c7-45e9-b50d-f589c7908b63 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:14:11 compute-0 nova_compute[192079]: 2025-10-02 12:14:11.003 2 WARNING nova.compute.manager [req-917e015b-8181-4ff4-a16f-1f568d11176e req-221d8b51-d720-436e-b500-12e3a541bcd1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Received unexpected event network-vif-plugged-c1d6d6c7-23c7-45e9-b50d-f589c7908b63 for instance with vm_state active and task_state None.
Oct 02 12:14:11 compute-0 nova_compute[192079]: 2025-10-02 12:14:11.719 2 DEBUG nova.compute.manager [None req-10374cba-cefc-4aaa-88f6-2b319a4d3fd9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:14:11 compute-0 nova_compute[192079]: 2025-10-02 12:14:11.821 2 INFO nova.compute.manager [None req-10374cba-cefc-4aaa-88f6-2b319a4d3fd9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] instance snapshotting
Oct 02 12:14:12 compute-0 nova_compute[192079]: 2025-10-02 12:14:12.150 2 INFO nova.virt.libvirt.driver [None req-10374cba-cefc-4aaa-88f6-2b319a4d3fd9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Beginning live snapshot process
Oct 02 12:14:12 compute-0 virtqemud[191807]: invalid argument: disk vda does not have an active block job
Oct 02 12:14:12 compute-0 nova_compute[192079]: 2025-10-02 12:14:12.443 2 DEBUG oslo_concurrency.processutils [None req-10374cba-cefc-4aaa-88f6-2b319a4d3fd9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/6db1ac7f-726d-4ad6-8992-86f0c23d4d79/disk --force-share --output=json -f qcow2 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:14:12 compute-0 nova_compute[192079]: 2025-10-02 12:14:12.501 2 DEBUG oslo_concurrency.processutils [None req-10374cba-cefc-4aaa-88f6-2b319a4d3fd9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/6db1ac7f-726d-4ad6-8992-86f0c23d4d79/disk --force-share --output=json -f qcow2" returned: 0 in 0.059s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:14:12 compute-0 nova_compute[192079]: 2025-10-02 12:14:12.503 2 DEBUG oslo_concurrency.processutils [None req-10374cba-cefc-4aaa-88f6-2b319a4d3fd9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/6db1ac7f-726d-4ad6-8992-86f0c23d4d79/disk --force-share --output=json -f qcow2 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:14:12 compute-0 nova_compute[192079]: 2025-10-02 12:14:12.581 2 DEBUG oslo_concurrency.processutils [None req-10374cba-cefc-4aaa-88f6-2b319a4d3fd9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/6db1ac7f-726d-4ad6-8992-86f0c23d4d79/disk --force-share --output=json -f qcow2" returned: 0 in 0.078s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:14:12 compute-0 nova_compute[192079]: 2025-10-02 12:14:12.598 2 DEBUG oslo_concurrency.processutils [None req-10374cba-cefc-4aaa-88f6-2b319a4d3fd9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:14:12 compute-0 nova_compute[192079]: 2025-10-02 12:14:12.667 2 DEBUG oslo_concurrency.processutils [None req-10374cba-cefc-4aaa-88f6-2b319a4d3fd9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.069s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:14:12 compute-0 nova_compute[192079]: 2025-10-02 12:14:12.668 2 DEBUG oslo_concurrency.processutils [None req-10374cba-cefc-4aaa-88f6-2b319a4d3fd9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/snapshots/tmpdea0cd37/ba27661ef3124bec9a45c3d131bb7102.delta 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:14:12 compute-0 nova_compute[192079]: 2025-10-02 12:14:12.847 2 DEBUG oslo_concurrency.processutils [None req-10374cba-cefc-4aaa-88f6-2b319a4d3fd9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/snapshots/tmpdea0cd37/ba27661ef3124bec9a45c3d131bb7102.delta 1073741824" returned: 0 in 0.178s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:14:12 compute-0 nova_compute[192079]: 2025-10-02 12:14:12.848 2 INFO nova.virt.libvirt.driver [None req-10374cba-cefc-4aaa-88f6-2b319a4d3fd9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Quiescing instance not available: QEMU guest agent is not enabled.
Oct 02 12:14:12 compute-0 nova_compute[192079]: 2025-10-02 12:14:12.899 2 DEBUG nova.virt.libvirt.guest [None req-10374cba-cefc-4aaa-88f6-2b319a4d3fd9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] COPY block job progress, current cursor: 0 final cursor: 1 is_job_complete /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:846
Oct 02 12:14:13 compute-0 nova_compute[192079]: 2025-10-02 12:14:13.139 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:13 compute-0 nova_compute[192079]: 2025-10-02 12:14:13.403 2 DEBUG nova.virt.libvirt.guest [None req-10374cba-cefc-4aaa-88f6-2b319a4d3fd9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] COPY block job progress, current cursor: 1 final cursor: 1 is_job_complete /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:846
Oct 02 12:14:13 compute-0 nova_compute[192079]: 2025-10-02 12:14:13.407 2 INFO nova.virt.libvirt.driver [None req-10374cba-cefc-4aaa-88f6-2b319a4d3fd9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Skipping quiescing instance: QEMU guest agent is not enabled.
Oct 02 12:14:13 compute-0 nova_compute[192079]: 2025-10-02 12:14:13.418 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759407238.4169288, 3277cbd6-2706-4647-b0df-b789c49f80ea => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:14:13 compute-0 nova_compute[192079]: 2025-10-02 12:14:13.419 2 INFO nova.compute.manager [-] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] VM Stopped (Lifecycle Event)
Oct 02 12:14:13 compute-0 nova_compute[192079]: 2025-10-02 12:14:13.444 2 DEBUG nova.compute.manager [None req-96e70fbb-c775-469e-8f1e-d6c9cea8f89b - - - - - -] [instance: 3277cbd6-2706-4647-b0df-b789c49f80ea] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:14:13 compute-0 nova_compute[192079]: 2025-10-02 12:14:13.577 2 DEBUG nova.privsep.utils [None req-10374cba-cefc-4aaa-88f6-2b319a4d3fd9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Path '/var/lib/nova/instances' supports direct I/O supports_direct_io /usr/lib/python3.9/site-packages/nova/privsep/utils.py:63
Oct 02 12:14:13 compute-0 nova_compute[192079]: 2025-10-02 12:14:13.579 2 DEBUG oslo_concurrency.processutils [None req-10374cba-cefc-4aaa-88f6-2b319a4d3fd9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Running cmd (subprocess): qemu-img convert -t none -O qcow2 -f qcow2 /var/lib/nova/instances/snapshots/tmpdea0cd37/ba27661ef3124bec9a45c3d131bb7102.delta /var/lib/nova/instances/snapshots/tmpdea0cd37/ba27661ef3124bec9a45c3d131bb7102 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:14:14 compute-0 nova_compute[192079]: 2025-10-02 12:14:14.839 2 DEBUG oslo_concurrency.processutils [None req-10374cba-cefc-4aaa-88f6-2b319a4d3fd9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] CMD "qemu-img convert -t none -O qcow2 -f qcow2 /var/lib/nova/instances/snapshots/tmpdea0cd37/ba27661ef3124bec9a45c3d131bb7102.delta /var/lib/nova/instances/snapshots/tmpdea0cd37/ba27661ef3124bec9a45c3d131bb7102" returned: 0 in 1.261s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:14:14 compute-0 nova_compute[192079]: 2025-10-02 12:14:14.840 2 INFO nova.virt.libvirt.driver [None req-10374cba-cefc-4aaa-88f6-2b319a4d3fd9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Snapshot extracted, beginning image upload
Oct 02 12:14:15 compute-0 nova_compute[192079]: 2025-10-02 12:14:15.200 2 WARNING nova.compute.manager [None req-10374cba-cefc-4aaa-88f6-2b319a4d3fd9 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Image not found during snapshot: nova.exception.ImageNotFound: Image b776e21b-2d7c-4cc5-ace5-de9d55541e75 could not be found.
Oct 02 12:14:15 compute-0 nova_compute[192079]: 2025-10-02 12:14:15.624 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:16 compute-0 nova_compute[192079]: 2025-10-02 12:14:16.694 2 DEBUG oslo_concurrency.lockutils [None req-f1fd605a-7112-4457-9bcb-3ed5ed9fe514 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "6db1ac7f-726d-4ad6-8992-86f0c23d4d79" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:16 compute-0 nova_compute[192079]: 2025-10-02 12:14:16.695 2 DEBUG oslo_concurrency.lockutils [None req-f1fd605a-7112-4457-9bcb-3ed5ed9fe514 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "6db1ac7f-726d-4ad6-8992-86f0c23d4d79" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:16 compute-0 nova_compute[192079]: 2025-10-02 12:14:16.696 2 DEBUG oslo_concurrency.lockutils [None req-f1fd605a-7112-4457-9bcb-3ed5ed9fe514 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "6db1ac7f-726d-4ad6-8992-86f0c23d4d79-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:16 compute-0 nova_compute[192079]: 2025-10-02 12:14:16.696 2 DEBUG oslo_concurrency.lockutils [None req-f1fd605a-7112-4457-9bcb-3ed5ed9fe514 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "6db1ac7f-726d-4ad6-8992-86f0c23d4d79-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:16 compute-0 nova_compute[192079]: 2025-10-02 12:14:16.696 2 DEBUG oslo_concurrency.lockutils [None req-f1fd605a-7112-4457-9bcb-3ed5ed9fe514 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "6db1ac7f-726d-4ad6-8992-86f0c23d4d79-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:16 compute-0 nova_compute[192079]: 2025-10-02 12:14:16.707 2 INFO nova.compute.manager [None req-f1fd605a-7112-4457-9bcb-3ed5ed9fe514 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Terminating instance
Oct 02 12:14:16 compute-0 nova_compute[192079]: 2025-10-02 12:14:16.717 2 DEBUG nova.compute.manager [None req-f1fd605a-7112-4457-9bcb-3ed5ed9fe514 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:14:16 compute-0 kernel: tapc1d6d6c7-23 (unregistering): left promiscuous mode
Oct 02 12:14:16 compute-0 NetworkManager[51160]: <info>  [1759407256.7431] device (tapc1d6d6c7-23): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:14:16 compute-0 ovn_controller[94336]: 2025-10-02T12:14:16Z|00230|binding|INFO|Releasing lport c1d6d6c7-23c7-45e9-b50d-f589c7908b63 from this chassis (sb_readonly=0)
Oct 02 12:14:16 compute-0 ovn_controller[94336]: 2025-10-02T12:14:16Z|00231|binding|INFO|Setting lport c1d6d6c7-23c7-45e9-b50d-f589c7908b63 down in Southbound
Oct 02 12:14:16 compute-0 ovn_controller[94336]: 2025-10-02T12:14:16Z|00232|binding|INFO|Removing iface tapc1d6d6c7-23 ovn-installed in OVS
Oct 02 12:14:16 compute-0 nova_compute[192079]: 2025-10-02 12:14:16.755 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:16.763 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:38:15:28 10.100.0.4'], port_security=['fa:16:3e:38:15:28 10.100.0.4'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.4/28', 'neutron:device_id': '6db1ac7f-726d-4ad6-8992-86f0c23d4d79', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'dcf78460093d411988a54040ea4c265a', 'neutron:revision_number': '4', 'neutron:security_group_ids': 'aacce687-8b76-4e90-b19c-0dd006394188', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=24ae9888-31f5-4083-b5ee-e7ed6a1eee13, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=c1d6d6c7-23c7-45e9-b50d-f589c7908b63) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:14:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:16.765 103294 INFO neutron.agent.ovn.metadata.agent [-] Port c1d6d6c7-23c7-45e9-b50d-f589c7908b63 in datapath 4f195445-fd43-4b92-89dd-a1b2fe9ea8c2 unbound from our chassis
Oct 02 12:14:16 compute-0 nova_compute[192079]: 2025-10-02 12:14:16.766 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:16.767 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 4f195445-fd43-4b92-89dd-a1b2fe9ea8c2, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:14:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:16.768 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[756171a0-f9ec-4abd-83ea-4e29faacab35]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:16.768 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2 namespace which is not needed anymore
Oct 02 12:14:16 compute-0 systemd[1]: machine-qemu\x2d33\x2dinstance\x2d00000046.scope: Deactivated successfully.
Oct 02 12:14:16 compute-0 systemd[1]: machine-qemu\x2d33\x2dinstance\x2d00000046.scope: Consumed 8.699s CPU time.
Oct 02 12:14:16 compute-0 systemd-machined[152150]: Machine qemu-33-instance-00000046 terminated.
Oct 02 12:14:16 compute-0 neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2[230159]: [NOTICE]   (230163) : haproxy version is 2.8.14-c23fe91
Oct 02 12:14:16 compute-0 neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2[230159]: [NOTICE]   (230163) : path to executable is /usr/sbin/haproxy
Oct 02 12:14:16 compute-0 neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2[230159]: [WARNING]  (230163) : Exiting Master process...
Oct 02 12:14:16 compute-0 neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2[230159]: [ALERT]    (230163) : Current worker (230166) exited with code 143 (Terminated)
Oct 02 12:14:16 compute-0 neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2[230159]: [WARNING]  (230163) : All workers exited. Exiting... (0)
Oct 02 12:14:16 compute-0 systemd[1]: libpod-e35fcdd54cf4a0ce2499ba5531d0b2fd8f95b306b337f99523f5084f46746a54.scope: Deactivated successfully.
Oct 02 12:14:16 compute-0 podman[230327]: 2025-10-02 12:14:16.97067794 +0000 UTC m=+0.123356832 container died e35fcdd54cf4a0ce2499ba5531d0b2fd8f95b306b337f99523f5084f46746a54 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_managed=true, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:14:16 compute-0 nova_compute[192079]: 2025-10-02 12:14:16.980 2 INFO nova.virt.libvirt.driver [-] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Instance destroyed successfully.
Oct 02 12:14:16 compute-0 nova_compute[192079]: 2025-10-02 12:14:16.980 2 DEBUG nova.objects.instance [None req-f1fd605a-7112-4457-9bcb-3ed5ed9fe514 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lazy-loading 'resources' on Instance uuid 6db1ac7f-726d-4ad6-8992-86f0c23d4d79 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:14:16 compute-0 nova_compute[192079]: 2025-10-02 12:14:16.998 2 DEBUG nova.virt.libvirt.vif [None req-f1fd605a-7112-4457-9bcb-3ed5ed9fe514 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:14:00Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ImagesTestJSON-server-225816649',display_name='tempest-ImagesTestJSON-server-225816649',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-imagestestjson-server-225816649',id=70,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:14:09Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='dcf78460093d411988a54040ea4c265a',ramdisk_id='',reservation_id='r-x30nkn60',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ImagesTestJSON-437970487',owner_user_name='tempest-ImagesTestJSON-437970487-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:14:15Z,user_data=None,user_id='dcdfc3c0f94e42cb931d27f2e3b5b12d',uuid=6db1ac7f-726d-4ad6-8992-86f0c23d4d79,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "c1d6d6c7-23c7-45e9-b50d-f589c7908b63", "address": "fa:16:3e:38:15:28", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc1d6d6c7-23", "ovs_interfaceid": "c1d6d6c7-23c7-45e9-b50d-f589c7908b63", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:14:16 compute-0 nova_compute[192079]: 2025-10-02 12:14:16.999 2 DEBUG nova.network.os_vif_util [None req-f1fd605a-7112-4457-9bcb-3ed5ed9fe514 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Converting VIF {"id": "c1d6d6c7-23c7-45e9-b50d-f589c7908b63", "address": "fa:16:3e:38:15:28", "network": {"id": "4f195445-fd43-4b92-89dd-a1b2fe9ea8c2", "bridge": "br-int", "label": "tempest-ImagesTestJSON-793597453-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "dcf78460093d411988a54040ea4c265a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc1d6d6c7-23", "ovs_interfaceid": "c1d6d6c7-23c7-45e9-b50d-f589c7908b63", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:14:17 compute-0 nova_compute[192079]: 2025-10-02 12:14:16.999 2 DEBUG nova.network.os_vif_util [None req-f1fd605a-7112-4457-9bcb-3ed5ed9fe514 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:38:15:28,bridge_name='br-int',has_traffic_filtering=True,id=c1d6d6c7-23c7-45e9-b50d-f589c7908b63,network=Network(4f195445-fd43-4b92-89dd-a1b2fe9ea8c2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapc1d6d6c7-23') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:14:17 compute-0 nova_compute[192079]: 2025-10-02 12:14:17.000 2 DEBUG os_vif [None req-f1fd605a-7112-4457-9bcb-3ed5ed9fe514 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:38:15:28,bridge_name='br-int',has_traffic_filtering=True,id=c1d6d6c7-23c7-45e9-b50d-f589c7908b63,network=Network(4f195445-fd43-4b92-89dd-a1b2fe9ea8c2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapc1d6d6c7-23') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:14:17 compute-0 nova_compute[192079]: 2025-10-02 12:14:17.001 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:17 compute-0 nova_compute[192079]: 2025-10-02 12:14:17.002 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapc1d6d6c7-23, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:14:17 compute-0 nova_compute[192079]: 2025-10-02 12:14:17.092 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:17 compute-0 nova_compute[192079]: 2025-10-02 12:14:17.096 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:14:17 compute-0 nova_compute[192079]: 2025-10-02 12:14:17.099 2 INFO os_vif [None req-f1fd605a-7112-4457-9bcb-3ed5ed9fe514 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:38:15:28,bridge_name='br-int',has_traffic_filtering=True,id=c1d6d6c7-23c7-45e9-b50d-f589c7908b63,network=Network(4f195445-fd43-4b92-89dd-a1b2fe9ea8c2),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapc1d6d6c7-23')
Oct 02 12:14:17 compute-0 nova_compute[192079]: 2025-10-02 12:14:17.100 2 INFO nova.virt.libvirt.driver [None req-f1fd605a-7112-4457-9bcb-3ed5ed9fe514 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Deleting instance files /var/lib/nova/instances/6db1ac7f-726d-4ad6-8992-86f0c23d4d79_del
Oct 02 12:14:17 compute-0 nova_compute[192079]: 2025-10-02 12:14:17.100 2 INFO nova.virt.libvirt.driver [None req-f1fd605a-7112-4457-9bcb-3ed5ed9fe514 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Deletion of /var/lib/nova/instances/6db1ac7f-726d-4ad6-8992-86f0c23d4d79_del complete
Oct 02 12:14:17 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-e35fcdd54cf4a0ce2499ba5531d0b2fd8f95b306b337f99523f5084f46746a54-userdata-shm.mount: Deactivated successfully.
Oct 02 12:14:17 compute-0 systemd[1]: var-lib-containers-storage-overlay-61fe78b7eb6321d27215f73b146be54eb1c1c0231c6f707859a38972a8a97058-merged.mount: Deactivated successfully.
Oct 02 12:14:17 compute-0 nova_compute[192079]: 2025-10-02 12:14:17.235 2 INFO nova.compute.manager [None req-f1fd605a-7112-4457-9bcb-3ed5ed9fe514 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Took 0.52 seconds to destroy the instance on the hypervisor.
Oct 02 12:14:17 compute-0 nova_compute[192079]: 2025-10-02 12:14:17.236 2 DEBUG oslo.service.loopingcall [None req-f1fd605a-7112-4457-9bcb-3ed5ed9fe514 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:14:17 compute-0 nova_compute[192079]: 2025-10-02 12:14:17.236 2 DEBUG nova.compute.manager [-] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:14:17 compute-0 nova_compute[192079]: 2025-10-02 12:14:17.236 2 DEBUG nova.network.neutron [-] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:14:17 compute-0 podman[230372]: 2025-10-02 12:14:17.312738018 +0000 UTC m=+0.052787959 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_id=edpm, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, container_name=ceilometer_agent_compute, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, io.buildah.version=1.41.3, managed_by=edpm_ansible)
Oct 02 12:14:17 compute-0 podman[230327]: 2025-10-02 12:14:17.384635245 +0000 UTC m=+0.537314137 container cleanup e35fcdd54cf4a0ce2499ba5531d0b2fd8f95b306b337f99523f5084f46746a54 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS)
Oct 02 12:14:17 compute-0 systemd[1]: libpod-conmon-e35fcdd54cf4a0ce2499ba5531d0b2fd8f95b306b337f99523f5084f46746a54.scope: Deactivated successfully.
Oct 02 12:14:17 compute-0 podman[230393]: 2025-10-02 12:14:17.682576332 +0000 UTC m=+0.257885226 container remove e35fcdd54cf4a0ce2499ba5531d0b2fd8f95b306b337f99523f5084f46746a54 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_managed=true)
Oct 02 12:14:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:17.688 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4ed04692-42d6-49c1-b87b-54b21f2d4e88]: (4, ('Thu Oct  2 12:14:16 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2 (e35fcdd54cf4a0ce2499ba5531d0b2fd8f95b306b337f99523f5084f46746a54)\ne35fcdd54cf4a0ce2499ba5531d0b2fd8f95b306b337f99523f5084f46746a54\nThu Oct  2 12:14:17 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2 (e35fcdd54cf4a0ce2499ba5531d0b2fd8f95b306b337f99523f5084f46746a54)\ne35fcdd54cf4a0ce2499ba5531d0b2fd8f95b306b337f99523f5084f46746a54\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:17.690 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6a548650-eca7-4e4c-8deb-000da2142ed0]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:17.690 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap4f195445-f0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:14:17 compute-0 nova_compute[192079]: 2025-10-02 12:14:17.692 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:17 compute-0 kernel: tap4f195445-f0: left promiscuous mode
Oct 02 12:14:17 compute-0 nova_compute[192079]: 2025-10-02 12:14:17.695 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:17.698 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[218dee2d-094d-4bf5-aa38-555820725b1d]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:17 compute-0 nova_compute[192079]: 2025-10-02 12:14:17.711 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:17.721 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9c846817-f75b-4e22-b4b6-72c771be6707]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:17.723 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f7f45129-43c7-42e3-95dc-1283c0dfeb01]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:17.737 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c918f172-338d-4eca-9942-73f6d7bf407a]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 524566, 'reachable_time': 16579, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 230408, 'error': None, 'target': 'ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:17 compute-0 systemd[1]: run-netns-ovnmeta\x2d4f195445\x2dfd43\x2d4b92\x2d89dd\x2da1b2fe9ea8c2.mount: Deactivated successfully.
Oct 02 12:14:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:17.742 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-4f195445-fd43-4b92-89dd-a1b2fe9ea8c2 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:14:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:17.742 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[7c9740be-cb09-4b5d-afd9-c1ff665bb0d7]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:18 compute-0 nova_compute[192079]: 2025-10-02 12:14:18.598 2 DEBUG nova.compute.manager [req-3bb29edc-7dcf-46ab-8500-633c0aa794cc req-241b3fe8-610c-4bc5-9ba6-e3589b7e44de 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Received event network-vif-unplugged-c1d6d6c7-23c7-45e9-b50d-f589c7908b63 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:14:18 compute-0 nova_compute[192079]: 2025-10-02 12:14:18.599 2 DEBUG oslo_concurrency.lockutils [req-3bb29edc-7dcf-46ab-8500-633c0aa794cc req-241b3fe8-610c-4bc5-9ba6-e3589b7e44de 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "6db1ac7f-726d-4ad6-8992-86f0c23d4d79-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:18 compute-0 nova_compute[192079]: 2025-10-02 12:14:18.600 2 DEBUG oslo_concurrency.lockutils [req-3bb29edc-7dcf-46ab-8500-633c0aa794cc req-241b3fe8-610c-4bc5-9ba6-e3589b7e44de 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6db1ac7f-726d-4ad6-8992-86f0c23d4d79-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:18 compute-0 nova_compute[192079]: 2025-10-02 12:14:18.600 2 DEBUG oslo_concurrency.lockutils [req-3bb29edc-7dcf-46ab-8500-633c0aa794cc req-241b3fe8-610c-4bc5-9ba6-e3589b7e44de 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6db1ac7f-726d-4ad6-8992-86f0c23d4d79-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:18 compute-0 nova_compute[192079]: 2025-10-02 12:14:18.601 2 DEBUG nova.compute.manager [req-3bb29edc-7dcf-46ab-8500-633c0aa794cc req-241b3fe8-610c-4bc5-9ba6-e3589b7e44de 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] No waiting events found dispatching network-vif-unplugged-c1d6d6c7-23c7-45e9-b50d-f589c7908b63 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:14:18 compute-0 nova_compute[192079]: 2025-10-02 12:14:18.602 2 DEBUG nova.compute.manager [req-3bb29edc-7dcf-46ab-8500-633c0aa794cc req-241b3fe8-610c-4bc5-9ba6-e3589b7e44de 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Received event network-vif-unplugged-c1d6d6c7-23c7-45e9-b50d-f589c7908b63 for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:14:18 compute-0 nova_compute[192079]: 2025-10-02 12:14:18.686 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:14:18 compute-0 nova_compute[192079]: 2025-10-02 12:14:18.687 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:14:18 compute-0 nova_compute[192079]: 2025-10-02 12:14:18.763 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:18 compute-0 nova_compute[192079]: 2025-10-02 12:14:18.764 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:18 compute-0 nova_compute[192079]: 2025-10-02 12:14:18.764 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:18 compute-0 nova_compute[192079]: 2025-10-02 12:14:18.765 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:14:18 compute-0 nova_compute[192079]: 2025-10-02 12:14:18.932 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/0d926f1d-a2a1-4e3d-b0d0-072c744cd745/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:14:18 compute-0 nova_compute[192079]: 2025-10-02 12:14:18.995 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/0d926f1d-a2a1-4e3d-b0d0-072c744cd745/disk --force-share --output=json" returned: 0 in 0.063s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:14:18 compute-0 nova_compute[192079]: 2025-10-02 12:14:18.996 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/0d926f1d-a2a1-4e3d-b0d0-072c744cd745/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.039 2 DEBUG nova.network.neutron [-] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.050 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/0d926f1d-a2a1-4e3d-b0d0-072c744cd745/disk --force-share --output=json" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.070 2 INFO nova.compute.manager [-] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Took 1.83 seconds to deallocate network for instance.
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.179 2 DEBUG oslo_concurrency.lockutils [None req-f1fd605a-7112-4457-9bcb-3ed5ed9fe514 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.180 2 DEBUG oslo_concurrency.lockutils [None req-f1fd605a-7112-4457-9bcb-3ed5ed9fe514 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.224 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.226 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5553MB free_disk=73.34872817993164GB free_vcpus=7 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.226 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.273 2 DEBUG nova.compute.provider_tree [None req-f1fd605a-7112-4457-9bcb-3ed5ed9fe514 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.302 2 DEBUG nova.scheduler.client.report [None req-f1fd605a-7112-4457-9bcb-3ed5ed9fe514 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.338 2 DEBUG oslo_concurrency.lockutils [None req-f1fd605a-7112-4457-9bcb-3ed5ed9fe514 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.158s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.340 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.114s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.428 2 INFO nova.scheduler.client.report [None req-f1fd605a-7112-4457-9bcb-3ed5ed9fe514 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Deleted allocations for instance 6db1ac7f-726d-4ad6-8992-86f0c23d4d79
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.449 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance 0d926f1d-a2a1-4e3d-b0d0-072c744cd745 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.450 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 1 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.450 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=640MB phys_disk=79GB used_disk=1GB total_vcpus=8 used_vcpus=1 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.506 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.541 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.559 2 DEBUG oslo_concurrency.lockutils [None req-f1fd605a-7112-4457-9bcb-3ed5ed9fe514 dcdfc3c0f94e42cb931d27f2e3b5b12d dcf78460093d411988a54040ea4c265a - - default default] Lock "6db1ac7f-726d-4ad6-8992-86f0c23d4d79" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 2.864s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.585 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.586 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.247s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.876 2 DEBUG oslo_concurrency.lockutils [None req-6cd0b2bb-cdff-4c8e-9824-5bb44525bae0 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Acquiring lock "0d926f1d-a2a1-4e3d-b0d0-072c744cd745" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.877 2 DEBUG oslo_concurrency.lockutils [None req-6cd0b2bb-cdff-4c8e-9824-5bb44525bae0 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Lock "0d926f1d-a2a1-4e3d-b0d0-072c744cd745" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.878 2 DEBUG oslo_concurrency.lockutils [None req-6cd0b2bb-cdff-4c8e-9824-5bb44525bae0 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Acquiring lock "0d926f1d-a2a1-4e3d-b0d0-072c744cd745-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.878 2 DEBUG oslo_concurrency.lockutils [None req-6cd0b2bb-cdff-4c8e-9824-5bb44525bae0 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Lock "0d926f1d-a2a1-4e3d-b0d0-072c744cd745-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.879 2 DEBUG oslo_concurrency.lockutils [None req-6cd0b2bb-cdff-4c8e-9824-5bb44525bae0 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Lock "0d926f1d-a2a1-4e3d-b0d0-072c744cd745-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.910 2 INFO nova.compute.manager [None req-6cd0b2bb-cdff-4c8e-9824-5bb44525bae0 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Terminating instance
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.947 2 DEBUG nova.compute.manager [None req-6cd0b2bb-cdff-4c8e-9824-5bb44525bae0 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:14:19 compute-0 kernel: tap8aed8ea7-af (unregistering): left promiscuous mode
Oct 02 12:14:19 compute-0 NetworkManager[51160]: <info>  [1759407259.9711] device (tap8aed8ea7-af): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:14:19 compute-0 ovn_controller[94336]: 2025-10-02T12:14:19Z|00233|binding|INFO|Releasing lport 8aed8ea7-afed-4dca-9a39-26b03675eec8 from this chassis (sb_readonly=0)
Oct 02 12:14:19 compute-0 ovn_controller[94336]: 2025-10-02T12:14:19Z|00234|binding|INFO|Setting lport 8aed8ea7-afed-4dca-9a39-26b03675eec8 down in Southbound
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.978 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:19 compute-0 ovn_controller[94336]: 2025-10-02T12:14:19Z|00235|binding|INFO|Removing iface tap8aed8ea7-af ovn-installed in OVS
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.983 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:19 compute-0 nova_compute[192079]: 2025-10-02 12:14:19.995 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:19.993 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:87:36:d3 10.100.0.11'], port_security=['fa:16:3e:87:36:d3 10.100.0.11'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.11/28', 'neutron:device_id': '0d926f1d-a2a1-4e3d-b0d0-072c744cd745', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-80aeccdc-b23c-43b5-ada7-eabdfc0b0b19', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '389df3c9188c4d8194eb17d703c957db', 'neutron:revision_number': '4', 'neutron:security_group_ids': 'e2269b20-1ca6-4ad2-b43d-9aec139eedb0', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=3cbada61-0eee-4c57-92ca-456b863dedea, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=8aed8ea7-afed-4dca-9a39-26b03675eec8) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:14:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:19.995 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 8aed8ea7-afed-4dca-9a39-26b03675eec8 in datapath 80aeccdc-b23c-43b5-ada7-eabdfc0b0b19 unbound from our chassis
Oct 02 12:14:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:19.997 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 80aeccdc-b23c-43b5-ada7-eabdfc0b0b19, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:14:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:19.998 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6e0c37cd-69ea-4c3a-a5ce-c5d037f0d1d7]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:19.999 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-80aeccdc-b23c-43b5-ada7-eabdfc0b0b19 namespace which is not needed anymore
Oct 02 12:14:20 compute-0 systemd[1]: machine-qemu\x2d34\x2dinstance\x2d00000047.scope: Deactivated successfully.
Oct 02 12:14:20 compute-0 systemd[1]: machine-qemu\x2d34\x2dinstance\x2d00000047.scope: Consumed 9.958s CPU time.
Oct 02 12:14:20 compute-0 systemd-machined[152150]: Machine qemu-34-instance-00000047 terminated.
Oct 02 12:14:20 compute-0 neutron-haproxy-ovnmeta-80aeccdc-b23c-43b5-ada7-eabdfc0b0b19[230264]: [NOTICE]   (230268) : haproxy version is 2.8.14-c23fe91
Oct 02 12:14:20 compute-0 neutron-haproxy-ovnmeta-80aeccdc-b23c-43b5-ada7-eabdfc0b0b19[230264]: [NOTICE]   (230268) : path to executable is /usr/sbin/haproxy
Oct 02 12:14:20 compute-0 neutron-haproxy-ovnmeta-80aeccdc-b23c-43b5-ada7-eabdfc0b0b19[230264]: [WARNING]  (230268) : Exiting Master process...
Oct 02 12:14:20 compute-0 neutron-haproxy-ovnmeta-80aeccdc-b23c-43b5-ada7-eabdfc0b0b19[230264]: [ALERT]    (230268) : Current worker (230270) exited with code 143 (Terminated)
Oct 02 12:14:20 compute-0 neutron-haproxy-ovnmeta-80aeccdc-b23c-43b5-ada7-eabdfc0b0b19[230264]: [WARNING]  (230268) : All workers exited. Exiting... (0)
Oct 02 12:14:20 compute-0 systemd[1]: libpod-01b6a0d24d946def6ac4e01a0c7dd5032048d1c2915d9dc76516b892a74a446a.scope: Deactivated successfully.
Oct 02 12:14:20 compute-0 podman[230441]: 2025-10-02 12:14:20.162918054 +0000 UTC m=+0.073496013 container died 01b6a0d24d946def6ac4e01a0c7dd5032048d1c2915d9dc76516b892a74a446a (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-80aeccdc-b23c-43b5-ada7-eabdfc0b0b19, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, tcib_managed=true, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:14:20 compute-0 nova_compute[192079]: 2025-10-02 12:14:20.208 2 INFO nova.virt.libvirt.driver [-] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Instance destroyed successfully.
Oct 02 12:14:20 compute-0 nova_compute[192079]: 2025-10-02 12:14:20.210 2 DEBUG nova.objects.instance [None req-6cd0b2bb-cdff-4c8e-9824-5bb44525bae0 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Lazy-loading 'resources' on Instance uuid 0d926f1d-a2a1-4e3d-b0d0-072c744cd745 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:14:20 compute-0 nova_compute[192079]: 2025-10-02 12:14:20.232 2 DEBUG nova.virt.libvirt.vif [None req-6cd0b2bb-cdff-4c8e-9824-5bb44525bae0 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:14:01Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerMetadataTestJSON-server-1682429524',display_name='tempest-ServerMetadataTestJSON-server-1682429524',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-servermetadatatestjson-server-1682429524',id=71,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:14:10Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={key1='alt1',key2='value2',key3='value3'},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='389df3c9188c4d8194eb17d703c957db',ramdisk_id='',reservation_id='r-qkkjqj0t',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServerMetadataTestJSON-1070274954',owner_user_name='tempest-ServerMetadataTestJSON-1070274954-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:14:19Z,user_data=None,user_id='fae0e5ee734643f6a2642e748e51d97f',uuid=0d926f1d-a2a1-4e3d-b0d0-072c744cd745,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "8aed8ea7-afed-4dca-9a39-26b03675eec8", "address": "fa:16:3e:87:36:d3", "network": {"id": "80aeccdc-b23c-43b5-ada7-eabdfc0b0b19", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-421813777-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "389df3c9188c4d8194eb17d703c957db", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap8aed8ea7-af", "ovs_interfaceid": "8aed8ea7-afed-4dca-9a39-26b03675eec8", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:14:20 compute-0 nova_compute[192079]: 2025-10-02 12:14:20.233 2 DEBUG nova.network.os_vif_util [None req-6cd0b2bb-cdff-4c8e-9824-5bb44525bae0 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Converting VIF {"id": "8aed8ea7-afed-4dca-9a39-26b03675eec8", "address": "fa:16:3e:87:36:d3", "network": {"id": "80aeccdc-b23c-43b5-ada7-eabdfc0b0b19", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-421813777-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "389df3c9188c4d8194eb17d703c957db", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap8aed8ea7-af", "ovs_interfaceid": "8aed8ea7-afed-4dca-9a39-26b03675eec8", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:14:20 compute-0 nova_compute[192079]: 2025-10-02 12:14:20.234 2 DEBUG nova.network.os_vif_util [None req-6cd0b2bb-cdff-4c8e-9824-5bb44525bae0 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:87:36:d3,bridge_name='br-int',has_traffic_filtering=True,id=8aed8ea7-afed-4dca-9a39-26b03675eec8,network=Network(80aeccdc-b23c-43b5-ada7-eabdfc0b0b19),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap8aed8ea7-af') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:14:20 compute-0 nova_compute[192079]: 2025-10-02 12:14:20.234 2 DEBUG os_vif [None req-6cd0b2bb-cdff-4c8e-9824-5bb44525bae0 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:87:36:d3,bridge_name='br-int',has_traffic_filtering=True,id=8aed8ea7-afed-4dca-9a39-26b03675eec8,network=Network(80aeccdc-b23c-43b5-ada7-eabdfc0b0b19),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap8aed8ea7-af') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:14:20 compute-0 nova_compute[192079]: 2025-10-02 12:14:20.236 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:20 compute-0 nova_compute[192079]: 2025-10-02 12:14:20.236 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap8aed8ea7-af, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:14:20 compute-0 nova_compute[192079]: 2025-10-02 12:14:20.239 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:20 compute-0 nova_compute[192079]: 2025-10-02 12:14:20.241 2 INFO os_vif [None req-6cd0b2bb-cdff-4c8e-9824-5bb44525bae0 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:87:36:d3,bridge_name='br-int',has_traffic_filtering=True,id=8aed8ea7-afed-4dca-9a39-26b03675eec8,network=Network(80aeccdc-b23c-43b5-ada7-eabdfc0b0b19),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap8aed8ea7-af')
Oct 02 12:14:20 compute-0 nova_compute[192079]: 2025-10-02 12:14:20.241 2 INFO nova.virt.libvirt.driver [None req-6cd0b2bb-cdff-4c8e-9824-5bb44525bae0 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Deleting instance files /var/lib/nova/instances/0d926f1d-a2a1-4e3d-b0d0-072c744cd745_del
Oct 02 12:14:20 compute-0 nova_compute[192079]: 2025-10-02 12:14:20.242 2 INFO nova.virt.libvirt.driver [None req-6cd0b2bb-cdff-4c8e-9824-5bb44525bae0 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Deletion of /var/lib/nova/instances/0d926f1d-a2a1-4e3d-b0d0-072c744cd745_del complete
Oct 02 12:14:20 compute-0 nova_compute[192079]: 2025-10-02 12:14:20.350 2 INFO nova.compute.manager [None req-6cd0b2bb-cdff-4c8e-9824-5bb44525bae0 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Took 0.40 seconds to destroy the instance on the hypervisor.
Oct 02 12:14:20 compute-0 nova_compute[192079]: 2025-10-02 12:14:20.351 2 DEBUG oslo.service.loopingcall [None req-6cd0b2bb-cdff-4c8e-9824-5bb44525bae0 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:14:20 compute-0 nova_compute[192079]: 2025-10-02 12:14:20.351 2 DEBUG nova.compute.manager [-] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:14:20 compute-0 nova_compute[192079]: 2025-10-02 12:14:20.352 2 DEBUG nova.network.neutron [-] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:14:20 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-01b6a0d24d946def6ac4e01a0c7dd5032048d1c2915d9dc76516b892a74a446a-userdata-shm.mount: Deactivated successfully.
Oct 02 12:14:20 compute-0 systemd[1]: var-lib-containers-storage-overlay-0b99a64160e3ce2cb000ee7fa28d4b41187d837554662ca9123305d922ec4d97-merged.mount: Deactivated successfully.
Oct 02 12:14:20 compute-0 nova_compute[192079]: 2025-10-02 12:14:20.561 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:14:20 compute-0 nova_compute[192079]: 2025-10-02 12:14:20.562 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:14:20 compute-0 podman[230441]: 2025-10-02 12:14:20.57875186 +0000 UTC m=+0.489329819 container cleanup 01b6a0d24d946def6ac4e01a0c7dd5032048d1c2915d9dc76516b892a74a446a (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-80aeccdc-b23c-43b5-ada7-eabdfc0b0b19, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0)
Oct 02 12:14:20 compute-0 systemd[1]: libpod-conmon-01b6a0d24d946def6ac4e01a0c7dd5032048d1c2915d9dc76516b892a74a446a.scope: Deactivated successfully.
Oct 02 12:14:20 compute-0 nova_compute[192079]: 2025-10-02 12:14:20.668 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:14:20 compute-0 nova_compute[192079]: 2025-10-02 12:14:20.670 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:20 compute-0 nova_compute[192079]: 2025-10-02 12:14:20.703 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:14:20 compute-0 nova_compute[192079]: 2025-10-02 12:14:20.704 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:14:20 compute-0 nova_compute[192079]: 2025-10-02 12:14:20.762 2 DEBUG nova.compute.manager [req-63e72ad4-b8cb-4664-b8f6-034696c39f79 req-3181d0d2-455c-4070-8cdf-a7ec271c4d5e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Received event network-vif-plugged-c1d6d6c7-23c7-45e9-b50d-f589c7908b63 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:14:20 compute-0 nova_compute[192079]: 2025-10-02 12:14:20.762 2 DEBUG oslo_concurrency.lockutils [req-63e72ad4-b8cb-4664-b8f6-034696c39f79 req-3181d0d2-455c-4070-8cdf-a7ec271c4d5e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "6db1ac7f-726d-4ad6-8992-86f0c23d4d79-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:20 compute-0 nova_compute[192079]: 2025-10-02 12:14:20.762 2 DEBUG oslo_concurrency.lockutils [req-63e72ad4-b8cb-4664-b8f6-034696c39f79 req-3181d0d2-455c-4070-8cdf-a7ec271c4d5e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6db1ac7f-726d-4ad6-8992-86f0c23d4d79-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:20 compute-0 nova_compute[192079]: 2025-10-02 12:14:20.763 2 DEBUG oslo_concurrency.lockutils [req-63e72ad4-b8cb-4664-b8f6-034696c39f79 req-3181d0d2-455c-4070-8cdf-a7ec271c4d5e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6db1ac7f-726d-4ad6-8992-86f0c23d4d79-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:20 compute-0 nova_compute[192079]: 2025-10-02 12:14:20.763 2 DEBUG nova.compute.manager [req-63e72ad4-b8cb-4664-b8f6-034696c39f79 req-3181d0d2-455c-4070-8cdf-a7ec271c4d5e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] No waiting events found dispatching network-vif-plugged-c1d6d6c7-23c7-45e9-b50d-f589c7908b63 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:14:20 compute-0 nova_compute[192079]: 2025-10-02 12:14:20.763 2 WARNING nova.compute.manager [req-63e72ad4-b8cb-4664-b8f6-034696c39f79 req-3181d0d2-455c-4070-8cdf-a7ec271c4d5e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Received unexpected event network-vif-plugged-c1d6d6c7-23c7-45e9-b50d-f589c7908b63 for instance with vm_state deleted and task_state None.
Oct 02 12:14:20 compute-0 nova_compute[192079]: 2025-10-02 12:14:20.763 2 DEBUG nova.compute.manager [req-63e72ad4-b8cb-4664-b8f6-034696c39f79 req-3181d0d2-455c-4070-8cdf-a7ec271c4d5e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Received event network-vif-deleted-c1d6d6c7-23c7-45e9-b50d-f589c7908b63 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:14:21 compute-0 podman[230486]: 2025-10-02 12:14:21.046348858 +0000 UTC m=+0.443492972 container remove 01b6a0d24d946def6ac4e01a0c7dd5032048d1c2915d9dc76516b892a74a446a (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-80aeccdc-b23c-43b5-ada7-eabdfc0b0b19, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:14:21 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:21.051 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d7848767-6404-4c31-9fde-2a4710e42a5f]: (4, ('Thu Oct  2 12:14:20 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-80aeccdc-b23c-43b5-ada7-eabdfc0b0b19 (01b6a0d24d946def6ac4e01a0c7dd5032048d1c2915d9dc76516b892a74a446a)\n01b6a0d24d946def6ac4e01a0c7dd5032048d1c2915d9dc76516b892a74a446a\nThu Oct  2 12:14:20 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-80aeccdc-b23c-43b5-ada7-eabdfc0b0b19 (01b6a0d24d946def6ac4e01a0c7dd5032048d1c2915d9dc76516b892a74a446a)\n01b6a0d24d946def6ac4e01a0c7dd5032048d1c2915d9dc76516b892a74a446a\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:21 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:21.054 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[771f6e88-1747-4d3c-a6e4-9f78601660d2]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:21 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:21.055 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap80aeccdc-b0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:14:21 compute-0 nova_compute[192079]: 2025-10-02 12:14:21.057 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:21 compute-0 kernel: tap80aeccdc-b0: left promiscuous mode
Oct 02 12:14:21 compute-0 nova_compute[192079]: 2025-10-02 12:14:21.060 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:21 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:21.062 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8bc16c5b-e3f6-49f8-bfdd-f96aacf09084]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:21 compute-0 nova_compute[192079]: 2025-10-02 12:14:21.073 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:21 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:21.105 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[044a9af3-b279-4352-aa50-0bf093a35a06]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:21 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:21.107 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[86f219e2-b222-4992-9cd4-55b0236ae9a5]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:21 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:21.132 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a2ecaf47-f112-4e01-a52e-3779b814ca58]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 524693, 'reachable_time': 19311, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 230501, 'error': None, 'target': 'ovnmeta-80aeccdc-b23c-43b5-ada7-eabdfc0b0b19', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:21 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:21.135 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-80aeccdc-b23c-43b5-ada7-eabdfc0b0b19 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:14:21 compute-0 systemd[1]: run-netns-ovnmeta\x2d80aeccdc\x2db23c\x2d43b5\x2dada7\x2deabdfc0b0b19.mount: Deactivated successfully.
Oct 02 12:14:21 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:21.135 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[a40da255-75ea-4878-86e7-6dbebd2213f3]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:21 compute-0 nova_compute[192079]: 2025-10-02 12:14:21.235 2 DEBUG nova.compute.manager [req-81ca9725-5025-4a3f-a40d-49d11dee6a70 req-8a79bd19-0de9-4bbb-b07f-f0cc29612e61 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Received event network-vif-unplugged-8aed8ea7-afed-4dca-9a39-26b03675eec8 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:14:21 compute-0 nova_compute[192079]: 2025-10-02 12:14:21.236 2 DEBUG oslo_concurrency.lockutils [req-81ca9725-5025-4a3f-a40d-49d11dee6a70 req-8a79bd19-0de9-4bbb-b07f-f0cc29612e61 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "0d926f1d-a2a1-4e3d-b0d0-072c744cd745-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:21 compute-0 nova_compute[192079]: 2025-10-02 12:14:21.236 2 DEBUG oslo_concurrency.lockutils [req-81ca9725-5025-4a3f-a40d-49d11dee6a70 req-8a79bd19-0de9-4bbb-b07f-f0cc29612e61 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0d926f1d-a2a1-4e3d-b0d0-072c744cd745-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:21 compute-0 nova_compute[192079]: 2025-10-02 12:14:21.236 2 DEBUG oslo_concurrency.lockutils [req-81ca9725-5025-4a3f-a40d-49d11dee6a70 req-8a79bd19-0de9-4bbb-b07f-f0cc29612e61 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0d926f1d-a2a1-4e3d-b0d0-072c744cd745-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:21 compute-0 nova_compute[192079]: 2025-10-02 12:14:21.236 2 DEBUG nova.compute.manager [req-81ca9725-5025-4a3f-a40d-49d11dee6a70 req-8a79bd19-0de9-4bbb-b07f-f0cc29612e61 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] No waiting events found dispatching network-vif-unplugged-8aed8ea7-afed-4dca-9a39-26b03675eec8 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:14:21 compute-0 nova_compute[192079]: 2025-10-02 12:14:21.236 2 DEBUG nova.compute.manager [req-81ca9725-5025-4a3f-a40d-49d11dee6a70 req-8a79bd19-0de9-4bbb-b07f-f0cc29612e61 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Received event network-vif-unplugged-8aed8ea7-afed-4dca-9a39-26b03675eec8 for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:14:21 compute-0 nova_compute[192079]: 2025-10-02 12:14:21.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:14:21 compute-0 nova_compute[192079]: 2025-10-02 12:14:21.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:14:21 compute-0 nova_compute[192079]: 2025-10-02 12:14:21.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:14:21 compute-0 nova_compute[192079]: 2025-10-02 12:14:21.688 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Skipping network cache update for instance because it is being deleted. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9875
Oct 02 12:14:21 compute-0 nova_compute[192079]: 2025-10-02 12:14:21.689 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:14:21 compute-0 nova_compute[192079]: 2025-10-02 12:14:21.689 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:14:21 compute-0 nova_compute[192079]: 2025-10-02 12:14:21.986 2 DEBUG nova.network.neutron [-] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:14:22 compute-0 nova_compute[192079]: 2025-10-02 12:14:22.026 2 INFO nova.compute.manager [-] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Took 1.67 seconds to deallocate network for instance.
Oct 02 12:14:22 compute-0 nova_compute[192079]: 2025-10-02 12:14:22.174 2 DEBUG oslo_concurrency.lockutils [None req-6cd0b2bb-cdff-4c8e-9824-5bb44525bae0 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:22 compute-0 nova_compute[192079]: 2025-10-02 12:14:22.175 2 DEBUG oslo_concurrency.lockutils [None req-6cd0b2bb-cdff-4c8e-9824-5bb44525bae0 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:22 compute-0 nova_compute[192079]: 2025-10-02 12:14:22.309 2 DEBUG nova.compute.provider_tree [None req-6cd0b2bb-cdff-4c8e-9824-5bb44525bae0 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:14:22 compute-0 nova_compute[192079]: 2025-10-02 12:14:22.328 2 DEBUG nova.scheduler.client.report [None req-6cd0b2bb-cdff-4c8e-9824-5bb44525bae0 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:14:22 compute-0 nova_compute[192079]: 2025-10-02 12:14:22.362 2 DEBUG oslo_concurrency.lockutils [None req-6cd0b2bb-cdff-4c8e-9824-5bb44525bae0 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.187s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:22 compute-0 nova_compute[192079]: 2025-10-02 12:14:22.396 2 INFO nova.scheduler.client.report [None req-6cd0b2bb-cdff-4c8e-9824-5bb44525bae0 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Deleted allocations for instance 0d926f1d-a2a1-4e3d-b0d0-072c744cd745
Oct 02 12:14:22 compute-0 nova_compute[192079]: 2025-10-02 12:14:22.495 2 DEBUG oslo_concurrency.lockutils [None req-6cd0b2bb-cdff-4c8e-9824-5bb44525bae0 fae0e5ee734643f6a2642e748e51d97f 389df3c9188c4d8194eb17d703c957db - - default default] Lock "0d926f1d-a2a1-4e3d-b0d0-072c744cd745" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 2.617s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:22 compute-0 nova_compute[192079]: 2025-10-02 12:14:22.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:14:22 compute-0 nova_compute[192079]: 2025-10-02 12:14:22.824 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:22.825 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=18, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=17) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:14:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:22.826 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 0 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:14:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:22.826 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '18'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:14:22 compute-0 nova_compute[192079]: 2025-10-02 12:14:22.900 2 DEBUG nova.compute.manager [req-daf35072-67b3-4f1c-829f-7a16dbd66dcc req-1b4eb8d2-d08f-4d6f-83c7-f4da598851b4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Received event network-vif-deleted-8aed8ea7-afed-4dca-9a39-26b03675eec8 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:14:23 compute-0 podman[230503]: 2025-10-02 12:14:23.155682024 +0000 UTC m=+0.066599814 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, config_id=multipathd, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, io.buildah.version=1.41.3, tcib_managed=true, org.label-schema.license=GPLv2, container_name=multipathd)
Oct 02 12:14:23 compute-0 podman[230502]: 2025-10-02 12:14:23.155706395 +0000 UTC m=+0.066685757 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, url=https://catalog.redhat.com/en/search?searchType=containers, vendor=Red Hat, Inc., version=9.6, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, config_id=edpm, distribution-scope=public, managed_by=edpm_ansible, build-date=2025-08-20T13:12:41, vcs-type=git, io.openshift.expose-services=, architecture=x86_64, maintainer=Red Hat, Inc., com.redhat.component=ubi9-minimal-container, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, container_name=openstack_network_exporter, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.openshift.tags=minimal rhel9, io.buildah.version=1.33.7, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., release=1755695350, name=ubi9-minimal)
Oct 02 12:14:23 compute-0 nova_compute[192079]: 2025-10-02 12:14:23.402 2 DEBUG nova.compute.manager [req-59c9a983-c5e7-46f3-b983-f24b556cd30c req-6d8088b6-a54e-4e6a-a44c-16b0b3f40a5b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Received event network-vif-plugged-8aed8ea7-afed-4dca-9a39-26b03675eec8 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:14:23 compute-0 nova_compute[192079]: 2025-10-02 12:14:23.402 2 DEBUG oslo_concurrency.lockutils [req-59c9a983-c5e7-46f3-b983-f24b556cd30c req-6d8088b6-a54e-4e6a-a44c-16b0b3f40a5b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "0d926f1d-a2a1-4e3d-b0d0-072c744cd745-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:23 compute-0 nova_compute[192079]: 2025-10-02 12:14:23.403 2 DEBUG oslo_concurrency.lockutils [req-59c9a983-c5e7-46f3-b983-f24b556cd30c req-6d8088b6-a54e-4e6a-a44c-16b0b3f40a5b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0d926f1d-a2a1-4e3d-b0d0-072c744cd745-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:23 compute-0 nova_compute[192079]: 2025-10-02 12:14:23.403 2 DEBUG oslo_concurrency.lockutils [req-59c9a983-c5e7-46f3-b983-f24b556cd30c req-6d8088b6-a54e-4e6a-a44c-16b0b3f40a5b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0d926f1d-a2a1-4e3d-b0d0-072c744cd745-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:23 compute-0 nova_compute[192079]: 2025-10-02 12:14:23.403 2 DEBUG nova.compute.manager [req-59c9a983-c5e7-46f3-b983-f24b556cd30c req-6d8088b6-a54e-4e6a-a44c-16b0b3f40a5b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] No waiting events found dispatching network-vif-plugged-8aed8ea7-afed-4dca-9a39-26b03675eec8 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:14:23 compute-0 nova_compute[192079]: 2025-10-02 12:14:23.404 2 WARNING nova.compute.manager [req-59c9a983-c5e7-46f3-b983-f24b556cd30c req-6d8088b6-a54e-4e6a-a44c-16b0b3f40a5b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Received unexpected event network-vif-plugged-8aed8ea7-afed-4dca-9a39-26b03675eec8 for instance with vm_state deleted and task_state None.
Oct 02 12:14:24 compute-0 nova_compute[192079]: 2025-10-02 12:14:24.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:14:25 compute-0 nova_compute[192079]: 2025-10-02 12:14:25.239 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:25 compute-0 nova_compute[192079]: 2025-10-02 12:14:25.671 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:28 compute-0 nova_compute[192079]: 2025-10-02 12:14:28.600 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:28 compute-0 nova_compute[192079]: 2025-10-02 12:14:28.781 2 DEBUG nova.compute.manager [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Stashing vm_state: active _prep_resize /usr/lib/python3.9/site-packages/nova/compute/manager.py:5560
Oct 02 12:14:28 compute-0 nova_compute[192079]: 2025-10-02 12:14:28.898 2 DEBUG oslo_concurrency.lockutils [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:28 compute-0 nova_compute[192079]: 2025-10-02 12:14:28.898 2 DEBUG oslo_concurrency.lockutils [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:28 compute-0 nova_compute[192079]: 2025-10-02 12:14:28.920 2 DEBUG nova.objects.instance [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lazy-loading 'pci_requests' on Instance uuid 6e45ea08-64c1-4434-9d80-94d4b7cec844 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:14:28 compute-0 nova_compute[192079]: 2025-10-02 12:14:28.935 2 DEBUG nova.virt.hardware [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:14:28 compute-0 nova_compute[192079]: 2025-10-02 12:14:28.935 2 INFO nova.compute.claims [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:14:28 compute-0 nova_compute[192079]: 2025-10-02 12:14:28.936 2 DEBUG nova.objects.instance [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lazy-loading 'resources' on Instance uuid 6e45ea08-64c1-4434-9d80-94d4b7cec844 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:14:28 compute-0 nova_compute[192079]: 2025-10-02 12:14:28.949 2 DEBUG nova.objects.instance [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lazy-loading 'pci_devices' on Instance uuid 6e45ea08-64c1-4434-9d80-94d4b7cec844 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:14:28 compute-0 nova_compute[192079]: 2025-10-02 12:14:28.997 2 INFO nova.compute.resource_tracker [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Updating resource usage from migration 72a59e8d-287b-4dea-bb81-1c51eb265546
Oct 02 12:14:28 compute-0 nova_compute[192079]: 2025-10-02 12:14:28.998 2 DEBUG nova.compute.resource_tracker [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Starting to track incoming migration 72a59e8d-287b-4dea-bb81-1c51eb265546 with flavor 9949d9da-6314-4ede-8797-6f2f0a6a64fc _update_usage_from_migration /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1431
Oct 02 12:14:29 compute-0 nova_compute[192079]: 2025-10-02 12:14:29.057 2 DEBUG nova.compute.provider_tree [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:14:29 compute-0 nova_compute[192079]: 2025-10-02 12:14:29.069 2 DEBUG nova.scheduler.client.report [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:14:29 compute-0 nova_compute[192079]: 2025-10-02 12:14:29.091 2 DEBUG oslo_concurrency.lockutils [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 0.192s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:29 compute-0 nova_compute[192079]: 2025-10-02 12:14:29.091 2 INFO nova.compute.manager [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Migrating
Oct 02 12:14:29 compute-0 podman[230543]: 2025-10-02 12:14:29.136903167 +0000 UTC m=+0.053694664 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:14:29 compute-0 podman[230544]: 2025-10-02 12:14:29.14433372 +0000 UTC m=+0.057762845 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, container_name=iscsid, managed_by=edpm_ansible, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=iscsid, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:14:30 compute-0 nova_compute[192079]: 2025-10-02 12:14:30.242 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:30 compute-0 nova_compute[192079]: 2025-10-02 12:14:30.674 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:31 compute-0 sshd-session[230586]: Accepted publickey for nova from 192.168.122.101 port 46574 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:14:31 compute-0 systemd[1]: Created slice User Slice of UID 42436.
Oct 02 12:14:31 compute-0 systemd[1]: Starting User Runtime Directory /run/user/42436...
Oct 02 12:14:31 compute-0 systemd-logind[827]: New session 44 of user nova.
Oct 02 12:14:31 compute-0 systemd[1]: Finished User Runtime Directory /run/user/42436.
Oct 02 12:14:31 compute-0 systemd[1]: Starting User Manager for UID 42436...
Oct 02 12:14:31 compute-0 systemd[230590]: pam_unix(systemd-user:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:14:31 compute-0 systemd[230590]: Queued start job for default target Main User Target.
Oct 02 12:14:31 compute-0 systemd[230590]: Created slice User Application Slice.
Oct 02 12:14:31 compute-0 systemd[230590]: Started Mark boot as successful after the user session has run 2 minutes.
Oct 02 12:14:31 compute-0 systemd[230590]: Started Daily Cleanup of User's Temporary Directories.
Oct 02 12:14:31 compute-0 systemd[230590]: Reached target Paths.
Oct 02 12:14:31 compute-0 systemd[230590]: Reached target Timers.
Oct 02 12:14:31 compute-0 systemd[230590]: Starting D-Bus User Message Bus Socket...
Oct 02 12:14:31 compute-0 systemd[230590]: Starting Create User's Volatile Files and Directories...
Oct 02 12:14:31 compute-0 systemd[230590]: Finished Create User's Volatile Files and Directories.
Oct 02 12:14:31 compute-0 systemd[230590]: Listening on D-Bus User Message Bus Socket.
Oct 02 12:14:31 compute-0 systemd[230590]: Reached target Sockets.
Oct 02 12:14:31 compute-0 systemd[230590]: Reached target Basic System.
Oct 02 12:14:31 compute-0 systemd[230590]: Reached target Main User Target.
Oct 02 12:14:31 compute-0 systemd[230590]: Startup finished in 142ms.
Oct 02 12:14:31 compute-0 systemd[1]: Started User Manager for UID 42436.
Oct 02 12:14:31 compute-0 systemd[1]: Started Session 44 of User nova.
Oct 02 12:14:31 compute-0 sshd-session[230586]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:14:31 compute-0 sshd-session[230605]: Received disconnect from 192.168.122.101 port 46574:11: disconnected by user
Oct 02 12:14:31 compute-0 sshd-session[230605]: Disconnected from user nova 192.168.122.101 port 46574
Oct 02 12:14:31 compute-0 sshd-session[230586]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:14:31 compute-0 systemd[1]: session-44.scope: Deactivated successfully.
Oct 02 12:14:31 compute-0 systemd-logind[827]: Session 44 logged out. Waiting for processes to exit.
Oct 02 12:14:31 compute-0 systemd-logind[827]: Removed session 44.
Oct 02 12:14:31 compute-0 sshd-session[230607]: Accepted publickey for nova from 192.168.122.101 port 46576 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:14:31 compute-0 nova_compute[192079]: 2025-10-02 12:14:31.978 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759407256.9778614, 6db1ac7f-726d-4ad6-8992-86f0c23d4d79 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:14:31 compute-0 nova_compute[192079]: 2025-10-02 12:14:31.979 2 INFO nova.compute.manager [-] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] VM Stopped (Lifecycle Event)
Oct 02 12:14:31 compute-0 systemd-logind[827]: New session 46 of user nova.
Oct 02 12:14:32 compute-0 systemd[1]: Started Session 46 of User nova.
Oct 02 12:14:32 compute-0 nova_compute[192079]: 2025-10-02 12:14:32.006 2 DEBUG nova.compute.manager [None req-c2b00c34-cd3f-4792-8f0a-7180f365deef - - - - - -] [instance: 6db1ac7f-726d-4ad6-8992-86f0c23d4d79] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:14:32 compute-0 sshd-session[230607]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:14:32 compute-0 sshd-session[230610]: Received disconnect from 192.168.122.101 port 46576:11: disconnected by user
Oct 02 12:14:32 compute-0 sshd-session[230610]: Disconnected from user nova 192.168.122.101 port 46576
Oct 02 12:14:32 compute-0 sshd-session[230607]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:14:32 compute-0 systemd[1]: session-46.scope: Deactivated successfully.
Oct 02 12:14:32 compute-0 systemd-logind[827]: Session 46 logged out. Waiting for processes to exit.
Oct 02 12:14:32 compute-0 systemd-logind[827]: Removed session 46.
Oct 02 12:14:32 compute-0 nova_compute[192079]: 2025-10-02 12:14:32.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:14:33 compute-0 nova_compute[192079]: 2025-10-02 12:14:33.679 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._run_pending_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:14:33 compute-0 nova_compute[192079]: 2025-10-02 12:14:33.679 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Cleaning up deleted instances _run_pending_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:11145
Oct 02 12:14:33 compute-0 nova_compute[192079]: 2025-10-02 12:14:33.694 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] There are 0 instances to clean _run_pending_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:11154
Oct 02 12:14:35 compute-0 nova_compute[192079]: 2025-10-02 12:14:35.208 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759407260.2064028, 0d926f1d-a2a1-4e3d-b0d0-072c744cd745 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:14:35 compute-0 nova_compute[192079]: 2025-10-02 12:14:35.208 2 INFO nova.compute.manager [-] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] VM Stopped (Lifecycle Event)
Oct 02 12:14:35 compute-0 nova_compute[192079]: 2025-10-02 12:14:35.227 2 DEBUG nova.compute.manager [None req-fc5a4ada-0648-42b1-a0c4-c8fa3f2a4182 - - - - - -] [instance: 0d926f1d-a2a1-4e3d-b0d0-072c744cd745] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:14:35 compute-0 nova_compute[192079]: 2025-10-02 12:14:35.245 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:35 compute-0 nova_compute[192079]: 2025-10-02 12:14:35.925 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:38 compute-0 podman[230612]: 2025-10-02 12:14:38.152730361 +0000 UTC m=+0.068086546 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, tcib_managed=true, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, container_name=ovn_metadata_agent, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_metadata_agent, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001)
Oct 02 12:14:38 compute-0 podman[230614]: 2025-10-02 12:14:38.156109103 +0000 UTC m=+0.066222375 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:14:38 compute-0 podman[230613]: 2025-10-02 12:14:38.224798025 +0000 UTC m=+0.131413801 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, container_name=ovn_controller, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:14:40 compute-0 nova_compute[192079]: 2025-10-02 12:14:40.248 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:40 compute-0 nova_compute[192079]: 2025-10-02 12:14:40.926 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:42 compute-0 systemd[1]: Stopping User Manager for UID 42436...
Oct 02 12:14:42 compute-0 systemd[230590]: Activating special unit Exit the Session...
Oct 02 12:14:42 compute-0 systemd[230590]: Stopped target Main User Target.
Oct 02 12:14:42 compute-0 systemd[230590]: Stopped target Basic System.
Oct 02 12:14:42 compute-0 systemd[230590]: Stopped target Paths.
Oct 02 12:14:42 compute-0 systemd[230590]: Stopped target Sockets.
Oct 02 12:14:42 compute-0 systemd[230590]: Stopped target Timers.
Oct 02 12:14:42 compute-0 systemd[230590]: Stopped Mark boot as successful after the user session has run 2 minutes.
Oct 02 12:14:42 compute-0 systemd[230590]: Stopped Daily Cleanup of User's Temporary Directories.
Oct 02 12:14:42 compute-0 systemd[230590]: Closed D-Bus User Message Bus Socket.
Oct 02 12:14:42 compute-0 systemd[230590]: Stopped Create User's Volatile Files and Directories.
Oct 02 12:14:42 compute-0 systemd[230590]: Removed slice User Application Slice.
Oct 02 12:14:42 compute-0 systemd[230590]: Reached target Shutdown.
Oct 02 12:14:42 compute-0 systemd[230590]: Finished Exit the Session.
Oct 02 12:14:42 compute-0 systemd[230590]: Reached target Exit the Session.
Oct 02 12:14:42 compute-0 systemd[1]: user@42436.service: Deactivated successfully.
Oct 02 12:14:42 compute-0 systemd[1]: Stopped User Manager for UID 42436.
Oct 02 12:14:42 compute-0 systemd[1]: Stopping User Runtime Directory /run/user/42436...
Oct 02 12:14:42 compute-0 systemd[1]: run-user-42436.mount: Deactivated successfully.
Oct 02 12:14:42 compute-0 systemd[1]: user-runtime-dir@42436.service: Deactivated successfully.
Oct 02 12:14:42 compute-0 systemd[1]: Stopped User Runtime Directory /run/user/42436.
Oct 02 12:14:42 compute-0 systemd[1]: Removed slice User Slice of UID 42436.
Oct 02 12:14:42 compute-0 nova_compute[192079]: 2025-10-02 12:14:42.490 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_power_states run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:14:45 compute-0 nova_compute[192079]: 2025-10-02 12:14:45.251 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:45 compute-0 nova_compute[192079]: 2025-10-02 12:14:45.927 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:47 compute-0 sshd-session[230683]: Accepted publickey for nova from 192.168.122.101 port 54660 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:14:47 compute-0 systemd[1]: Created slice User Slice of UID 42436.
Oct 02 12:14:47 compute-0 systemd[1]: Starting User Runtime Directory /run/user/42436...
Oct 02 12:14:47 compute-0 systemd-logind[827]: New session 47 of user nova.
Oct 02 12:14:47 compute-0 systemd[1]: Finished User Runtime Directory /run/user/42436.
Oct 02 12:14:47 compute-0 systemd[1]: Starting User Manager for UID 42436...
Oct 02 12:14:47 compute-0 systemd[230700]: pam_unix(systemd-user:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:14:47 compute-0 podman[230685]: 2025-10-02 12:14:47.764946409 +0000 UTC m=+0.079174837 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, org.label-schema.license=GPLv2, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ceilometer_agent_compute)
Oct 02 12:14:47 compute-0 systemd[230700]: Queued start job for default target Main User Target.
Oct 02 12:14:47 compute-0 systemd[230700]: Created slice User Application Slice.
Oct 02 12:14:47 compute-0 systemd[230700]: Started Mark boot as successful after the user session has run 2 minutes.
Oct 02 12:14:47 compute-0 systemd[230700]: Started Daily Cleanup of User's Temporary Directories.
Oct 02 12:14:47 compute-0 systemd[230700]: Reached target Paths.
Oct 02 12:14:47 compute-0 systemd[230700]: Reached target Timers.
Oct 02 12:14:47 compute-0 systemd[230700]: Starting D-Bus User Message Bus Socket...
Oct 02 12:14:47 compute-0 systemd[230700]: Starting Create User's Volatile Files and Directories...
Oct 02 12:14:47 compute-0 systemd[230700]: Finished Create User's Volatile Files and Directories.
Oct 02 12:14:47 compute-0 systemd[230700]: Listening on D-Bus User Message Bus Socket.
Oct 02 12:14:47 compute-0 systemd[230700]: Reached target Sockets.
Oct 02 12:14:47 compute-0 systemd[230700]: Reached target Basic System.
Oct 02 12:14:47 compute-0 systemd[230700]: Reached target Main User Target.
Oct 02 12:14:47 compute-0 systemd[230700]: Startup finished in 135ms.
Oct 02 12:14:47 compute-0 systemd[1]: Started User Manager for UID 42436.
Oct 02 12:14:47 compute-0 systemd[1]: Started Session 47 of User nova.
Oct 02 12:14:47 compute-0 sshd-session[230683]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:14:48 compute-0 sshd-session[230722]: Received disconnect from 192.168.122.101 port 54660:11: disconnected by user
Oct 02 12:14:48 compute-0 sshd-session[230722]: Disconnected from user nova 192.168.122.101 port 54660
Oct 02 12:14:48 compute-0 sshd-session[230683]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:14:48 compute-0 systemd[1]: session-47.scope: Deactivated successfully.
Oct 02 12:14:48 compute-0 systemd-logind[827]: Session 47 logged out. Waiting for processes to exit.
Oct 02 12:14:48 compute-0 systemd-logind[827]: Removed session 47.
Oct 02 12:14:48 compute-0 sshd-session[230724]: Accepted publickey for nova from 192.168.122.101 port 54672 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:14:48 compute-0 systemd-logind[827]: New session 49 of user nova.
Oct 02 12:14:48 compute-0 systemd[1]: Started Session 49 of User nova.
Oct 02 12:14:48 compute-0 sshd-session[230724]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:14:49 compute-0 sshd-session[230727]: Received disconnect from 192.168.122.101 port 54672:11: disconnected by user
Oct 02 12:14:49 compute-0 sshd-session[230727]: Disconnected from user nova 192.168.122.101 port 54672
Oct 02 12:14:49 compute-0 sshd-session[230724]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:14:49 compute-0 systemd[1]: session-49.scope: Deactivated successfully.
Oct 02 12:14:49 compute-0 systemd-logind[827]: Session 49 logged out. Waiting for processes to exit.
Oct 02 12:14:49 compute-0 systemd-logind[827]: Removed session 49.
Oct 02 12:14:49 compute-0 sshd-session[230729]: Accepted publickey for nova from 192.168.122.101 port 48306 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:14:49 compute-0 systemd-logind[827]: New session 50 of user nova.
Oct 02 12:14:49 compute-0 systemd[1]: Started Session 50 of User nova.
Oct 02 12:14:49 compute-0 sshd-session[230729]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:14:49 compute-0 sshd-session[230732]: Received disconnect from 192.168.122.101 port 48306:11: disconnected by user
Oct 02 12:14:49 compute-0 sshd-session[230732]: Disconnected from user nova 192.168.122.101 port 48306
Oct 02 12:14:49 compute-0 sshd-session[230729]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:14:49 compute-0 systemd[1]: session-50.scope: Deactivated successfully.
Oct 02 12:14:49 compute-0 systemd-logind[827]: Session 50 logged out. Waiting for processes to exit.
Oct 02 12:14:49 compute-0 systemd-logind[827]: Removed session 50.
Oct 02 12:14:50 compute-0 nova_compute[192079]: 2025-10-02 12:14:50.050 2 DEBUG nova.compute.manager [req-ddfb7129-54aa-4ce0-a679-195ff6ea142d req-03faada7-231b-43f3-8050-d2e1cb031e57 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Received event network-vif-unplugged-b1b379f4-7eb3-40e5-8edd-d903c05484af external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:14:50 compute-0 nova_compute[192079]: 2025-10-02 12:14:50.052 2 DEBUG oslo_concurrency.lockutils [req-ddfb7129-54aa-4ce0-a679-195ff6ea142d req-03faada7-231b-43f3-8050-d2e1cb031e57 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "6e45ea08-64c1-4434-9d80-94d4b7cec844-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:50 compute-0 nova_compute[192079]: 2025-10-02 12:14:50.052 2 DEBUG oslo_concurrency.lockutils [req-ddfb7129-54aa-4ce0-a679-195ff6ea142d req-03faada7-231b-43f3-8050-d2e1cb031e57 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6e45ea08-64c1-4434-9d80-94d4b7cec844-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:50 compute-0 nova_compute[192079]: 2025-10-02 12:14:50.052 2 DEBUG oslo_concurrency.lockutils [req-ddfb7129-54aa-4ce0-a679-195ff6ea142d req-03faada7-231b-43f3-8050-d2e1cb031e57 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6e45ea08-64c1-4434-9d80-94d4b7cec844-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:50 compute-0 nova_compute[192079]: 2025-10-02 12:14:50.052 2 DEBUG nova.compute.manager [req-ddfb7129-54aa-4ce0-a679-195ff6ea142d req-03faada7-231b-43f3-8050-d2e1cb031e57 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] No waiting events found dispatching network-vif-unplugged-b1b379f4-7eb3-40e5-8edd-d903c05484af pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:14:50 compute-0 nova_compute[192079]: 2025-10-02 12:14:50.052 2 WARNING nova.compute.manager [req-ddfb7129-54aa-4ce0-a679-195ff6ea142d req-03faada7-231b-43f3-8050-d2e1cb031e57 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Received unexpected event network-vif-unplugged-b1b379f4-7eb3-40e5-8edd-d903c05484af for instance with vm_state active and task_state resize_migrated.
Oct 02 12:14:50 compute-0 nova_compute[192079]: 2025-10-02 12:14:50.254 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:50 compute-0 nova_compute[192079]: 2025-10-02 12:14:50.427 2 INFO nova.network.neutron [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Updating port b1b379f4-7eb3-40e5-8edd-d903c05484af with attributes {'binding:host_id': 'compute-0.ctlplane.example.com', 'device_owner': 'compute:nova'}
Oct 02 12:14:50 compute-0 nova_compute[192079]: 2025-10-02 12:14:50.930 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:53 compute-0 nova_compute[192079]: 2025-10-02 12:14:53.078 2 DEBUG nova.compute.manager [req-caf6901b-6312-46b4-a2cb-f4564773e559 req-05f8774d-e807-448e-860f-65e4829b7f57 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Received event network-vif-plugged-b1b379f4-7eb3-40e5-8edd-d903c05484af external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:14:53 compute-0 nova_compute[192079]: 2025-10-02 12:14:53.078 2 DEBUG oslo_concurrency.lockutils [req-caf6901b-6312-46b4-a2cb-f4564773e559 req-05f8774d-e807-448e-860f-65e4829b7f57 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "6e45ea08-64c1-4434-9d80-94d4b7cec844-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:53 compute-0 nova_compute[192079]: 2025-10-02 12:14:53.079 2 DEBUG oslo_concurrency.lockutils [req-caf6901b-6312-46b4-a2cb-f4564773e559 req-05f8774d-e807-448e-860f-65e4829b7f57 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6e45ea08-64c1-4434-9d80-94d4b7cec844-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:53 compute-0 nova_compute[192079]: 2025-10-02 12:14:53.079 2 DEBUG oslo_concurrency.lockutils [req-caf6901b-6312-46b4-a2cb-f4564773e559 req-05f8774d-e807-448e-860f-65e4829b7f57 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6e45ea08-64c1-4434-9d80-94d4b7cec844-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:53 compute-0 nova_compute[192079]: 2025-10-02 12:14:53.080 2 DEBUG nova.compute.manager [req-caf6901b-6312-46b4-a2cb-f4564773e559 req-05f8774d-e807-448e-860f-65e4829b7f57 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] No waiting events found dispatching network-vif-plugged-b1b379f4-7eb3-40e5-8edd-d903c05484af pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:14:53 compute-0 nova_compute[192079]: 2025-10-02 12:14:53.080 2 WARNING nova.compute.manager [req-caf6901b-6312-46b4-a2cb-f4564773e559 req-05f8774d-e807-448e-860f-65e4829b7f57 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Received unexpected event network-vif-plugged-b1b379f4-7eb3-40e5-8edd-d903c05484af for instance with vm_state active and task_state resize_migrated.
Oct 02 12:14:54 compute-0 podman[230734]: 2025-10-02 12:14:54.154724392 +0000 UTC m=+0.061511878 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, url=https://catalog.redhat.com/en/search?searchType=containers, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, distribution-scope=public, maintainer=Red Hat, Inc., summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, container_name=openstack_network_exporter, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, managed_by=edpm_ansible, release=1755695350, com.redhat.component=ubi9-minimal-container, config_id=edpm, io.openshift.expose-services=, io.openshift.tags=minimal rhel9, name=ubi9-minimal, vcs-type=git, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., version=9.6, io.buildah.version=1.33.7, vendor=Red Hat, Inc., build-date=2025-08-20T13:12:41, architecture=x86_64, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly.)
Oct 02 12:14:54 compute-0 podman[230735]: 2025-10-02 12:14:54.164921 +0000 UTC m=+0.074963715 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, container_name=multipathd, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:14:54 compute-0 nova_compute[192079]: 2025-10-02 12:14:54.256 2 DEBUG oslo_concurrency.lockutils [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Acquiring lock "refresh_cache-6e45ea08-64c1-4434-9d80-94d4b7cec844" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:14:54 compute-0 nova_compute[192079]: 2025-10-02 12:14:54.256 2 DEBUG oslo_concurrency.lockutils [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Acquired lock "refresh_cache-6e45ea08-64c1-4434-9d80-94d4b7cec844" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:14:54 compute-0 nova_compute[192079]: 2025-10-02 12:14:54.257 2 DEBUG nova.network.neutron [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:14:55 compute-0 nova_compute[192079]: 2025-10-02 12:14:55.257 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:55 compute-0 nova_compute[192079]: 2025-10-02 12:14:55.953 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:57 compute-0 nova_compute[192079]: 2025-10-02 12:14:57.200 2 DEBUG nova.compute.manager [req-e20dea4a-cc82-49f6-b8ee-d5b4c8a9a229 req-e0a85e33-cfbc-4f69-a8f9-27eb7113d897 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Received event network-changed-b1b379f4-7eb3-40e5-8edd-d903c05484af external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:14:57 compute-0 nova_compute[192079]: 2025-10-02 12:14:57.201 2 DEBUG nova.compute.manager [req-e20dea4a-cc82-49f6-b8ee-d5b4c8a9a229 req-e0a85e33-cfbc-4f69-a8f9-27eb7113d897 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Refreshing instance network info cache due to event network-changed-b1b379f4-7eb3-40e5-8edd-d903c05484af. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:14:57 compute-0 nova_compute[192079]: 2025-10-02 12:14:57.202 2 DEBUG oslo_concurrency.lockutils [req-e20dea4a-cc82-49f6-b8ee-d5b4c8a9a229 req-e0a85e33-cfbc-4f69-a8f9-27eb7113d897 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-6e45ea08-64c1-4434-9d80-94d4b7cec844" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.199 2 DEBUG nova.network.neutron [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Updating instance_info_cache with network_info: [{"id": "b1b379f4-7eb3-40e5-8edd-d903c05484af", "address": "fa:16:3e:5d:22:98", "network": {"id": "d6de4737-ca60-4c8d-bfd5-687f9366ec8b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1853814355-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffae703d68b24b9c89686c149113fc2b", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb1b379f4-7e", "ovs_interfaceid": "b1b379f4-7eb3-40e5-8edd-d903c05484af", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.225 2 DEBUG oslo_concurrency.lockutils [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Releasing lock "refresh_cache-6e45ea08-64c1-4434-9d80-94d4b7cec844" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.228 2 DEBUG oslo_concurrency.lockutils [req-e20dea4a-cc82-49f6-b8ee-d5b4c8a9a229 req-e0a85e33-cfbc-4f69-a8f9-27eb7113d897 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-6e45ea08-64c1-4434-9d80-94d4b7cec844" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.228 2 DEBUG nova.network.neutron [req-e20dea4a-cc82-49f6-b8ee-d5b4c8a9a229 req-e0a85e33-cfbc-4f69-a8f9-27eb7113d897 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Refreshing network info cache for port b1b379f4-7eb3-40e5-8edd-d903c05484af _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.348 2 DEBUG nova.virt.libvirt.driver [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Starting finish_migration finish_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:11698
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.349 2 DEBUG nova.virt.libvirt.driver [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Instance directory exists: not creating _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4719
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.350 2 INFO nova.virt.libvirt.driver [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Creating image(s)
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.351 2 DEBUG nova.objects.instance [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lazy-loading 'trusted_certs' on Instance uuid 6e45ea08-64c1-4434-9d80-94d4b7cec844 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.367 2 DEBUG oslo_concurrency.processutils [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.422 2 DEBUG oslo_concurrency.processutils [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.423 2 DEBUG nova.virt.disk.api [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Checking if we can resize image /var/lib/nova/instances/6e45ea08-64c1-4434-9d80-94d4b7cec844/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.423 2 DEBUG oslo_concurrency.processutils [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/6e45ea08-64c1-4434-9d80-94d4b7cec844/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.484 2 DEBUG oslo_concurrency.processutils [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/6e45ea08-64c1-4434-9d80-94d4b7cec844/disk --force-share --output=json" returned: 0 in 0.060s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.485 2 DEBUG nova.virt.disk.api [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Cannot resize image /var/lib/nova/instances/6e45ea08-64c1-4434-9d80-94d4b7cec844/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.497 2 DEBUG nova.virt.libvirt.driver [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Did not create local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4859
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.498 2 DEBUG nova.virt.libvirt.driver [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Ensure instance console log exists: /var/lib/nova/instances/6e45ea08-64c1-4434-9d80-94d4b7cec844/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.498 2 DEBUG oslo_concurrency.lockutils [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.498 2 DEBUG oslo_concurrency.lockutils [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.499 2 DEBUG oslo_concurrency.lockutils [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.501 2 DEBUG nova.virt.libvirt.driver [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Start _get_guest_xml network_info=[{"id": "b1b379f4-7eb3-40e5-8edd-d903c05484af", "address": "fa:16:3e:5d:22:98", "network": {"id": "d6de4737-ca60-4c8d-bfd5-687f9366ec8b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1853814355-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [], "label": "tempest-ServerDiskConfigTestJSON-1853814355-network", "vif_mac": "fa:16:3e:5d:22:98"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffae703d68b24b9c89686c149113fc2b", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb1b379f4-7e", "ovs_interfaceid": "b1b379f4-7eb3-40e5-8edd-d903c05484af", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.506 2 WARNING nova.virt.libvirt.driver [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.512 2 DEBUG nova.virt.libvirt.host [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.513 2 DEBUG nova.virt.libvirt.host [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.517 2 DEBUG nova.virt.libvirt.host [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.517 2 DEBUG nova.virt.libvirt.host [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.518 2 DEBUG nova.virt.libvirt.driver [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.518 2 DEBUG nova.virt.hardware [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:25Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9949d9da-6314-4ede-8797-6f2f0a6a64fc',id=2,is_public=True,memory_mb=192,name='m1.micro',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.518 2 DEBUG nova.virt.hardware [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.519 2 DEBUG nova.virt.hardware [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.519 2 DEBUG nova.virt.hardware [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.519 2 DEBUG nova.virt.hardware [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.519 2 DEBUG nova.virt.hardware [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.519 2 DEBUG nova.virt.hardware [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.520 2 DEBUG nova.virt.hardware [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.520 2 DEBUG nova.virt.hardware [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.520 2 DEBUG nova.virt.hardware [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.520 2 DEBUG nova.virt.hardware [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.520 2 DEBUG nova.objects.instance [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lazy-loading 'vcpu_model' on Instance uuid 6e45ea08-64c1-4434-9d80-94d4b7cec844 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.536 2 DEBUG oslo_concurrency.processutils [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/6e45ea08-64c1-4434-9d80-94d4b7cec844/disk.config --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.590 2 DEBUG oslo_concurrency.processutils [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/6e45ea08-64c1-4434-9d80-94d4b7cec844/disk.config --force-share --output=json" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.591 2 DEBUG oslo_concurrency.lockutils [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Acquiring lock "/var/lib/nova/instances/6e45ea08-64c1-4434-9d80-94d4b7cec844/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.591 2 DEBUG oslo_concurrency.lockutils [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lock "/var/lib/nova/instances/6e45ea08-64c1-4434-9d80-94d4b7cec844/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.592 2 DEBUG oslo_concurrency.lockutils [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lock "/var/lib/nova/instances/6e45ea08-64c1-4434-9d80-94d4b7cec844/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.593 2 DEBUG nova.virt.libvirt.vif [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:14:08Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerDiskConfigTestJSON-server-922274791',display_name='tempest-ServerDiskConfigTestJSON-server-922274791',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(2),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverdiskconfigtestjson-server-922274791',id=72,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=2,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:14:23Z,launched_on='compute-1.ctlplane.example.com',locked=False,locked_by=None,memory_mb=192,metadata={},migration_context=MigrationContext,new_flavor=Flavor(2),node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=Flavor(1),os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='ffae703d68b24b9c89686c149113fc2b',ramdisk_id='',reservation_id='r-7a20jfzw',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=ServiceList,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',old_vm_state='active',owner_project_name='tempest-ServerDiskConfigTestJSON-1763056137',owner_user_name='tempest-ServerDiskConfigTestJSON-1763056137-project-member'},tags=<?>,task_state='resize_finish',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:14:50Z,user_data=None,user_id='def48c13fd6a43ba88836b753986a731',uuid=6e45ea08-64c1-4434-9d80-94d4b7cec844,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "b1b379f4-7eb3-40e5-8edd-d903c05484af", "address": "fa:16:3e:5d:22:98", "network": {"id": "d6de4737-ca60-4c8d-bfd5-687f9366ec8b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1853814355-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [], "label": "tempest-ServerDiskConfigTestJSON-1853814355-network", "vif_mac": "fa:16:3e:5d:22:98"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffae703d68b24b9c89686c149113fc2b", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb1b379f4-7e", "ovs_interfaceid": "b1b379f4-7eb3-40e5-8edd-d903c05484af", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.593 2 DEBUG nova.network.os_vif_util [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Converting VIF {"id": "b1b379f4-7eb3-40e5-8edd-d903c05484af", "address": "fa:16:3e:5d:22:98", "network": {"id": "d6de4737-ca60-4c8d-bfd5-687f9366ec8b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1853814355-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [], "label": "tempest-ServerDiskConfigTestJSON-1853814355-network", "vif_mac": "fa:16:3e:5d:22:98"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffae703d68b24b9c89686c149113fc2b", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb1b379f4-7e", "ovs_interfaceid": "b1b379f4-7eb3-40e5-8edd-d903c05484af", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.594 2 DEBUG nova.network.os_vif_util [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:5d:22:98,bridge_name='br-int',has_traffic_filtering=True,id=b1b379f4-7eb3-40e5-8edd-d903c05484af,network=Network(d6de4737-ca60-4c8d-bfd5-687f9366ec8b),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapb1b379f4-7e') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.596 2 DEBUG nova.virt.libvirt.driver [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:14:58 compute-0 nova_compute[192079]:   <uuid>6e45ea08-64c1-4434-9d80-94d4b7cec844</uuid>
Oct 02 12:14:58 compute-0 nova_compute[192079]:   <name>instance-00000048</name>
Oct 02 12:14:58 compute-0 nova_compute[192079]:   <memory>196608</memory>
Oct 02 12:14:58 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:14:58 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:14:58 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:       <nova:name>tempest-ServerDiskConfigTestJSON-server-922274791</nova:name>
Oct 02 12:14:58 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:14:58</nova:creationTime>
Oct 02 12:14:58 compute-0 nova_compute[192079]:       <nova:flavor name="m1.micro">
Oct 02 12:14:58 compute-0 nova_compute[192079]:         <nova:memory>192</nova:memory>
Oct 02 12:14:58 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:14:58 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:14:58 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:14:58 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:14:58 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:14:58 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:14:58 compute-0 nova_compute[192079]:         <nova:user uuid="def48c13fd6a43ba88836b753986a731">tempest-ServerDiskConfigTestJSON-1763056137-project-member</nova:user>
Oct 02 12:14:58 compute-0 nova_compute[192079]:         <nova:project uuid="ffae703d68b24b9c89686c149113fc2b">tempest-ServerDiskConfigTestJSON-1763056137</nova:project>
Oct 02 12:14:58 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:14:58 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:14:58 compute-0 nova_compute[192079]:         <nova:port uuid="b1b379f4-7eb3-40e5-8edd-d903c05484af">
Oct 02 12:14:58 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.6" ipVersion="4"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:14:58 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:14:58 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:14:58 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <system>
Oct 02 12:14:58 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:14:58 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:14:58 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:14:58 compute-0 nova_compute[192079]:       <entry name="serial">6e45ea08-64c1-4434-9d80-94d4b7cec844</entry>
Oct 02 12:14:58 compute-0 nova_compute[192079]:       <entry name="uuid">6e45ea08-64c1-4434-9d80-94d4b7cec844</entry>
Oct 02 12:14:58 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     </system>
Oct 02 12:14:58 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:14:58 compute-0 nova_compute[192079]:   <os>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:   </os>
Oct 02 12:14:58 compute-0 nova_compute[192079]:   <features>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:   </features>
Oct 02 12:14:58 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:14:58 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:14:58 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:14:58 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/6e45ea08-64c1-4434-9d80-94d4b7cec844/disk"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:14:58 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/6e45ea08-64c1-4434-9d80-94d4b7cec844/disk.config"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:14:58 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:5d:22:98"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:       <target dev="tapb1b379f4-7e"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:14:58 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/6e45ea08-64c1-4434-9d80-94d4b7cec844/console.log" append="off"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <video>
Oct 02 12:14:58 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     </video>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:14:58 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:14:58 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:14:58 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:14:58 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:14:58 compute-0 nova_compute[192079]: </domain>
Oct 02 12:14:58 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.597 2 DEBUG nova.virt.libvirt.vif [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:14:08Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerDiskConfigTestJSON-server-922274791',display_name='tempest-ServerDiskConfigTestJSON-server-922274791',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(2),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverdiskconfigtestjson-server-922274791',id=72,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=2,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:14:23Z,launched_on='compute-1.ctlplane.example.com',locked=False,locked_by=None,memory_mb=192,metadata={},migration_context=MigrationContext,new_flavor=Flavor(2),node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=Flavor(1),os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='ffae703d68b24b9c89686c149113fc2b',ramdisk_id='',reservation_id='r-7a20jfzw',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=ServiceList,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',old_vm_state='active',owner_project_name='tempest-ServerDiskConfigTestJSON-1763056137',owner_user_name='tempest-ServerDiskConfigTestJSON-1763056137-project-member'},tags=<?>,task_state='resize_finish',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:14:50Z,user_data=None,user_id='def48c13fd6a43ba88836b753986a731',uuid=6e45ea08-64c1-4434-9d80-94d4b7cec844,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "b1b379f4-7eb3-40e5-8edd-d903c05484af", "address": "fa:16:3e:5d:22:98", "network": {"id": "d6de4737-ca60-4c8d-bfd5-687f9366ec8b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1853814355-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [], "label": "tempest-ServerDiskConfigTestJSON-1853814355-network", "vif_mac": "fa:16:3e:5d:22:98"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffae703d68b24b9c89686c149113fc2b", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb1b379f4-7e", "ovs_interfaceid": "b1b379f4-7eb3-40e5-8edd-d903c05484af", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.598 2 DEBUG nova.network.os_vif_util [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Converting VIF {"id": "b1b379f4-7eb3-40e5-8edd-d903c05484af", "address": "fa:16:3e:5d:22:98", "network": {"id": "d6de4737-ca60-4c8d-bfd5-687f9366ec8b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1853814355-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [], "label": "tempest-ServerDiskConfigTestJSON-1853814355-network", "vif_mac": "fa:16:3e:5d:22:98"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffae703d68b24b9c89686c149113fc2b", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb1b379f4-7e", "ovs_interfaceid": "b1b379f4-7eb3-40e5-8edd-d903c05484af", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.598 2 DEBUG nova.network.os_vif_util [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:5d:22:98,bridge_name='br-int',has_traffic_filtering=True,id=b1b379f4-7eb3-40e5-8edd-d903c05484af,network=Network(d6de4737-ca60-4c8d-bfd5-687f9366ec8b),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapb1b379f4-7e') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.598 2 DEBUG os_vif [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:5d:22:98,bridge_name='br-int',has_traffic_filtering=True,id=b1b379f4-7eb3-40e5-8edd-d903c05484af,network=Network(d6de4737-ca60-4c8d-bfd5-687f9366ec8b),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapb1b379f4-7e') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.599 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.599 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.600 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.602 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.602 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapb1b379f4-7e, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.602 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapb1b379f4-7e, col_values=(('external_ids', {'iface-id': 'b1b379f4-7eb3-40e5-8edd-d903c05484af', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:5d:22:98', 'vm-uuid': '6e45ea08-64c1-4434-9d80-94d4b7cec844'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.605 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:58 compute-0 NetworkManager[51160]: <info>  [1759407298.6066] manager: (tapb1b379f4-7e): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/119)
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.608 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.612 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.614 2 INFO os_vif [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:5d:22:98,bridge_name='br-int',has_traffic_filtering=True,id=b1b379f4-7eb3-40e5-8edd-d903c05484af,network=Network(d6de4737-ca60-4c8d-bfd5-687f9366ec8b),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapb1b379f4-7e')
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.809 2 DEBUG nova.virt.libvirt.driver [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.809 2 DEBUG nova.virt.libvirt.driver [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.809 2 DEBUG nova.virt.libvirt.driver [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] No VIF found with MAC fa:16:3e:5d:22:98, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.810 2 INFO nova.virt.libvirt.driver [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Using config drive
Oct 02 12:14:58 compute-0 kernel: tapb1b379f4-7e: entered promiscuous mode
Oct 02 12:14:58 compute-0 NetworkManager[51160]: <info>  [1759407298.8692] manager: (tapb1b379f4-7e): new Tun device (/org/freedesktop/NetworkManager/Devices/120)
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.870 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:58 compute-0 ovn_controller[94336]: 2025-10-02T12:14:58Z|00236|binding|INFO|Claiming lport b1b379f4-7eb3-40e5-8edd-d903c05484af for this chassis.
Oct 02 12:14:58 compute-0 ovn_controller[94336]: 2025-10-02T12:14:58Z|00237|binding|INFO|b1b379f4-7eb3-40e5-8edd-d903c05484af: Claiming fa:16:3e:5d:22:98 10.100.0.6
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.878 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:58.894 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:5d:22:98 10.100.0.6'], port_security=['fa:16:3e:5d:22:98 10.100.0.6'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.6/28', 'neutron:device_id': '6e45ea08-64c1-4434-9d80-94d4b7cec844', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-d6de4737-ca60-4c8d-bfd5-687f9366ec8b', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'ffae703d68b24b9c89686c149113fc2b', 'neutron:revision_number': '6', 'neutron:security_group_ids': '64970375-b20e-4c18-bfb5-2a0465f8be7d', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=9476db85-7514-407a-b55a-3d3c703e8f7b, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=b1b379f4-7eb3-40e5-8edd-d903c05484af) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:14:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:58.895 103294 INFO neutron.agent.ovn.metadata.agent [-] Port b1b379f4-7eb3-40e5-8edd-d903c05484af in datapath d6de4737-ca60-4c8d-bfd5-687f9366ec8b bound to our chassis
Oct 02 12:14:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:58.896 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network d6de4737-ca60-4c8d-bfd5-687f9366ec8b
Oct 02 12:14:58 compute-0 systemd-udevd[230802]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:14:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:58.910 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9c9889f6-c04e-42f8-a7fe-7b98f74fa0a2]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:58.910 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tapd6de4737-c1 in ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:14:58 compute-0 systemd-machined[152150]: New machine qemu-35-instance-00000048.
Oct 02 12:14:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:58.913 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tapd6de4737-c0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:14:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:58.913 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[551d2948-df08-43b0-82d2-f314b2fb75d0]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:58.913 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a4a96417-2fbb-44a3-a1d7-0daaa4830fdb]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:58 compute-0 NetworkManager[51160]: <info>  [1759407298.9265] device (tapb1b379f4-7e): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:14:58 compute-0 NetworkManager[51160]: <info>  [1759407298.9272] device (tapb1b379f4-7e): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:14:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:58.929 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[5564e3e0-a9a1-4c0a-8fe3-b8b47572484c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:58.951 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c16db636-8fb4-4f2b-b5c7-e4f4664f5040]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:58 compute-0 ovn_controller[94336]: 2025-10-02T12:14:58Z|00238|binding|INFO|Setting lport b1b379f4-7eb3-40e5-8edd-d903c05484af ovn-installed in OVS
Oct 02 12:14:58 compute-0 ovn_controller[94336]: 2025-10-02T12:14:58Z|00239|binding|INFO|Setting lport b1b379f4-7eb3-40e5-8edd-d903c05484af up in Southbound
Oct 02 12:14:58 compute-0 systemd[1]: Started Virtual Machine qemu-35-instance-00000048.
Oct 02 12:14:58 compute-0 nova_compute[192079]: 2025-10-02 12:14:58.955 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:58.982 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[2cbd87d9-8800-488f-970f-bac862454539]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:58.987 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[74a72861-5264-4ea1-b2cd-cd96ec2445fe]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:58 compute-0 NetworkManager[51160]: <info>  [1759407298.9885] manager: (tapd6de4737-c0): new Veth device (/org/freedesktop/NetworkManager/Devices/121)
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:59.026 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[2597fa1c-22cc-4835-848b-13bd54522119]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:59.030 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[e8141642-fead-4fdc-877d-3c39001c3aca]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:59 compute-0 NetworkManager[51160]: <info>  [1759407299.0530] device (tapd6de4737-c0): carrier: link connected
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:59.058 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[72db7138-3eb4-4edc-bb47-8c05a92fab73]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:59.073 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4813992e-9299-4204-84f7-34a4005f2fc8]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapd6de4737-c1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:bd:c9:1f'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 74], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 529668, 'reachable_time': 39507, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 230834, 'error': None, 'target': 'ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:59.095 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1d1f8a27-cf40-4535-8594-efdd33f0cabc]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:febd:c91f'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 529668, 'tstamp': 529668}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 230835, 'error': None, 'target': 'ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:59.109 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[93202e44-00fd-4135-aa08-0a66dcb2be0f]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapd6de4737-c1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:bd:c9:1f'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 74], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 529668, 'reachable_time': 39507, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 230836, 'error': None, 'target': 'ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:59.143 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f52b1584-9dce-4cb6-b8ac-41052ae10e95]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:59.201 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c2c0117e-5273-43f9-ac22-430a41c8409f]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:59.202 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapd6de4737-c0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:59.203 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:59.203 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapd6de4737-c0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:14:59 compute-0 NetworkManager[51160]: <info>  [1759407299.2056] manager: (tapd6de4737-c0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/122)
Oct 02 12:14:59 compute-0 kernel: tapd6de4737-c0: entered promiscuous mode
Oct 02 12:14:59 compute-0 nova_compute[192079]: 2025-10-02 12:14:59.205 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:59.208 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tapd6de4737-c0, col_values=(('external_ids', {'iface-id': 'cc451eb7-bf34-4b54-96d8-b834f11e06fb'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:14:59 compute-0 ovn_controller[94336]: 2025-10-02T12:14:59Z|00240|binding|INFO|Releasing lport cc451eb7-bf34-4b54-96d8-b834f11e06fb from this chassis (sb_readonly=0)
Oct 02 12:14:59 compute-0 nova_compute[192079]: 2025-10-02 12:14:59.209 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:59.211 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/d6de4737-ca60-4c8d-bfd5-687f9366ec8b.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/d6de4737-ca60-4c8d-bfd5-687f9366ec8b.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:59.212 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c800868b-e78f-47be-9c5d-5037f4334564]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:59.212 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-d6de4737-ca60-4c8d-bfd5-687f9366ec8b
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/d6de4737-ca60-4c8d-bfd5-687f9366ec8b.pid.haproxy
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID d6de4737-ca60-4c8d-bfd5-687f9366ec8b
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:14:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:14:59.213 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b', 'env', 'PROCESS_TAG=haproxy-d6de4737-ca60-4c8d-bfd5-687f9366ec8b', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/d6de4737-ca60-4c8d-bfd5-687f9366ec8b.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:14:59 compute-0 nova_compute[192079]: 2025-10-02 12:14:59.221 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:14:59 compute-0 systemd[1]: Stopping User Manager for UID 42436...
Oct 02 12:14:59 compute-0 systemd[230700]: Activating special unit Exit the Session...
Oct 02 12:14:59 compute-0 systemd[230700]: Stopped target Main User Target.
Oct 02 12:14:59 compute-0 systemd[230700]: Stopped target Basic System.
Oct 02 12:14:59 compute-0 systemd[230700]: Stopped target Paths.
Oct 02 12:14:59 compute-0 systemd[230700]: Stopped target Sockets.
Oct 02 12:14:59 compute-0 systemd[230700]: Stopped target Timers.
Oct 02 12:14:59 compute-0 systemd[230700]: Stopped Mark boot as successful after the user session has run 2 minutes.
Oct 02 12:14:59 compute-0 systemd[230700]: Stopped Daily Cleanup of User's Temporary Directories.
Oct 02 12:14:59 compute-0 systemd[230700]: Closed D-Bus User Message Bus Socket.
Oct 02 12:14:59 compute-0 systemd[230700]: Stopped Create User's Volatile Files and Directories.
Oct 02 12:14:59 compute-0 systemd[230700]: Removed slice User Application Slice.
Oct 02 12:14:59 compute-0 systemd[230700]: Reached target Shutdown.
Oct 02 12:14:59 compute-0 systemd[230700]: Finished Exit the Session.
Oct 02 12:14:59 compute-0 systemd[230700]: Reached target Exit the Session.
Oct 02 12:14:59 compute-0 systemd[1]: user@42436.service: Deactivated successfully.
Oct 02 12:14:59 compute-0 systemd[1]: Stopped User Manager for UID 42436.
Oct 02 12:14:59 compute-0 systemd[1]: Stopping User Runtime Directory /run/user/42436...
Oct 02 12:14:59 compute-0 systemd[1]: run-user-42436.mount: Deactivated successfully.
Oct 02 12:14:59 compute-0 systemd[1]: user-runtime-dir@42436.service: Deactivated successfully.
Oct 02 12:14:59 compute-0 systemd[1]: Stopped User Runtime Directory /run/user/42436.
Oct 02 12:14:59 compute-0 systemd[1]: Removed slice User Slice of UID 42436.
Oct 02 12:14:59 compute-0 podman[230875]: 2025-10-02 12:14:59.530308489 +0000 UTC m=+0.022026511 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:14:59 compute-0 podman[230875]: 2025-10-02 12:14:59.671282653 +0000 UTC m=+0.163000665 container create 0e46ce9fce55b83c973cc15acfcf2f69748da3e3b71fd94a19b97835b8002140 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3)
Oct 02 12:14:59 compute-0 podman[230889]: 2025-10-02 12:14:59.711309174 +0000 UTC m=+0.117592648 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid, managed_by=edpm_ansible, org.label-schema.license=GPLv2, container_name=iscsid, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001)
Oct 02 12:14:59 compute-0 nova_compute[192079]: 2025-10-02 12:14:59.716 2 DEBUG nova.compute.manager [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:14:59 compute-0 nova_compute[192079]: 2025-10-02 12:14:59.717 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407299.7174182, 6e45ea08-64c1-4434-9d80-94d4b7cec844 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:14:59 compute-0 systemd[1]: Started libpod-conmon-0e46ce9fce55b83c973cc15acfcf2f69748da3e3b71fd94a19b97835b8002140.scope.
Oct 02 12:14:59 compute-0 nova_compute[192079]: 2025-10-02 12:14:59.718 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] VM Resumed (Lifecycle Event)
Oct 02 12:14:59 compute-0 nova_compute[192079]: 2025-10-02 12:14:59.728 2 INFO nova.virt.libvirt.driver [-] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Instance running successfully.
Oct 02 12:14:59 compute-0 virtqemud[191807]: argument unsupported: QEMU guest agent is not configured
Oct 02 12:14:59 compute-0 nova_compute[192079]: 2025-10-02 12:14:59.731 2 DEBUG nova.virt.libvirt.guest [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Failed to set time: agent not configured sync_guest_time /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:200
Oct 02 12:14:59 compute-0 nova_compute[192079]: 2025-10-02 12:14:59.731 2 DEBUG nova.virt.libvirt.driver [None req-46b8b047-c493-43bc-8208-2102778793db def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] finish_migration finished successfully. finish_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:11793
Oct 02 12:14:59 compute-0 podman[230888]: 2025-10-02 12:14:59.732950924 +0000 UTC m=+0.137689555 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:14:59 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:14:59 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/ed423f7c9a242774a8adb1080291d0edb86cb1459bfe70d1bf79dc10ad8d3866/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:14:59 compute-0 nova_compute[192079]: 2025-10-02 12:14:59.753 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:14:59 compute-0 nova_compute[192079]: 2025-10-02 12:14:59.759 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: active, current task_state: resize_finish, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:14:59 compute-0 podman[230875]: 2025-10-02 12:14:59.764802913 +0000 UTC m=+0.256520935 container init 0e46ce9fce55b83c973cc15acfcf2f69748da3e3b71fd94a19b97835b8002140 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:14:59 compute-0 podman[230875]: 2025-10-02 12:14:59.76985269 +0000 UTC m=+0.261570702 container start 0e46ce9fce55b83c973cc15acfcf2f69748da3e3b71fd94a19b97835b8002140 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:14:59 compute-0 neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b[230927]: [NOTICE]   (230936) : New worker (230938) forked
Oct 02 12:14:59 compute-0 neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b[230927]: [NOTICE]   (230936) : Loading success.
Oct 02 12:14:59 compute-0 nova_compute[192079]: 2025-10-02 12:14:59.911 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] During sync_power_state the instance has a pending task (resize_finish). Skip.
Oct 02 12:14:59 compute-0 nova_compute[192079]: 2025-10-02 12:14:59.912 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407299.7179248, 6e45ea08-64c1-4434-9d80-94d4b7cec844 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:14:59 compute-0 nova_compute[192079]: 2025-10-02 12:14:59.912 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] VM Started (Lifecycle Event)
Oct 02 12:14:59 compute-0 nova_compute[192079]: 2025-10-02 12:14:59.952 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:14:59 compute-0 nova_compute[192079]: 2025-10-02 12:14:59.954 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Synchronizing instance power state after lifecycle event "Started"; current vm_state: active, current task_state: resize_finish, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:14:59 compute-0 nova_compute[192079]: 2025-10-02 12:14:59.984 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] During sync_power_state the instance has a pending task (resize_finish). Skip.
Oct 02 12:15:00 compute-0 nova_compute[192079]: 2025-10-02 12:15:00.097 2 DEBUG nova.compute.manager [req-5e56154b-4057-4357-a760-972e1445ed17 req-26eb508f-e8be-4fc8-80c0-f635fcde9213 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Received event network-vif-plugged-b1b379f4-7eb3-40e5-8edd-d903c05484af external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:15:00 compute-0 nova_compute[192079]: 2025-10-02 12:15:00.097 2 DEBUG oslo_concurrency.lockutils [req-5e56154b-4057-4357-a760-972e1445ed17 req-26eb508f-e8be-4fc8-80c0-f635fcde9213 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "6e45ea08-64c1-4434-9d80-94d4b7cec844-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:00 compute-0 nova_compute[192079]: 2025-10-02 12:15:00.098 2 DEBUG oslo_concurrency.lockutils [req-5e56154b-4057-4357-a760-972e1445ed17 req-26eb508f-e8be-4fc8-80c0-f635fcde9213 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6e45ea08-64c1-4434-9d80-94d4b7cec844-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:00 compute-0 nova_compute[192079]: 2025-10-02 12:15:00.098 2 DEBUG oslo_concurrency.lockutils [req-5e56154b-4057-4357-a760-972e1445ed17 req-26eb508f-e8be-4fc8-80c0-f635fcde9213 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6e45ea08-64c1-4434-9d80-94d4b7cec844-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:00 compute-0 nova_compute[192079]: 2025-10-02 12:15:00.098 2 DEBUG nova.compute.manager [req-5e56154b-4057-4357-a760-972e1445ed17 req-26eb508f-e8be-4fc8-80c0-f635fcde9213 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] No waiting events found dispatching network-vif-plugged-b1b379f4-7eb3-40e5-8edd-d903c05484af pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:15:00 compute-0 nova_compute[192079]: 2025-10-02 12:15:00.099 2 WARNING nova.compute.manager [req-5e56154b-4057-4357-a760-972e1445ed17 req-26eb508f-e8be-4fc8-80c0-f635fcde9213 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Received unexpected event network-vif-plugged-b1b379f4-7eb3-40e5-8edd-d903c05484af for instance with vm_state resized and task_state None.
Oct 02 12:15:00 compute-0 nova_compute[192079]: 2025-10-02 12:15:00.555 2 DEBUG oslo_concurrency.lockutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Acquiring lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:00 compute-0 nova_compute[192079]: 2025-10-02 12:15:00.555 2 DEBUG oslo_concurrency.lockutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:00 compute-0 nova_compute[192079]: 2025-10-02 12:15:00.608 2 DEBUG nova.compute.manager [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:15:00 compute-0 nova_compute[192079]: 2025-10-02 12:15:00.753 2 DEBUG oslo_concurrency.lockutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:00 compute-0 nova_compute[192079]: 2025-10-02 12:15:00.754 2 DEBUG oslo_concurrency.lockutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:00 compute-0 nova_compute[192079]: 2025-10-02 12:15:00.765 2 DEBUG nova.virt.hardware [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:15:00 compute-0 nova_compute[192079]: 2025-10-02 12:15:00.765 2 INFO nova.compute.claims [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:15:00 compute-0 nova_compute[192079]: 2025-10-02 12:15:00.954 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:00 compute-0 nova_compute[192079]: 2025-10-02 12:15:00.981 2 DEBUG nova.compute.provider_tree [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:15:00 compute-0 nova_compute[192079]: 2025-10-02 12:15:00.990 2 DEBUG nova.network.neutron [req-e20dea4a-cc82-49f6-b8ee-d5b4c8a9a229 req-e0a85e33-cfbc-4f69-a8f9-27eb7113d897 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Updated VIF entry in instance network info cache for port b1b379f4-7eb3-40e5-8edd-d903c05484af. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:15:00 compute-0 nova_compute[192079]: 2025-10-02 12:15:00.991 2 DEBUG nova.network.neutron [req-e20dea4a-cc82-49f6-b8ee-d5b4c8a9a229 req-e0a85e33-cfbc-4f69-a8f9-27eb7113d897 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Updating instance_info_cache with network_info: [{"id": "b1b379f4-7eb3-40e5-8edd-d903c05484af", "address": "fa:16:3e:5d:22:98", "network": {"id": "d6de4737-ca60-4c8d-bfd5-687f9366ec8b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1853814355-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffae703d68b24b9c89686c149113fc2b", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb1b379f4-7e", "ovs_interfaceid": "b1b379f4-7eb3-40e5-8edd-d903c05484af", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:15:00 compute-0 nova_compute[192079]: 2025-10-02 12:15:00.995 2 DEBUG nova.scheduler.client.report [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.012 2 DEBUG oslo_concurrency.lockutils [req-e20dea4a-cc82-49f6-b8ee-d5b4c8a9a229 req-e0a85e33-cfbc-4f69-a8f9-27eb7113d897 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-6e45ea08-64c1-4434-9d80-94d4b7cec844" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.016 2 DEBUG oslo_concurrency.lockutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.263s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.017 2 DEBUG nova.compute.manager [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.119 2 DEBUG nova.compute.manager [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.120 2 DEBUG nova.network.neutron [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.143 2 INFO nova.virt.libvirt.driver [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.162 2 DEBUG nova.compute.manager [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.286 2 DEBUG nova.compute.manager [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.287 2 DEBUG nova.virt.libvirt.driver [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.288 2 INFO nova.virt.libvirt.driver [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Creating image(s)
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.288 2 DEBUG oslo_concurrency.lockutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Acquiring lock "/var/lib/nova/instances/5fd0efb0-7a09-4760-8a2c-23ab235018f2/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.289 2 DEBUG oslo_concurrency.lockutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Lock "/var/lib/nova/instances/5fd0efb0-7a09-4760-8a2c-23ab235018f2/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.290 2 DEBUG oslo_concurrency.lockutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Lock "/var/lib/nova/instances/5fd0efb0-7a09-4760-8a2c-23ab235018f2/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.308 2 DEBUG oslo_concurrency.processutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.368 2 DEBUG oslo_concurrency.processutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.060s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.370 2 DEBUG oslo_concurrency.lockutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.370 2 DEBUG oslo_concurrency.lockutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.383 2 DEBUG oslo_concurrency.processutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.449 2 DEBUG oslo_concurrency.processutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.066s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.450 2 DEBUG oslo_concurrency.processutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/5fd0efb0-7a09-4760-8a2c-23ab235018f2/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.670 2 DEBUG oslo_concurrency.processutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/5fd0efb0-7a09-4760-8a2c-23ab235018f2/disk 1073741824" returned: 0 in 0.219s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.671 2 DEBUG oslo_concurrency.lockutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.301s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.672 2 DEBUG oslo_concurrency.processutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.732 2 DEBUG oslo_concurrency.processutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.060s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.733 2 DEBUG nova.virt.disk.api [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Checking if we can resize image /var/lib/nova/instances/5fd0efb0-7a09-4760-8a2c-23ab235018f2/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.733 2 DEBUG oslo_concurrency.processutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/5fd0efb0-7a09-4760-8a2c-23ab235018f2/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.795 2 DEBUG oslo_concurrency.processutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/5fd0efb0-7a09-4760-8a2c-23ab235018f2/disk --force-share --output=json" returned: 0 in 0.062s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.797 2 DEBUG nova.virt.disk.api [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Cannot resize image /var/lib/nova/instances/5fd0efb0-7a09-4760-8a2c-23ab235018f2/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.799 2 DEBUG nova.objects.instance [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Lazy-loading 'migration_context' on Instance uuid 5fd0efb0-7a09-4760-8a2c-23ab235018f2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.823 2 DEBUG nova.virt.libvirt.driver [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.824 2 DEBUG nova.virt.libvirt.driver [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Ensure instance console log exists: /var/lib/nova/instances/5fd0efb0-7a09-4760-8a2c-23ab235018f2/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.825 2 DEBUG oslo_concurrency.lockutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.825 2 DEBUG oslo_concurrency.lockutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:01 compute-0 nova_compute[192079]: 2025-10-02 12:15:01.825 2 DEBUG oslo_concurrency.lockutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:02 compute-0 nova_compute[192079]: 2025-10-02 12:15:02.028 2 DEBUG nova.policy [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '64ab4561f89846cc90cf0ab7f878cbd3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '11be1361f6f44b10a6efea8fccf616aa', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:15:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:02.215 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:02.216 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:02.216 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:02 compute-0 nova_compute[192079]: 2025-10-02 12:15:02.338 2 DEBUG nova.compute.manager [req-fdffc4a3-9b5f-4de7-874b-92df385e6a01 req-887e40f8-2d25-4199-8edb-fc337f753469 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Received event network-vif-plugged-b1b379f4-7eb3-40e5-8edd-d903c05484af external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:15:02 compute-0 nova_compute[192079]: 2025-10-02 12:15:02.338 2 DEBUG oslo_concurrency.lockutils [req-fdffc4a3-9b5f-4de7-874b-92df385e6a01 req-887e40f8-2d25-4199-8edb-fc337f753469 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "6e45ea08-64c1-4434-9d80-94d4b7cec844-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:02 compute-0 nova_compute[192079]: 2025-10-02 12:15:02.340 2 DEBUG oslo_concurrency.lockutils [req-fdffc4a3-9b5f-4de7-874b-92df385e6a01 req-887e40f8-2d25-4199-8edb-fc337f753469 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6e45ea08-64c1-4434-9d80-94d4b7cec844-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:02 compute-0 nova_compute[192079]: 2025-10-02 12:15:02.340 2 DEBUG oslo_concurrency.lockutils [req-fdffc4a3-9b5f-4de7-874b-92df385e6a01 req-887e40f8-2d25-4199-8edb-fc337f753469 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6e45ea08-64c1-4434-9d80-94d4b7cec844-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:02 compute-0 nova_compute[192079]: 2025-10-02 12:15:02.340 2 DEBUG nova.compute.manager [req-fdffc4a3-9b5f-4de7-874b-92df385e6a01 req-887e40f8-2d25-4199-8edb-fc337f753469 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] No waiting events found dispatching network-vif-plugged-b1b379f4-7eb3-40e5-8edd-d903c05484af pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:15:02 compute-0 nova_compute[192079]: 2025-10-02 12:15:02.340 2 WARNING nova.compute.manager [req-fdffc4a3-9b5f-4de7-874b-92df385e6a01 req-887e40f8-2d25-4199-8edb-fc337f753469 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Received unexpected event network-vif-plugged-b1b379f4-7eb3-40e5-8edd-d903c05484af for instance with vm_state resized and task_state None.
Oct 02 12:15:03 compute-0 nova_compute[192079]: 2025-10-02 12:15:03.604 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:04 compute-0 nova_compute[192079]: 2025-10-02 12:15:04.147 2 DEBUG nova.network.neutron [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Successfully created port: add21826-27b9-48e6-b6bd-da40856e1eb0 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:15:05 compute-0 nova_compute[192079]: 2025-10-02 12:15:05.051 2 DEBUG nova.network.neutron [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Successfully created port: c4044870-326c-4aa1-a6b5-c4bd8e48ea5c _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:15:05 compute-0 nova_compute[192079]: 2025-10-02 12:15:05.955 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:05 compute-0 nova_compute[192079]: 2025-10-02 12:15:05.980 2 DEBUG nova.network.neutron [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Successfully updated port: add21826-27b9-48e6-b6bd-da40856e1eb0 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:15:06 compute-0 nova_compute[192079]: 2025-10-02 12:15:06.070 2 DEBUG nova.compute.manager [req-a795c1c2-90fa-4861-a7c2-5ad9e24cd45c req-0bcca3a2-f72f-4d3b-9036-551f029efceb 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Received event network-changed-add21826-27b9-48e6-b6bd-da40856e1eb0 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:15:06 compute-0 nova_compute[192079]: 2025-10-02 12:15:06.071 2 DEBUG nova.compute.manager [req-a795c1c2-90fa-4861-a7c2-5ad9e24cd45c req-0bcca3a2-f72f-4d3b-9036-551f029efceb 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Refreshing instance network info cache due to event network-changed-add21826-27b9-48e6-b6bd-da40856e1eb0. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:15:06 compute-0 nova_compute[192079]: 2025-10-02 12:15:06.071 2 DEBUG oslo_concurrency.lockutils [req-a795c1c2-90fa-4861-a7c2-5ad9e24cd45c req-0bcca3a2-f72f-4d3b-9036-551f029efceb 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-5fd0efb0-7a09-4760-8a2c-23ab235018f2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:15:06 compute-0 nova_compute[192079]: 2025-10-02 12:15:06.071 2 DEBUG oslo_concurrency.lockutils [req-a795c1c2-90fa-4861-a7c2-5ad9e24cd45c req-0bcca3a2-f72f-4d3b-9036-551f029efceb 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-5fd0efb0-7a09-4760-8a2c-23ab235018f2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:15:06 compute-0 nova_compute[192079]: 2025-10-02 12:15:06.071 2 DEBUG nova.network.neutron [req-a795c1c2-90fa-4861-a7c2-5ad9e24cd45c req-0bcca3a2-f72f-4d3b-9036-551f029efceb 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Refreshing network info cache for port add21826-27b9-48e6-b6bd-da40856e1eb0 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:15:06 compute-0 nova_compute[192079]: 2025-10-02 12:15:06.231 2 DEBUG nova.network.neutron [req-a795c1c2-90fa-4861-a7c2-5ad9e24cd45c req-0bcca3a2-f72f-4d3b-9036-551f029efceb 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:15:06 compute-0 nova_compute[192079]: 2025-10-02 12:15:06.705 2 DEBUG nova.network.neutron [req-a795c1c2-90fa-4861-a7c2-5ad9e24cd45c req-0bcca3a2-f72f-4d3b-9036-551f029efceb 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:15:06 compute-0 nova_compute[192079]: 2025-10-02 12:15:06.732 2 DEBUG oslo_concurrency.lockutils [req-a795c1c2-90fa-4861-a7c2-5ad9e24cd45c req-0bcca3a2-f72f-4d3b-9036-551f029efceb 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-5fd0efb0-7a09-4760-8a2c-23ab235018f2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:15:06 compute-0 nova_compute[192079]: 2025-10-02 12:15:06.820 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:06.821 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=19, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=18) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:15:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:06.821 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 1 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:15:06 compute-0 nova_compute[192079]: 2025-10-02 12:15:06.954 2 DEBUG nova.network.neutron [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Successfully updated port: c4044870-326c-4aa1-a6b5-c4bd8e48ea5c _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:15:06 compute-0 nova_compute[192079]: 2025-10-02 12:15:06.969 2 DEBUG oslo_concurrency.lockutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Acquiring lock "refresh_cache-5fd0efb0-7a09-4760-8a2c-23ab235018f2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:15:06 compute-0 nova_compute[192079]: 2025-10-02 12:15:06.970 2 DEBUG oslo_concurrency.lockutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Acquired lock "refresh_cache-5fd0efb0-7a09-4760-8a2c-23ab235018f2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:15:06 compute-0 nova_compute[192079]: 2025-10-02 12:15:06.970 2 DEBUG nova.network.neutron [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:15:07 compute-0 nova_compute[192079]: 2025-10-02 12:15:07.198 2 DEBUG nova.network.neutron [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:15:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:07.823 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '19'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:08 compute-0 nova_compute[192079]: 2025-10-02 12:15:08.228 2 DEBUG nova.compute.manager [req-173bab79-d636-4aa5-89b5-d97201999c03 req-23771a2c-f704-4736-9a3f-ef0a41800c5e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Received event network-changed-c4044870-326c-4aa1-a6b5-c4bd8e48ea5c external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:15:08 compute-0 nova_compute[192079]: 2025-10-02 12:15:08.229 2 DEBUG nova.compute.manager [req-173bab79-d636-4aa5-89b5-d97201999c03 req-23771a2c-f704-4736-9a3f-ef0a41800c5e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Refreshing instance network info cache due to event network-changed-c4044870-326c-4aa1-a6b5-c4bd8e48ea5c. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:15:08 compute-0 nova_compute[192079]: 2025-10-02 12:15:08.229 2 DEBUG oslo_concurrency.lockutils [req-173bab79-d636-4aa5-89b5-d97201999c03 req-23771a2c-f704-4736-9a3f-ef0a41800c5e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-5fd0efb0-7a09-4760-8a2c-23ab235018f2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:15:08 compute-0 nova_compute[192079]: 2025-10-02 12:15:08.606 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:09 compute-0 podman[230963]: 2025-10-02 12:15:09.146235231 +0000 UTC m=+0.056572934 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_metadata_agent)
Oct 02 12:15:09 compute-0 podman[230964]: 2025-10-02 12:15:09.205688862 +0000 UTC m=+0.101535280 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_controller, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, tcib_managed=true)
Oct 02 12:15:09 compute-0 podman[230965]: 2025-10-02 12:15:09.207226784 +0000 UTC m=+0.096961584 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.643 2 DEBUG oslo_concurrency.lockutils [None req-96a11536-b7ee-4b88-846c-5ff41ac9217c def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Acquiring lock "6e45ea08-64c1-4434-9d80-94d4b7cec844" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.643 2 DEBUG oslo_concurrency.lockutils [None req-96a11536-b7ee-4b88-846c-5ff41ac9217c def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lock "6e45ea08-64c1-4434-9d80-94d4b7cec844" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.643 2 DEBUG oslo_concurrency.lockutils [None req-96a11536-b7ee-4b88-846c-5ff41ac9217c def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Acquiring lock "6e45ea08-64c1-4434-9d80-94d4b7cec844-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.644 2 DEBUG oslo_concurrency.lockutils [None req-96a11536-b7ee-4b88-846c-5ff41ac9217c def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lock "6e45ea08-64c1-4434-9d80-94d4b7cec844-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.644 2 DEBUG oslo_concurrency.lockutils [None req-96a11536-b7ee-4b88-846c-5ff41ac9217c def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lock "6e45ea08-64c1-4434-9d80-94d4b7cec844-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.683 2 INFO nova.compute.manager [None req-96a11536-b7ee-4b88-846c-5ff41ac9217c def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Terminating instance
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.694 2 DEBUG nova.compute.manager [None req-96a11536-b7ee-4b88-846c-5ff41ac9217c def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:15:09 compute-0 kernel: tapb1b379f4-7e (unregistering): left promiscuous mode
Oct 02 12:15:09 compute-0 NetworkManager[51160]: <info>  [1759407309.7212] device (tapb1b379f4-7e): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:15:09 compute-0 ovn_controller[94336]: 2025-10-02T12:15:09Z|00241|binding|INFO|Releasing lport b1b379f4-7eb3-40e5-8edd-d903c05484af from this chassis (sb_readonly=0)
Oct 02 12:15:09 compute-0 ovn_controller[94336]: 2025-10-02T12:15:09Z|00242|binding|INFO|Setting lport b1b379f4-7eb3-40e5-8edd-d903c05484af down in Southbound
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.732 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:09 compute-0 ovn_controller[94336]: 2025-10-02T12:15:09Z|00243|binding|INFO|Removing iface tapb1b379f4-7e ovn-installed in OVS
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.736 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:09.743 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:5d:22:98 10.100.0.6'], port_security=['fa:16:3e:5d:22:98 10.100.0.6'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.6/28', 'neutron:device_id': '6e45ea08-64c1-4434-9d80-94d4b7cec844', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-d6de4737-ca60-4c8d-bfd5-687f9366ec8b', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'ffae703d68b24b9c89686c149113fc2b', 'neutron:revision_number': '8', 'neutron:security_group_ids': '64970375-b20e-4c18-bfb5-2a0465f8be7d', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=9476db85-7514-407a-b55a-3d3c703e8f7b, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=b1b379f4-7eb3-40e5-8edd-d903c05484af) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:15:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:09.745 103294 INFO neutron.agent.ovn.metadata.agent [-] Port b1b379f4-7eb3-40e5-8edd-d903c05484af in datapath d6de4737-ca60-4c8d-bfd5-687f9366ec8b unbound from our chassis
Oct 02 12:15:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:09.746 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network d6de4737-ca60-4c8d-bfd5-687f9366ec8b, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:15:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:09.747 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[fbdc52a5-0250-47ef-958a-9d705e98c37e]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:09.747 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b namespace which is not needed anymore
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.749 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:09 compute-0 systemd[1]: machine-qemu\x2d35\x2dinstance\x2d00000048.scope: Deactivated successfully.
Oct 02 12:15:09 compute-0 systemd[1]: machine-qemu\x2d35\x2dinstance\x2d00000048.scope: Consumed 10.760s CPU time.
Oct 02 12:15:09 compute-0 systemd-machined[152150]: Machine qemu-35-instance-00000048 terminated.
Oct 02 12:15:09 compute-0 neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b[230927]: [NOTICE]   (230936) : haproxy version is 2.8.14-c23fe91
Oct 02 12:15:09 compute-0 neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b[230927]: [NOTICE]   (230936) : path to executable is /usr/sbin/haproxy
Oct 02 12:15:09 compute-0 neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b[230927]: [WARNING]  (230936) : Exiting Master process...
Oct 02 12:15:09 compute-0 neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b[230927]: [WARNING]  (230936) : Exiting Master process...
Oct 02 12:15:09 compute-0 neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b[230927]: [ALERT]    (230936) : Current worker (230938) exited with code 143 (Terminated)
Oct 02 12:15:09 compute-0 neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b[230927]: [WARNING]  (230936) : All workers exited. Exiting... (0)
Oct 02 12:15:09 compute-0 systemd[1]: libpod-0e46ce9fce55b83c973cc15acfcf2f69748da3e3b71fd94a19b97835b8002140.scope: Deactivated successfully.
Oct 02 12:15:09 compute-0 podman[231054]: 2025-10-02 12:15:09.886508335 +0000 UTC m=+0.052032030 container died 0e46ce9fce55b83c973cc15acfcf2f69748da3e3b71fd94a19b97835b8002140 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:15:09 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-0e46ce9fce55b83c973cc15acfcf2f69748da3e3b71fd94a19b97835b8002140-userdata-shm.mount: Deactivated successfully.
Oct 02 12:15:09 compute-0 systemd[1]: var-lib-containers-storage-overlay-ed423f7c9a242774a8adb1080291d0edb86cb1459bfe70d1bf79dc10ad8d3866-merged.mount: Deactivated successfully.
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.924 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:09 compute-0 podman[231054]: 2025-10-02 12:15:09.933540757 +0000 UTC m=+0.099064452 container cleanup 0e46ce9fce55b83c973cc15acfcf2f69748da3e3b71fd94a19b97835b8002140 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.build-date=20251001)
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.933 2 DEBUG nova.network.neutron [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Updating instance_info_cache with network_info: [{"id": "add21826-27b9-48e6-b6bd-da40856e1eb0", "address": "fa:16:3e:76:5d:e6", "network": {"id": "b6540487-e583-4697-ba62-6db6c44a9c42", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1408659145", "subnets": [{"cidr": "10.100.0.0/24", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.197", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "11be1361f6f44b10a6efea8fccf616aa", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapadd21826-27", "ovs_interfaceid": "add21826-27b9-48e6-b6bd-da40856e1eb0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c4044870-326c-4aa1-a6b5-c4bd8e48ea5c", "address": "fa:16:3e:a7:91:0f", "network": {"id": "44157705-b81b-4ce5-a7fb-27ef102009e9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1640137022", "subnets": [{"cidr": "10.100.1.0/24", "dns": [], "gateway": {"address": "10.100.1.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.1.33", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "11be1361f6f44b10a6efea8fccf616aa", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4044870-32", "ovs_interfaceid": "c4044870-326c-4aa1-a6b5-c4bd8e48ea5c", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.935 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:09 compute-0 systemd[1]: libpod-conmon-0e46ce9fce55b83c973cc15acfcf2f69748da3e3b71fd94a19b97835b8002140.scope: Deactivated successfully.
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.952 2 DEBUG oslo_concurrency.lockutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Releasing lock "refresh_cache-5fd0efb0-7a09-4760-8a2c-23ab235018f2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.952 2 DEBUG nova.compute.manager [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Instance network_info: |[{"id": "add21826-27b9-48e6-b6bd-da40856e1eb0", "address": "fa:16:3e:76:5d:e6", "network": {"id": "b6540487-e583-4697-ba62-6db6c44a9c42", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1408659145", "subnets": [{"cidr": "10.100.0.0/24", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.197", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "11be1361f6f44b10a6efea8fccf616aa", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapadd21826-27", "ovs_interfaceid": "add21826-27b9-48e6-b6bd-da40856e1eb0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c4044870-326c-4aa1-a6b5-c4bd8e48ea5c", "address": "fa:16:3e:a7:91:0f", "network": {"id": "44157705-b81b-4ce5-a7fb-27ef102009e9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1640137022", "subnets": [{"cidr": "10.100.1.0/24", "dns": [], "gateway": {"address": "10.100.1.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.1.33", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "11be1361f6f44b10a6efea8fccf616aa", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4044870-32", "ovs_interfaceid": "c4044870-326c-4aa1-a6b5-c4bd8e48ea5c", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.953 2 DEBUG oslo_concurrency.lockutils [req-173bab79-d636-4aa5-89b5-d97201999c03 req-23771a2c-f704-4736-9a3f-ef0a41800c5e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-5fd0efb0-7a09-4760-8a2c-23ab235018f2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.953 2 DEBUG nova.network.neutron [req-173bab79-d636-4aa5-89b5-d97201999c03 req-23771a2c-f704-4736-9a3f-ef0a41800c5e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Refreshing network info cache for port c4044870-326c-4aa1-a6b5-c4bd8e48ea5c _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.957 2 DEBUG nova.virt.libvirt.driver [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Start _get_guest_xml network_info=[{"id": "add21826-27b9-48e6-b6bd-da40856e1eb0", "address": "fa:16:3e:76:5d:e6", "network": {"id": "b6540487-e583-4697-ba62-6db6c44a9c42", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1408659145", "subnets": [{"cidr": "10.100.0.0/24", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.197", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "11be1361f6f44b10a6efea8fccf616aa", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapadd21826-27", "ovs_interfaceid": "add21826-27b9-48e6-b6bd-da40856e1eb0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c4044870-326c-4aa1-a6b5-c4bd8e48ea5c", "address": "fa:16:3e:a7:91:0f", "network": {"id": "44157705-b81b-4ce5-a7fb-27ef102009e9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1640137022", "subnets": [{"cidr": "10.100.1.0/24", "dns": [], "gateway": {"address": "10.100.1.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.1.33", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "11be1361f6f44b10a6efea8fccf616aa", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4044870-32", "ovs_interfaceid": "c4044870-326c-4aa1-a6b5-c4bd8e48ea5c", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.964 2 WARNING nova.virt.libvirt.driver [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.968 2 INFO nova.virt.libvirt.driver [-] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Instance destroyed successfully.
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.968 2 DEBUG nova.objects.instance [None req-96a11536-b7ee-4b88-846c-5ff41ac9217c def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lazy-loading 'resources' on Instance uuid 6e45ea08-64c1-4434-9d80-94d4b7cec844 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.971 2 DEBUG nova.virt.libvirt.host [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.971 2 DEBUG nova.virt.libvirt.host [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.977 2 DEBUG nova.virt.libvirt.host [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.978 2 DEBUG nova.virt.libvirt.host [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.979 2 DEBUG nova.virt.libvirt.driver [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.979 2 DEBUG nova.virt.hardware [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.980 2 DEBUG nova.virt.hardware [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.980 2 DEBUG nova.virt.hardware [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.980 2 DEBUG nova.virt.hardware [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.980 2 DEBUG nova.virt.hardware [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.981 2 DEBUG nova.virt.hardware [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.981 2 DEBUG nova.virt.hardware [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.981 2 DEBUG nova.virt.hardware [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.981 2 DEBUG nova.virt.hardware [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.982 2 DEBUG nova.virt.hardware [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.982 2 DEBUG nova.virt.hardware [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.986 2 DEBUG nova.virt.libvirt.vif [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:14:58Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ServersTestMultiNic-server-1824088787',display_name='tempest-ServersTestMultiNic-server-1824088787',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverstestmultinic-server-1824088787',id=75,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='11be1361f6f44b10a6efea8fccf616aa',ramdisk_id='',reservation_id='r-4g3fhypv',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServersTestMultiNic-1305956602',owner_user_name='tempest-ServersTestMultiNic-1305956602-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:15:01Z,user_data=None,user_id='64ab4561f89846cc90cf0ab7f878cbd3',uuid=5fd0efb0-7a09-4760-8a2c-23ab235018f2,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "add21826-27b9-48e6-b6bd-da40856e1eb0", "address": "fa:16:3e:76:5d:e6", "network": {"id": "b6540487-e583-4697-ba62-6db6c44a9c42", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1408659145", "subnets": [{"cidr": "10.100.0.0/24", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.197", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "11be1361f6f44b10a6efea8fccf616aa", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapadd21826-27", "ovs_interfaceid": "add21826-27b9-48e6-b6bd-da40856e1eb0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.986 2 DEBUG nova.network.os_vif_util [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Converting VIF {"id": "add21826-27b9-48e6-b6bd-da40856e1eb0", "address": "fa:16:3e:76:5d:e6", "network": {"id": "b6540487-e583-4697-ba62-6db6c44a9c42", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1408659145", "subnets": [{"cidr": "10.100.0.0/24", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.197", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "11be1361f6f44b10a6efea8fccf616aa", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapadd21826-27", "ovs_interfaceid": "add21826-27b9-48e6-b6bd-da40856e1eb0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.987 2 DEBUG nova.network.os_vif_util [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:76:5d:e6,bridge_name='br-int',has_traffic_filtering=True,id=add21826-27b9-48e6-b6bd-da40856e1eb0,network=Network(b6540487-e583-4697-ba62-6db6c44a9c42),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapadd21826-27') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.988 2 DEBUG nova.virt.libvirt.vif [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:14:58Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ServersTestMultiNic-server-1824088787',display_name='tempest-ServersTestMultiNic-server-1824088787',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverstestmultinic-server-1824088787',id=75,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='11be1361f6f44b10a6efea8fccf616aa',ramdisk_id='',reservation_id='r-4g3fhypv',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServersTestMultiNic-1305956602',owner_user_name='tempest-ServersTestMultiNic-1305956602-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:15:01Z,user_data=None,user_id='64ab4561f89846cc90cf0ab7f878cbd3',uuid=5fd0efb0-7a09-4760-8a2c-23ab235018f2,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "c4044870-326c-4aa1-a6b5-c4bd8e48ea5c", "address": "fa:16:3e:a7:91:0f", "network": {"id": "44157705-b81b-4ce5-a7fb-27ef102009e9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1640137022", "subnets": [{"cidr": "10.100.1.0/24", "dns": [], "gateway": {"address": "10.100.1.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.1.33", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "11be1361f6f44b10a6efea8fccf616aa", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4044870-32", "ovs_interfaceid": "c4044870-326c-4aa1-a6b5-c4bd8e48ea5c", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.988 2 DEBUG nova.network.os_vif_util [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Converting VIF {"id": "c4044870-326c-4aa1-a6b5-c4bd8e48ea5c", "address": "fa:16:3e:a7:91:0f", "network": {"id": "44157705-b81b-4ce5-a7fb-27ef102009e9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1640137022", "subnets": [{"cidr": "10.100.1.0/24", "dns": [], "gateway": {"address": "10.100.1.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.1.33", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "11be1361f6f44b10a6efea8fccf616aa", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4044870-32", "ovs_interfaceid": "c4044870-326c-4aa1-a6b5-c4bd8e48ea5c", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.988 2 DEBUG nova.network.os_vif_util [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:a7:91:0f,bridge_name='br-int',has_traffic_filtering=True,id=c4044870-326c-4aa1-a6b5-c4bd8e48ea5c,network=Network(44157705-b81b-4ce5-a7fb-27ef102009e9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapc4044870-32') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.990 2 DEBUG nova.objects.instance [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Lazy-loading 'pci_devices' on Instance uuid 5fd0efb0-7a09-4760-8a2c-23ab235018f2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.992 2 DEBUG nova.virt.libvirt.vif [None req-96a11536-b7ee-4b88-846c-5ff41ac9217c def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:14:08Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerDiskConfigTestJSON-server-922274791',display_name='tempest-ServerDiskConfigTestJSON-server-922274791',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(2),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverdiskconfigtestjson-server-922274791',id=72,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=2,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:14:59Z,launched_on='compute-1.ctlplane.example.com',locked=False,locked_by=None,memory_mb=192,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='ffae703d68b24b9c89686c149113fc2b',ramdisk_id='',reservation_id='r-7a20jfzw',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServerDiskConfigTestJSON-1763056137',owner_user_name='tempest-ServerDiskConfigTestJSON-1763056137-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:15:05Z,user_data=None,user_id='def48c13fd6a43ba88836b753986a731',uuid=6e45ea08-64c1-4434-9d80-94d4b7cec844,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "b1b379f4-7eb3-40e5-8edd-d903c05484af", "address": "fa:16:3e:5d:22:98", "network": {"id": "d6de4737-ca60-4c8d-bfd5-687f9366ec8b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1853814355-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffae703d68b24b9c89686c149113fc2b", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb1b379f4-7e", "ovs_interfaceid": "b1b379f4-7eb3-40e5-8edd-d903c05484af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.992 2 DEBUG nova.network.os_vif_util [None req-96a11536-b7ee-4b88-846c-5ff41ac9217c def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Converting VIF {"id": "b1b379f4-7eb3-40e5-8edd-d903c05484af", "address": "fa:16:3e:5d:22:98", "network": {"id": "d6de4737-ca60-4c8d-bfd5-687f9366ec8b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1853814355-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffae703d68b24b9c89686c149113fc2b", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb1b379f4-7e", "ovs_interfaceid": "b1b379f4-7eb3-40e5-8edd-d903c05484af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.993 2 DEBUG nova.network.os_vif_util [None req-96a11536-b7ee-4b88-846c-5ff41ac9217c def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:5d:22:98,bridge_name='br-int',has_traffic_filtering=True,id=b1b379f4-7eb3-40e5-8edd-d903c05484af,network=Network(d6de4737-ca60-4c8d-bfd5-687f9366ec8b),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapb1b379f4-7e') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.993 2 DEBUG os_vif [None req-96a11536-b7ee-4b88-846c-5ff41ac9217c def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:5d:22:98,bridge_name='br-int',has_traffic_filtering=True,id=b1b379f4-7eb3-40e5-8edd-d903c05484af,network=Network(d6de4737-ca60-4c8d-bfd5-687f9366ec8b),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapb1b379f4-7e') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.995 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.995 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapb1b379f4-7e, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:09 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.996 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:09.999 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.001 2 INFO os_vif [None req-96a11536-b7ee-4b88-846c-5ff41ac9217c def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:5d:22:98,bridge_name='br-int',has_traffic_filtering=True,id=b1b379f4-7eb3-40e5-8edd-d903c05484af,network=Network(d6de4737-ca60-4c8d-bfd5-687f9366ec8b),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapb1b379f4-7e')
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.002 2 INFO nova.virt.libvirt.driver [None req-96a11536-b7ee-4b88-846c-5ff41ac9217c def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Deleting instance files /var/lib/nova/instances/6e45ea08-64c1-4434-9d80-94d4b7cec844_del
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.007 2 INFO nova.virt.libvirt.driver [None req-96a11536-b7ee-4b88-846c-5ff41ac9217c def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Deletion of /var/lib/nova/instances/6e45ea08-64c1-4434-9d80-94d4b7cec844_del complete
Oct 02 12:15:10 compute-0 podman[231095]: 2025-10-02 12:15:10.00956883 +0000 UTC m=+0.045480041 container remove 0e46ce9fce55b83c973cc15acfcf2f69748da3e3b71fd94a19b97835b8002140 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3)
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.011 2 DEBUG nova.virt.libvirt.driver [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:15:10 compute-0 nova_compute[192079]:   <uuid>5fd0efb0-7a09-4760-8a2c-23ab235018f2</uuid>
Oct 02 12:15:10 compute-0 nova_compute[192079]:   <name>instance-0000004b</name>
Oct 02 12:15:10 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:15:10 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:15:10 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <nova:name>tempest-ServersTestMultiNic-server-1824088787</nova:name>
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:15:09</nova:creationTime>
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:15:10 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:15:10 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:15:10 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:15:10 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:15:10 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:15:10 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:15:10 compute-0 nova_compute[192079]:         <nova:user uuid="64ab4561f89846cc90cf0ab7f878cbd3">tempest-ServersTestMultiNic-1305956602-project-member</nova:user>
Oct 02 12:15:10 compute-0 nova_compute[192079]:         <nova:project uuid="11be1361f6f44b10a6efea8fccf616aa">tempest-ServersTestMultiNic-1305956602</nova:project>
Oct 02 12:15:10 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:15:10 compute-0 nova_compute[192079]:         <nova:port uuid="add21826-27b9-48e6-b6bd-da40856e1eb0">
Oct 02 12:15:10 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.197" ipVersion="4"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:15:10 compute-0 nova_compute[192079]:         <nova:port uuid="c4044870-326c-4aa1-a6b5-c4bd8e48ea5c">
Oct 02 12:15:10 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.1.33" ipVersion="4"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:15:10 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:15:10 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:15:10 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <system>
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <entry name="serial">5fd0efb0-7a09-4760-8a2c-23ab235018f2</entry>
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <entry name="uuid">5fd0efb0-7a09-4760-8a2c-23ab235018f2</entry>
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     </system>
Oct 02 12:15:10 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:15:10 compute-0 nova_compute[192079]:   <os>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:   </os>
Oct 02 12:15:10 compute-0 nova_compute[192079]:   <features>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:   </features>
Oct 02 12:15:10 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:15:10 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:15:10 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/5fd0efb0-7a09-4760-8a2c-23ab235018f2/disk"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/5fd0efb0-7a09-4760-8a2c-23ab235018f2/disk.config"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:76:5d:e6"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <target dev="tapadd21826-27"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:a7:91:0f"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <target dev="tapc4044870-32"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/5fd0efb0-7a09-4760-8a2c-23ab235018f2/console.log" append="off"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <video>
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     </video>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:15:10 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:15:10 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:15:10 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:15:10 compute-0 nova_compute[192079]: </domain>
Oct 02 12:15:10 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.012 2 DEBUG nova.compute.manager [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Preparing to wait for external event network-vif-plugged-add21826-27b9-48e6-b6bd-da40856e1eb0 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.012 2 DEBUG oslo_concurrency.lockutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Acquiring lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.012 2 DEBUG oslo_concurrency.lockutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.013 2 DEBUG oslo_concurrency.lockutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.013 2 DEBUG nova.compute.manager [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Preparing to wait for external event network-vif-plugged-c4044870-326c-4aa1-a6b5-c4bd8e48ea5c prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.013 2 DEBUG oslo_concurrency.lockutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Acquiring lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.013 2 DEBUG oslo_concurrency.lockutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.013 2 DEBUG oslo_concurrency.lockutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.014 2 DEBUG nova.virt.libvirt.vif [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:14:58Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ServersTestMultiNic-server-1824088787',display_name='tempest-ServersTestMultiNic-server-1824088787',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverstestmultinic-server-1824088787',id=75,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='11be1361f6f44b10a6efea8fccf616aa',ramdisk_id='',reservation_id='r-4g3fhypv',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServersTestMultiNic-1305956602',owner_user_name='tempest-ServersTestMultiNic-1305956602-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:15:01Z,user_data=None,user_id='64ab4561f89846cc90cf0ab7f878cbd3',uuid=5fd0efb0-7a09-4760-8a2c-23ab235018f2,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "add21826-27b9-48e6-b6bd-da40856e1eb0", "address": "fa:16:3e:76:5d:e6", "network": {"id": "b6540487-e583-4697-ba62-6db6c44a9c42", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1408659145", "subnets": [{"cidr": "10.100.0.0/24", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.197", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "11be1361f6f44b10a6efea8fccf616aa", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapadd21826-27", "ovs_interfaceid": "add21826-27b9-48e6-b6bd-da40856e1eb0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.014 2 DEBUG nova.network.os_vif_util [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Converting VIF {"id": "add21826-27b9-48e6-b6bd-da40856e1eb0", "address": "fa:16:3e:76:5d:e6", "network": {"id": "b6540487-e583-4697-ba62-6db6c44a9c42", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1408659145", "subnets": [{"cidr": "10.100.0.0/24", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.197", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "11be1361f6f44b10a6efea8fccf616aa", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapadd21826-27", "ovs_interfaceid": "add21826-27b9-48e6-b6bd-da40856e1eb0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:15:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:10.014 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8e08ef65-8302-4735-a453-b9a5f862feab]: (4, ('Thu Oct  2 12:15:09 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b (0e46ce9fce55b83c973cc15acfcf2f69748da3e3b71fd94a19b97835b8002140)\n0e46ce9fce55b83c973cc15acfcf2f69748da3e3b71fd94a19b97835b8002140\nThu Oct  2 12:15:09 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b (0e46ce9fce55b83c973cc15acfcf2f69748da3e3b71fd94a19b97835b8002140)\n0e46ce9fce55b83c973cc15acfcf2f69748da3e3b71fd94a19b97835b8002140\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:10.015 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[45f28cc7-28ff-4dca-a05e-6e0e2ae56831]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:10.016 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapd6de4737-c0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.017 2 DEBUG nova.network.os_vif_util [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:76:5d:e6,bridge_name='br-int',has_traffic_filtering=True,id=add21826-27b9-48e6-b6bd-da40856e1eb0,network=Network(b6540487-e583-4697-ba62-6db6c44a9c42),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapadd21826-27') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.018 2 DEBUG os_vif [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:76:5d:e6,bridge_name='br-int',has_traffic_filtering=True,id=add21826-27b9-48e6-b6bd-da40856e1eb0,network=Network(b6540487-e583-4697-ba62-6db6c44a9c42),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapadd21826-27') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:15:10 compute-0 kernel: tapd6de4737-c0: left promiscuous mode
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.019 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.020 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.020 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.020 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.022 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.022 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapadd21826-27, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.022 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapadd21826-27, col_values=(('external_ids', {'iface-id': 'add21826-27b9-48e6-b6bd-da40856e1eb0', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:76:5d:e6', 'vm-uuid': '5fd0efb0-7a09-4760-8a2c-23ab235018f2'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.023 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:10 compute-0 NetworkManager[51160]: <info>  [1759407310.0244] manager: (tapadd21826-27): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/123)
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.026 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.029 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:10.030 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a497ca2b-229e-463d-9512-45eadd509b43]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.040 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.040 2 INFO os_vif [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:76:5d:e6,bridge_name='br-int',has_traffic_filtering=True,id=add21826-27b9-48e6-b6bd-da40856e1eb0,network=Network(b6540487-e583-4697-ba62-6db6c44a9c42),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapadd21826-27')
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.041 2 DEBUG nova.virt.libvirt.vif [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:14:58Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ServersTestMultiNic-server-1824088787',display_name='tempest-ServersTestMultiNic-server-1824088787',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverstestmultinic-server-1824088787',id=75,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='11be1361f6f44b10a6efea8fccf616aa',ramdisk_id='',reservation_id='r-4g3fhypv',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServersTestMultiNic-1305956602',owner_user_name='tempest-ServersTestMultiNic-1305956602-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:15:01Z,user_data=None,user_id='64ab4561f89846cc90cf0ab7f878cbd3',uuid=5fd0efb0-7a09-4760-8a2c-23ab235018f2,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "c4044870-326c-4aa1-a6b5-c4bd8e48ea5c", "address": "fa:16:3e:a7:91:0f", "network": {"id": "44157705-b81b-4ce5-a7fb-27ef102009e9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1640137022", "subnets": [{"cidr": "10.100.1.0/24", "dns": [], "gateway": {"address": "10.100.1.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.1.33", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "11be1361f6f44b10a6efea8fccf616aa", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4044870-32", "ovs_interfaceid": "c4044870-326c-4aa1-a6b5-c4bd8e48ea5c", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.041 2 DEBUG nova.network.os_vif_util [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Converting VIF {"id": "c4044870-326c-4aa1-a6b5-c4bd8e48ea5c", "address": "fa:16:3e:a7:91:0f", "network": {"id": "44157705-b81b-4ce5-a7fb-27ef102009e9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1640137022", "subnets": [{"cidr": "10.100.1.0/24", "dns": [], "gateway": {"address": "10.100.1.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.1.33", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "11be1361f6f44b10a6efea8fccf616aa", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4044870-32", "ovs_interfaceid": "c4044870-326c-4aa1-a6b5-c4bd8e48ea5c", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.042 2 DEBUG nova.network.os_vif_util [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:a7:91:0f,bridge_name='br-int',has_traffic_filtering=True,id=c4044870-326c-4aa1-a6b5-c4bd8e48ea5c,network=Network(44157705-b81b-4ce5-a7fb-27ef102009e9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapc4044870-32') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.042 2 DEBUG os_vif [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:a7:91:0f,bridge_name='br-int',has_traffic_filtering=True,id=c4044870-326c-4aa1-a6b5-c4bd8e48ea5c,network=Network(44157705-b81b-4ce5-a7fb-27ef102009e9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapc4044870-32') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.043 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.043 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.043 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.045 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.045 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapc4044870-32, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.045 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapc4044870-32, col_values=(('external_ids', {'iface-id': 'c4044870-326c-4aa1-a6b5-c4bd8e48ea5c', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:a7:91:0f', 'vm-uuid': '5fd0efb0-7a09-4760-8a2c-23ab235018f2'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.046 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:10 compute-0 NetworkManager[51160]: <info>  [1759407310.0478] manager: (tapc4044870-32): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/124)
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.050 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.054 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.055 2 INFO os_vif [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:a7:91:0f,bridge_name='br-int',has_traffic_filtering=True,id=c4044870-326c-4aa1-a6b5-c4bd8e48ea5c,network=Network(44157705-b81b-4ce5-a7fb-27ef102009e9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapc4044870-32')
Oct 02 12:15:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:10.063 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[981f8b5e-5a8e-46b7-9eff-17595c8c3601]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:10.065 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7555c12c-afc9-45ed-88e3-e8b3ac2483d8]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:10.078 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f0301b88-f8b4-4b3e-83ca-5262c69535d4]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 529660, 'reachable_time': 28124, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 231117, 'error': None, 'target': 'ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:10.083 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:15:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:10.083 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[6f6b1031-d137-402e-be88-5f977942f5ab]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:10 compute-0 systemd[1]: run-netns-ovnmeta\x2dd6de4737\x2dca60\x2d4c8d\x2dbfd5\x2d687f9366ec8b.mount: Deactivated successfully.
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.094 2 INFO nova.compute.manager [None req-96a11536-b7ee-4b88-846c-5ff41ac9217c def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Took 0.40 seconds to destroy the instance on the hypervisor.
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.095 2 DEBUG oslo.service.loopingcall [None req-96a11536-b7ee-4b88-846c-5ff41ac9217c def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.096 2 DEBUG nova.compute.manager [-] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.096 2 DEBUG nova.network.neutron [-] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.119 2 DEBUG nova.virt.libvirt.driver [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.119 2 DEBUG nova.virt.libvirt.driver [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.119 2 DEBUG nova.virt.libvirt.driver [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] No VIF found with MAC fa:16:3e:76:5d:e6, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.120 2 DEBUG nova.virt.libvirt.driver [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] No VIF found with MAC fa:16:3e:a7:91:0f, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.120 2 INFO nova.virt.libvirt.driver [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Using config drive
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.180 2 DEBUG nova.compute.manager [req-9f20d6cf-1631-4db9-afe9-a1d5c5f1ea1e req-5f855be3-1469-4271-9fca-407b7529e233 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Received event network-vif-unplugged-b1b379f4-7eb3-40e5-8edd-d903c05484af external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.181 2 DEBUG oslo_concurrency.lockutils [req-9f20d6cf-1631-4db9-afe9-a1d5c5f1ea1e req-5f855be3-1469-4271-9fca-407b7529e233 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "6e45ea08-64c1-4434-9d80-94d4b7cec844-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.182 2 DEBUG oslo_concurrency.lockutils [req-9f20d6cf-1631-4db9-afe9-a1d5c5f1ea1e req-5f855be3-1469-4271-9fca-407b7529e233 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6e45ea08-64c1-4434-9d80-94d4b7cec844-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.182 2 DEBUG oslo_concurrency.lockutils [req-9f20d6cf-1631-4db9-afe9-a1d5c5f1ea1e req-5f855be3-1469-4271-9fca-407b7529e233 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6e45ea08-64c1-4434-9d80-94d4b7cec844-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.182 2 DEBUG nova.compute.manager [req-9f20d6cf-1631-4db9-afe9-a1d5c5f1ea1e req-5f855be3-1469-4271-9fca-407b7529e233 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] No waiting events found dispatching network-vif-unplugged-b1b379f4-7eb3-40e5-8edd-d903c05484af pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.182 2 DEBUG nova.compute.manager [req-9f20d6cf-1631-4db9-afe9-a1d5c5f1ea1e req-5f855be3-1469-4271-9fca-407b7529e233 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Received event network-vif-unplugged-b1b379f4-7eb3-40e5-8edd-d903c05484af for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:15:10 compute-0 nova_compute[192079]: 2025-10-02 12:15:10.957 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:11 compute-0 nova_compute[192079]: 2025-10-02 12:15:11.586 2 INFO nova.virt.libvirt.driver [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Creating config drive at /var/lib/nova/instances/5fd0efb0-7a09-4760-8a2c-23ab235018f2/disk.config
Oct 02 12:15:11 compute-0 nova_compute[192079]: 2025-10-02 12:15:11.591 2 DEBUG oslo_concurrency.processutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/5fd0efb0-7a09-4760-8a2c-23ab235018f2/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpjuno3t_d execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:15:11 compute-0 nova_compute[192079]: 2025-10-02 12:15:11.735 2 DEBUG oslo_concurrency.processutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/5fd0efb0-7a09-4760-8a2c-23ab235018f2/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpjuno3t_d" returned: 0 in 0.144s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:15:11 compute-0 kernel: tapadd21826-27: entered promiscuous mode
Oct 02 12:15:11 compute-0 systemd-udevd[231033]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:15:11 compute-0 NetworkManager[51160]: <info>  [1759407311.8254] manager: (tapadd21826-27): new Tun device (/org/freedesktop/NetworkManager/Devices/125)
Oct 02 12:15:11 compute-0 nova_compute[192079]: 2025-10-02 12:15:11.832 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:11 compute-0 ovn_controller[94336]: 2025-10-02T12:15:11Z|00244|binding|INFO|Claiming lport add21826-27b9-48e6-b6bd-da40856e1eb0 for this chassis.
Oct 02 12:15:11 compute-0 ovn_controller[94336]: 2025-10-02T12:15:11Z|00245|binding|INFO|add21826-27b9-48e6-b6bd-da40856e1eb0: Claiming fa:16:3e:76:5d:e6 10.100.0.197
Oct 02 12:15:11 compute-0 NetworkManager[51160]: <info>  [1759407311.8411] manager: (tapc4044870-32): new Tun device (/org/freedesktop/NetworkManager/Devices/126)
Oct 02 12:15:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:11.841 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:76:5d:e6 10.100.0.197'], port_security=['fa:16:3e:76:5d:e6 10.100.0.197'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.197/24', 'neutron:device_id': '5fd0efb0-7a09-4760-8a2c-23ab235018f2', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-b6540487-e583-4697-ba62-6db6c44a9c42', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '11be1361f6f44b10a6efea8fccf616aa', 'neutron:revision_number': '2', 'neutron:security_group_ids': '8a0b041d-c4b2-499a-b557-418346b0314a', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=6109bd00-7f42-46ba-9a18-4f359f323b31, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=2, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=add21826-27b9-48e6-b6bd-da40856e1eb0) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:15:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:11.842 103294 INFO neutron.agent.ovn.metadata.agent [-] Port add21826-27b9-48e6-b6bd-da40856e1eb0 in datapath b6540487-e583-4697-ba62-6db6c44a9c42 bound to our chassis
Oct 02 12:15:11 compute-0 systemd-udevd[231034]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:15:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:11.844 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network b6540487-e583-4697-ba62-6db6c44a9c42
Oct 02 12:15:11 compute-0 NetworkManager[51160]: <info>  [1759407311.8457] device (tapadd21826-27): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:15:11 compute-0 NetworkManager[51160]: <info>  [1759407311.8467] device (tapadd21826-27): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:15:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:11.857 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5449a05f-fe06-4541-ac22-bce5a78cf146]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:11.858 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tapb6540487-e1 in ovnmeta-b6540487-e583-4697-ba62-6db6c44a9c42 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:15:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:11.864 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tapb6540487-e0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:15:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:11.864 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[836bafd8-7e80-47dd-8afb-ff35b9b1f981]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:11.865 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a2271145-65c0-43f3-86a2-00062a75fdb8]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:11 compute-0 kernel: tapc4044870-32: entered promiscuous mode
Oct 02 12:15:11 compute-0 NetworkManager[51160]: <info>  [1759407311.8680] device (tapc4044870-32): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:15:11 compute-0 NetworkManager[51160]: <info>  [1759407311.8696] device (tapc4044870-32): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:15:11 compute-0 ovn_controller[94336]: 2025-10-02T12:15:11Z|00246|binding|INFO|Claiming lport c4044870-326c-4aa1-a6b5-c4bd8e48ea5c for this chassis.
Oct 02 12:15:11 compute-0 nova_compute[192079]: 2025-10-02 12:15:11.870 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:11 compute-0 ovn_controller[94336]: 2025-10-02T12:15:11Z|00247|binding|INFO|c4044870-326c-4aa1-a6b5-c4bd8e48ea5c: Claiming fa:16:3e:a7:91:0f 10.100.1.33
Oct 02 12:15:11 compute-0 nova_compute[192079]: 2025-10-02 12:15:11.876 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:11.876 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[2353116b-5a3c-4fdb-9861-f0b32299f02c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:11 compute-0 nova_compute[192079]: 2025-10-02 12:15:11.878 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:11 compute-0 ovn_controller[94336]: 2025-10-02T12:15:11Z|00248|binding|INFO|Setting lport add21826-27b9-48e6-b6bd-da40856e1eb0 ovn-installed in OVS
Oct 02 12:15:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:11.884 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:a7:91:0f 10.100.1.33'], port_security=['fa:16:3e:a7:91:0f 10.100.1.33'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.1.33/24', 'neutron:device_id': '5fd0efb0-7a09-4760-8a2c-23ab235018f2', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-44157705-b81b-4ce5-a7fb-27ef102009e9', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '11be1361f6f44b10a6efea8fccf616aa', 'neutron:revision_number': '2', 'neutron:security_group_ids': '8a0b041d-c4b2-499a-b557-418346b0314a', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=f4ae280b-e04b-4f3d-bfe8-1556531970ba, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=2, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=c4044870-326c-4aa1-a6b5-c4bd8e48ea5c) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:15:11 compute-0 ovn_controller[94336]: 2025-10-02T12:15:11Z|00249|binding|INFO|Setting lport add21826-27b9-48e6-b6bd-da40856e1eb0 up in Southbound
Oct 02 12:15:11 compute-0 systemd-machined[152150]: New machine qemu-36-instance-0000004b.
Oct 02 12:15:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:11.907 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[791a02e1-c903-4523-8fac-62eeb2619f8e]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:11 compute-0 nova_compute[192079]: 2025-10-02 12:15:11.911 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:11 compute-0 nova_compute[192079]: 2025-10-02 12:15:11.921 2 DEBUG nova.network.neutron [-] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:15:11 compute-0 nova_compute[192079]: 2025-10-02 12:15:11.938 2 INFO nova.compute.manager [-] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Took 1.84 seconds to deallocate network for instance.
Oct 02 12:15:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:11.940 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[16b9c33f-f5c3-4f9d-9a14-564a3487ef3a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:11 compute-0 NetworkManager[51160]: <info>  [1759407311.9476] manager: (tapb6540487-e0): new Veth device (/org/freedesktop/NetworkManager/Devices/127)
Oct 02 12:15:11 compute-0 ovn_controller[94336]: 2025-10-02T12:15:11Z|00250|binding|INFO|Setting lport c4044870-326c-4aa1-a6b5-c4bd8e48ea5c ovn-installed in OVS
Oct 02 12:15:11 compute-0 ovn_controller[94336]: 2025-10-02T12:15:11Z|00251|binding|INFO|Setting lport c4044870-326c-4aa1-a6b5-c4bd8e48ea5c up in Southbound
Oct 02 12:15:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:11.946 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8bf9b7c4-5ba4-4835-8c83-bf3edb53152b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:11 compute-0 systemd[1]: Started Virtual Machine qemu-36-instance-0000004b.
Oct 02 12:15:11 compute-0 nova_compute[192079]: 2025-10-02 12:15:11.949 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:11.982 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[0ddf63c4-31e0-47a2-9369-2327aaaf16ac]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:11.985 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[5e74b914-e7e3-46e0-a533-807dabbdbc82]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:12 compute-0 NetworkManager[51160]: <info>  [1759407312.0052] device (tapb6540487-e0): carrier: link connected
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:12.010 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[c8ae77af-621d-4f21-8967-4b700504e2ee]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:12.025 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[02c4c384-eaac-49b8-833d-f00548abf975]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapb6540487-e1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:2e:b5:08'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 78], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 530963, 'reachable_time': 27926, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 231171, 'error': None, 'target': 'ovnmeta-b6540487-e583-4697-ba62-6db6c44a9c42', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.030 2 DEBUG oslo_concurrency.lockutils [None req-96a11536-b7ee-4b88-846c-5ff41ac9217c def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.030 2 DEBUG oslo_concurrency.lockutils [None req-96a11536-b7ee-4b88-846c-5ff41ac9217c def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.034 2 DEBUG oslo_concurrency.lockutils [None req-96a11536-b7ee-4b88-846c-5ff41ac9217c def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.004s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:12.043 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a62538e9-363a-4b80-b3e2-5a83450fde7e]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe2e:b508'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 530963, 'tstamp': 530963}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 231173, 'error': None, 'target': 'ovnmeta-b6540487-e583-4697-ba62-6db6c44a9c42', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:12.061 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ddde2ee8-bdb1-4d4a-9ffc-b0c5ded285cb]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapb6540487-e1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:2e:b5:08'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 78], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 530963, 'reachable_time': 27926, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 231174, 'error': None, 'target': 'ovnmeta-b6540487-e583-4697-ba62-6db6c44a9c42', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.067 2 INFO nova.scheduler.client.report [None req-96a11536-b7ee-4b88-846c-5ff41ac9217c def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Deleted allocations for instance 6e45ea08-64c1-4434-9d80-94d4b7cec844
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:12.092 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ac94f4d1-c9e6-441a-aa60-bcc026708e67]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:12.158 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[29d09697-93b8-40aa-b2f2-4593e3bc2479]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:12.160 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapb6540487-e0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:12.160 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:12.161 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapb6540487-e0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:12 compute-0 NetworkManager[51160]: <info>  [1759407312.1636] manager: (tapb6540487-e0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/128)
Oct 02 12:15:12 compute-0 kernel: tapb6540487-e0: entered promiscuous mode
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.162 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.165 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:12.166 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tapb6540487-e0, col_values=(('external_ids', {'iface-id': '662ffb7e-b55b-4ddd-afa0-31b551ecbca9'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:12 compute-0 ovn_controller[94336]: 2025-10-02T12:15:12Z|00252|binding|INFO|Releasing lport 662ffb7e-b55b-4ddd-afa0-31b551ecbca9 from this chassis (sb_readonly=0)
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.168 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.184 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:12.185 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/b6540487-e583-4697-ba62-6db6c44a9c42.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/b6540487-e583-4697-ba62-6db6c44a9c42.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:12.186 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[dcfbc077-5f62-437c-8a75-4f6dd3d0b2c3]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:12.187 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-b6540487-e583-4697-ba62-6db6c44a9c42
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/b6540487-e583-4697-ba62-6db6c44a9c42.pid.haproxy
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID b6540487-e583-4697-ba62-6db6c44a9c42
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:15:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:12.190 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-b6540487-e583-4697-ba62-6db6c44a9c42', 'env', 'PROCESS_TAG=haproxy-b6540487-e583-4697-ba62-6db6c44a9c42', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/b6540487-e583-4697-ba62-6db6c44a9c42.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.202 2 DEBUG oslo_concurrency.lockutils [None req-96a11536-b7ee-4b88-846c-5ff41ac9217c def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lock "6e45ea08-64c1-4434-9d80-94d4b7cec844" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 2.559s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.264 2 DEBUG nova.compute.manager [req-c8993fb0-9556-4171-ae43-d91fbd116219 req-14619707-b8f1-433d-8dd2-f60c87906912 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Received event network-vif-plugged-add21826-27b9-48e6-b6bd-da40856e1eb0 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.264 2 DEBUG oslo_concurrency.lockutils [req-c8993fb0-9556-4171-ae43-d91fbd116219 req-14619707-b8f1-433d-8dd2-f60c87906912 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.265 2 DEBUG oslo_concurrency.lockutils [req-c8993fb0-9556-4171-ae43-d91fbd116219 req-14619707-b8f1-433d-8dd2-f60c87906912 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.265 2 DEBUG oslo_concurrency.lockutils [req-c8993fb0-9556-4171-ae43-d91fbd116219 req-14619707-b8f1-433d-8dd2-f60c87906912 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.266 2 DEBUG nova.compute.manager [req-c8993fb0-9556-4171-ae43-d91fbd116219 req-14619707-b8f1-433d-8dd2-f60c87906912 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Processing event network-vif-plugged-add21826-27b9-48e6-b6bd-da40856e1eb0 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.271 2 DEBUG nova.compute.manager [req-d2a85d33-60fe-45ff-b789-6f2df6283263 req-e319be47-e736-46e5-b25c-29017ca1056b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Received event network-vif-plugged-c4044870-326c-4aa1-a6b5-c4bd8e48ea5c external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.271 2 DEBUG oslo_concurrency.lockutils [req-d2a85d33-60fe-45ff-b789-6f2df6283263 req-e319be47-e736-46e5-b25c-29017ca1056b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.272 2 DEBUG oslo_concurrency.lockutils [req-d2a85d33-60fe-45ff-b789-6f2df6283263 req-e319be47-e736-46e5-b25c-29017ca1056b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.272 2 DEBUG oslo_concurrency.lockutils [req-d2a85d33-60fe-45ff-b789-6f2df6283263 req-e319be47-e736-46e5-b25c-29017ca1056b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.272 2 DEBUG nova.compute.manager [req-d2a85d33-60fe-45ff-b789-6f2df6283263 req-e319be47-e736-46e5-b25c-29017ca1056b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Processing event network-vif-plugged-c4044870-326c-4aa1-a6b5-c4bd8e48ea5c _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.389 2 DEBUG nova.compute.manager [req-6564543e-59d9-426c-9807-e7f043e0dd58 req-449dc6e4-43e6-4d9c-a92a-3daa232fc822 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Received event network-vif-plugged-b1b379f4-7eb3-40e5-8edd-d903c05484af external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.390 2 DEBUG oslo_concurrency.lockutils [req-6564543e-59d9-426c-9807-e7f043e0dd58 req-449dc6e4-43e6-4d9c-a92a-3daa232fc822 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "6e45ea08-64c1-4434-9d80-94d4b7cec844-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.391 2 DEBUG oslo_concurrency.lockutils [req-6564543e-59d9-426c-9807-e7f043e0dd58 req-449dc6e4-43e6-4d9c-a92a-3daa232fc822 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6e45ea08-64c1-4434-9d80-94d4b7cec844-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.392 2 DEBUG oslo_concurrency.lockutils [req-6564543e-59d9-426c-9807-e7f043e0dd58 req-449dc6e4-43e6-4d9c-a92a-3daa232fc822 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6e45ea08-64c1-4434-9d80-94d4b7cec844-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.392 2 DEBUG nova.compute.manager [req-6564543e-59d9-426c-9807-e7f043e0dd58 req-449dc6e4-43e6-4d9c-a92a-3daa232fc822 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] No waiting events found dispatching network-vif-plugged-b1b379f4-7eb3-40e5-8edd-d903c05484af pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.393 2 WARNING nova.compute.manager [req-6564543e-59d9-426c-9807-e7f043e0dd58 req-449dc6e4-43e6-4d9c-a92a-3daa232fc822 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Received unexpected event network-vif-plugged-b1b379f4-7eb3-40e5-8edd-d903c05484af for instance with vm_state deleted and task_state None.
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.394 2 DEBUG nova.compute.manager [req-6564543e-59d9-426c-9807-e7f043e0dd58 req-449dc6e4-43e6-4d9c-a92a-3daa232fc822 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Received event network-vif-deleted-b1b379f4-7eb3-40e5-8edd-d903c05484af external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.637 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407312.6362116, 5fd0efb0-7a09-4760-8a2c-23ab235018f2 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.637 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] VM Started (Lifecycle Event)
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.639 2 DEBUG nova.compute.manager [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Instance event wait completed in 0 seconds for network-vif-plugged,network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.643 2 DEBUG nova.virt.libvirt.driver [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.646 2 INFO nova.virt.libvirt.driver [-] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Instance spawned successfully.
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.647 2 DEBUG nova.virt.libvirt.driver [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:15:12 compute-0 podman[231214]: 2025-10-02 12:15:12.55532782 +0000 UTC m=+0.023286645 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.679 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.687 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.691 2 DEBUG nova.virt.libvirt.driver [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.692 2 DEBUG nova.virt.libvirt.driver [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.692 2 DEBUG nova.virt.libvirt.driver [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.692 2 DEBUG nova.virt.libvirt.driver [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.693 2 DEBUG nova.virt.libvirt.driver [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.694 2 DEBUG nova.virt.libvirt.driver [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.714 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.714 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407312.6364732, 5fd0efb0-7a09-4760-8a2c-23ab235018f2 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.715 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] VM Paused (Lifecycle Event)
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.762 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.766 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407312.6425874, 5fd0efb0-7a09-4760-8a2c-23ab235018f2 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.767 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] VM Resumed (Lifecycle Event)
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.795 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.800 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.824 2 INFO nova.compute.manager [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Took 11.54 seconds to spawn the instance on the hypervisor.
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.824 2 DEBUG nova.compute.manager [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.825 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:15:12 compute-0 podman[231214]: 2025-10-02 12:15:12.876735534 +0000 UTC m=+0.344694349 container create a6fa93effd4fe39c69dc12fa7143fc30dd84de99cea18b6fb82237dd73549cd7 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-b6540487-e583-4697-ba62-6db6c44a9c42, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, tcib_managed=true, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.911 2 INFO nova.compute.manager [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Took 12.19 seconds to build instance.
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.935 2 DEBUG oslo_concurrency.lockutils [None req-39070ec3-80d2-4296-aea3-e6257cc600fb 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 12.380s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:12 compute-0 systemd[1]: Started libpod-conmon-a6fa93effd4fe39c69dc12fa7143fc30dd84de99cea18b6fb82237dd73549cd7.scope.
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.996 2 DEBUG nova.network.neutron [req-173bab79-d636-4aa5-89b5-d97201999c03 req-23771a2c-f704-4736-9a3f-ef0a41800c5e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Updated VIF entry in instance network info cache for port c4044870-326c-4aa1-a6b5-c4bd8e48ea5c. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:15:12 compute-0 nova_compute[192079]: 2025-10-02 12:15:12.998 2 DEBUG nova.network.neutron [req-173bab79-d636-4aa5-89b5-d97201999c03 req-23771a2c-f704-4736-9a3f-ef0a41800c5e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Updating instance_info_cache with network_info: [{"id": "add21826-27b9-48e6-b6bd-da40856e1eb0", "address": "fa:16:3e:76:5d:e6", "network": {"id": "b6540487-e583-4697-ba62-6db6c44a9c42", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1408659145", "subnets": [{"cidr": "10.100.0.0/24", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.197", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "11be1361f6f44b10a6efea8fccf616aa", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapadd21826-27", "ovs_interfaceid": "add21826-27b9-48e6-b6bd-da40856e1eb0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c4044870-326c-4aa1-a6b5-c4bd8e48ea5c", "address": "fa:16:3e:a7:91:0f", "network": {"id": "44157705-b81b-4ce5-a7fb-27ef102009e9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1640137022", "subnets": [{"cidr": "10.100.1.0/24", "dns": [], "gateway": {"address": "10.100.1.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.1.33", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "11be1361f6f44b10a6efea8fccf616aa", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4044870-32", "ovs_interfaceid": "c4044870-326c-4aa1-a6b5-c4bd8e48ea5c", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:15:12 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:15:13 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/dd5abf184291062bf27839ae79341836852656f33ca90da3a7fd83dd3c0802d5/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:15:13 compute-0 nova_compute[192079]: 2025-10-02 12:15:13.015 2 DEBUG oslo_concurrency.lockutils [req-173bab79-d636-4aa5-89b5-d97201999c03 req-23771a2c-f704-4736-9a3f-ef0a41800c5e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-5fd0efb0-7a09-4760-8a2c-23ab235018f2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:15:13 compute-0 podman[231214]: 2025-10-02 12:15:13.179562851 +0000 UTC m=+0.647521686 container init a6fa93effd4fe39c69dc12fa7143fc30dd84de99cea18b6fb82237dd73549cd7 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-b6540487-e583-4697-ba62-6db6c44a9c42, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:15:13 compute-0 podman[231214]: 2025-10-02 12:15:13.185383259 +0000 UTC m=+0.653342064 container start a6fa93effd4fe39c69dc12fa7143fc30dd84de99cea18b6fb82237dd73549cd7 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-b6540487-e583-4697-ba62-6db6c44a9c42, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.vendor=CentOS)
Oct 02 12:15:13 compute-0 neutron-haproxy-ovnmeta-b6540487-e583-4697-ba62-6db6c44a9c42[231229]: [NOTICE]   (231233) : New worker (231235) forked
Oct 02 12:15:13 compute-0 neutron-haproxy-ovnmeta-b6540487-e583-4697-ba62-6db6c44a9c42[231229]: [NOTICE]   (231233) : Loading success.
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:13.332 103294 INFO neutron.agent.ovn.metadata.agent [-] Port c4044870-326c-4aa1-a6b5-c4bd8e48ea5c in datapath 44157705-b81b-4ce5-a7fb-27ef102009e9 unbound from our chassis
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:13.333 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 44157705-b81b-4ce5-a7fb-27ef102009e9
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:13.342 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[525a2a13-90af-45a8-964c-df1128054746]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:13.343 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap44157705-b1 in ovnmeta-44157705-b81b-4ce5-a7fb-27ef102009e9 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:13.345 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap44157705-b0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:13.345 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[82855ba1-faa0-48d8-a738-f4e55795725c]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:13.346 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a746bf73-00f9-491d-aa72-61796239db9d]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:13.356 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[8725bf19-c050-471b-a7cc-29278db1b8c3]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:13.378 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3b0342ef-b458-44f7-b155-f20718380c62]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:13.414 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[bcc283a5-da41-42a2-8c61-1680d04bf0d8]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:13 compute-0 NetworkManager[51160]: <info>  [1759407313.4310] manager: (tap44157705-b0): new Veth device (/org/freedesktop/NetworkManager/Devices/129)
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:13.430 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[df3ded59-9af0-4d43-a8fd-203a5f00f2aa]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:13 compute-0 systemd-udevd[231251]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:13.477 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[3fdda400-7715-4334-a9ba-78ec262fd095]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:13.481 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[13d8dff4-1d2e-4948-a15c-e8d621867e86]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:13 compute-0 NetworkManager[51160]: <info>  [1759407313.5072] device (tap44157705-b0): carrier: link connected
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:13.513 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[235dbcaf-5110-4a69-885f-1838f7179ee4]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:13.535 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ea5af3f6-847d-4712-a8ba-a4c666477fe8]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap44157705-b1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:12:85:ad'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 2, 'rx_bytes': 110, 'tx_bytes': 176, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 2, 'rx_bytes': 110, 'tx_bytes': 176, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 79], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 531113, 'reachable_time': 40723, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 2, 'outoctets': 148, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 2, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 148, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 2, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 231270, 'error': None, 'target': 'ovnmeta-44157705-b81b-4ce5-a7fb-27ef102009e9', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:13.550 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5edeeca2-bff3-46de-8f74-d11f68650132]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe12:85ad'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 531113, 'tstamp': 531113}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 231271, 'error': None, 'target': 'ovnmeta-44157705-b81b-4ce5-a7fb-27ef102009e9', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:13.574 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0c0fdfdd-3b0a-4807-b9b4-32a3613eaab6]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap44157705-b1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:12:85:ad'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 2, 'rx_bytes': 110, 'tx_bytes': 176, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 2, 'rx_bytes': 110, 'tx_bytes': 176, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 79], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 531113, 'reachable_time': 40723, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 2, 'outoctets': 148, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 2, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 148, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 2, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 231272, 'error': None, 'target': 'ovnmeta-44157705-b81b-4ce5-a7fb-27ef102009e9', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:13.610 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[bab79309-5ea2-47e4-8731-4cb133f6e5ba]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:13.670 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b1d1aaea-6473-41ec-9413-783f4fbed5ab]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:13.672 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap44157705-b0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:13.672 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:13.672 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap44157705-b0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:13 compute-0 kernel: tap44157705-b0: entered promiscuous mode
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:13.677 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap44157705-b0, col_values=(('external_ids', {'iface-id': '0efdf547-de6b-49a5-a910-d4ddea0a77c3'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:13.679 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/44157705-b81b-4ce5-a7fb-27ef102009e9.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/44157705-b81b-4ce5-a7fb-27ef102009e9.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:15:13 compute-0 nova_compute[192079]: 2025-10-02 12:15:13.681 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:13 compute-0 NetworkManager[51160]: <info>  [1759407313.6915] manager: (tap44157705-b0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/130)
Oct 02 12:15:13 compute-0 ovn_controller[94336]: 2025-10-02T12:15:13Z|00253|binding|INFO|Releasing lport 0efdf547-de6b-49a5-a910-d4ddea0a77c3 from this chassis (sb_readonly=0)
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:13.693 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1a1a2898-22ef-4b45-afa5-a0b1caebdd57]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:13.694 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-44157705-b81b-4ce5-a7fb-27ef102009e9
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/44157705-b81b-4ce5-a7fb-27ef102009e9.pid.haproxy
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 44157705-b81b-4ce5-a7fb-27ef102009e9
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:15:13 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:13.695 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-44157705-b81b-4ce5-a7fb-27ef102009e9', 'env', 'PROCESS_TAG=haproxy-44157705-b81b-4ce5-a7fb-27ef102009e9', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/44157705-b81b-4ce5-a7fb-27ef102009e9.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:15:13 compute-0 nova_compute[192079]: 2025-10-02 12:15:13.710 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:14 compute-0 podman[231303]: 2025-10-02 12:15:14.044077202 +0000 UTC m=+0.029019963 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:15:14 compute-0 nova_compute[192079]: 2025-10-02 12:15:14.397 2 DEBUG nova.compute.manager [req-d8215de0-c8fd-4048-9d59-cdd5091bda16 req-a1fdc912-a8a1-486a-a16e-333986f39cb5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Received event network-vif-plugged-c4044870-326c-4aa1-a6b5-c4bd8e48ea5c external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:15:14 compute-0 nova_compute[192079]: 2025-10-02 12:15:14.398 2 DEBUG oslo_concurrency.lockutils [req-d8215de0-c8fd-4048-9d59-cdd5091bda16 req-a1fdc912-a8a1-486a-a16e-333986f39cb5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:14 compute-0 nova_compute[192079]: 2025-10-02 12:15:14.399 2 DEBUG oslo_concurrency.lockutils [req-d8215de0-c8fd-4048-9d59-cdd5091bda16 req-a1fdc912-a8a1-486a-a16e-333986f39cb5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:14 compute-0 nova_compute[192079]: 2025-10-02 12:15:14.399 2 DEBUG oslo_concurrency.lockutils [req-d8215de0-c8fd-4048-9d59-cdd5091bda16 req-a1fdc912-a8a1-486a-a16e-333986f39cb5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:14 compute-0 nova_compute[192079]: 2025-10-02 12:15:14.399 2 DEBUG nova.compute.manager [req-d8215de0-c8fd-4048-9d59-cdd5091bda16 req-a1fdc912-a8a1-486a-a16e-333986f39cb5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] No waiting events found dispatching network-vif-plugged-c4044870-326c-4aa1-a6b5-c4bd8e48ea5c pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:15:14 compute-0 nova_compute[192079]: 2025-10-02 12:15:14.400 2 WARNING nova.compute.manager [req-d8215de0-c8fd-4048-9d59-cdd5091bda16 req-a1fdc912-a8a1-486a-a16e-333986f39cb5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Received unexpected event network-vif-plugged-c4044870-326c-4aa1-a6b5-c4bd8e48ea5c for instance with vm_state active and task_state None.
Oct 02 12:15:14 compute-0 nova_compute[192079]: 2025-10-02 12:15:14.503 2 DEBUG nova.compute.manager [req-510b07d6-8640-48c7-ace6-b37641cb4951 req-0572e758-971f-4638-b002-ee30f0e5bc89 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Received event network-vif-plugged-add21826-27b9-48e6-b6bd-da40856e1eb0 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:15:14 compute-0 nova_compute[192079]: 2025-10-02 12:15:14.504 2 DEBUG oslo_concurrency.lockutils [req-510b07d6-8640-48c7-ace6-b37641cb4951 req-0572e758-971f-4638-b002-ee30f0e5bc89 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:14 compute-0 nova_compute[192079]: 2025-10-02 12:15:14.505 2 DEBUG oslo_concurrency.lockutils [req-510b07d6-8640-48c7-ace6-b37641cb4951 req-0572e758-971f-4638-b002-ee30f0e5bc89 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:14 compute-0 nova_compute[192079]: 2025-10-02 12:15:14.506 2 DEBUG oslo_concurrency.lockutils [req-510b07d6-8640-48c7-ace6-b37641cb4951 req-0572e758-971f-4638-b002-ee30f0e5bc89 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:14 compute-0 nova_compute[192079]: 2025-10-02 12:15:14.506 2 DEBUG nova.compute.manager [req-510b07d6-8640-48c7-ace6-b37641cb4951 req-0572e758-971f-4638-b002-ee30f0e5bc89 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] No waiting events found dispatching network-vif-plugged-add21826-27b9-48e6-b6bd-da40856e1eb0 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:15:14 compute-0 nova_compute[192079]: 2025-10-02 12:15:14.507 2 WARNING nova.compute.manager [req-510b07d6-8640-48c7-ace6-b37641cb4951 req-0572e758-971f-4638-b002-ee30f0e5bc89 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Received unexpected event network-vif-plugged-add21826-27b9-48e6-b6bd-da40856e1eb0 for instance with vm_state active and task_state None.
Oct 02 12:15:14 compute-0 podman[231303]: 2025-10-02 12:15:14.632558827 +0000 UTC m=+0.617501238 container create ff711c17f5509e7943010a7af964031a1016a129baec19884116ab8c2089a6dd (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-44157705-b81b-4ce5-a7fb-27ef102009e9, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0)
Oct 02 12:15:14 compute-0 systemd[1]: Started libpod-conmon-ff711c17f5509e7943010a7af964031a1016a129baec19884116ab8c2089a6dd.scope.
Oct 02 12:15:14 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:15:14 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/cbf66784d0bf0fb78084d9240304d39cf12573fe939296bece417b65bc817d69/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.048 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:15 compute-0 podman[231303]: 2025-10-02 12:15:15.05957382 +0000 UTC m=+1.044516251 container init ff711c17f5509e7943010a7af964031a1016a129baec19884116ab8c2089a6dd (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-44157705-b81b-4ce5-a7fb-27ef102009e9, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2)
Oct 02 12:15:15 compute-0 podman[231303]: 2025-10-02 12:15:15.065002768 +0000 UTC m=+1.049945189 container start ff711c17f5509e7943010a7af964031a1016a129baec19884116ab8c2089a6dd (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-44157705-b81b-4ce5-a7fb-27ef102009e9, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.vendor=CentOS)
Oct 02 12:15:15 compute-0 neutron-haproxy-ovnmeta-44157705-b81b-4ce5-a7fb-27ef102009e9[231318]: [NOTICE]   (231322) : New worker (231324) forked
Oct 02 12:15:15 compute-0 neutron-haproxy-ovnmeta-44157705-b81b-4ce5-a7fb-27ef102009e9[231318]: [NOTICE]   (231322) : Loading success.
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.356 2 DEBUG oslo_concurrency.lockutils [None req-0784480f-3a1b-4f58-953f-bbe2d648b364 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Acquiring lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.356 2 DEBUG oslo_concurrency.lockutils [None req-0784480f-3a1b-4f58-953f-bbe2d648b364 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.357 2 DEBUG oslo_concurrency.lockutils [None req-0784480f-3a1b-4f58-953f-bbe2d648b364 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Acquiring lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.357 2 DEBUG oslo_concurrency.lockutils [None req-0784480f-3a1b-4f58-953f-bbe2d648b364 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.357 2 DEBUG oslo_concurrency.lockutils [None req-0784480f-3a1b-4f58-953f-bbe2d648b364 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.371 2 INFO nova.compute.manager [None req-0784480f-3a1b-4f58-953f-bbe2d648b364 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Terminating instance
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.384 2 DEBUG nova.compute.manager [None req-0784480f-3a1b-4f58-953f-bbe2d648b364 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:15:15 compute-0 kernel: tapadd21826-27 (unregistering): left promiscuous mode
Oct 02 12:15:15 compute-0 NetworkManager[51160]: <info>  [1759407315.4132] device (tapadd21826-27): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.421 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:15 compute-0 ovn_controller[94336]: 2025-10-02T12:15:15Z|00254|binding|INFO|Releasing lport add21826-27b9-48e6-b6bd-da40856e1eb0 from this chassis (sb_readonly=0)
Oct 02 12:15:15 compute-0 ovn_controller[94336]: 2025-10-02T12:15:15Z|00255|binding|INFO|Setting lport add21826-27b9-48e6-b6bd-da40856e1eb0 down in Southbound
Oct 02 12:15:15 compute-0 ovn_controller[94336]: 2025-10-02T12:15:15Z|00256|binding|INFO|Removing iface tapadd21826-27 ovn-installed in OVS
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.433 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:15 compute-0 kernel: tapc4044870-32 (unregistering): left promiscuous mode
Oct 02 12:15:15 compute-0 NetworkManager[51160]: <info>  [1759407315.4619] device (tapc4044870-32): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.472 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:15 compute-0 ovn_controller[94336]: 2025-10-02T12:15:15Z|00257|binding|INFO|Releasing lport c4044870-326c-4aa1-a6b5-c4bd8e48ea5c from this chassis (sb_readonly=1)
Oct 02 12:15:15 compute-0 ovn_controller[94336]: 2025-10-02T12:15:15Z|00258|binding|INFO|Removing iface tapc4044870-32 ovn-installed in OVS
Oct 02 12:15:15 compute-0 ovn_controller[94336]: 2025-10-02T12:15:15Z|00259|if_status|INFO|Not setting lport c4044870-326c-4aa1-a6b5-c4bd8e48ea5c down as sb is readonly
Oct 02 12:15:15 compute-0 ovn_controller[94336]: 2025-10-02T12:15:15Z|00260|binding|INFO|Setting lport c4044870-326c-4aa1-a6b5-c4bd8e48ea5c down in Southbound
Oct 02 12:15:15 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:15.558 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:76:5d:e6 10.100.0.197'], port_security=['fa:16:3e:76:5d:e6 10.100.0.197'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.197/24', 'neutron:device_id': '5fd0efb0-7a09-4760-8a2c-23ab235018f2', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-b6540487-e583-4697-ba62-6db6c44a9c42', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '11be1361f6f44b10a6efea8fccf616aa', 'neutron:revision_number': '4', 'neutron:security_group_ids': '8a0b041d-c4b2-499a-b557-418346b0314a', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=6109bd00-7f42-46ba-9a18-4f359f323b31, chassis=[], tunnel_key=2, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=add21826-27b9-48e6-b6bd-da40856e1eb0) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:15:15 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:15.560 103294 INFO neutron.agent.ovn.metadata.agent [-] Port add21826-27b9-48e6-b6bd-da40856e1eb0 in datapath b6540487-e583-4697-ba62-6db6c44a9c42 unbound from our chassis
Oct 02 12:15:15 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:15.561 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network b6540487-e583-4697-ba62-6db6c44a9c42, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.562 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:15 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:15.562 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[15fb9988-1f98-4047-a243-b62e7f0366fe]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:15 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:15.563 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-b6540487-e583-4697-ba62-6db6c44a9c42 namespace which is not needed anymore
Oct 02 12:15:15 compute-0 systemd[1]: machine-qemu\x2d36\x2dinstance\x2d0000004b.scope: Deactivated successfully.
Oct 02 12:15:15 compute-0 systemd[1]: machine-qemu\x2d36\x2dinstance\x2d0000004b.scope: Consumed 3.370s CPU time.
Oct 02 12:15:15 compute-0 systemd-machined[152150]: Machine qemu-36-instance-0000004b terminated.
Oct 02 12:15:15 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:15.607 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:a7:91:0f 10.100.1.33'], port_security=['fa:16:3e:a7:91:0f 10.100.1.33'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.1.33/24', 'neutron:device_id': '5fd0efb0-7a09-4760-8a2c-23ab235018f2', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-44157705-b81b-4ce5-a7fb-27ef102009e9', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '11be1361f6f44b10a6efea8fccf616aa', 'neutron:revision_number': '4', 'neutron:security_group_ids': '8a0b041d-c4b2-499a-b557-418346b0314a', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=f4ae280b-e04b-4f3d-bfe8-1556531970ba, chassis=[], tunnel_key=2, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=c4044870-326c-4aa1-a6b5-c4bd8e48ea5c) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:15:15 compute-0 neutron-haproxy-ovnmeta-b6540487-e583-4697-ba62-6db6c44a9c42[231229]: [NOTICE]   (231233) : haproxy version is 2.8.14-c23fe91
Oct 02 12:15:15 compute-0 neutron-haproxy-ovnmeta-b6540487-e583-4697-ba62-6db6c44a9c42[231229]: [NOTICE]   (231233) : path to executable is /usr/sbin/haproxy
Oct 02 12:15:15 compute-0 neutron-haproxy-ovnmeta-b6540487-e583-4697-ba62-6db6c44a9c42[231229]: [WARNING]  (231233) : Exiting Master process...
Oct 02 12:15:15 compute-0 neutron-haproxy-ovnmeta-b6540487-e583-4697-ba62-6db6c44a9c42[231229]: [WARNING]  (231233) : Exiting Master process...
Oct 02 12:15:15 compute-0 neutron-haproxy-ovnmeta-b6540487-e583-4697-ba62-6db6c44a9c42[231229]: [ALERT]    (231233) : Current worker (231235) exited with code 143 (Terminated)
Oct 02 12:15:15 compute-0 neutron-haproxy-ovnmeta-b6540487-e583-4697-ba62-6db6c44a9c42[231229]: [WARNING]  (231233) : All workers exited. Exiting... (0)
Oct 02 12:15:15 compute-0 systemd[1]: libpod-a6fa93effd4fe39c69dc12fa7143fc30dd84de99cea18b6fb82237dd73549cd7.scope: Deactivated successfully.
Oct 02 12:15:15 compute-0 podman[231360]: 2025-10-02 12:15:15.719064031 +0000 UTC m=+0.072850627 container died a6fa93effd4fe39c69dc12fa7143fc30dd84de99cea18b6fb82237dd73549cd7 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-b6540487-e583-4697-ba62-6db6c44a9c42, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:15:15 compute-0 NetworkManager[51160]: <info>  [1759407315.8012] manager: (tapadd21826-27): new Tun device (/org/freedesktop/NetworkManager/Devices/131)
Oct 02 12:15:15 compute-0 NetworkManager[51160]: <info>  [1759407315.8148] manager: (tapc4044870-32): new Tun device (/org/freedesktop/NetworkManager/Devices/132)
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.855 2 INFO nova.virt.libvirt.driver [-] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Instance destroyed successfully.
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.855 2 DEBUG nova.objects.instance [None req-0784480f-3a1b-4f58-953f-bbe2d648b364 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Lazy-loading 'resources' on Instance uuid 5fd0efb0-7a09-4760-8a2c-23ab235018f2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.876 2 DEBUG nova.virt.libvirt.vif [None req-0784480f-3a1b-4f58-953f-bbe2d648b364 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:14:58Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServersTestMultiNic-server-1824088787',display_name='tempest-ServersTestMultiNic-server-1824088787',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverstestmultinic-server-1824088787',id=75,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:15:12Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='11be1361f6f44b10a6efea8fccf616aa',ramdisk_id='',reservation_id='r-4g3fhypv',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServersTestMultiNic-1305956602',owner_user_name='tempest-ServersTestMultiNic-1305956602-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:15:12Z,user_data=None,user_id='64ab4561f89846cc90cf0ab7f878cbd3',uuid=5fd0efb0-7a09-4760-8a2c-23ab235018f2,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "add21826-27b9-48e6-b6bd-da40856e1eb0", "address": "fa:16:3e:76:5d:e6", "network": {"id": "b6540487-e583-4697-ba62-6db6c44a9c42", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1408659145", "subnets": [{"cidr": "10.100.0.0/24", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.197", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "11be1361f6f44b10a6efea8fccf616aa", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapadd21826-27", "ovs_interfaceid": "add21826-27b9-48e6-b6bd-da40856e1eb0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.877 2 DEBUG nova.network.os_vif_util [None req-0784480f-3a1b-4f58-953f-bbe2d648b364 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Converting VIF {"id": "add21826-27b9-48e6-b6bd-da40856e1eb0", "address": "fa:16:3e:76:5d:e6", "network": {"id": "b6540487-e583-4697-ba62-6db6c44a9c42", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1408659145", "subnets": [{"cidr": "10.100.0.0/24", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.197", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "11be1361f6f44b10a6efea8fccf616aa", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapadd21826-27", "ovs_interfaceid": "add21826-27b9-48e6-b6bd-da40856e1eb0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.878 2 DEBUG nova.network.os_vif_util [None req-0784480f-3a1b-4f58-953f-bbe2d648b364 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:76:5d:e6,bridge_name='br-int',has_traffic_filtering=True,id=add21826-27b9-48e6-b6bd-da40856e1eb0,network=Network(b6540487-e583-4697-ba62-6db6c44a9c42),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapadd21826-27') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.878 2 DEBUG os_vif [None req-0784480f-3a1b-4f58-953f-bbe2d648b364 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:76:5d:e6,bridge_name='br-int',has_traffic_filtering=True,id=add21826-27b9-48e6-b6bd-da40856e1eb0,network=Network(b6540487-e583-4697-ba62-6db6c44a9c42),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapadd21826-27') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.881 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.881 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapadd21826-27, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.883 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.886 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.886 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.888 2 INFO os_vif [None req-0784480f-3a1b-4f58-953f-bbe2d648b364 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:76:5d:e6,bridge_name='br-int',has_traffic_filtering=True,id=add21826-27b9-48e6-b6bd-da40856e1eb0,network=Network(b6540487-e583-4697-ba62-6db6c44a9c42),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapadd21826-27')
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.889 2 DEBUG nova.virt.libvirt.vif [None req-0784480f-3a1b-4f58-953f-bbe2d648b364 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:14:58Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServersTestMultiNic-server-1824088787',display_name='tempest-ServersTestMultiNic-server-1824088787',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverstestmultinic-server-1824088787',id=75,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:15:12Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='11be1361f6f44b10a6efea8fccf616aa',ramdisk_id='',reservation_id='r-4g3fhypv',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServersTestMultiNic-1305956602',owner_user_name='tempest-ServersTestMultiNic-1305956602-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:15:12Z,user_data=None,user_id='64ab4561f89846cc90cf0ab7f878cbd3',uuid=5fd0efb0-7a09-4760-8a2c-23ab235018f2,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "c4044870-326c-4aa1-a6b5-c4bd8e48ea5c", "address": "fa:16:3e:a7:91:0f", "network": {"id": "44157705-b81b-4ce5-a7fb-27ef102009e9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1640137022", "subnets": [{"cidr": "10.100.1.0/24", "dns": [], "gateway": {"address": "10.100.1.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.1.33", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "11be1361f6f44b10a6efea8fccf616aa", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4044870-32", "ovs_interfaceid": "c4044870-326c-4aa1-a6b5-c4bd8e48ea5c", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.890 2 DEBUG nova.network.os_vif_util [None req-0784480f-3a1b-4f58-953f-bbe2d648b364 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Converting VIF {"id": "c4044870-326c-4aa1-a6b5-c4bd8e48ea5c", "address": "fa:16:3e:a7:91:0f", "network": {"id": "44157705-b81b-4ce5-a7fb-27ef102009e9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1640137022", "subnets": [{"cidr": "10.100.1.0/24", "dns": [], "gateway": {"address": "10.100.1.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.1.33", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "11be1361f6f44b10a6efea8fccf616aa", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4044870-32", "ovs_interfaceid": "c4044870-326c-4aa1-a6b5-c4bd8e48ea5c", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.890 2 DEBUG nova.network.os_vif_util [None req-0784480f-3a1b-4f58-953f-bbe2d648b364 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:a7:91:0f,bridge_name='br-int',has_traffic_filtering=True,id=c4044870-326c-4aa1-a6b5-c4bd8e48ea5c,network=Network(44157705-b81b-4ce5-a7fb-27ef102009e9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapc4044870-32') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.891 2 DEBUG os_vif [None req-0784480f-3a1b-4f58-953f-bbe2d648b364 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:a7:91:0f,bridge_name='br-int',has_traffic_filtering=True,id=c4044870-326c-4aa1-a6b5-c4bd8e48ea5c,network=Network(44157705-b81b-4ce5-a7fb-27ef102009e9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapc4044870-32') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.892 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.892 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapc4044870-32, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.893 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.895 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.896 2 INFO os_vif [None req-0784480f-3a1b-4f58-953f-bbe2d648b364 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:a7:91:0f,bridge_name='br-int',has_traffic_filtering=True,id=c4044870-326c-4aa1-a6b5-c4bd8e48ea5c,network=Network(44157705-b81b-4ce5-a7fb-27ef102009e9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapc4044870-32')
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.897 2 INFO nova.virt.libvirt.driver [None req-0784480f-3a1b-4f58-953f-bbe2d648b364 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Deleting instance files /var/lib/nova/instances/5fd0efb0-7a09-4760-8a2c-23ab235018f2_del
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.898 2 INFO nova.virt.libvirt.driver [None req-0784480f-3a1b-4f58-953f-bbe2d648b364 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Deletion of /var/lib/nova/instances/5fd0efb0-7a09-4760-8a2c-23ab235018f2_del complete
Oct 02 12:15:15 compute-0 nova_compute[192079]: 2025-10-02 12:15:15.959 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.007 2 INFO nova.compute.manager [None req-0784480f-3a1b-4f58-953f-bbe2d648b364 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Took 0.62 seconds to destroy the instance on the hypervisor.
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.008 2 DEBUG oslo.service.loopingcall [None req-0784480f-3a1b-4f58-953f-bbe2d648b364 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.008 2 DEBUG nova.compute.manager [-] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.008 2 DEBUG nova.network.neutron [-] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:15:16 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-a6fa93effd4fe39c69dc12fa7143fc30dd84de99cea18b6fb82237dd73549cd7-userdata-shm.mount: Deactivated successfully.
Oct 02 12:15:16 compute-0 systemd[1]: var-lib-containers-storage-overlay-dd5abf184291062bf27839ae79341836852656f33ca90da3a7fd83dd3c0802d5-merged.mount: Deactivated successfully.
Oct 02 12:15:16 compute-0 podman[231360]: 2025-10-02 12:15:16.298372295 +0000 UTC m=+0.652158871 container cleanup a6fa93effd4fe39c69dc12fa7143fc30dd84de99cea18b6fb82237dd73549cd7 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-b6540487-e583-4697-ba62-6db6c44a9c42, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0)
Oct 02 12:15:16 compute-0 systemd[1]: libpod-conmon-a6fa93effd4fe39c69dc12fa7143fc30dd84de99cea18b6fb82237dd73549cd7.scope: Deactivated successfully.
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.571 2 DEBUG nova.compute.manager [req-b59fa934-25ea-46fd-ab01-9a00ce36e581 req-96d1403b-cc16-4a92-9e11-85a8cb3f2e4d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Received event network-vif-unplugged-c4044870-326c-4aa1-a6b5-c4bd8e48ea5c external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.572 2 DEBUG oslo_concurrency.lockutils [req-b59fa934-25ea-46fd-ab01-9a00ce36e581 req-96d1403b-cc16-4a92-9e11-85a8cb3f2e4d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.572 2 DEBUG oslo_concurrency.lockutils [req-b59fa934-25ea-46fd-ab01-9a00ce36e581 req-96d1403b-cc16-4a92-9e11-85a8cb3f2e4d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.573 2 DEBUG oslo_concurrency.lockutils [req-b59fa934-25ea-46fd-ab01-9a00ce36e581 req-96d1403b-cc16-4a92-9e11-85a8cb3f2e4d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.573 2 DEBUG nova.compute.manager [req-b59fa934-25ea-46fd-ab01-9a00ce36e581 req-96d1403b-cc16-4a92-9e11-85a8cb3f2e4d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] No waiting events found dispatching network-vif-unplugged-c4044870-326c-4aa1-a6b5-c4bd8e48ea5c pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.574 2 DEBUG nova.compute.manager [req-b59fa934-25ea-46fd-ab01-9a00ce36e581 req-96d1403b-cc16-4a92-9e11-85a8cb3f2e4d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Received event network-vif-unplugged-c4044870-326c-4aa1-a6b5-c4bd8e48ea5c for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.575 2 DEBUG nova.compute.manager [req-b59fa934-25ea-46fd-ab01-9a00ce36e581 req-96d1403b-cc16-4a92-9e11-85a8cb3f2e4d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Received event network-vif-plugged-c4044870-326c-4aa1-a6b5-c4bd8e48ea5c external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.575 2 DEBUG oslo_concurrency.lockutils [req-b59fa934-25ea-46fd-ab01-9a00ce36e581 req-96d1403b-cc16-4a92-9e11-85a8cb3f2e4d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.576 2 DEBUG oslo_concurrency.lockutils [req-b59fa934-25ea-46fd-ab01-9a00ce36e581 req-96d1403b-cc16-4a92-9e11-85a8cb3f2e4d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.576 2 DEBUG oslo_concurrency.lockutils [req-b59fa934-25ea-46fd-ab01-9a00ce36e581 req-96d1403b-cc16-4a92-9e11-85a8cb3f2e4d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.577 2 DEBUG nova.compute.manager [req-b59fa934-25ea-46fd-ab01-9a00ce36e581 req-96d1403b-cc16-4a92-9e11-85a8cb3f2e4d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] No waiting events found dispatching network-vif-plugged-c4044870-326c-4aa1-a6b5-c4bd8e48ea5c pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.577 2 WARNING nova.compute.manager [req-b59fa934-25ea-46fd-ab01-9a00ce36e581 req-96d1403b-cc16-4a92-9e11-85a8cb3f2e4d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Received unexpected event network-vif-plugged-c4044870-326c-4aa1-a6b5-c4bd8e48ea5c for instance with vm_state active and task_state deleting.
Oct 02 12:15:16 compute-0 podman[231421]: 2025-10-02 12:15:16.595322002 +0000 UTC m=+0.268152862 container remove a6fa93effd4fe39c69dc12fa7143fc30dd84de99cea18b6fb82237dd73549cd7 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-b6540487-e583-4697-ba62-6db6c44a9c42, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:15:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:16.601 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8ad3ed4f-ea7d-41a3-89f6-a6c4edef144d]: (4, ('Thu Oct  2 12:15:15 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-b6540487-e583-4697-ba62-6db6c44a9c42 (a6fa93effd4fe39c69dc12fa7143fc30dd84de99cea18b6fb82237dd73549cd7)\na6fa93effd4fe39c69dc12fa7143fc30dd84de99cea18b6fb82237dd73549cd7\nThu Oct  2 12:15:16 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-b6540487-e583-4697-ba62-6db6c44a9c42 (a6fa93effd4fe39c69dc12fa7143fc30dd84de99cea18b6fb82237dd73549cd7)\na6fa93effd4fe39c69dc12fa7143fc30dd84de99cea18b6fb82237dd73549cd7\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:16.602 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[21d0cbb3-71f7-4cbd-acf2-4423d893fcb0]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:16.603 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapb6540487-e0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.619 2 DEBUG nova.compute.manager [req-d2c1f2da-97ae-4325-b397-33999589d009 req-64a9d93c-447d-45e8-988c-a8d3b1c50f93 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Received event network-vif-unplugged-add21826-27b9-48e6-b6bd-da40856e1eb0 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.620 2 DEBUG oslo_concurrency.lockutils [req-d2c1f2da-97ae-4325-b397-33999589d009 req-64a9d93c-447d-45e8-988c-a8d3b1c50f93 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.620 2 DEBUG oslo_concurrency.lockutils [req-d2c1f2da-97ae-4325-b397-33999589d009 req-64a9d93c-447d-45e8-988c-a8d3b1c50f93 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.621 2 DEBUG oslo_concurrency.lockutils [req-d2c1f2da-97ae-4325-b397-33999589d009 req-64a9d93c-447d-45e8-988c-a8d3b1c50f93 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.621 2 DEBUG nova.compute.manager [req-d2c1f2da-97ae-4325-b397-33999589d009 req-64a9d93c-447d-45e8-988c-a8d3b1c50f93 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] No waiting events found dispatching network-vif-unplugged-add21826-27b9-48e6-b6bd-da40856e1eb0 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.622 2 DEBUG nova.compute.manager [req-d2c1f2da-97ae-4325-b397-33999589d009 req-64a9d93c-447d-45e8-988c-a8d3b1c50f93 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Received event network-vif-unplugged-add21826-27b9-48e6-b6bd-da40856e1eb0 for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.622 2 DEBUG nova.compute.manager [req-d2c1f2da-97ae-4325-b397-33999589d009 req-64a9d93c-447d-45e8-988c-a8d3b1c50f93 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Received event network-vif-plugged-add21826-27b9-48e6-b6bd-da40856e1eb0 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.623 2 DEBUG oslo_concurrency.lockutils [req-d2c1f2da-97ae-4325-b397-33999589d009 req-64a9d93c-447d-45e8-988c-a8d3b1c50f93 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.623 2 DEBUG oslo_concurrency.lockutils [req-d2c1f2da-97ae-4325-b397-33999589d009 req-64a9d93c-447d-45e8-988c-a8d3b1c50f93 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.624 2 DEBUG oslo_concurrency.lockutils [req-d2c1f2da-97ae-4325-b397-33999589d009 req-64a9d93c-447d-45e8-988c-a8d3b1c50f93 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.624 2 DEBUG nova.compute.manager [req-d2c1f2da-97ae-4325-b397-33999589d009 req-64a9d93c-447d-45e8-988c-a8d3b1c50f93 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] No waiting events found dispatching network-vif-plugged-add21826-27b9-48e6-b6bd-da40856e1eb0 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.625 2 WARNING nova.compute.manager [req-d2c1f2da-97ae-4325-b397-33999589d009 req-64a9d93c-447d-45e8-988c-a8d3b1c50f93 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Received unexpected event network-vif-plugged-add21826-27b9-48e6-b6bd-da40856e1eb0 for instance with vm_state active and task_state deleting.
Oct 02 12:15:16 compute-0 kernel: tapb6540487-e0: left promiscuous mode
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.691 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:16 compute-0 nova_compute[192079]: 2025-10-02 12:15:16.702 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:16.704 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8cda20d0-38bd-4131-a670-7548c341673c]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:16.742 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[17f9898b-5676-4555-85e8-6c18a96e3795]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:16.743 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7ebd393a-b5f5-4b80-8686-d944867576fc]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:16.757 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ed13eea5-6c88-43f4-b87d-a6fe6f7c9b31]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 530956, 'reachable_time': 44909, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 231439, 'error': None, 'target': 'ovnmeta-b6540487-e583-4697-ba62-6db6c44a9c42', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:16 compute-0 systemd[1]: run-netns-ovnmeta\x2db6540487\x2de583\x2d4697\x2dba62\x2d6db6c44a9c42.mount: Deactivated successfully.
Oct 02 12:15:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:16.760 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-b6540487-e583-4697-ba62-6db6c44a9c42 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:15:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:16.760 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[1dd2ecb9-277b-4bf6-a1ce-4ee6116a108a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:16.761 103294 INFO neutron.agent.ovn.metadata.agent [-] Port c4044870-326c-4aa1-a6b5-c4bd8e48ea5c in datapath 44157705-b81b-4ce5-a7fb-27ef102009e9 unbound from our chassis
Oct 02 12:15:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:16.762 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 44157705-b81b-4ce5-a7fb-27ef102009e9, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:15:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:16.762 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a2b037a2-7b1e-44dc-8530-f376a13c59c4]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:16.763 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-44157705-b81b-4ce5-a7fb-27ef102009e9 namespace which is not needed anymore
Oct 02 12:15:17 compute-0 neutron-haproxy-ovnmeta-44157705-b81b-4ce5-a7fb-27ef102009e9[231318]: [NOTICE]   (231322) : haproxy version is 2.8.14-c23fe91
Oct 02 12:15:17 compute-0 neutron-haproxy-ovnmeta-44157705-b81b-4ce5-a7fb-27ef102009e9[231318]: [NOTICE]   (231322) : path to executable is /usr/sbin/haproxy
Oct 02 12:15:17 compute-0 neutron-haproxy-ovnmeta-44157705-b81b-4ce5-a7fb-27ef102009e9[231318]: [WARNING]  (231322) : Exiting Master process...
Oct 02 12:15:17 compute-0 neutron-haproxy-ovnmeta-44157705-b81b-4ce5-a7fb-27ef102009e9[231318]: [ALERT]    (231322) : Current worker (231324) exited with code 143 (Terminated)
Oct 02 12:15:17 compute-0 neutron-haproxy-ovnmeta-44157705-b81b-4ce5-a7fb-27ef102009e9[231318]: [WARNING]  (231322) : All workers exited. Exiting... (0)
Oct 02 12:15:17 compute-0 systemd[1]: libpod-ff711c17f5509e7943010a7af964031a1016a129baec19884116ab8c2089a6dd.scope: Deactivated successfully.
Oct 02 12:15:17 compute-0 conmon[231318]: conmon ff711c17f5509e794301 <nwarn>: Failed to open cgroups file: /sys/fs/cgroup/machine.slice/libpod-ff711c17f5509e7943010a7af964031a1016a129baec19884116ab8c2089a6dd.scope/container/memory.events
Oct 02 12:15:17 compute-0 podman[231457]: 2025-10-02 12:15:17.025515081 +0000 UTC m=+0.191350237 container died ff711c17f5509e7943010a7af964031a1016a129baec19884116ab8c2089a6dd (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-44157705-b81b-4ce5-a7fb-27ef102009e9, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.vendor=CentOS, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:15:17 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-ff711c17f5509e7943010a7af964031a1016a129baec19884116ab8c2089a6dd-userdata-shm.mount: Deactivated successfully.
Oct 02 12:15:17 compute-0 systemd[1]: var-lib-containers-storage-overlay-cbf66784d0bf0fb78084d9240304d39cf12573fe939296bece417b65bc817d69-merged.mount: Deactivated successfully.
Oct 02 12:15:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:15:17.101 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:15:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:15:17.102 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:15:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:15:17.102 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:15:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:15:17.102 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:15:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:15:17.102 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:15:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:15:17.103 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:15:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:15:17.103 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:15:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:15:17.103 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:15:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:15:17.103 12 DEBUG ceilometer.polling.manager [-] Skip pollster memory.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:15:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:15:17.103 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.allocation, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:15:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:15:17.103 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:15:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:15:17.103 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:15:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:15:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:15:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:15:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.iops, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:15:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:15:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster cpu, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:15:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:15:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:15:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:15:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:15:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:15:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:15:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:15:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:15:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:15:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:15:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:15:17.105 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:15:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:15:17.105 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:15:17 compute-0 podman[231457]: 2025-10-02 12:15:17.105291497 +0000 UTC m=+0.271126583 container cleanup ff711c17f5509e7943010a7af964031a1016a129baec19884116ab8c2089a6dd (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-44157705-b81b-4ce5-a7fb-27ef102009e9, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.license=GPLv2)
Oct 02 12:15:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:15:17.105 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.capacity, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:15:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:15:17.105 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:15:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:15:17.105 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:15:17 compute-0 systemd[1]: libpod-conmon-ff711c17f5509e7943010a7af964031a1016a129baec19884116ab8c2089a6dd.scope: Deactivated successfully.
Oct 02 12:15:17 compute-0 podman[231486]: 2025-10-02 12:15:17.228372672 +0000 UTC m=+0.101083817 container remove ff711c17f5509e7943010a7af964031a1016a129baec19884116ab8c2089a6dd (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-44157705-b81b-4ce5-a7fb-27ef102009e9, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, tcib_managed=true)
Oct 02 12:15:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:17.234 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[46eaff27-76a9-43e0-b3db-fb52dc112ee1]: (4, ('Thu Oct  2 12:15:16 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-44157705-b81b-4ce5-a7fb-27ef102009e9 (ff711c17f5509e7943010a7af964031a1016a129baec19884116ab8c2089a6dd)\nff711c17f5509e7943010a7af964031a1016a129baec19884116ab8c2089a6dd\nThu Oct  2 12:15:17 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-44157705-b81b-4ce5-a7fb-27ef102009e9 (ff711c17f5509e7943010a7af964031a1016a129baec19884116ab8c2089a6dd)\nff711c17f5509e7943010a7af964031a1016a129baec19884116ab8c2089a6dd\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:17.235 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3e6e55da-bbe4-4e4a-81a6-ceee93c4bf45]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:17.237 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap44157705-b0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:17 compute-0 nova_compute[192079]: 2025-10-02 12:15:17.239 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:17 compute-0 kernel: tap44157705-b0: left promiscuous mode
Oct 02 12:15:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:17.243 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c3b76886-0735-471c-ae1e-7928b8e0a502]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:17 compute-0 nova_compute[192079]: 2025-10-02 12:15:17.254 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:17.274 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d479b58b-a62b-445a-bd40-cff4f653ef50]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:17.279 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[506767b9-1084-4e91-9c1f-90a8369fda27]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:17.299 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9bf97e46-c902-485f-9479-b4b6f704b503]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 531103, 'reachable_time': 41345, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 231501, 'error': None, 'target': 'ovnmeta-44157705-b81b-4ce5-a7fb-27ef102009e9', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:17.301 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-44157705-b81b-4ce5-a7fb-27ef102009e9 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:15:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:17.301 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[83600e65-0ecd-4076-8b1b-2dfc94d92c2b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:17 compute-0 systemd[1]: run-netns-ovnmeta\x2d44157705\x2db81b\x2d4ce5\x2da7fb\x2d27ef102009e9.mount: Deactivated successfully.
Oct 02 12:15:17 compute-0 nova_compute[192079]: 2025-10-02 12:15:17.980 2 DEBUG nova.network.neutron [-] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:15:18 compute-0 nova_compute[192079]: 2025-10-02 12:15:18.004 2 INFO nova.compute.manager [-] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Took 2.00 seconds to deallocate network for instance.
Oct 02 12:15:18 compute-0 nova_compute[192079]: 2025-10-02 12:15:18.083 2 DEBUG oslo_concurrency.lockutils [None req-0784480f-3a1b-4f58-953f-bbe2d648b364 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:18 compute-0 nova_compute[192079]: 2025-10-02 12:15:18.083 2 DEBUG oslo_concurrency.lockutils [None req-0784480f-3a1b-4f58-953f-bbe2d648b364 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:18 compute-0 podman[231502]: 2025-10-02 12:15:18.14226622 +0000 UTC m=+0.058405773 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2, config_id=edpm, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:15:18 compute-0 nova_compute[192079]: 2025-10-02 12:15:18.239 2 DEBUG nova.compute.provider_tree [None req-0784480f-3a1b-4f58-953f-bbe2d648b364 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:15:18 compute-0 nova_compute[192079]: 2025-10-02 12:15:18.253 2 DEBUG nova.scheduler.client.report [None req-0784480f-3a1b-4f58-953f-bbe2d648b364 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:15:18 compute-0 nova_compute[192079]: 2025-10-02 12:15:18.271 2 DEBUG oslo_concurrency.lockutils [None req-0784480f-3a1b-4f58-953f-bbe2d648b364 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.188s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:18 compute-0 nova_compute[192079]: 2025-10-02 12:15:18.364 2 INFO nova.scheduler.client.report [None req-0784480f-3a1b-4f58-953f-bbe2d648b364 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Deleted allocations for instance 5fd0efb0-7a09-4760-8a2c-23ab235018f2
Oct 02 12:15:18 compute-0 nova_compute[192079]: 2025-10-02 12:15:18.449 2 DEBUG oslo_concurrency.lockutils [None req-0784480f-3a1b-4f58-953f-bbe2d648b364 64ab4561f89846cc90cf0ab7f878cbd3 11be1361f6f44b10a6efea8fccf616aa - - default default] Lock "5fd0efb0-7a09-4760-8a2c-23ab235018f2" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 3.093s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:18 compute-0 nova_compute[192079]: 2025-10-02 12:15:18.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:15:18 compute-0 nova_compute[192079]: 2025-10-02 12:15:18.695 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:18 compute-0 nova_compute[192079]: 2025-10-02 12:15:18.696 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:18 compute-0 nova_compute[192079]: 2025-10-02 12:15:18.696 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:18 compute-0 nova_compute[192079]: 2025-10-02 12:15:18.696 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:15:18 compute-0 nova_compute[192079]: 2025-10-02 12:15:18.859 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:15:18 compute-0 nova_compute[192079]: 2025-10-02 12:15:18.860 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5630MB free_disk=73.34943771362305GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:15:18 compute-0 nova_compute[192079]: 2025-10-02 12:15:18.860 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:18 compute-0 nova_compute[192079]: 2025-10-02 12:15:18.860 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:18 compute-0 nova_compute[192079]: 2025-10-02 12:15:18.903 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:15:18 compute-0 nova_compute[192079]: 2025-10-02 12:15:18.903 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:15:18 compute-0 nova_compute[192079]: 2025-10-02 12:15:18.924 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:15:18 compute-0 nova_compute[192079]: 2025-10-02 12:15:18.936 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:15:18 compute-0 nova_compute[192079]: 2025-10-02 12:15:18.960 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:15:18 compute-0 nova_compute[192079]: 2025-10-02 12:15:18.960 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.100s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:19 compute-0 nova_compute[192079]: 2025-10-02 12:15:19.567 2 DEBUG nova.compute.manager [req-ee4bdc29-3556-4900-bec7-0882a03d5176 req-05886460-f8c7-4f71-becc-221e4f231891 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Received event network-vif-deleted-c4044870-326c-4aa1-a6b5-c4bd8e48ea5c external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:15:19 compute-0 nova_compute[192079]: 2025-10-02 12:15:19.567 2 DEBUG nova.compute.manager [req-ee4bdc29-3556-4900-bec7-0882a03d5176 req-05886460-f8c7-4f71-becc-221e4f231891 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Received event network-vif-deleted-add21826-27b9-48e6-b6bd-da40856e1eb0 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:15:19 compute-0 nova_compute[192079]: 2025-10-02 12:15:19.955 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:15:20 compute-0 nova_compute[192079]: 2025-10-02 12:15:20.230 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:20 compute-0 nova_compute[192079]: 2025-10-02 12:15:20.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:15:20 compute-0 nova_compute[192079]: 2025-10-02 12:15:20.894 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:20 compute-0 nova_compute[192079]: 2025-10-02 12:15:20.961 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:21 compute-0 nova_compute[192079]: 2025-10-02 12:15:21.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:15:21 compute-0 nova_compute[192079]: 2025-10-02 12:15:21.664 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:15:21 compute-0 nova_compute[192079]: 2025-10-02 12:15:21.664 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:15:21 compute-0 nova_compute[192079]: 2025-10-02 12:15:21.699 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:15:21 compute-0 nova_compute[192079]: 2025-10-02 12:15:21.699 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:15:21 compute-0 nova_compute[192079]: 2025-10-02 12:15:21.700 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:15:21 compute-0 nova_compute[192079]: 2025-10-02 12:15:21.701 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:15:21 compute-0 nova_compute[192079]: 2025-10-02 12:15:21.701 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:15:24 compute-0 nova_compute[192079]: 2025-10-02 12:15:24.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:15:24 compute-0 nova_compute[192079]: 2025-10-02 12:15:24.966 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759407309.9646242, 6e45ea08-64c1-4434-9d80-94d4b7cec844 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:15:24 compute-0 nova_compute[192079]: 2025-10-02 12:15:24.967 2 INFO nova.compute.manager [-] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] VM Stopped (Lifecycle Event)
Oct 02 12:15:24 compute-0 nova_compute[192079]: 2025-10-02 12:15:24.995 2 DEBUG nova.compute.manager [None req-61140b0f-6719-4a3a-957c-9c9c29bd352b - - - - - -] [instance: 6e45ea08-64c1-4434-9d80-94d4b7cec844] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:15:25 compute-0 podman[231523]: 2025-10-02 12:15:25.148123448 +0000 UTC m=+0.051603768 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.openshift.tags=minimal rhel9, managed_by=edpm_ansible, io.buildah.version=1.33.7, io.openshift.expose-services=, name=ubi9-minimal, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, distribution-scope=public, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, container_name=openstack_network_exporter, release=1755695350, vcs-type=git, maintainer=Red Hat, Inc., com.redhat.component=ubi9-minimal-container, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, build-date=2025-08-20T13:12:41, config_id=edpm, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, architecture=x86_64, vendor=Red Hat, Inc., version=9.6, url=https://catalog.redhat.com/en/search?searchType=containers)
Oct 02 12:15:25 compute-0 podman[231524]: 2025-10-02 12:15:25.158763197 +0000 UTC m=+0.058814814 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, config_id=multipathd, container_name=multipathd, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:15:25 compute-0 nova_compute[192079]: 2025-10-02 12:15:25.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:15:25 compute-0 nova_compute[192079]: 2025-10-02 12:15:25.897 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:25 compute-0 nova_compute[192079]: 2025-10-02 12:15:25.964 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:29 compute-0 nova_compute[192079]: 2025-10-02 12:15:29.069 2 DEBUG oslo_concurrency.lockutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Acquiring lock "92f5a241-27d9-416b-a19f-da7560348296" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:29 compute-0 nova_compute[192079]: 2025-10-02 12:15:29.069 2 DEBUG oslo_concurrency.lockutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Lock "92f5a241-27d9-416b-a19f-da7560348296" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:29 compute-0 nova_compute[192079]: 2025-10-02 12:15:29.168 2 DEBUG nova.compute.manager [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:15:29 compute-0 nova_compute[192079]: 2025-10-02 12:15:29.762 2 DEBUG oslo_concurrency.lockutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:29 compute-0 nova_compute[192079]: 2025-10-02 12:15:29.763 2 DEBUG oslo_concurrency.lockutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:29 compute-0 nova_compute[192079]: 2025-10-02 12:15:29.770 2 DEBUG nova.virt.hardware [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:15:29 compute-0 nova_compute[192079]: 2025-10-02 12:15:29.771 2 INFO nova.compute.claims [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:15:30 compute-0 nova_compute[192079]: 2025-10-02 12:15:30.053 2 DEBUG nova.compute.provider_tree [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:15:30 compute-0 nova_compute[192079]: 2025-10-02 12:15:30.103 2 DEBUG nova.scheduler.client.report [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:15:30 compute-0 podman[231566]: 2025-10-02 12:15:30.133947897 +0000 UTC m=+0.049149400 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']})
Oct 02 12:15:30 compute-0 podman[231567]: 2025-10-02 12:15:30.168931712 +0000 UTC m=+0.084176156 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, config_id=iscsid, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:15:30 compute-0 nova_compute[192079]: 2025-10-02 12:15:30.716 2 DEBUG oslo_concurrency.lockutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.953s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:30 compute-0 nova_compute[192079]: 2025-10-02 12:15:30.716 2 DEBUG nova.compute.manager [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:15:30 compute-0 nova_compute[192079]: 2025-10-02 12:15:30.854 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759407315.8534148, 5fd0efb0-7a09-4760-8a2c-23ab235018f2 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:15:30 compute-0 nova_compute[192079]: 2025-10-02 12:15:30.854 2 INFO nova.compute.manager [-] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] VM Stopped (Lifecycle Event)
Oct 02 12:15:30 compute-0 nova_compute[192079]: 2025-10-02 12:15:30.942 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:30 compute-0 nova_compute[192079]: 2025-10-02 12:15:30.954 2 DEBUG nova.compute.manager [None req-9d8cbcdb-1747-474e-961c-380d60c257f2 - - - - - -] [instance: 5fd0efb0-7a09-4760-8a2c-23ab235018f2] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:15:30 compute-0 nova_compute[192079]: 2025-10-02 12:15:30.964 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.025 2 DEBUG nova.compute.manager [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.025 2 DEBUG nova.network.neutron [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.050 2 INFO nova.virt.libvirt.driver [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.094 2 DEBUG nova.compute.manager [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.320 2 DEBUG nova.compute.manager [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.321 2 DEBUG nova.virt.libvirt.driver [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.322 2 INFO nova.virt.libvirt.driver [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Creating image(s)
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.322 2 DEBUG oslo_concurrency.lockutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Acquiring lock "/var/lib/nova/instances/92f5a241-27d9-416b-a19f-da7560348296/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.323 2 DEBUG oslo_concurrency.lockutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Lock "/var/lib/nova/instances/92f5a241-27d9-416b-a19f-da7560348296/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.324 2 DEBUG oslo_concurrency.lockutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Lock "/var/lib/nova/instances/92f5a241-27d9-416b-a19f-da7560348296/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.340 2 DEBUG oslo_concurrency.processutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.404 2 DEBUG oslo_concurrency.processutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.064s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.405 2 DEBUG oslo_concurrency.lockutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.406 2 DEBUG oslo_concurrency.lockutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.419 2 DEBUG oslo_concurrency.processutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.472 2 DEBUG oslo_concurrency.processutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.053s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.473 2 DEBUG oslo_concurrency.processutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/92f5a241-27d9-416b-a19f-da7560348296/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.679 2 DEBUG oslo_concurrency.processutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/92f5a241-27d9-416b-a19f-da7560348296/disk 1073741824" returned: 0 in 0.206s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.680 2 DEBUG oslo_concurrency.lockutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.274s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.681 2 DEBUG oslo_concurrency.processutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.740 2 DEBUG oslo_concurrency.processutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.059s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.741 2 DEBUG nova.virt.disk.api [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Checking if we can resize image /var/lib/nova/instances/92f5a241-27d9-416b-a19f-da7560348296/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.741 2 DEBUG oslo_concurrency.processutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/92f5a241-27d9-416b-a19f-da7560348296/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.798 2 DEBUG oslo_concurrency.processutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/92f5a241-27d9-416b-a19f-da7560348296/disk --force-share --output=json" returned: 0 in 0.057s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.799 2 DEBUG nova.virt.disk.api [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Cannot resize image /var/lib/nova/instances/92f5a241-27d9-416b-a19f-da7560348296/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.800 2 DEBUG nova.objects.instance [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Lazy-loading 'migration_context' on Instance uuid 92f5a241-27d9-416b-a19f-da7560348296 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.820 2 DEBUG nova.virt.libvirt.driver [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.821 2 DEBUG nova.virt.libvirt.driver [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Ensure instance console log exists: /var/lib/nova/instances/92f5a241-27d9-416b-a19f-da7560348296/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.821 2 DEBUG oslo_concurrency.lockutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.821 2 DEBUG oslo_concurrency.lockutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:31 compute-0 nova_compute[192079]: 2025-10-02 12:15:31.822 2 DEBUG oslo_concurrency.lockutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:32 compute-0 nova_compute[192079]: 2025-10-02 12:15:32.108 2 DEBUG nova.policy [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '341760d37e2c44209429d234ca5f01ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ed7af923ad494ac5b7dbd3d8403dc33e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:15:33 compute-0 nova_compute[192079]: 2025-10-02 12:15:33.086 2 DEBUG nova.network.neutron [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Successfully created port: f289b804-29b2-4f3d-985c-e9cc226259ad _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:15:33 compute-0 nova_compute[192079]: 2025-10-02 12:15:33.191 2 DEBUG nova.compute.manager [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Stashing vm_state: active _prep_resize /usr/lib/python3.9/site-packages/nova/compute/manager.py:5560
Oct 02 12:15:33 compute-0 nova_compute[192079]: 2025-10-02 12:15:33.459 2 DEBUG oslo_concurrency.lockutils [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:33 compute-0 nova_compute[192079]: 2025-10-02 12:15:33.460 2 DEBUG oslo_concurrency.lockutils [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:33 compute-0 nova_compute[192079]: 2025-10-02 12:15:33.509 2 DEBUG nova.objects.instance [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lazy-loading 'pci_requests' on Instance uuid fa72d8b8-93c0-417b-9793-ccd611ffbb84 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:15:33 compute-0 nova_compute[192079]: 2025-10-02 12:15:33.572 2 DEBUG nova.virt.hardware [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:15:33 compute-0 nova_compute[192079]: 2025-10-02 12:15:33.573 2 INFO nova.compute.claims [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:15:33 compute-0 nova_compute[192079]: 2025-10-02 12:15:33.573 2 DEBUG nova.objects.instance [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lazy-loading 'resources' on Instance uuid fa72d8b8-93c0-417b-9793-ccd611ffbb84 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:15:33 compute-0 nova_compute[192079]: 2025-10-02 12:15:33.614 2 DEBUG nova.objects.instance [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lazy-loading 'pci_devices' on Instance uuid fa72d8b8-93c0-417b-9793-ccd611ffbb84 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:15:33 compute-0 nova_compute[192079]: 2025-10-02 12:15:33.795 2 INFO nova.compute.resource_tracker [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Updating resource usage from migration 23ba7d64-b31c-4bd0-8f82-77a95cd8e782
Oct 02 12:15:33 compute-0 nova_compute[192079]: 2025-10-02 12:15:33.795 2 DEBUG nova.compute.resource_tracker [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Starting to track incoming migration 23ba7d64-b31c-4bd0-8f82-77a95cd8e782 with flavor 9949d9da-6314-4ede-8797-6f2f0a6a64fc _update_usage_from_migration /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1431
Oct 02 12:15:33 compute-0 nova_compute[192079]: 2025-10-02 12:15:33.879 2 DEBUG nova.compute.provider_tree [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:15:33 compute-0 nova_compute[192079]: 2025-10-02 12:15:33.903 2 DEBUG nova.scheduler.client.report [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:15:33 compute-0 nova_compute[192079]: 2025-10-02 12:15:33.929 2 DEBUG oslo_concurrency.lockutils [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 0.469s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:33 compute-0 nova_compute[192079]: 2025-10-02 12:15:33.930 2 INFO nova.compute.manager [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Migrating
Oct 02 12:15:34 compute-0 nova_compute[192079]: 2025-10-02 12:15:34.077 2 DEBUG nova.network.neutron [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Successfully updated port: f289b804-29b2-4f3d-985c-e9cc226259ad _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:15:34 compute-0 nova_compute[192079]: 2025-10-02 12:15:34.097 2 DEBUG oslo_concurrency.lockutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Acquiring lock "refresh_cache-92f5a241-27d9-416b-a19f-da7560348296" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:15:34 compute-0 nova_compute[192079]: 2025-10-02 12:15:34.097 2 DEBUG oslo_concurrency.lockutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Acquired lock "refresh_cache-92f5a241-27d9-416b-a19f-da7560348296" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:15:34 compute-0 nova_compute[192079]: 2025-10-02 12:15:34.097 2 DEBUG nova.network.neutron [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:15:34 compute-0 nova_compute[192079]: 2025-10-02 12:15:34.173 2 DEBUG nova.compute.manager [req-b9311880-ab9f-4919-9914-5fedaa6e23a6 req-682e777d-c4b7-44ce-8af3-0b7b53d3b17d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Received event network-changed-f289b804-29b2-4f3d-985c-e9cc226259ad external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:15:34 compute-0 nova_compute[192079]: 2025-10-02 12:15:34.173 2 DEBUG nova.compute.manager [req-b9311880-ab9f-4919-9914-5fedaa6e23a6 req-682e777d-c4b7-44ce-8af3-0b7b53d3b17d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Refreshing instance network info cache due to event network-changed-f289b804-29b2-4f3d-985c-e9cc226259ad. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:15:34 compute-0 nova_compute[192079]: 2025-10-02 12:15:34.173 2 DEBUG oslo_concurrency.lockutils [req-b9311880-ab9f-4919-9914-5fedaa6e23a6 req-682e777d-c4b7-44ce-8af3-0b7b53d3b17d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-92f5a241-27d9-416b-a19f-da7560348296" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:15:34 compute-0 nova_compute[192079]: 2025-10-02 12:15:34.232 2 DEBUG nova.network.neutron [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.276 2 DEBUG nova.network.neutron [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Updating instance_info_cache with network_info: [{"id": "f289b804-29b2-4f3d-985c-e9cc226259ad", "address": "fa:16:3e:a7:ed:93", "network": {"id": "5716ac1c-acf7-48a7-8b93-dda3a5af31f6", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1571059342-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ed7af923ad494ac5b7dbd3d8403dc33e", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf289b804-29", "ovs_interfaceid": "f289b804-29b2-4f3d-985c-e9cc226259ad", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.308 2 DEBUG oslo_concurrency.lockutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Releasing lock "refresh_cache-92f5a241-27d9-416b-a19f-da7560348296" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.309 2 DEBUG nova.compute.manager [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Instance network_info: |[{"id": "f289b804-29b2-4f3d-985c-e9cc226259ad", "address": "fa:16:3e:a7:ed:93", "network": {"id": "5716ac1c-acf7-48a7-8b93-dda3a5af31f6", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1571059342-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ed7af923ad494ac5b7dbd3d8403dc33e", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf289b804-29", "ovs_interfaceid": "f289b804-29b2-4f3d-985c-e9cc226259ad", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.309 2 DEBUG oslo_concurrency.lockutils [req-b9311880-ab9f-4919-9914-5fedaa6e23a6 req-682e777d-c4b7-44ce-8af3-0b7b53d3b17d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-92f5a241-27d9-416b-a19f-da7560348296" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.310 2 DEBUG nova.network.neutron [req-b9311880-ab9f-4919-9914-5fedaa6e23a6 req-682e777d-c4b7-44ce-8af3-0b7b53d3b17d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Refreshing network info cache for port f289b804-29b2-4f3d-985c-e9cc226259ad _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.313 2 DEBUG nova.virt.libvirt.driver [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Start _get_guest_xml network_info=[{"id": "f289b804-29b2-4f3d-985c-e9cc226259ad", "address": "fa:16:3e:a7:ed:93", "network": {"id": "5716ac1c-acf7-48a7-8b93-dda3a5af31f6", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1571059342-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ed7af923ad494ac5b7dbd3d8403dc33e", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf289b804-29", "ovs_interfaceid": "f289b804-29b2-4f3d-985c-e9cc226259ad", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.317 2 WARNING nova.virt.libvirt.driver [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.323 2 DEBUG nova.virt.libvirt.host [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.323 2 DEBUG nova.virt.libvirt.host [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.327 2 DEBUG nova.virt.libvirt.host [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.327 2 DEBUG nova.virt.libvirt.host [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.329 2 DEBUG nova.virt.libvirt.driver [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.329 2 DEBUG nova.virt.hardware [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.329 2 DEBUG nova.virt.hardware [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.330 2 DEBUG nova.virt.hardware [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.330 2 DEBUG nova.virt.hardware [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.330 2 DEBUG nova.virt.hardware [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.330 2 DEBUG nova.virt.hardware [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.331 2 DEBUG nova.virt.hardware [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.331 2 DEBUG nova.virt.hardware [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.331 2 DEBUG nova.virt.hardware [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.332 2 DEBUG nova.virt.hardware [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.332 2 DEBUG nova.virt.hardware [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.336 2 DEBUG nova.virt.libvirt.vif [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:15:25Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-SecurityGroupsTestJSON-server-1096579654',display_name='tempest-SecurityGroupsTestJSON-server-1096579654',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-securitygroupstestjson-server-1096579654',id=78,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='ed7af923ad494ac5b7dbd3d8403dc33e',ramdisk_id='',reservation_id='r-0nbzpwe9',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-SecurityGroupsTestJSON-431508526',owner_user_name='tempest-SecurityGroupsTestJSON-431508526-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:15:31Z,user_data=None,user_id='341760d37e2c44209429d234ca5f01ae',uuid=92f5a241-27d9-416b-a19f-da7560348296,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "f289b804-29b2-4f3d-985c-e9cc226259ad", "address": "fa:16:3e:a7:ed:93", "network": {"id": "5716ac1c-acf7-48a7-8b93-dda3a5af31f6", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1571059342-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ed7af923ad494ac5b7dbd3d8403dc33e", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf289b804-29", "ovs_interfaceid": "f289b804-29b2-4f3d-985c-e9cc226259ad", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.336 2 DEBUG nova.network.os_vif_util [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Converting VIF {"id": "f289b804-29b2-4f3d-985c-e9cc226259ad", "address": "fa:16:3e:a7:ed:93", "network": {"id": "5716ac1c-acf7-48a7-8b93-dda3a5af31f6", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1571059342-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ed7af923ad494ac5b7dbd3d8403dc33e", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf289b804-29", "ovs_interfaceid": "f289b804-29b2-4f3d-985c-e9cc226259ad", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.337 2 DEBUG nova.network.os_vif_util [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:a7:ed:93,bridge_name='br-int',has_traffic_filtering=True,id=f289b804-29b2-4f3d-985c-e9cc226259ad,network=Network(5716ac1c-acf7-48a7-8b93-dda3a5af31f6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf289b804-29') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.338 2 DEBUG nova.objects.instance [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Lazy-loading 'pci_devices' on Instance uuid 92f5a241-27d9-416b-a19f-da7560348296 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.384 2 DEBUG nova.virt.libvirt.driver [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:15:35 compute-0 nova_compute[192079]:   <uuid>92f5a241-27d9-416b-a19f-da7560348296</uuid>
Oct 02 12:15:35 compute-0 nova_compute[192079]:   <name>instance-0000004e</name>
Oct 02 12:15:35 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:15:35 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:15:35 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:15:35 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:       <nova:name>tempest-SecurityGroupsTestJSON-server-1096579654</nova:name>
Oct 02 12:15:35 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:15:35</nova:creationTime>
Oct 02 12:15:35 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:15:35 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:15:35 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:15:35 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:15:35 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:15:35 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:15:35 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:15:35 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:15:35 compute-0 nova_compute[192079]:         <nova:user uuid="341760d37e2c44209429d234ca5f01ae">tempest-SecurityGroupsTestJSON-431508526-project-member</nova:user>
Oct 02 12:15:35 compute-0 nova_compute[192079]:         <nova:project uuid="ed7af923ad494ac5b7dbd3d8403dc33e">tempest-SecurityGroupsTestJSON-431508526</nova:project>
Oct 02 12:15:35 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:15:35 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:15:35 compute-0 nova_compute[192079]:         <nova:port uuid="f289b804-29b2-4f3d-985c-e9cc226259ad">
Oct 02 12:15:35 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.8" ipVersion="4"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:15:35 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:15:35 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:15:35 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <system>
Oct 02 12:15:35 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:15:35 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:15:35 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:15:35 compute-0 nova_compute[192079]:       <entry name="serial">92f5a241-27d9-416b-a19f-da7560348296</entry>
Oct 02 12:15:35 compute-0 nova_compute[192079]:       <entry name="uuid">92f5a241-27d9-416b-a19f-da7560348296</entry>
Oct 02 12:15:35 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     </system>
Oct 02 12:15:35 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:15:35 compute-0 nova_compute[192079]:   <os>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:   </os>
Oct 02 12:15:35 compute-0 nova_compute[192079]:   <features>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:   </features>
Oct 02 12:15:35 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:15:35 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:15:35 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:15:35 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/92f5a241-27d9-416b-a19f-da7560348296/disk"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:15:35 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/92f5a241-27d9-416b-a19f-da7560348296/disk.config"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:15:35 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:a7:ed:93"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:       <target dev="tapf289b804-29"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:15:35 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/92f5a241-27d9-416b-a19f-da7560348296/console.log" append="off"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <video>
Oct 02 12:15:35 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     </video>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:15:35 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:15:35 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:15:35 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:15:35 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:15:35 compute-0 nova_compute[192079]: </domain>
Oct 02 12:15:35 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.386 2 DEBUG nova.compute.manager [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Preparing to wait for external event network-vif-plugged-f289b804-29b2-4f3d-985c-e9cc226259ad prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.386 2 DEBUG oslo_concurrency.lockutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Acquiring lock "92f5a241-27d9-416b-a19f-da7560348296-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.386 2 DEBUG oslo_concurrency.lockutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Lock "92f5a241-27d9-416b-a19f-da7560348296-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.386 2 DEBUG oslo_concurrency.lockutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Lock "92f5a241-27d9-416b-a19f-da7560348296-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.387 2 DEBUG nova.virt.libvirt.vif [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:15:25Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-SecurityGroupsTestJSON-server-1096579654',display_name='tempest-SecurityGroupsTestJSON-server-1096579654',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-securitygroupstestjson-server-1096579654',id=78,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='ed7af923ad494ac5b7dbd3d8403dc33e',ramdisk_id='',reservation_id='r-0nbzpwe9',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-SecurityGroupsTestJSON-431508526',owner_user_name='tempest-SecurityGroupsTestJSON-431508526-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:15:31Z,user_data=None,user_id='341760d37e2c44209429d234ca5f01ae',uuid=92f5a241-27d9-416b-a19f-da7560348296,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "f289b804-29b2-4f3d-985c-e9cc226259ad", "address": "fa:16:3e:a7:ed:93", "network": {"id": "5716ac1c-acf7-48a7-8b93-dda3a5af31f6", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1571059342-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ed7af923ad494ac5b7dbd3d8403dc33e", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf289b804-29", "ovs_interfaceid": "f289b804-29b2-4f3d-985c-e9cc226259ad", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.387 2 DEBUG nova.network.os_vif_util [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Converting VIF {"id": "f289b804-29b2-4f3d-985c-e9cc226259ad", "address": "fa:16:3e:a7:ed:93", "network": {"id": "5716ac1c-acf7-48a7-8b93-dda3a5af31f6", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1571059342-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ed7af923ad494ac5b7dbd3d8403dc33e", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf289b804-29", "ovs_interfaceid": "f289b804-29b2-4f3d-985c-e9cc226259ad", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.388 2 DEBUG nova.network.os_vif_util [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:a7:ed:93,bridge_name='br-int',has_traffic_filtering=True,id=f289b804-29b2-4f3d-985c-e9cc226259ad,network=Network(5716ac1c-acf7-48a7-8b93-dda3a5af31f6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf289b804-29') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.388 2 DEBUG os_vif [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:a7:ed:93,bridge_name='br-int',has_traffic_filtering=True,id=f289b804-29b2-4f3d-985c-e9cc226259ad,network=Network(5716ac1c-acf7-48a7-8b93-dda3a5af31f6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf289b804-29') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.389 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.390 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.390 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.394 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.394 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapf289b804-29, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.394 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapf289b804-29, col_values=(('external_ids', {'iface-id': 'f289b804-29b2-4f3d-985c-e9cc226259ad', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:a7:ed:93', 'vm-uuid': '92f5a241-27d9-416b-a19f-da7560348296'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.396 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:35 compute-0 NetworkManager[51160]: <info>  [1759407335.3972] manager: (tapf289b804-29): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/133)
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.398 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.406 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.407 2 INFO os_vif [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:a7:ed:93,bridge_name='br-int',has_traffic_filtering=True,id=f289b804-29b2-4f3d-985c-e9cc226259ad,network=Network(5716ac1c-acf7-48a7-8b93-dda3a5af31f6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf289b804-29')
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.545 2 DEBUG nova.virt.libvirt.driver [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.545 2 DEBUG nova.virt.libvirt.driver [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.546 2 DEBUG nova.virt.libvirt.driver [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] No VIF found with MAC fa:16:3e:a7:ed:93, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.546 2 INFO nova.virt.libvirt.driver [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Using config drive
Oct 02 12:15:35 compute-0 sshd-session[231626]: Accepted publickey for nova from 192.168.122.101 port 58438 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:15:35 compute-0 systemd[1]: Created slice User Slice of UID 42436.
Oct 02 12:15:35 compute-0 systemd[1]: Starting User Runtime Directory /run/user/42436...
Oct 02 12:15:35 compute-0 systemd-logind[827]: New session 51 of user nova.
Oct 02 12:15:35 compute-0 systemd[1]: Finished User Runtime Directory /run/user/42436.
Oct 02 12:15:35 compute-0 systemd[1]: Starting User Manager for UID 42436...
Oct 02 12:15:35 compute-0 systemd[231630]: pam_unix(systemd-user:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:15:35 compute-0 systemd[231630]: Queued start job for default target Main User Target.
Oct 02 12:15:35 compute-0 nova_compute[192079]: 2025-10-02 12:15:35.966 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:35 compute-0 systemd[231630]: Created slice User Application Slice.
Oct 02 12:15:35 compute-0 systemd[231630]: Started Mark boot as successful after the user session has run 2 minutes.
Oct 02 12:15:35 compute-0 systemd[231630]: Started Daily Cleanup of User's Temporary Directories.
Oct 02 12:15:35 compute-0 systemd[231630]: Reached target Paths.
Oct 02 12:15:35 compute-0 systemd[231630]: Reached target Timers.
Oct 02 12:15:35 compute-0 systemd[231630]: Starting D-Bus User Message Bus Socket...
Oct 02 12:15:35 compute-0 systemd[231630]: Starting Create User's Volatile Files and Directories...
Oct 02 12:15:35 compute-0 systemd[231630]: Finished Create User's Volatile Files and Directories.
Oct 02 12:15:35 compute-0 systemd[231630]: Listening on D-Bus User Message Bus Socket.
Oct 02 12:15:35 compute-0 systemd[231630]: Reached target Sockets.
Oct 02 12:15:35 compute-0 systemd[231630]: Reached target Basic System.
Oct 02 12:15:35 compute-0 systemd[231630]: Reached target Main User Target.
Oct 02 12:15:35 compute-0 systemd[231630]: Startup finished in 146ms.
Oct 02 12:15:35 compute-0 systemd[1]: Started User Manager for UID 42436.
Oct 02 12:15:36 compute-0 systemd[1]: Started Session 51 of User nova.
Oct 02 12:15:36 compute-0 sshd-session[231626]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:15:36 compute-0 nova_compute[192079]: 2025-10-02 12:15:36.044 2 INFO nova.virt.libvirt.driver [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Creating config drive at /var/lib/nova/instances/92f5a241-27d9-416b-a19f-da7560348296/disk.config
Oct 02 12:15:36 compute-0 nova_compute[192079]: 2025-10-02 12:15:36.049 2 DEBUG oslo_concurrency.processutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/92f5a241-27d9-416b-a19f-da7560348296/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpsuyos_ep execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:15:36 compute-0 sshd-session[231644]: Received disconnect from 192.168.122.101 port 58438:11: disconnected by user
Oct 02 12:15:36 compute-0 sshd-session[231644]: Disconnected from user nova 192.168.122.101 port 58438
Oct 02 12:15:36 compute-0 sshd-session[231626]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:15:36 compute-0 systemd[1]: session-51.scope: Deactivated successfully.
Oct 02 12:15:36 compute-0 systemd-logind[827]: Session 51 logged out. Waiting for processes to exit.
Oct 02 12:15:36 compute-0 systemd-logind[827]: Removed session 51.
Oct 02 12:15:36 compute-0 nova_compute[192079]: 2025-10-02 12:15:36.178 2 DEBUG oslo_concurrency.processutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/92f5a241-27d9-416b-a19f-da7560348296/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpsuyos_ep" returned: 0 in 0.129s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:15:36 compute-0 sshd-session[231649]: Accepted publickey for nova from 192.168.122.101 port 58452 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:15:36 compute-0 systemd-logind[827]: New session 53 of user nova.
Oct 02 12:15:36 compute-0 systemd[1]: Started Session 53 of User nova.
Oct 02 12:15:36 compute-0 sshd-session[231649]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:15:36 compute-0 NetworkManager[51160]: <info>  [1759407336.2576] manager: (tapf289b804-29): new Tun device (/org/freedesktop/NetworkManager/Devices/134)
Oct 02 12:15:36 compute-0 kernel: tapf289b804-29: entered promiscuous mode
Oct 02 12:15:36 compute-0 nova_compute[192079]: 2025-10-02 12:15:36.259 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:36 compute-0 ovn_controller[94336]: 2025-10-02T12:15:36Z|00261|binding|INFO|Claiming lport f289b804-29b2-4f3d-985c-e9cc226259ad for this chassis.
Oct 02 12:15:36 compute-0 ovn_controller[94336]: 2025-10-02T12:15:36Z|00262|binding|INFO|f289b804-29b2-4f3d-985c-e9cc226259ad: Claiming fa:16:3e:a7:ed:93 10.100.0.8
Oct 02 12:15:36 compute-0 nova_compute[192079]: 2025-10-02 12:15:36.272 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:36.278 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:a7:ed:93 10.100.0.8'], port_security=['fa:16:3e:a7:ed:93 10.100.0.8'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.8/28', 'neutron:device_id': '92f5a241-27d9-416b-a19f-da7560348296', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-5716ac1c-acf7-48a7-8b93-dda3a5af31f6', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'ed7af923ad494ac5b7dbd3d8403dc33e', 'neutron:revision_number': '2', 'neutron:security_group_ids': 'f6f46a30-ca89-45c9-b4fd-d5c78d4ee0ae', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=08fc185f-7900-4a64-ba36-f229e6cb956d, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=f289b804-29b2-4f3d-985c-e9cc226259ad) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:36.280 103294 INFO neutron.agent.ovn.metadata.agent [-] Port f289b804-29b2-4f3d-985c-e9cc226259ad in datapath 5716ac1c-acf7-48a7-8b93-dda3a5af31f6 bound to our chassis
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:36.284 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 5716ac1c-acf7-48a7-8b93-dda3a5af31f6
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:36.296 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[fc2ad6c8-7cb0-4ec9-809e-92cfa751fbd4]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:36.297 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap5716ac1c-a1 in ovnmeta-5716ac1c-acf7-48a7-8b93-dda3a5af31f6 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:36.299 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap5716ac1c-a0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:36.299 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[487f67dd-7f18-49f5-bc80-e3aab1d524d5]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:36.300 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f7edf91c-2e5e-4321-9312-1f26593aef7b]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:36 compute-0 systemd-machined[152150]: New machine qemu-37-instance-0000004e.
Oct 02 12:15:36 compute-0 systemd-udevd[231669]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:15:36 compute-0 sshd-session[231660]: Received disconnect from 192.168.122.101 port 58452:11: disconnected by user
Oct 02 12:15:36 compute-0 sshd-session[231660]: Disconnected from user nova 192.168.122.101 port 58452
Oct 02 12:15:36 compute-0 sshd-session[231649]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:36.312 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[d7552a20-2109-4464-a4c1-f288f133d5ea]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:36 compute-0 NetworkManager[51160]: <info>  [1759407336.3196] device (tapf289b804-29): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:15:36 compute-0 systemd[1]: Started Virtual Machine qemu-37-instance-0000004e.
Oct 02 12:15:36 compute-0 NetworkManager[51160]: <info>  [1759407336.3219] device (tapf289b804-29): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:15:36 compute-0 systemd[1]: session-53.scope: Deactivated successfully.
Oct 02 12:15:36 compute-0 ovn_controller[94336]: 2025-10-02T12:15:36Z|00263|binding|INFO|Setting lport f289b804-29b2-4f3d-985c-e9cc226259ad ovn-installed in OVS
Oct 02 12:15:36 compute-0 ovn_controller[94336]: 2025-10-02T12:15:36Z|00264|binding|INFO|Setting lport f289b804-29b2-4f3d-985c-e9cc226259ad up in Southbound
Oct 02 12:15:36 compute-0 systemd-logind[827]: Session 53 logged out. Waiting for processes to exit.
Oct 02 12:15:36 compute-0 nova_compute[192079]: 2025-10-02 12:15:36.326 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:36 compute-0 systemd-logind[827]: Removed session 53.
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:36.329 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f42d42b6-ff9c-44ed-b09c-8fbf4b4b6483]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:36.363 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[a4afb5cf-f2bc-4959-a3cc-d4d175818ad7]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:36.368 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[fb68211c-7ad5-4b5e-b42f-885c7d70f723]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:36 compute-0 NetworkManager[51160]: <info>  [1759407336.3693] manager: (tap5716ac1c-a0): new Veth device (/org/freedesktop/NetworkManager/Devices/135)
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:36.404 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[cd6d036e-b191-4e85-aef6-45afa12f665d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:36.408 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[6a6563c6-1438-452a-a3d5-5155357bcebf]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:36 compute-0 NetworkManager[51160]: <info>  [1759407336.4286] device (tap5716ac1c-a0): carrier: link connected
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:36.435 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[1473c19c-ead4-4ee4-9f9d-8e9532b19e1c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:36.452 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8f8a4317-a24f-4cc4-81f7-7d37ffbd4432]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap5716ac1c-a1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:af:f8:3e'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 83], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 533405, 'reachable_time': 33029, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 231701, 'error': None, 'target': 'ovnmeta-5716ac1c-acf7-48a7-8b93-dda3a5af31f6', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:36.467 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[afef996b-d2a3-4244-aeaf-490ac30cbda4]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:feaf:f83e'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 533405, 'tstamp': 533405}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 231702, 'error': None, 'target': 'ovnmeta-5716ac1c-acf7-48a7-8b93-dda3a5af31f6', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:36.481 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[76204ee6-58eb-451f-a62d-fb6269d3ede1]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap5716ac1c-a1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:af:f8:3e'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 83], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 533405, 'reachable_time': 33029, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 231703, 'error': None, 'target': 'ovnmeta-5716ac1c-acf7-48a7-8b93-dda3a5af31f6', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:36.510 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b481c159-90ca-4ad6-b768-0244c7c8d027]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:36.566 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2cf5cf60-10fd-4f33-b035-867c90015568]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:36.567 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap5716ac1c-a0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:36.567 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:36.568 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap5716ac1c-a0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:36 compute-0 nova_compute[192079]: 2025-10-02 12:15:36.570 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:36 compute-0 kernel: tap5716ac1c-a0: entered promiscuous mode
Oct 02 12:15:36 compute-0 NetworkManager[51160]: <info>  [1759407336.5707] manager: (tap5716ac1c-a0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/136)
Oct 02 12:15:36 compute-0 nova_compute[192079]: 2025-10-02 12:15:36.572 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:36.574 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap5716ac1c-a0, col_values=(('external_ids', {'iface-id': 'cc8e73bf-6cd9-4487-9685-abdace89cf29'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:36 compute-0 nova_compute[192079]: 2025-10-02 12:15:36.575 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:36 compute-0 ovn_controller[94336]: 2025-10-02T12:15:36Z|00265|binding|INFO|Releasing lport cc8e73bf-6cd9-4487-9685-abdace89cf29 from this chassis (sb_readonly=0)
Oct 02 12:15:36 compute-0 nova_compute[192079]: 2025-10-02 12:15:36.576 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:36.576 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/5716ac1c-acf7-48a7-8b93-dda3a5af31f6.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/5716ac1c-acf7-48a7-8b93-dda3a5af31f6.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:36.577 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4eba6563-08dd-4087-a074-735f449923ad]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:36.578 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-5716ac1c-acf7-48a7-8b93-dda3a5af31f6
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/5716ac1c-acf7-48a7-8b93-dda3a5af31f6.pid.haproxy
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 5716ac1c-acf7-48a7-8b93-dda3a5af31f6
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:15:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:36.579 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-5716ac1c-acf7-48a7-8b93-dda3a5af31f6', 'env', 'PROCESS_TAG=haproxy-5716ac1c-acf7-48a7-8b93-dda3a5af31f6', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/5716ac1c-acf7-48a7-8b93-dda3a5af31f6.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:15:36 compute-0 nova_compute[192079]: 2025-10-02 12:15:36.587 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:36 compute-0 nova_compute[192079]: 2025-10-02 12:15:36.941 2 DEBUG nova.network.neutron [req-b9311880-ab9f-4919-9914-5fedaa6e23a6 req-682e777d-c4b7-44ce-8af3-0b7b53d3b17d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Updated VIF entry in instance network info cache for port f289b804-29b2-4f3d-985c-e9cc226259ad. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:15:36 compute-0 nova_compute[192079]: 2025-10-02 12:15:36.942 2 DEBUG nova.network.neutron [req-b9311880-ab9f-4919-9914-5fedaa6e23a6 req-682e777d-c4b7-44ce-8af3-0b7b53d3b17d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Updating instance_info_cache with network_info: [{"id": "f289b804-29b2-4f3d-985c-e9cc226259ad", "address": "fa:16:3e:a7:ed:93", "network": {"id": "5716ac1c-acf7-48a7-8b93-dda3a5af31f6", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1571059342-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ed7af923ad494ac5b7dbd3d8403dc33e", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf289b804-29", "ovs_interfaceid": "f289b804-29b2-4f3d-985c-e9cc226259ad", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:15:37 compute-0 podman[231742]: 2025-10-02 12:15:36.911696286 +0000 UTC m=+0.022834074 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:15:37 compute-0 nova_compute[192079]: 2025-10-02 12:15:37.148 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407337.1484332, 92f5a241-27d9-416b-a19f-da7560348296 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:15:37 compute-0 nova_compute[192079]: 2025-10-02 12:15:37.149 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 92f5a241-27d9-416b-a19f-da7560348296] VM Started (Lifecycle Event)
Oct 02 12:15:37 compute-0 podman[231742]: 2025-10-02 12:15:37.343498608 +0000 UTC m=+0.454636426 container create 5521ce23a20daf86f597b7a6134edb99d381efaa7192b898403c9ac11b6025e3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-5716ac1c-acf7-48a7-8b93-dda3a5af31f6, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, tcib_managed=true, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001)
Oct 02 12:15:37 compute-0 systemd[1]: Started libpod-conmon-5521ce23a20daf86f597b7a6134edb99d381efaa7192b898403c9ac11b6025e3.scope.
Oct 02 12:15:37 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:15:37 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/ea6d57d2aeb27b8fcf91fdd57dfa8a1f4128844b7033da84fbe8a2fa1951c93a/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:15:37 compute-0 podman[231742]: 2025-10-02 12:15:37.690379286 +0000 UTC m=+0.801517174 container init 5521ce23a20daf86f597b7a6134edb99d381efaa7192b898403c9ac11b6025e3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-5716ac1c-acf7-48a7-8b93-dda3a5af31f6, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:15:37 compute-0 podman[231742]: 2025-10-02 12:15:37.697836399 +0000 UTC m=+0.808974187 container start 5521ce23a20daf86f597b7a6134edb99d381efaa7192b898403c9ac11b6025e3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-5716ac1c-acf7-48a7-8b93-dda3a5af31f6, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2)
Oct 02 12:15:37 compute-0 neutron-haproxy-ovnmeta-5716ac1c-acf7-48a7-8b93-dda3a5af31f6[231757]: [NOTICE]   (231762) : New worker (231764) forked
Oct 02 12:15:37 compute-0 neutron-haproxy-ovnmeta-5716ac1c-acf7-48a7-8b93-dda3a5af31f6[231757]: [NOTICE]   (231762) : Loading success.
Oct 02 12:15:37 compute-0 nova_compute[192079]: 2025-10-02 12:15:37.825 2 DEBUG nova.compute.manager [req-86eaea7f-1832-4207-8bde-c6945028cd92 req-3c59db37-c7f8-493e-a5ec-5f7404d36bb7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Received event network-vif-plugged-f289b804-29b2-4f3d-985c-e9cc226259ad external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:15:37 compute-0 nova_compute[192079]: 2025-10-02 12:15:37.826 2 DEBUG oslo_concurrency.lockutils [req-86eaea7f-1832-4207-8bde-c6945028cd92 req-3c59db37-c7f8-493e-a5ec-5f7404d36bb7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "92f5a241-27d9-416b-a19f-da7560348296-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:37 compute-0 nova_compute[192079]: 2025-10-02 12:15:37.826 2 DEBUG oslo_concurrency.lockutils [req-86eaea7f-1832-4207-8bde-c6945028cd92 req-3c59db37-c7f8-493e-a5ec-5f7404d36bb7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "92f5a241-27d9-416b-a19f-da7560348296-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:37 compute-0 nova_compute[192079]: 2025-10-02 12:15:37.826 2 DEBUG oslo_concurrency.lockutils [req-86eaea7f-1832-4207-8bde-c6945028cd92 req-3c59db37-c7f8-493e-a5ec-5f7404d36bb7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "92f5a241-27d9-416b-a19f-da7560348296-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:37 compute-0 nova_compute[192079]: 2025-10-02 12:15:37.826 2 DEBUG nova.compute.manager [req-86eaea7f-1832-4207-8bde-c6945028cd92 req-3c59db37-c7f8-493e-a5ec-5f7404d36bb7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Processing event network-vif-plugged-f289b804-29b2-4f3d-985c-e9cc226259ad _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:15:37 compute-0 nova_compute[192079]: 2025-10-02 12:15:37.827 2 DEBUG nova.compute.manager [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:15:37 compute-0 nova_compute[192079]: 2025-10-02 12:15:37.832 2 DEBUG nova.virt.libvirt.driver [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:15:37 compute-0 nova_compute[192079]: 2025-10-02 12:15:37.836 2 INFO nova.virt.libvirt.driver [-] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Instance spawned successfully.
Oct 02 12:15:37 compute-0 nova_compute[192079]: 2025-10-02 12:15:37.836 2 DEBUG nova.virt.libvirt.driver [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:15:37 compute-0 nova_compute[192079]: 2025-10-02 12:15:37.864 2 DEBUG oslo_concurrency.lockutils [req-b9311880-ab9f-4919-9914-5fedaa6e23a6 req-682e777d-c4b7-44ce-8af3-0b7b53d3b17d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-92f5a241-27d9-416b-a19f-da7560348296" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:15:37 compute-0 nova_compute[192079]: 2025-10-02 12:15:37.868 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:15:37 compute-0 nova_compute[192079]: 2025-10-02 12:15:37.876 2 DEBUG nova.virt.libvirt.driver [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:15:37 compute-0 nova_compute[192079]: 2025-10-02 12:15:37.876 2 DEBUG nova.virt.libvirt.driver [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:15:37 compute-0 nova_compute[192079]: 2025-10-02 12:15:37.877 2 DEBUG nova.virt.libvirt.driver [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:15:37 compute-0 nova_compute[192079]: 2025-10-02 12:15:37.877 2 DEBUG nova.virt.libvirt.driver [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:15:37 compute-0 nova_compute[192079]: 2025-10-02 12:15:37.878 2 DEBUG nova.virt.libvirt.driver [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:15:37 compute-0 nova_compute[192079]: 2025-10-02 12:15:37.878 2 DEBUG nova.virt.libvirt.driver [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:15:37 compute-0 nova_compute[192079]: 2025-10-02 12:15:37.882 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:15:37 compute-0 nova_compute[192079]: 2025-10-02 12:15:37.954 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 92f5a241-27d9-416b-a19f-da7560348296] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:15:37 compute-0 nova_compute[192079]: 2025-10-02 12:15:37.961 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407337.1485543, 92f5a241-27d9-416b-a19f-da7560348296 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:15:37 compute-0 nova_compute[192079]: 2025-10-02 12:15:37.962 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 92f5a241-27d9-416b-a19f-da7560348296] VM Paused (Lifecycle Event)
Oct 02 12:15:37 compute-0 nova_compute[192079]: 2025-10-02 12:15:37.988 2 INFO nova.compute.manager [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Took 6.67 seconds to spawn the instance on the hypervisor.
Oct 02 12:15:37 compute-0 nova_compute[192079]: 2025-10-02 12:15:37.988 2 DEBUG nova.compute.manager [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:15:37 compute-0 nova_compute[192079]: 2025-10-02 12:15:37.994 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:15:37 compute-0 nova_compute[192079]: 2025-10-02 12:15:37.997 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407337.831059, 92f5a241-27d9-416b-a19f-da7560348296 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:15:37 compute-0 nova_compute[192079]: 2025-10-02 12:15:37.997 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 92f5a241-27d9-416b-a19f-da7560348296] VM Resumed (Lifecycle Event)
Oct 02 12:15:38 compute-0 nova_compute[192079]: 2025-10-02 12:15:38.036 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:15:38 compute-0 nova_compute[192079]: 2025-10-02 12:15:38.039 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:15:38 compute-0 nova_compute[192079]: 2025-10-02 12:15:38.069 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 92f5a241-27d9-416b-a19f-da7560348296] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:15:38 compute-0 nova_compute[192079]: 2025-10-02 12:15:38.119 2 INFO nova.compute.manager [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Took 8.47 seconds to build instance.
Oct 02 12:15:38 compute-0 nova_compute[192079]: 2025-10-02 12:15:38.138 2 DEBUG oslo_concurrency.lockutils [None req-d6c0509d-edfc-4eb8-b7db-3722583b359d 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Lock "92f5a241-27d9-416b-a19f-da7560348296" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 9.069s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:40 compute-0 nova_compute[192079]: 2025-10-02 12:15:40.070 2 DEBUG nova.compute.manager [req-fc9413ca-e34d-442c-9616-6542c02a6e74 req-fe2d57de-41e6-4f4d-a865-321946c8088f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Received event network-vif-plugged-f289b804-29b2-4f3d-985c-e9cc226259ad external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:15:40 compute-0 nova_compute[192079]: 2025-10-02 12:15:40.071 2 DEBUG oslo_concurrency.lockutils [req-fc9413ca-e34d-442c-9616-6542c02a6e74 req-fe2d57de-41e6-4f4d-a865-321946c8088f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "92f5a241-27d9-416b-a19f-da7560348296-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:40 compute-0 nova_compute[192079]: 2025-10-02 12:15:40.071 2 DEBUG oslo_concurrency.lockutils [req-fc9413ca-e34d-442c-9616-6542c02a6e74 req-fe2d57de-41e6-4f4d-a865-321946c8088f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "92f5a241-27d9-416b-a19f-da7560348296-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:40 compute-0 nova_compute[192079]: 2025-10-02 12:15:40.071 2 DEBUG oslo_concurrency.lockutils [req-fc9413ca-e34d-442c-9616-6542c02a6e74 req-fe2d57de-41e6-4f4d-a865-321946c8088f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "92f5a241-27d9-416b-a19f-da7560348296-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:40 compute-0 nova_compute[192079]: 2025-10-02 12:15:40.071 2 DEBUG nova.compute.manager [req-fc9413ca-e34d-442c-9616-6542c02a6e74 req-fe2d57de-41e6-4f4d-a865-321946c8088f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] No waiting events found dispatching network-vif-plugged-f289b804-29b2-4f3d-985c-e9cc226259ad pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:15:40 compute-0 nova_compute[192079]: 2025-10-02 12:15:40.072 2 WARNING nova.compute.manager [req-fc9413ca-e34d-442c-9616-6542c02a6e74 req-fe2d57de-41e6-4f4d-a865-321946c8088f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Received unexpected event network-vif-plugged-f289b804-29b2-4f3d-985c-e9cc226259ad for instance with vm_state active and task_state None.
Oct 02 12:15:40 compute-0 podman[231773]: 2025-10-02 12:15:40.131862214 +0000 UTC m=+0.049255543 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, container_name=ovn_metadata_agent, config_id=ovn_metadata_agent, managed_by=edpm_ansible, io.buildah.version=1.41.3, org.label-schema.license=GPLv2)
Oct 02 12:15:40 compute-0 podman[231775]: 2025-10-02 12:15:40.142391011 +0000 UTC m=+0.054598529 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:15:40 compute-0 podman[231774]: 2025-10-02 12:15:40.162927061 +0000 UTC m=+0.078693107 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.license=GPLv2, config_id=ovn_controller, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:15:40 compute-0 nova_compute[192079]: 2025-10-02 12:15:40.396 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:40 compute-0 nova_compute[192079]: 2025-10-02 12:15:40.968 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:41 compute-0 nova_compute[192079]: 2025-10-02 12:15:41.167 2 DEBUG nova.compute.manager [req-d82dc82e-d744-4579-8eba-1f6fe877e594 req-13b8f0a0-5cb1-4463-9030-5f1a942aea1a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Received event network-changed-f289b804-29b2-4f3d-985c-e9cc226259ad external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:15:41 compute-0 nova_compute[192079]: 2025-10-02 12:15:41.167 2 DEBUG nova.compute.manager [req-d82dc82e-d744-4579-8eba-1f6fe877e594 req-13b8f0a0-5cb1-4463-9030-5f1a942aea1a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Refreshing instance network info cache due to event network-changed-f289b804-29b2-4f3d-985c-e9cc226259ad. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:15:41 compute-0 nova_compute[192079]: 2025-10-02 12:15:41.168 2 DEBUG oslo_concurrency.lockutils [req-d82dc82e-d744-4579-8eba-1f6fe877e594 req-13b8f0a0-5cb1-4463-9030-5f1a942aea1a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-92f5a241-27d9-416b-a19f-da7560348296" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:15:41 compute-0 nova_compute[192079]: 2025-10-02 12:15:41.168 2 DEBUG oslo_concurrency.lockutils [req-d82dc82e-d744-4579-8eba-1f6fe877e594 req-13b8f0a0-5cb1-4463-9030-5f1a942aea1a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-92f5a241-27d9-416b-a19f-da7560348296" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:15:41 compute-0 nova_compute[192079]: 2025-10-02 12:15:41.168 2 DEBUG nova.network.neutron [req-d82dc82e-d744-4579-8eba-1f6fe877e594 req-13b8f0a0-5cb1-4463-9030-5f1a942aea1a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Refreshing network info cache for port f289b804-29b2-4f3d-985c-e9cc226259ad _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:15:44 compute-0 nova_compute[192079]: 2025-10-02 12:15:44.739 2 DEBUG nova.network.neutron [req-d82dc82e-d744-4579-8eba-1f6fe877e594 req-13b8f0a0-5cb1-4463-9030-5f1a942aea1a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Updated VIF entry in instance network info cache for port f289b804-29b2-4f3d-985c-e9cc226259ad. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:15:44 compute-0 nova_compute[192079]: 2025-10-02 12:15:44.740 2 DEBUG nova.network.neutron [req-d82dc82e-d744-4579-8eba-1f6fe877e594 req-13b8f0a0-5cb1-4463-9030-5f1a942aea1a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Updating instance_info_cache with network_info: [{"id": "f289b804-29b2-4f3d-985c-e9cc226259ad", "address": "fa:16:3e:a7:ed:93", "network": {"id": "5716ac1c-acf7-48a7-8b93-dda3a5af31f6", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1571059342-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ed7af923ad494ac5b7dbd3d8403dc33e", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf289b804-29", "ovs_interfaceid": "f289b804-29b2-4f3d-985c-e9cc226259ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:15:44 compute-0 nova_compute[192079]: 2025-10-02 12:15:44.746 2 DEBUG nova.compute.manager [req-2d59020b-578d-424b-99e1-5900685c7de6 req-d7293fca-7433-495d-a229-2c5b7114d831 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Received event network-changed-f289b804-29b2-4f3d-985c-e9cc226259ad external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:15:44 compute-0 nova_compute[192079]: 2025-10-02 12:15:44.747 2 DEBUG nova.compute.manager [req-2d59020b-578d-424b-99e1-5900685c7de6 req-d7293fca-7433-495d-a229-2c5b7114d831 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Refreshing instance network info cache due to event network-changed-f289b804-29b2-4f3d-985c-e9cc226259ad. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:15:44 compute-0 nova_compute[192079]: 2025-10-02 12:15:44.747 2 DEBUG oslo_concurrency.lockutils [req-2d59020b-578d-424b-99e1-5900685c7de6 req-d7293fca-7433-495d-a229-2c5b7114d831 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-92f5a241-27d9-416b-a19f-da7560348296" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:15:44 compute-0 nova_compute[192079]: 2025-10-02 12:15:44.777 2 DEBUG oslo_concurrency.lockutils [req-d82dc82e-d744-4579-8eba-1f6fe877e594 req-13b8f0a0-5cb1-4463-9030-5f1a942aea1a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-92f5a241-27d9-416b-a19f-da7560348296" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:15:44 compute-0 nova_compute[192079]: 2025-10-02 12:15:44.778 2 DEBUG oslo_concurrency.lockutils [req-2d59020b-578d-424b-99e1-5900685c7de6 req-d7293fca-7433-495d-a229-2c5b7114d831 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-92f5a241-27d9-416b-a19f-da7560348296" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:15:44 compute-0 nova_compute[192079]: 2025-10-02 12:15:44.778 2 DEBUG nova.network.neutron [req-2d59020b-578d-424b-99e1-5900685c7de6 req-d7293fca-7433-495d-a229-2c5b7114d831 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Refreshing network info cache for port f289b804-29b2-4f3d-985c-e9cc226259ad _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:15:45 compute-0 nova_compute[192079]: 2025-10-02 12:15:45.434 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:45 compute-0 nova_compute[192079]: 2025-10-02 12:15:45.971 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:46 compute-0 systemd[1]: Stopping User Manager for UID 42436...
Oct 02 12:15:46 compute-0 systemd[231630]: Activating special unit Exit the Session...
Oct 02 12:15:46 compute-0 systemd[231630]: Stopped target Main User Target.
Oct 02 12:15:46 compute-0 systemd[231630]: Stopped target Basic System.
Oct 02 12:15:46 compute-0 systemd[231630]: Stopped target Paths.
Oct 02 12:15:46 compute-0 systemd[231630]: Stopped target Sockets.
Oct 02 12:15:46 compute-0 systemd[231630]: Stopped target Timers.
Oct 02 12:15:46 compute-0 systemd[231630]: Stopped Mark boot as successful after the user session has run 2 minutes.
Oct 02 12:15:46 compute-0 systemd[231630]: Stopped Daily Cleanup of User's Temporary Directories.
Oct 02 12:15:46 compute-0 systemd[231630]: Closed D-Bus User Message Bus Socket.
Oct 02 12:15:46 compute-0 systemd[231630]: Stopped Create User's Volatile Files and Directories.
Oct 02 12:15:46 compute-0 systemd[231630]: Removed slice User Application Slice.
Oct 02 12:15:46 compute-0 systemd[231630]: Reached target Shutdown.
Oct 02 12:15:46 compute-0 systemd[231630]: Finished Exit the Session.
Oct 02 12:15:46 compute-0 systemd[231630]: Reached target Exit the Session.
Oct 02 12:15:46 compute-0 systemd[1]: user@42436.service: Deactivated successfully.
Oct 02 12:15:46 compute-0 systemd[1]: Stopped User Manager for UID 42436.
Oct 02 12:15:46 compute-0 systemd[1]: Stopping User Runtime Directory /run/user/42436...
Oct 02 12:15:46 compute-0 systemd[1]: run-user-42436.mount: Deactivated successfully.
Oct 02 12:15:46 compute-0 systemd[1]: user-runtime-dir@42436.service: Deactivated successfully.
Oct 02 12:15:46 compute-0 systemd[1]: Stopped User Runtime Directory /run/user/42436.
Oct 02 12:15:46 compute-0 systemd[1]: Removed slice User Slice of UID 42436.
Oct 02 12:15:46 compute-0 nova_compute[192079]: 2025-10-02 12:15:46.695 2 DEBUG nova.network.neutron [req-2d59020b-578d-424b-99e1-5900685c7de6 req-d7293fca-7433-495d-a229-2c5b7114d831 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Updated VIF entry in instance network info cache for port f289b804-29b2-4f3d-985c-e9cc226259ad. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:15:46 compute-0 nova_compute[192079]: 2025-10-02 12:15:46.695 2 DEBUG nova.network.neutron [req-2d59020b-578d-424b-99e1-5900685c7de6 req-d7293fca-7433-495d-a229-2c5b7114d831 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Updating instance_info_cache with network_info: [{"id": "f289b804-29b2-4f3d-985c-e9cc226259ad", "address": "fa:16:3e:a7:ed:93", "network": {"id": "5716ac1c-acf7-48a7-8b93-dda3a5af31f6", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1571059342-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ed7af923ad494ac5b7dbd3d8403dc33e", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf289b804-29", "ovs_interfaceid": "f289b804-29b2-4f3d-985c-e9cc226259ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:15:46 compute-0 nova_compute[192079]: 2025-10-02 12:15:46.712 2 DEBUG oslo_concurrency.lockutils [req-2d59020b-578d-424b-99e1-5900685c7de6 req-d7293fca-7433-495d-a229-2c5b7114d831 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-92f5a241-27d9-416b-a19f-da7560348296" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:15:49 compute-0 podman[231836]: 2025-10-02 12:15:49.168407419 +0000 UTC m=+0.078717237 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_id=edpm, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=ceilometer_agent_compute, org.label-schema.schema-version=1.0, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']})
Oct 02 12:15:49 compute-0 sshd-session[231856]: Accepted publickey for nova from 192.168.122.101 port 35710 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:15:49 compute-0 systemd[1]: Created slice User Slice of UID 42436.
Oct 02 12:15:49 compute-0 systemd[1]: Starting User Runtime Directory /run/user/42436...
Oct 02 12:15:49 compute-0 systemd-logind[827]: New session 54 of user nova.
Oct 02 12:15:49 compute-0 systemd[1]: Finished User Runtime Directory /run/user/42436.
Oct 02 12:15:49 compute-0 systemd[1]: Starting User Manager for UID 42436...
Oct 02 12:15:49 compute-0 systemd[231860]: pam_unix(systemd-user:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:15:49 compute-0 systemd[231860]: Queued start job for default target Main User Target.
Oct 02 12:15:49 compute-0 systemd[231860]: Created slice User Application Slice.
Oct 02 12:15:49 compute-0 systemd[231860]: Started Mark boot as successful after the user session has run 2 minutes.
Oct 02 12:15:49 compute-0 systemd[231860]: Started Daily Cleanup of User's Temporary Directories.
Oct 02 12:15:49 compute-0 systemd[231860]: Reached target Paths.
Oct 02 12:15:49 compute-0 systemd[231860]: Reached target Timers.
Oct 02 12:15:49 compute-0 systemd[231860]: Starting D-Bus User Message Bus Socket...
Oct 02 12:15:49 compute-0 systemd[231860]: Starting Create User's Volatile Files and Directories...
Oct 02 12:15:49 compute-0 systemd[231860]: Listening on D-Bus User Message Bus Socket.
Oct 02 12:15:49 compute-0 systemd[231860]: Finished Create User's Volatile Files and Directories.
Oct 02 12:15:49 compute-0 systemd[231860]: Reached target Sockets.
Oct 02 12:15:49 compute-0 systemd[231860]: Reached target Basic System.
Oct 02 12:15:49 compute-0 systemd[231860]: Reached target Main User Target.
Oct 02 12:15:49 compute-0 systemd[231860]: Startup finished in 145ms.
Oct 02 12:15:49 compute-0 systemd[1]: Started User Manager for UID 42436.
Oct 02 12:15:49 compute-0 systemd[1]: Started Session 54 of User nova.
Oct 02 12:15:49 compute-0 sshd-session[231856]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:15:50 compute-0 nova_compute[192079]: 2025-10-02 12:15:50.438 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:50 compute-0 sshd-session[231875]: Received disconnect from 192.168.122.101 port 35710:11: disconnected by user
Oct 02 12:15:50 compute-0 sshd-session[231875]: Disconnected from user nova 192.168.122.101 port 35710
Oct 02 12:15:50 compute-0 sshd-session[231856]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:15:50 compute-0 systemd[1]: session-54.scope: Deactivated successfully.
Oct 02 12:15:50 compute-0 systemd-logind[827]: Session 54 logged out. Waiting for processes to exit.
Oct 02 12:15:50 compute-0 systemd-logind[827]: Removed session 54.
Oct 02 12:15:50 compute-0 sshd-session[231878]: Accepted publickey for nova from 192.168.122.101 port 35720 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:15:50 compute-0 systemd-logind[827]: New session 56 of user nova.
Oct 02 12:15:50 compute-0 systemd[1]: Started Session 56 of User nova.
Oct 02 12:15:50 compute-0 sshd-session[231878]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:15:50 compute-0 sshd-session[231881]: Received disconnect from 192.168.122.101 port 35720:11: disconnected by user
Oct 02 12:15:50 compute-0 sshd-session[231881]: Disconnected from user nova 192.168.122.101 port 35720
Oct 02 12:15:50 compute-0 sshd-session[231878]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:15:50 compute-0 systemd[1]: session-56.scope: Deactivated successfully.
Oct 02 12:15:50 compute-0 systemd-logind[827]: Session 56 logged out. Waiting for processes to exit.
Oct 02 12:15:50 compute-0 systemd-logind[827]: Removed session 56.
Oct 02 12:15:50 compute-0 sshd-session[231883]: Accepted publickey for nova from 192.168.122.101 port 35726 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:15:50 compute-0 systemd-logind[827]: New session 57 of user nova.
Oct 02 12:15:50 compute-0 systemd[1]: Started Session 57 of User nova.
Oct 02 12:15:50 compute-0 sshd-session[231883]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:15:50 compute-0 nova_compute[192079]: 2025-10-02 12:15:50.972 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:51 compute-0 sshd-session[231897]: Received disconnect from 192.168.122.101 port 35726:11: disconnected by user
Oct 02 12:15:51 compute-0 sshd-session[231897]: Disconnected from user nova 192.168.122.101 port 35726
Oct 02 12:15:51 compute-0 sshd-session[231883]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:15:51 compute-0 systemd[1]: session-57.scope: Deactivated successfully.
Oct 02 12:15:51 compute-0 systemd-logind[827]: Session 57 logged out. Waiting for processes to exit.
Oct 02 12:15:51 compute-0 systemd-logind[827]: Removed session 57.
Oct 02 12:15:51 compute-0 nova_compute[192079]: 2025-10-02 12:15:51.752 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:51.752 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=20, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=19) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:15:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:51.754 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 0 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:15:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:51.754 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '20'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:52 compute-0 nova_compute[192079]: 2025-10-02 12:15:52.873 2 INFO nova.network.neutron [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Updating port 1692479a-54ef-45ae-a6a3-39c68408e4f6 with attributes {'binding:host_id': 'compute-0.ctlplane.example.com', 'device_owner': 'compute:nova'}
Oct 02 12:15:53 compute-0 ovn_controller[94336]: 2025-10-02T12:15:53Z|00024|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:a7:ed:93 10.100.0.8
Oct 02 12:15:53 compute-0 ovn_controller[94336]: 2025-10-02T12:15:53Z|00025|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:a7:ed:93 10.100.0.8
Oct 02 12:15:54 compute-0 nova_compute[192079]: 2025-10-02 12:15:54.873 2 DEBUG nova.compute.manager [req-8af04a29-47f0-4c00-998b-64033fb058c1 req-2f9e2aa5-7255-44d6-8af7-ba6627a36de7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Received event network-vif-unplugged-1692479a-54ef-45ae-a6a3-39c68408e4f6 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:15:54 compute-0 nova_compute[192079]: 2025-10-02 12:15:54.874 2 DEBUG oslo_concurrency.lockutils [req-8af04a29-47f0-4c00-998b-64033fb058c1 req-2f9e2aa5-7255-44d6-8af7-ba6627a36de7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "fa72d8b8-93c0-417b-9793-ccd611ffbb84-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:54 compute-0 nova_compute[192079]: 2025-10-02 12:15:54.874 2 DEBUG oslo_concurrency.lockutils [req-8af04a29-47f0-4c00-998b-64033fb058c1 req-2f9e2aa5-7255-44d6-8af7-ba6627a36de7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "fa72d8b8-93c0-417b-9793-ccd611ffbb84-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:54 compute-0 nova_compute[192079]: 2025-10-02 12:15:54.874 2 DEBUG oslo_concurrency.lockutils [req-8af04a29-47f0-4c00-998b-64033fb058c1 req-2f9e2aa5-7255-44d6-8af7-ba6627a36de7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "fa72d8b8-93c0-417b-9793-ccd611ffbb84-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:54 compute-0 nova_compute[192079]: 2025-10-02 12:15:54.874 2 DEBUG nova.compute.manager [req-8af04a29-47f0-4c00-998b-64033fb058c1 req-2f9e2aa5-7255-44d6-8af7-ba6627a36de7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] No waiting events found dispatching network-vif-unplugged-1692479a-54ef-45ae-a6a3-39c68408e4f6 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:15:54 compute-0 nova_compute[192079]: 2025-10-02 12:15:54.875 2 WARNING nova.compute.manager [req-8af04a29-47f0-4c00-998b-64033fb058c1 req-2f9e2aa5-7255-44d6-8af7-ba6627a36de7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Received unexpected event network-vif-unplugged-1692479a-54ef-45ae-a6a3-39c68408e4f6 for instance with vm_state active and task_state resize_migrated.
Oct 02 12:15:54 compute-0 nova_compute[192079]: 2025-10-02 12:15:54.875 2 DEBUG nova.compute.manager [req-8af04a29-47f0-4c00-998b-64033fb058c1 req-2f9e2aa5-7255-44d6-8af7-ba6627a36de7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Received event network-vif-plugged-1692479a-54ef-45ae-a6a3-39c68408e4f6 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:15:54 compute-0 nova_compute[192079]: 2025-10-02 12:15:54.875 2 DEBUG oslo_concurrency.lockutils [req-8af04a29-47f0-4c00-998b-64033fb058c1 req-2f9e2aa5-7255-44d6-8af7-ba6627a36de7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "fa72d8b8-93c0-417b-9793-ccd611ffbb84-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:54 compute-0 nova_compute[192079]: 2025-10-02 12:15:54.875 2 DEBUG oslo_concurrency.lockutils [req-8af04a29-47f0-4c00-998b-64033fb058c1 req-2f9e2aa5-7255-44d6-8af7-ba6627a36de7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "fa72d8b8-93c0-417b-9793-ccd611ffbb84-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:54 compute-0 nova_compute[192079]: 2025-10-02 12:15:54.875 2 DEBUG oslo_concurrency.lockutils [req-8af04a29-47f0-4c00-998b-64033fb058c1 req-2f9e2aa5-7255-44d6-8af7-ba6627a36de7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "fa72d8b8-93c0-417b-9793-ccd611ffbb84-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:54 compute-0 nova_compute[192079]: 2025-10-02 12:15:54.876 2 DEBUG nova.compute.manager [req-8af04a29-47f0-4c00-998b-64033fb058c1 req-2f9e2aa5-7255-44d6-8af7-ba6627a36de7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] No waiting events found dispatching network-vif-plugged-1692479a-54ef-45ae-a6a3-39c68408e4f6 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:15:54 compute-0 nova_compute[192079]: 2025-10-02 12:15:54.876 2 WARNING nova.compute.manager [req-8af04a29-47f0-4c00-998b-64033fb058c1 req-2f9e2aa5-7255-44d6-8af7-ba6627a36de7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Received unexpected event network-vif-plugged-1692479a-54ef-45ae-a6a3-39c68408e4f6 for instance with vm_state active and task_state resize_migrated.
Oct 02 12:15:55 compute-0 nova_compute[192079]: 2025-10-02 12:15:55.441 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:55 compute-0 nova_compute[192079]: 2025-10-02 12:15:55.898 2 DEBUG oslo_concurrency.lockutils [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Acquiring lock "refresh_cache-fa72d8b8-93c0-417b-9793-ccd611ffbb84" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:15:55 compute-0 nova_compute[192079]: 2025-10-02 12:15:55.899 2 DEBUG oslo_concurrency.lockutils [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Acquired lock "refresh_cache-fa72d8b8-93c0-417b-9793-ccd611ffbb84" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:15:55 compute-0 nova_compute[192079]: 2025-10-02 12:15:55.899 2 DEBUG nova.network.neutron [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:15:55 compute-0 nova_compute[192079]: 2025-10-02 12:15:55.975 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:56 compute-0 podman[231905]: 2025-10-02 12:15:56.142572923 +0000 UTC m=+0.058815015 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, container_name=openstack_network_exporter, maintainer=Red Hat, Inc., summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, vcs-type=git, version=9.6, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., build-date=2025-08-20T13:12:41, name=ubi9-minimal, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, config_id=edpm, release=1755695350, io.openshift.tags=minimal rhel9, url=https://catalog.redhat.com/en/search?searchType=containers, io.openshift.expose-services=, architecture=x86_64, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, distribution-scope=public, vendor=Red Hat, Inc., com.redhat.component=ubi9-minimal-container, io.buildah.version=1.33.7)
Oct 02 12:15:56 compute-0 podman[231906]: 2025-10-02 12:15:56.147176448 +0000 UTC m=+0.059959976 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, container_name=multipathd, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:15:57 compute-0 nova_compute[192079]: 2025-10-02 12:15:57.016 2 DEBUG nova.compute.manager [req-63158324-85e7-4284-9f6b-ab266bbf6503 req-a8fd7899-7ae3-49e3-af36-289b61700af8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Received event network-changed-1692479a-54ef-45ae-a6a3-39c68408e4f6 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:15:57 compute-0 nova_compute[192079]: 2025-10-02 12:15:57.016 2 DEBUG nova.compute.manager [req-63158324-85e7-4284-9f6b-ab266bbf6503 req-a8fd7899-7ae3-49e3-af36-289b61700af8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Refreshing instance network info cache due to event network-changed-1692479a-54ef-45ae-a6a3-39c68408e4f6. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:15:57 compute-0 nova_compute[192079]: 2025-10-02 12:15:57.017 2 DEBUG oslo_concurrency.lockutils [req-63158324-85e7-4284-9f6b-ab266bbf6503 req-a8fd7899-7ae3-49e3-af36-289b61700af8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-fa72d8b8-93c0-417b-9793-ccd611ffbb84" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.077 2 DEBUG nova.network.neutron [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Updating instance_info_cache with network_info: [{"id": "1692479a-54ef-45ae-a6a3-39c68408e4f6", "address": "fa:16:3e:41:9a:b6", "network": {"id": "d6de4737-ca60-4c8d-bfd5-687f9366ec8b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1853814355-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffae703d68b24b9c89686c149113fc2b", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap1692479a-54", "ovs_interfaceid": "1692479a-54ef-45ae-a6a3-39c68408e4f6", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.107 2 DEBUG oslo_concurrency.lockutils [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Releasing lock "refresh_cache-fa72d8b8-93c0-417b-9793-ccd611ffbb84" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.110 2 DEBUG oslo_concurrency.lockutils [req-63158324-85e7-4284-9f6b-ab266bbf6503 req-a8fd7899-7ae3-49e3-af36-289b61700af8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-fa72d8b8-93c0-417b-9793-ccd611ffbb84" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.110 2 DEBUG nova.network.neutron [req-63158324-85e7-4284-9f6b-ab266bbf6503 req-a8fd7899-7ae3-49e3-af36-289b61700af8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Refreshing network info cache for port 1692479a-54ef-45ae-a6a3-39c68408e4f6 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.213 2 DEBUG nova.virt.libvirt.driver [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Starting finish_migration finish_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:11698
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.214 2 DEBUG nova.virt.libvirt.driver [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Instance directory exists: not creating _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4719
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.215 2 INFO nova.virt.libvirt.driver [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Creating image(s)
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.216 2 DEBUG nova.objects.instance [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lazy-loading 'trusted_certs' on Instance uuid fa72d8b8-93c0-417b-9793-ccd611ffbb84 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.229 2 DEBUG oslo_concurrency.processutils [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.290 2 DEBUG oslo_concurrency.processutils [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.060s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.291 2 DEBUG nova.virt.disk.api [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Checking if we can resize image /var/lib/nova/instances/fa72d8b8-93c0-417b-9793-ccd611ffbb84/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.291 2 DEBUG oslo_concurrency.processutils [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/fa72d8b8-93c0-417b-9793-ccd611ffbb84/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.361 2 DEBUG oslo_concurrency.processutils [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/fa72d8b8-93c0-417b-9793-ccd611ffbb84/disk --force-share --output=json" returned: 0 in 0.070s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.363 2 DEBUG nova.virt.disk.api [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Cannot resize image /var/lib/nova/instances/fa72d8b8-93c0-417b-9793-ccd611ffbb84/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.393 2 DEBUG nova.virt.libvirt.driver [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Did not create local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4859
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.393 2 DEBUG nova.virt.libvirt.driver [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Ensure instance console log exists: /var/lib/nova/instances/fa72d8b8-93c0-417b-9793-ccd611ffbb84/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.394 2 DEBUG oslo_concurrency.lockutils [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.394 2 DEBUG oslo_concurrency.lockutils [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.395 2 DEBUG oslo_concurrency.lockutils [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.397 2 DEBUG nova.virt.libvirt.driver [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Start _get_guest_xml network_info=[{"id": "1692479a-54ef-45ae-a6a3-39c68408e4f6", "address": "fa:16:3e:41:9a:b6", "network": {"id": "d6de4737-ca60-4c8d-bfd5-687f9366ec8b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1853814355-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [], "label": "tempest-ServerDiskConfigTestJSON-1853814355-network", "vif_mac": "fa:16:3e:41:9a:b6"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffae703d68b24b9c89686c149113fc2b", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap1692479a-54", "ovs_interfaceid": "1692479a-54ef-45ae-a6a3-39c68408e4f6", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.402 2 WARNING nova.virt.libvirt.driver [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.407 2 DEBUG nova.virt.libvirt.host [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.407 2 DEBUG nova.virt.libvirt.host [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.410 2 DEBUG nova.virt.libvirt.host [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.410 2 DEBUG nova.virt.libvirt.host [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.412 2 DEBUG nova.virt.libvirt.driver [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.412 2 DEBUG nova.virt.hardware [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:25Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9949d9da-6314-4ede-8797-6f2f0a6a64fc',id=2,is_public=True,memory_mb=192,name='m1.micro',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.412 2 DEBUG nova.virt.hardware [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.412 2 DEBUG nova.virt.hardware [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.413 2 DEBUG nova.virt.hardware [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.413 2 DEBUG nova.virt.hardware [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.413 2 DEBUG nova.virt.hardware [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.413 2 DEBUG nova.virt.hardware [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.414 2 DEBUG nova.virt.hardware [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.414 2 DEBUG nova.virt.hardware [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.414 2 DEBUG nova.virt.hardware [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.414 2 DEBUG nova.virt.hardware [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.414 2 DEBUG nova.objects.instance [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lazy-loading 'vcpu_model' on Instance uuid fa72d8b8-93c0-417b-9793-ccd611ffbb84 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.431 2 DEBUG oslo_concurrency.processutils [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/fa72d8b8-93c0-417b-9793-ccd611ffbb84/disk.config --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.484 2 DEBUG oslo_concurrency.processutils [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/fa72d8b8-93c0-417b-9793-ccd611ffbb84/disk.config --force-share --output=json" returned: 0 in 0.053s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.485 2 DEBUG oslo_concurrency.lockutils [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Acquiring lock "/var/lib/nova/instances/fa72d8b8-93c0-417b-9793-ccd611ffbb84/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.485 2 DEBUG oslo_concurrency.lockutils [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lock "/var/lib/nova/instances/fa72d8b8-93c0-417b-9793-ccd611ffbb84/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.487 2 DEBUG oslo_concurrency.lockutils [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lock "/var/lib/nova/instances/fa72d8b8-93c0-417b-9793-ccd611ffbb84/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.488 2 DEBUG nova.virt.libvirt.vif [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=True,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:15:12Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerDiskConfigTestJSON-server-480428625',display_name='tempest-ServerDiskConfigTestJSON-server-480428625',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(2),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverdiskconfigtestjson-server-480428625',id=76,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=2,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:15:25Z,launched_on='compute-1.ctlplane.example.com',locked=False,locked_by=None,memory_mb=192,metadata={},migration_context=MigrationContext,new_flavor=Flavor(2),node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=Flavor(1),os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='ffae703d68b24b9c89686c149113fc2b',ramdisk_id='',reservation_id='r-uec4q7qr',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=ServiceList,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',old_vm_state='active',owner_project_name='tempest-ServerDiskConfigTestJSON-1763056137',owner_user_name='tempest-ServerDiskConfigTestJSON-1763056137-project-member'},tags=<?>,task_state='resize_finish',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:15:51Z,user_data=None,user_id='def48c13fd6a43ba88836b753986a731',uuid=fa72d8b8-93c0-417b-9793-ccd611ffbb84,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "1692479a-54ef-45ae-a6a3-39c68408e4f6", "address": "fa:16:3e:41:9a:b6", "network": {"id": "d6de4737-ca60-4c8d-bfd5-687f9366ec8b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1853814355-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [], "label": "tempest-ServerDiskConfigTestJSON-1853814355-network", "vif_mac": "fa:16:3e:41:9a:b6"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffae703d68b24b9c89686c149113fc2b", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap1692479a-54", "ovs_interfaceid": "1692479a-54ef-45ae-a6a3-39c68408e4f6", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.489 2 DEBUG nova.network.os_vif_util [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Converting VIF {"id": "1692479a-54ef-45ae-a6a3-39c68408e4f6", "address": "fa:16:3e:41:9a:b6", "network": {"id": "d6de4737-ca60-4c8d-bfd5-687f9366ec8b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1853814355-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [], "label": "tempest-ServerDiskConfigTestJSON-1853814355-network", "vif_mac": "fa:16:3e:41:9a:b6"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffae703d68b24b9c89686c149113fc2b", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap1692479a-54", "ovs_interfaceid": "1692479a-54ef-45ae-a6a3-39c68408e4f6", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.490 2 DEBUG nova.network.os_vif_util [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:41:9a:b6,bridge_name='br-int',has_traffic_filtering=True,id=1692479a-54ef-45ae-a6a3-39c68408e4f6,network=Network(d6de4737-ca60-4c8d-bfd5-687f9366ec8b),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap1692479a-54') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.494 2 DEBUG nova.virt.libvirt.driver [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:15:58 compute-0 nova_compute[192079]:   <uuid>fa72d8b8-93c0-417b-9793-ccd611ffbb84</uuid>
Oct 02 12:15:58 compute-0 nova_compute[192079]:   <name>instance-0000004c</name>
Oct 02 12:15:58 compute-0 nova_compute[192079]:   <memory>196608</memory>
Oct 02 12:15:58 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:15:58 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:15:58 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:       <nova:name>tempest-ServerDiskConfigTestJSON-server-480428625</nova:name>
Oct 02 12:15:58 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:15:58</nova:creationTime>
Oct 02 12:15:58 compute-0 nova_compute[192079]:       <nova:flavor name="m1.micro">
Oct 02 12:15:58 compute-0 nova_compute[192079]:         <nova:memory>192</nova:memory>
Oct 02 12:15:58 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:15:58 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:15:58 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:15:58 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:15:58 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:15:58 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:15:58 compute-0 nova_compute[192079]:         <nova:user uuid="def48c13fd6a43ba88836b753986a731">tempest-ServerDiskConfigTestJSON-1763056137-project-member</nova:user>
Oct 02 12:15:58 compute-0 nova_compute[192079]:         <nova:project uuid="ffae703d68b24b9c89686c149113fc2b">tempest-ServerDiskConfigTestJSON-1763056137</nova:project>
Oct 02 12:15:58 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:15:58 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:15:58 compute-0 nova_compute[192079]:         <nova:port uuid="1692479a-54ef-45ae-a6a3-39c68408e4f6">
Oct 02 12:15:58 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.14" ipVersion="4"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:15:58 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:15:58 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:15:58 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <system>
Oct 02 12:15:58 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:15:58 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:15:58 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:15:58 compute-0 nova_compute[192079]:       <entry name="serial">fa72d8b8-93c0-417b-9793-ccd611ffbb84</entry>
Oct 02 12:15:58 compute-0 nova_compute[192079]:       <entry name="uuid">fa72d8b8-93c0-417b-9793-ccd611ffbb84</entry>
Oct 02 12:15:58 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     </system>
Oct 02 12:15:58 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:15:58 compute-0 nova_compute[192079]:   <os>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:   </os>
Oct 02 12:15:58 compute-0 nova_compute[192079]:   <features>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:   </features>
Oct 02 12:15:58 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:15:58 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:15:58 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:15:58 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/fa72d8b8-93c0-417b-9793-ccd611ffbb84/disk"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:15:58 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/fa72d8b8-93c0-417b-9793-ccd611ffbb84/disk.config"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:15:58 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:41:9a:b6"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:       <target dev="tap1692479a-54"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:15:58 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/fa72d8b8-93c0-417b-9793-ccd611ffbb84/console.log" append="off"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <video>
Oct 02 12:15:58 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     </video>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:15:58 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:15:58 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:15:58 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:15:58 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:15:58 compute-0 nova_compute[192079]: </domain>
Oct 02 12:15:58 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.496 2 DEBUG nova.virt.libvirt.vif [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=True,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:15:12Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerDiskConfigTestJSON-server-480428625',display_name='tempest-ServerDiskConfigTestJSON-server-480428625',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(2),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverdiskconfigtestjson-server-480428625',id=76,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=2,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:15:25Z,launched_on='compute-1.ctlplane.example.com',locked=False,locked_by=None,memory_mb=192,metadata={},migration_context=MigrationContext,new_flavor=Flavor(2),node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=Flavor(1),os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='ffae703d68b24b9c89686c149113fc2b',ramdisk_id='',reservation_id='r-uec4q7qr',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=ServiceList,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',old_vm_state='active',owner_project_name='tempest-ServerDiskConfigTestJSON-1763056137',owner_user_name='tempest-ServerDiskConfigTestJSON-1763056137-project-member'},tags=<?>,task_state='resize_finish',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:15:51Z,user_data=None,user_id='def48c13fd6a43ba88836b753986a731',uuid=fa72d8b8-93c0-417b-9793-ccd611ffbb84,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "1692479a-54ef-45ae-a6a3-39c68408e4f6", "address": "fa:16:3e:41:9a:b6", "network": {"id": "d6de4737-ca60-4c8d-bfd5-687f9366ec8b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1853814355-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [], "label": "tempest-ServerDiskConfigTestJSON-1853814355-network", "vif_mac": "fa:16:3e:41:9a:b6"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffae703d68b24b9c89686c149113fc2b", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap1692479a-54", "ovs_interfaceid": "1692479a-54ef-45ae-a6a3-39c68408e4f6", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.497 2 DEBUG nova.network.os_vif_util [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Converting VIF {"id": "1692479a-54ef-45ae-a6a3-39c68408e4f6", "address": "fa:16:3e:41:9a:b6", "network": {"id": "d6de4737-ca60-4c8d-bfd5-687f9366ec8b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1853814355-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [], "label": "tempest-ServerDiskConfigTestJSON-1853814355-network", "vif_mac": "fa:16:3e:41:9a:b6"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffae703d68b24b9c89686c149113fc2b", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap1692479a-54", "ovs_interfaceid": "1692479a-54ef-45ae-a6a3-39c68408e4f6", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.497 2 DEBUG nova.network.os_vif_util [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:41:9a:b6,bridge_name='br-int',has_traffic_filtering=True,id=1692479a-54ef-45ae-a6a3-39c68408e4f6,network=Network(d6de4737-ca60-4c8d-bfd5-687f9366ec8b),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap1692479a-54') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.498 2 DEBUG os_vif [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:41:9a:b6,bridge_name='br-int',has_traffic_filtering=True,id=1692479a-54ef-45ae-a6a3-39c68408e4f6,network=Network(d6de4737-ca60-4c8d-bfd5-687f9366ec8b),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap1692479a-54') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.498 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.499 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.499 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.501 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.501 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap1692479a-54, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.501 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap1692479a-54, col_values=(('external_ids', {'iface-id': '1692479a-54ef-45ae-a6a3-39c68408e4f6', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:41:9a:b6', 'vm-uuid': 'fa72d8b8-93c0-417b-9793-ccd611ffbb84'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.532 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:58 compute-0 NetworkManager[51160]: <info>  [1759407358.5332] manager: (tap1692479a-54): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/137)
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.535 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.539 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.540 2 INFO os_vif [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:41:9a:b6,bridge_name='br-int',has_traffic_filtering=True,id=1692479a-54ef-45ae-a6a3-39c68408e4f6,network=Network(d6de4737-ca60-4c8d-bfd5-687f9366ec8b),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap1692479a-54')
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.602 2 DEBUG nova.virt.libvirt.driver [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.603 2 DEBUG nova.virt.libvirt.driver [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.603 2 DEBUG nova.virt.libvirt.driver [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] No VIF found with MAC fa:16:3e:41:9a:b6, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.603 2 INFO nova.virt.libvirt.driver [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Using config drive
Oct 02 12:15:58 compute-0 kernel: tap1692479a-54: entered promiscuous mode
Oct 02 12:15:58 compute-0 ovn_controller[94336]: 2025-10-02T12:15:58Z|00266|binding|INFO|Claiming lport 1692479a-54ef-45ae-a6a3-39c68408e4f6 for this chassis.
Oct 02 12:15:58 compute-0 ovn_controller[94336]: 2025-10-02T12:15:58Z|00267|binding|INFO|1692479a-54ef-45ae-a6a3-39c68408e4f6: Claiming fa:16:3e:41:9a:b6 10.100.0.14
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.657 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:58 compute-0 NetworkManager[51160]: <info>  [1759407358.6613] manager: (tap1692479a-54): new Tun device (/org/freedesktop/NetworkManager/Devices/138)
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.661 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:58.686 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:41:9a:b6 10.100.0.14'], port_security=['fa:16:3e:41:9a:b6 10.100.0.14'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.14/28', 'neutron:device_id': 'fa72d8b8-93c0-417b-9793-ccd611ffbb84', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-d6de4737-ca60-4c8d-bfd5-687f9366ec8b', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'ffae703d68b24b9c89686c149113fc2b', 'neutron:revision_number': '6', 'neutron:security_group_ids': '64970375-b20e-4c18-bfb5-2a0465f8be7d', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=9476db85-7514-407a-b55a-3d3c703e8f7b, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=1692479a-54ef-45ae-a6a3-39c68408e4f6) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:15:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:58.688 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 1692479a-54ef-45ae-a6a3-39c68408e4f6 in datapath d6de4737-ca60-4c8d-bfd5-687f9366ec8b bound to our chassis
Oct 02 12:15:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:58.689 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network d6de4737-ca60-4c8d-bfd5-687f9366ec8b
Oct 02 12:15:58 compute-0 systemd-udevd[231967]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:15:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:58.705 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[61a71d49-58e5-4762-aace-15ece7e20fd8]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:58.706 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tapd6de4737-c1 in ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:15:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:58.708 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tapd6de4737-c0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:15:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:58.708 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8e34d957-c0c7-4dc2-8876-f84ce38f98fb]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:58.709 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[05156064-4553-46cd-a453-f657cf99458d]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:58 compute-0 NetworkManager[51160]: <info>  [1759407358.7167] device (tap1692479a-54): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:15:58 compute-0 NetworkManager[51160]: <info>  [1759407358.7176] device (tap1692479a-54): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:15:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:58.725 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[f0e7feda-bc74-4472-82f2-344b296e15d0]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:58 compute-0 systemd-machined[152150]: New machine qemu-38-instance-0000004c.
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.741 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:58 compute-0 ovn_controller[94336]: 2025-10-02T12:15:58Z|00268|binding|INFO|Setting lport 1692479a-54ef-45ae-a6a3-39c68408e4f6 ovn-installed in OVS
Oct 02 12:15:58 compute-0 ovn_controller[94336]: 2025-10-02T12:15:58Z|00269|binding|INFO|Setting lport 1692479a-54ef-45ae-a6a3-39c68408e4f6 up in Southbound
Oct 02 12:15:58 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.745 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:58 compute-0 systemd[1]: Started Virtual Machine qemu-38-instance-0000004c.
Oct 02 12:15:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:58.756 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[75789629-0a88-4ead-a914-b168e5d04835]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:58.785 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[659a944e-4be3-46c8-ba46-d426b1be738c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:58 compute-0 systemd-udevd[231972]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:15:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:58.793 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[552a5b69-fb08-4e27-95b7-d2fdd3574200]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:58 compute-0 NetworkManager[51160]: <info>  [1759407358.7943] manager: (tapd6de4737-c0): new Veth device (/org/freedesktop/NetworkManager/Devices/139)
Oct 02 12:15:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:58.824 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[00cbaa69-af8c-4084-8ecc-f161d3e165e4]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:58.830 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[8606245f-e748-4b84-af0f-eb33ce1c3b73]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:58 compute-0 NetworkManager[51160]: <info>  [1759407358.8552] device (tapd6de4737-c0): carrier: link connected
Oct 02 12:15:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:58.862 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[8532d6f7-ad93-4d78-b199-5bb07f39e6b6]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:58.879 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[892772d8-cf11-45d4-a6b1-8c21f575e331]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapd6de4737-c1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:bd:c9:1f'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 85], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 535648, 'reachable_time': 35950, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 232002, 'error': None, 'target': 'ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:58.892 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a04346a9-7767-4bb3-a33b-fbca3c6e831e]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:febd:c91f'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 535648, 'tstamp': 535648}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 232003, 'error': None, 'target': 'ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:58.906 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[afdd31c1-4303-4dbb-a75d-05b0bc5e20f4]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapd6de4737-c1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:bd:c9:1f'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 85], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 535648, 'reachable_time': 35950, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 232004, 'error': None, 'target': 'ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:58.934 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[bba10dcd-4ddc-4a6e-8a1c-55ce363e44dd]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:58.982 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0d9630c1-7802-4a73-a39d-ca3233651c63]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:58.983 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapd6de4737-c0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:58.983 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:15:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:58.984 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapd6de4737-c0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:58 compute-0 NetworkManager[51160]: <info>  [1759407358.9865] manager: (tapd6de4737-c0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/140)
Oct 02 12:15:58 compute-0 kernel: tapd6de4737-c0: entered promiscuous mode
Oct 02 12:15:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:58.988 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tapd6de4737-c0, col_values=(('external_ids', {'iface-id': 'cc451eb7-bf34-4b54-96d8-b834f11e06fb'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:15:58 compute-0 ovn_controller[94336]: 2025-10-02T12:15:58Z|00270|binding|INFO|Releasing lport cc451eb7-bf34-4b54-96d8-b834f11e06fb from this chassis (sb_readonly=0)
Oct 02 12:15:59 compute-0 nova_compute[192079]: 2025-10-02 12:15:58.999 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:59 compute-0 nova_compute[192079]: 2025-10-02 12:15:59.002 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:59.003 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/d6de4737-ca60-4c8d-bfd5-687f9366ec8b.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/d6de4737-ca60-4c8d-bfd5-687f9366ec8b.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:59.004 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[89b97f3a-da15-4b23-aa97-8c6298bf4311]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:59.004 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-d6de4737-ca60-4c8d-bfd5-687f9366ec8b
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/d6de4737-ca60-4c8d-bfd5-687f9366ec8b.pid.haproxy
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID d6de4737-ca60-4c8d-bfd5-687f9366ec8b
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:15:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:15:59.006 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b', 'env', 'PROCESS_TAG=haproxy-d6de4737-ca60-4c8d-bfd5-687f9366ec8b', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/d6de4737-ca60-4c8d-bfd5-687f9366ec8b.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:15:59 compute-0 podman[232043]: 2025-10-02 12:15:59.361088565 +0000 UTC m=+0.025555687 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:15:59 compute-0 nova_compute[192079]: 2025-10-02 12:15:59.559 2 DEBUG nova.compute.manager [req-2d7822b2-ec0b-413e-835f-92ce3fc6e3dc req-7eba754d-f65c-4405-bfd0-ded3a4d556fd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Received event network-vif-plugged-1692479a-54ef-45ae-a6a3-39c68408e4f6 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:15:59 compute-0 nova_compute[192079]: 2025-10-02 12:15:59.560 2 DEBUG oslo_concurrency.lockutils [req-2d7822b2-ec0b-413e-835f-92ce3fc6e3dc req-7eba754d-f65c-4405-bfd0-ded3a4d556fd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "fa72d8b8-93c0-417b-9793-ccd611ffbb84-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:15:59 compute-0 nova_compute[192079]: 2025-10-02 12:15:59.560 2 DEBUG oslo_concurrency.lockutils [req-2d7822b2-ec0b-413e-835f-92ce3fc6e3dc req-7eba754d-f65c-4405-bfd0-ded3a4d556fd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "fa72d8b8-93c0-417b-9793-ccd611ffbb84-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:15:59 compute-0 nova_compute[192079]: 2025-10-02 12:15:59.560 2 DEBUG oslo_concurrency.lockutils [req-2d7822b2-ec0b-413e-835f-92ce3fc6e3dc req-7eba754d-f65c-4405-bfd0-ded3a4d556fd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "fa72d8b8-93c0-417b-9793-ccd611ffbb84-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:15:59 compute-0 nova_compute[192079]: 2025-10-02 12:15:59.561 2 DEBUG nova.compute.manager [req-2d7822b2-ec0b-413e-835f-92ce3fc6e3dc req-7eba754d-f65c-4405-bfd0-ded3a4d556fd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] No waiting events found dispatching network-vif-plugged-1692479a-54ef-45ae-a6a3-39c68408e4f6 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:15:59 compute-0 nova_compute[192079]: 2025-10-02 12:15:59.561 2 WARNING nova.compute.manager [req-2d7822b2-ec0b-413e-835f-92ce3fc6e3dc req-7eba754d-f65c-4405-bfd0-ded3a4d556fd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Received unexpected event network-vif-plugged-1692479a-54ef-45ae-a6a3-39c68408e4f6 for instance with vm_state active and task_state resize_finish.
Oct 02 12:15:59 compute-0 nova_compute[192079]: 2025-10-02 12:15:59.575 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407359.5742877, fa72d8b8-93c0-417b-9793-ccd611ffbb84 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:15:59 compute-0 nova_compute[192079]: 2025-10-02 12:15:59.575 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] VM Resumed (Lifecycle Event)
Oct 02 12:15:59 compute-0 nova_compute[192079]: 2025-10-02 12:15:59.577 2 DEBUG nova.compute.manager [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:15:59 compute-0 nova_compute[192079]: 2025-10-02 12:15:59.580 2 INFO nova.virt.libvirt.driver [-] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Instance running successfully.
Oct 02 12:15:59 compute-0 virtqemud[191807]: argument unsupported: QEMU guest agent is not configured
Oct 02 12:15:59 compute-0 nova_compute[192079]: 2025-10-02 12:15:59.582 2 DEBUG nova.virt.libvirt.guest [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Failed to set time: agent not configured sync_guest_time /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:200
Oct 02 12:15:59 compute-0 nova_compute[192079]: 2025-10-02 12:15:59.582 2 DEBUG nova.virt.libvirt.driver [None req-36c1f53d-67b1-4e3f-9245-73827ca1dac5 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] finish_migration finished successfully. finish_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:11793
Oct 02 12:15:59 compute-0 nova_compute[192079]: 2025-10-02 12:15:59.595 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:15:59 compute-0 nova_compute[192079]: 2025-10-02 12:15:59.598 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: active, current task_state: resize_finish, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:15:59 compute-0 nova_compute[192079]: 2025-10-02 12:15:59.662 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] During sync_power_state the instance has a pending task (resize_finish). Skip.
Oct 02 12:15:59 compute-0 nova_compute[192079]: 2025-10-02 12:15:59.663 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407359.57667, fa72d8b8-93c0-417b-9793-ccd611ffbb84 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:15:59 compute-0 nova_compute[192079]: 2025-10-02 12:15:59.663 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] VM Started (Lifecycle Event)
Oct 02 12:15:59 compute-0 podman[232043]: 2025-10-02 12:15:59.671110639 +0000 UTC m=+0.335577741 container create f2f68eaae35a6e5b0a38962b5d10d8e322cfa2a3fe57c3e4e6a827cd48217555 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.license=GPLv2)
Oct 02 12:15:59 compute-0 systemd[1]: Started libpod-conmon-f2f68eaae35a6e5b0a38962b5d10d8e322cfa2a3fe57c3e4e6a827cd48217555.scope.
Oct 02 12:15:59 compute-0 nova_compute[192079]: 2025-10-02 12:15:59.714 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:15:59 compute-0 nova_compute[192079]: 2025-10-02 12:15:59.719 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Synchronizing instance power state after lifecycle event "Started"; current vm_state: active, current task_state: resize_finish, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:15:59 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:15:59 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/322d7ab9137326d5820a0c6af72262b5397cf985aea9f505e1757227e5a54843/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:15:59 compute-0 podman[232043]: 2025-10-02 12:15:59.759628242 +0000 UTC m=+0.424095374 container init f2f68eaae35a6e5b0a38962b5d10d8e322cfa2a3fe57c3e4e6a827cd48217555 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, tcib_managed=true, org.label-schema.vendor=CentOS)
Oct 02 12:15:59 compute-0 podman[232043]: 2025-10-02 12:15:59.764967208 +0000 UTC m=+0.429434320 container start f2f68eaae35a6e5b0a38962b5d10d8e322cfa2a3fe57c3e4e6a827cd48217555 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:15:59 compute-0 neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b[232058]: [NOTICE]   (232062) : New worker (232064) forked
Oct 02 12:15:59 compute-0 neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b[232058]: [NOTICE]   (232062) : Loading success.
Oct 02 12:16:00 compute-0 nova_compute[192079]: 2025-10-02 12:16:00.332 2 DEBUG nova.network.neutron [req-63158324-85e7-4284-9f6b-ab266bbf6503 req-a8fd7899-7ae3-49e3-af36-289b61700af8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Updated VIF entry in instance network info cache for port 1692479a-54ef-45ae-a6a3-39c68408e4f6. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:16:00 compute-0 nova_compute[192079]: 2025-10-02 12:16:00.333 2 DEBUG nova.network.neutron [req-63158324-85e7-4284-9f6b-ab266bbf6503 req-a8fd7899-7ae3-49e3-af36-289b61700af8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Updating instance_info_cache with network_info: [{"id": "1692479a-54ef-45ae-a6a3-39c68408e4f6", "address": "fa:16:3e:41:9a:b6", "network": {"id": "d6de4737-ca60-4c8d-bfd5-687f9366ec8b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1853814355-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffae703d68b24b9c89686c149113fc2b", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap1692479a-54", "ovs_interfaceid": "1692479a-54ef-45ae-a6a3-39c68408e4f6", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:16:00 compute-0 nova_compute[192079]: 2025-10-02 12:16:00.354 2 DEBUG oslo_concurrency.lockutils [req-63158324-85e7-4284-9f6b-ab266bbf6503 req-a8fd7899-7ae3-49e3-af36-289b61700af8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-fa72d8b8-93c0-417b-9793-ccd611ffbb84" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:16:01 compute-0 nova_compute[192079]: 2025-10-02 12:16:01.009 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:01 compute-0 systemd[1]: Stopping User Manager for UID 42436...
Oct 02 12:16:01 compute-0 systemd[231860]: Activating special unit Exit the Session...
Oct 02 12:16:01 compute-0 systemd[231860]: Stopped target Main User Target.
Oct 02 12:16:01 compute-0 systemd[231860]: Stopped target Basic System.
Oct 02 12:16:01 compute-0 systemd[231860]: Stopped target Paths.
Oct 02 12:16:01 compute-0 systemd[231860]: Stopped target Sockets.
Oct 02 12:16:01 compute-0 systemd[231860]: Stopped target Timers.
Oct 02 12:16:01 compute-0 systemd[231860]: Stopped Mark boot as successful after the user session has run 2 minutes.
Oct 02 12:16:01 compute-0 systemd[231860]: Stopped Daily Cleanup of User's Temporary Directories.
Oct 02 12:16:01 compute-0 systemd[231860]: Closed D-Bus User Message Bus Socket.
Oct 02 12:16:01 compute-0 systemd[231860]: Stopped Create User's Volatile Files and Directories.
Oct 02 12:16:01 compute-0 systemd[231860]: Removed slice User Application Slice.
Oct 02 12:16:01 compute-0 systemd[231860]: Reached target Shutdown.
Oct 02 12:16:01 compute-0 systemd[231860]: Finished Exit the Session.
Oct 02 12:16:01 compute-0 systemd[231860]: Reached target Exit the Session.
Oct 02 12:16:01 compute-0 systemd[1]: user@42436.service: Deactivated successfully.
Oct 02 12:16:01 compute-0 systemd[1]: Stopped User Manager for UID 42436.
Oct 02 12:16:01 compute-0 systemd[1]: Stopping User Runtime Directory /run/user/42436...
Oct 02 12:16:01 compute-0 systemd[1]: run-user-42436.mount: Deactivated successfully.
Oct 02 12:16:01 compute-0 systemd[1]: user-runtime-dir@42436.service: Deactivated successfully.
Oct 02 12:16:01 compute-0 systemd[1]: Stopped User Runtime Directory /run/user/42436.
Oct 02 12:16:01 compute-0 systemd[1]: Removed slice User Slice of UID 42436.
Oct 02 12:16:01 compute-0 podman[232074]: 2025-10-02 12:16:01.169910184 +0000 UTC m=+0.072986761 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:16:01 compute-0 podman[232075]: 2025-10-02 12:16:01.185666784 +0000 UTC m=+0.092217466 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, container_name=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=iscsid, managed_by=edpm_ansible)
Oct 02 12:16:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:02.216 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:16:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:02.216 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:16:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:02.217 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:16:02 compute-0 nova_compute[192079]: 2025-10-02 12:16:02.499 2 DEBUG nova.compute.manager [req-39917549-6d33-40d2-bd45-966c2cbcb3cd req-248af73a-839c-48f1-ae2d-306f44b1ddad 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Received event network-vif-plugged-1692479a-54ef-45ae-a6a3-39c68408e4f6 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:16:02 compute-0 nova_compute[192079]: 2025-10-02 12:16:02.500 2 DEBUG oslo_concurrency.lockutils [req-39917549-6d33-40d2-bd45-966c2cbcb3cd req-248af73a-839c-48f1-ae2d-306f44b1ddad 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "fa72d8b8-93c0-417b-9793-ccd611ffbb84-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:16:02 compute-0 nova_compute[192079]: 2025-10-02 12:16:02.500 2 DEBUG oslo_concurrency.lockutils [req-39917549-6d33-40d2-bd45-966c2cbcb3cd req-248af73a-839c-48f1-ae2d-306f44b1ddad 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "fa72d8b8-93c0-417b-9793-ccd611ffbb84-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:16:02 compute-0 nova_compute[192079]: 2025-10-02 12:16:02.500 2 DEBUG oslo_concurrency.lockutils [req-39917549-6d33-40d2-bd45-966c2cbcb3cd req-248af73a-839c-48f1-ae2d-306f44b1ddad 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "fa72d8b8-93c0-417b-9793-ccd611ffbb84-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:16:02 compute-0 nova_compute[192079]: 2025-10-02 12:16:02.501 2 DEBUG nova.compute.manager [req-39917549-6d33-40d2-bd45-966c2cbcb3cd req-248af73a-839c-48f1-ae2d-306f44b1ddad 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] No waiting events found dispatching network-vif-plugged-1692479a-54ef-45ae-a6a3-39c68408e4f6 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:16:02 compute-0 nova_compute[192079]: 2025-10-02 12:16:02.501 2 WARNING nova.compute.manager [req-39917549-6d33-40d2-bd45-966c2cbcb3cd req-248af73a-839c-48f1-ae2d-306f44b1ddad 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Received unexpected event network-vif-plugged-1692479a-54ef-45ae-a6a3-39c68408e4f6 for instance with vm_state resized and task_state None.
Oct 02 12:16:03 compute-0 nova_compute[192079]: 2025-10-02 12:16:03.580 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:06 compute-0 nova_compute[192079]: 2025-10-02 12:16:06.011 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.395 2 DEBUG oslo_concurrency.lockutils [None req-c82df3cb-f1c2-4ed0-8269-246197eb38a1 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Acquiring lock "fa72d8b8-93c0-417b-9793-ccd611ffbb84" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.396 2 DEBUG oslo_concurrency.lockutils [None req-c82df3cb-f1c2-4ed0-8269-246197eb38a1 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lock "fa72d8b8-93c0-417b-9793-ccd611ffbb84" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.396 2 DEBUG oslo_concurrency.lockutils [None req-c82df3cb-f1c2-4ed0-8269-246197eb38a1 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Acquiring lock "fa72d8b8-93c0-417b-9793-ccd611ffbb84-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.396 2 DEBUG oslo_concurrency.lockutils [None req-c82df3cb-f1c2-4ed0-8269-246197eb38a1 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lock "fa72d8b8-93c0-417b-9793-ccd611ffbb84-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.397 2 DEBUG oslo_concurrency.lockutils [None req-c82df3cb-f1c2-4ed0-8269-246197eb38a1 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lock "fa72d8b8-93c0-417b-9793-ccd611ffbb84-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.410 2 INFO nova.compute.manager [None req-c82df3cb-f1c2-4ed0-8269-246197eb38a1 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Terminating instance
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.430 2 DEBUG nova.compute.manager [None req-c82df3cb-f1c2-4ed0-8269-246197eb38a1 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:16:08 compute-0 kernel: tap1692479a-54 (unregistering): left promiscuous mode
Oct 02 12:16:08 compute-0 NetworkManager[51160]: <info>  [1759407368.4537] device (tap1692479a-54): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:16:08 compute-0 ovn_controller[94336]: 2025-10-02T12:16:08Z|00271|binding|INFO|Releasing lport 1692479a-54ef-45ae-a6a3-39c68408e4f6 from this chassis (sb_readonly=0)
Oct 02 12:16:08 compute-0 ovn_controller[94336]: 2025-10-02T12:16:08Z|00272|binding|INFO|Setting lport 1692479a-54ef-45ae-a6a3-39c68408e4f6 down in Southbound
Oct 02 12:16:08 compute-0 ovn_controller[94336]: 2025-10-02T12:16:08Z|00273|binding|INFO|Removing iface tap1692479a-54 ovn-installed in OVS
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.467 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.469 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:08.476 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:41:9a:b6 10.100.0.14'], port_security=['fa:16:3e:41:9a:b6 10.100.0.14'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.14/28', 'neutron:device_id': 'fa72d8b8-93c0-417b-9793-ccd611ffbb84', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-d6de4737-ca60-4c8d-bfd5-687f9366ec8b', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'ffae703d68b24b9c89686c149113fc2b', 'neutron:revision_number': '8', 'neutron:security_group_ids': '64970375-b20e-4c18-bfb5-2a0465f8be7d', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=9476db85-7514-407a-b55a-3d3c703e8f7b, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=1692479a-54ef-45ae-a6a3-39c68408e4f6) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:16:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:08.477 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 1692479a-54ef-45ae-a6a3-39c68408e4f6 in datapath d6de4737-ca60-4c8d-bfd5-687f9366ec8b unbound from our chassis
Oct 02 12:16:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:08.479 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network d6de4737-ca60-4c8d-bfd5-687f9366ec8b, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:16:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:08.480 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[077f0bb9-b719-45b2-8c95-272a47919ded]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:08.481 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b namespace which is not needed anymore
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.480 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:08 compute-0 systemd[1]: machine-qemu\x2d38\x2dinstance\x2d0000004c.scope: Deactivated successfully.
Oct 02 12:16:08 compute-0 systemd[1]: machine-qemu\x2d38\x2dinstance\x2d0000004c.scope: Consumed 9.713s CPU time.
Oct 02 12:16:08 compute-0 systemd-machined[152150]: Machine qemu-38-instance-0000004c terminated.
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.581 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:08 compute-0 neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b[232058]: [NOTICE]   (232062) : haproxy version is 2.8.14-c23fe91
Oct 02 12:16:08 compute-0 neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b[232058]: [NOTICE]   (232062) : path to executable is /usr/sbin/haproxy
Oct 02 12:16:08 compute-0 neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b[232058]: [WARNING]  (232062) : Exiting Master process...
Oct 02 12:16:08 compute-0 neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b[232058]: [WARNING]  (232062) : Exiting Master process...
Oct 02 12:16:08 compute-0 neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b[232058]: [ALERT]    (232062) : Current worker (232064) exited with code 143 (Terminated)
Oct 02 12:16:08 compute-0 neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b[232058]: [WARNING]  (232062) : All workers exited. Exiting... (0)
Oct 02 12:16:08 compute-0 systemd[1]: libpod-f2f68eaae35a6e5b0a38962b5d10d8e322cfa2a3fe57c3e4e6a827cd48217555.scope: Deactivated successfully.
Oct 02 12:16:08 compute-0 podman[232140]: 2025-10-02 12:16:08.613252708 +0000 UTC m=+0.041899593 container died f2f68eaae35a6e5b0a38962b5d10d8e322cfa2a3fe57c3e4e6a827cd48217555 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:16:08 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-f2f68eaae35a6e5b0a38962b5d10d8e322cfa2a3fe57c3e4e6a827cd48217555-userdata-shm.mount: Deactivated successfully.
Oct 02 12:16:08 compute-0 systemd[1]: var-lib-containers-storage-overlay-322d7ab9137326d5820a0c6af72262b5397cf985aea9f505e1757227e5a54843-merged.mount: Deactivated successfully.
Oct 02 12:16:08 compute-0 podman[232140]: 2025-10-02 12:16:08.650579416 +0000 UTC m=+0.079226301 container cleanup f2f68eaae35a6e5b0a38962b5d10d8e322cfa2a3fe57c3e4e6a827cd48217555 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b, org.label-schema.schema-version=1.0, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:16:08 compute-0 systemd[1]: libpod-conmon-f2f68eaae35a6e5b0a38962b5d10d8e322cfa2a3fe57c3e4e6a827cd48217555.scope: Deactivated successfully.
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.693 2 INFO nova.virt.libvirt.driver [-] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Instance destroyed successfully.
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.694 2 DEBUG nova.objects.instance [None req-c82df3cb-f1c2-4ed0-8269-246197eb38a1 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lazy-loading 'resources' on Instance uuid fa72d8b8-93c0-417b-9793-ccd611ffbb84 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:16:08 compute-0 podman[232179]: 2025-10-02 12:16:08.70537359 +0000 UTC m=+0.035170090 container remove f2f68eaae35a6e5b0a38962b5d10d8e322cfa2a3fe57c3e4e6a827cd48217555 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0)
Oct 02 12:16:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:08.709 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[88471217-082f-4c5e-b82d-f9e2608fdeb2]: (4, ('Thu Oct  2 12:16:08 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b (f2f68eaae35a6e5b0a38962b5d10d8e322cfa2a3fe57c3e4e6a827cd48217555)\nf2f68eaae35a6e5b0a38962b5d10d8e322cfa2a3fe57c3e4e6a827cd48217555\nThu Oct  2 12:16:08 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b (f2f68eaae35a6e5b0a38962b5d10d8e322cfa2a3fe57c3e4e6a827cd48217555)\nf2f68eaae35a6e5b0a38962b5d10d8e322cfa2a3fe57c3e4e6a827cd48217555\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:08.711 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[712a6e64-98bc-4895-b2dc-145f083c90ce]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:08.712 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapd6de4737-c0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.714 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:08 compute-0 kernel: tapd6de4737-c0: left promiscuous mode
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.731 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:08.733 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[82bab07b-0baf-45ee-8abe-2ca85600ac06]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.734 2 DEBUG nova.virt.libvirt.vif [None req-c82df3cb-f1c2-4ed0-8269-246197eb38a1 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=True,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:15:12Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerDiskConfigTestJSON-server-480428625',display_name='tempest-ServerDiskConfigTestJSON-server-480428625',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(2),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverdiskconfigtestjson-server-480428625',id=76,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=2,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:15:59Z,launched_on='compute-1.ctlplane.example.com',locked=False,locked_by=None,memory_mb=192,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='ffae703d68b24b9c89686c149113fc2b',ramdisk_id='',reservation_id='r-uec4q7qr',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServerDiskConfigTestJSON-1763056137',owner_user_name='tempest-ServerDiskConfigTestJSON-1763056137-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:16:05Z,user_data=None,user_id='def48c13fd6a43ba88836b753986a731',uuid=fa72d8b8-93c0-417b-9793-ccd611ffbb84,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "1692479a-54ef-45ae-a6a3-39c68408e4f6", "address": "fa:16:3e:41:9a:b6", "network": {"id": "d6de4737-ca60-4c8d-bfd5-687f9366ec8b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1853814355-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffae703d68b24b9c89686c149113fc2b", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap1692479a-54", "ovs_interfaceid": "1692479a-54ef-45ae-a6a3-39c68408e4f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.734 2 DEBUG nova.network.os_vif_util [None req-c82df3cb-f1c2-4ed0-8269-246197eb38a1 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Converting VIF {"id": "1692479a-54ef-45ae-a6a3-39c68408e4f6", "address": "fa:16:3e:41:9a:b6", "network": {"id": "d6de4737-ca60-4c8d-bfd5-687f9366ec8b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1853814355-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffae703d68b24b9c89686c149113fc2b", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap1692479a-54", "ovs_interfaceid": "1692479a-54ef-45ae-a6a3-39c68408e4f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.735 2 DEBUG nova.network.os_vif_util [None req-c82df3cb-f1c2-4ed0-8269-246197eb38a1 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:41:9a:b6,bridge_name='br-int',has_traffic_filtering=True,id=1692479a-54ef-45ae-a6a3-39c68408e4f6,network=Network(d6de4737-ca60-4c8d-bfd5-687f9366ec8b),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap1692479a-54') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.736 2 DEBUG os_vif [None req-c82df3cb-f1c2-4ed0-8269-246197eb38a1 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:41:9a:b6,bridge_name='br-int',has_traffic_filtering=True,id=1692479a-54ef-45ae-a6a3-39c68408e4f6,network=Network(d6de4737-ca60-4c8d-bfd5-687f9366ec8b),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap1692479a-54') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.737 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.738 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap1692479a-54, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.740 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.742 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.742 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.744 2 INFO os_vif [None req-c82df3cb-f1c2-4ed0-8269-246197eb38a1 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:41:9a:b6,bridge_name='br-int',has_traffic_filtering=True,id=1692479a-54ef-45ae-a6a3-39c68408e4f6,network=Network(d6de4737-ca60-4c8d-bfd5-687f9366ec8b),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap1692479a-54')
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.745 2 INFO nova.virt.libvirt.driver [None req-c82df3cb-f1c2-4ed0-8269-246197eb38a1 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Deleting instance files /var/lib/nova/instances/fa72d8b8-93c0-417b-9793-ccd611ffbb84_del
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.750 2 INFO nova.virt.libvirt.driver [None req-c82df3cb-f1c2-4ed0-8269-246197eb38a1 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Deletion of /var/lib/nova/instances/fa72d8b8-93c0-417b-9793-ccd611ffbb84_del complete
Oct 02 12:16:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:08.752 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6d6c6175-08eb-4c69-a83b-7bd8aa114ad8]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:08.754 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3f040931-216a-4136-a012-686626bfab43]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:08.768 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[cf2e0b32-cece-4382-8bbf-80fd74e0027e]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 535640, 'reachable_time': 44754, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 232209, 'error': None, 'target': 'ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:08.772 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-d6de4737-ca60-4c8d-bfd5-687f9366ec8b deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:16:08 compute-0 systemd[1]: run-netns-ovnmeta\x2dd6de4737\x2dca60\x2d4c8d\x2dbfd5\x2d687f9366ec8b.mount: Deactivated successfully.
Oct 02 12:16:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:08.772 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[9ca0a3cf-2ecb-4328-9bb3-2a95eb765308]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.919 2 INFO nova.compute.manager [None req-c82df3cb-f1c2-4ed0-8269-246197eb38a1 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Took 0.49 seconds to destroy the instance on the hypervisor.
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.919 2 DEBUG oslo.service.loopingcall [None req-c82df3cb-f1c2-4ed0-8269-246197eb38a1 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.920 2 DEBUG nova.compute.manager [-] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:16:08 compute-0 nova_compute[192079]: 2025-10-02 12:16:08.920 2 DEBUG nova.network.neutron [-] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:16:11 compute-0 nova_compute[192079]: 2025-10-02 12:16:11.013 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:11 compute-0 podman[232212]: 2025-10-02 12:16:11.147120025 +0000 UTC m=+0.048904075 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']})
Oct 02 12:16:11 compute-0 podman[232210]: 2025-10-02 12:16:11.175837108 +0000 UTC m=+0.083424306 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, config_id=ovn_metadata_agent, io.buildah.version=1.41.3, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, container_name=ovn_metadata_agent, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS)
Oct 02 12:16:11 compute-0 podman[232211]: 2025-10-02 12:16:11.175857958 +0000 UTC m=+0.082720127 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0)
Oct 02 12:16:13 compute-0 nova_compute[192079]: 2025-10-02 12:16:13.623 2 DEBUG nova.compute.manager [req-877670f5-7a3b-4049-867c-332dd9620747 req-0e2018bd-8092-4935-80d8-c29e0db435d3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Received event network-vif-deleted-1692479a-54ef-45ae-a6a3-39c68408e4f6 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:16:13 compute-0 nova_compute[192079]: 2025-10-02 12:16:13.624 2 INFO nova.compute.manager [req-877670f5-7a3b-4049-867c-332dd9620747 req-0e2018bd-8092-4935-80d8-c29e0db435d3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Neutron deleted interface 1692479a-54ef-45ae-a6a3-39c68408e4f6; detaching it from the instance and deleting it from the info cache
Oct 02 12:16:13 compute-0 nova_compute[192079]: 2025-10-02 12:16:13.624 2 DEBUG nova.network.neutron [req-877670f5-7a3b-4049-867c-332dd9620747 req-0e2018bd-8092-4935-80d8-c29e0db435d3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:16:13 compute-0 nova_compute[192079]: 2025-10-02 12:16:13.628 2 DEBUG nova.compute.manager [req-5fcd6e0f-d53d-4e84-9e83-9553f9bb1544 req-fbcb3942-7363-487b-acd1-6fa4f0501d5e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Received event network-vif-unplugged-1692479a-54ef-45ae-a6a3-39c68408e4f6 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:16:13 compute-0 nova_compute[192079]: 2025-10-02 12:16:13.629 2 DEBUG oslo_concurrency.lockutils [req-5fcd6e0f-d53d-4e84-9e83-9553f9bb1544 req-fbcb3942-7363-487b-acd1-6fa4f0501d5e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "fa72d8b8-93c0-417b-9793-ccd611ffbb84-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:16:13 compute-0 nova_compute[192079]: 2025-10-02 12:16:13.630 2 DEBUG oslo_concurrency.lockutils [req-5fcd6e0f-d53d-4e84-9e83-9553f9bb1544 req-fbcb3942-7363-487b-acd1-6fa4f0501d5e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "fa72d8b8-93c0-417b-9793-ccd611ffbb84-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:16:13 compute-0 nova_compute[192079]: 2025-10-02 12:16:13.630 2 DEBUG oslo_concurrency.lockutils [req-5fcd6e0f-d53d-4e84-9e83-9553f9bb1544 req-fbcb3942-7363-487b-acd1-6fa4f0501d5e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "fa72d8b8-93c0-417b-9793-ccd611ffbb84-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:16:13 compute-0 nova_compute[192079]: 2025-10-02 12:16:13.630 2 DEBUG nova.compute.manager [req-5fcd6e0f-d53d-4e84-9e83-9553f9bb1544 req-fbcb3942-7363-487b-acd1-6fa4f0501d5e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] No waiting events found dispatching network-vif-unplugged-1692479a-54ef-45ae-a6a3-39c68408e4f6 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:16:13 compute-0 nova_compute[192079]: 2025-10-02 12:16:13.631 2 DEBUG nova.compute.manager [req-5fcd6e0f-d53d-4e84-9e83-9553f9bb1544 req-fbcb3942-7363-487b-acd1-6fa4f0501d5e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Received event network-vif-unplugged-1692479a-54ef-45ae-a6a3-39c68408e4f6 for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:16:13 compute-0 nova_compute[192079]: 2025-10-02 12:16:13.649 2 DEBUG nova.network.neutron [-] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:16:13 compute-0 nova_compute[192079]: 2025-10-02 12:16:13.740 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:13 compute-0 nova_compute[192079]: 2025-10-02 12:16:13.795 2 INFO nova.compute.manager [-] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Took 4.88 seconds to deallocate network for instance.
Oct 02 12:16:13 compute-0 nova_compute[192079]: 2025-10-02 12:16:13.799 2 DEBUG nova.compute.manager [req-877670f5-7a3b-4049-867c-332dd9620747 req-0e2018bd-8092-4935-80d8-c29e0db435d3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Detach interface failed, port_id=1692479a-54ef-45ae-a6a3-39c68408e4f6, reason: Instance fa72d8b8-93c0-417b-9793-ccd611ffbb84 could not be found. _process_instance_vif_deleted_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10882
Oct 02 12:16:13 compute-0 nova_compute[192079]: 2025-10-02 12:16:13.991 2 DEBUG oslo_concurrency.lockutils [None req-c82df3cb-f1c2-4ed0-8269-246197eb38a1 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:16:13 compute-0 nova_compute[192079]: 2025-10-02 12:16:13.991 2 DEBUG oslo_concurrency.lockutils [None req-c82df3cb-f1c2-4ed0-8269-246197eb38a1 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:16:13 compute-0 nova_compute[192079]: 2025-10-02 12:16:13.998 2 DEBUG oslo_concurrency.lockutils [None req-c82df3cb-f1c2-4ed0-8269-246197eb38a1 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.007s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:16:14 compute-0 nova_compute[192079]: 2025-10-02 12:16:14.041 2 INFO nova.scheduler.client.report [None req-c82df3cb-f1c2-4ed0-8269-246197eb38a1 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Deleted allocations for instance fa72d8b8-93c0-417b-9793-ccd611ffbb84
Oct 02 12:16:14 compute-0 nova_compute[192079]: 2025-10-02 12:16:14.141 2 DEBUG oslo_concurrency.lockutils [None req-c82df3cb-f1c2-4ed0-8269-246197eb38a1 def48c13fd6a43ba88836b753986a731 ffae703d68b24b9c89686c149113fc2b - - default default] Lock "fa72d8b8-93c0-417b-9793-ccd611ffbb84" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 5.745s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:16:16 compute-0 nova_compute[192079]: 2025-10-02 12:16:16.016 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:16 compute-0 nova_compute[192079]: 2025-10-02 12:16:16.782 2 DEBUG nova.compute.manager [req-2a43e954-ce9e-4f98-97aa-8c00dd62acf6 req-71218edd-c017-46ea-b949-43cbb1aefcc9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Received event network-vif-plugged-1692479a-54ef-45ae-a6a3-39c68408e4f6 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:16:16 compute-0 nova_compute[192079]: 2025-10-02 12:16:16.783 2 DEBUG oslo_concurrency.lockutils [req-2a43e954-ce9e-4f98-97aa-8c00dd62acf6 req-71218edd-c017-46ea-b949-43cbb1aefcc9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "fa72d8b8-93c0-417b-9793-ccd611ffbb84-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:16:16 compute-0 nova_compute[192079]: 2025-10-02 12:16:16.783 2 DEBUG oslo_concurrency.lockutils [req-2a43e954-ce9e-4f98-97aa-8c00dd62acf6 req-71218edd-c017-46ea-b949-43cbb1aefcc9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "fa72d8b8-93c0-417b-9793-ccd611ffbb84-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:16:16 compute-0 nova_compute[192079]: 2025-10-02 12:16:16.783 2 DEBUG oslo_concurrency.lockutils [req-2a43e954-ce9e-4f98-97aa-8c00dd62acf6 req-71218edd-c017-46ea-b949-43cbb1aefcc9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "fa72d8b8-93c0-417b-9793-ccd611ffbb84-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:16:16 compute-0 nova_compute[192079]: 2025-10-02 12:16:16.784 2 DEBUG nova.compute.manager [req-2a43e954-ce9e-4f98-97aa-8c00dd62acf6 req-71218edd-c017-46ea-b949-43cbb1aefcc9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] No waiting events found dispatching network-vif-plugged-1692479a-54ef-45ae-a6a3-39c68408e4f6 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:16:16 compute-0 nova_compute[192079]: 2025-10-02 12:16:16.784 2 WARNING nova.compute.manager [req-2a43e954-ce9e-4f98-97aa-8c00dd62acf6 req-71218edd-c017-46ea-b949-43cbb1aefcc9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Received unexpected event network-vif-plugged-1692479a-54ef-45ae-a6a3-39c68408e4f6 for instance with vm_state deleted and task_state None.
Oct 02 12:16:18 compute-0 nova_compute[192079]: 2025-10-02 12:16:18.742 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:19 compute-0 nova_compute[192079]: 2025-10-02 12:16:19.660 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:16:20 compute-0 podman[232274]: 2025-10-02 12:16:20.173451891 +0000 UTC m=+0.079701155 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, config_id=edpm, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 12:16:20 compute-0 nova_compute[192079]: 2025-10-02 12:16:20.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:16:20 compute-0 nova_compute[192079]: 2025-10-02 12:16:20.706 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:16:20 compute-0 nova_compute[192079]: 2025-10-02 12:16:20.707 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:16:20 compute-0 nova_compute[192079]: 2025-10-02 12:16:20.707 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:16:20 compute-0 nova_compute[192079]: 2025-10-02 12:16:20.707 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:16:20 compute-0 nova_compute[192079]: 2025-10-02 12:16:20.796 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/92f5a241-27d9-416b-a19f-da7560348296/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:16:20 compute-0 nova_compute[192079]: 2025-10-02 12:16:20.850 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/92f5a241-27d9-416b-a19f-da7560348296/disk --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:16:20 compute-0 nova_compute[192079]: 2025-10-02 12:16:20.851 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/92f5a241-27d9-416b-a19f-da7560348296/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:16:20 compute-0 nova_compute[192079]: 2025-10-02 12:16:20.905 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/92f5a241-27d9-416b-a19f-da7560348296/disk --force-share --output=json" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:16:21 compute-0 nova_compute[192079]: 2025-10-02 12:16:21.062 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:21 compute-0 nova_compute[192079]: 2025-10-02 12:16:21.093 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:16:21 compute-0 nova_compute[192079]: 2025-10-02 12:16:21.095 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5548MB free_disk=73.32043075561523GB free_vcpus=7 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:16:21 compute-0 nova_compute[192079]: 2025-10-02 12:16:21.095 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:16:21 compute-0 nova_compute[192079]: 2025-10-02 12:16:21.095 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:16:21 compute-0 nova_compute[192079]: 2025-10-02 12:16:21.199 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance 92f5a241-27d9-416b-a19f-da7560348296 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:16:21 compute-0 nova_compute[192079]: 2025-10-02 12:16:21.199 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 1 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:16:21 compute-0 nova_compute[192079]: 2025-10-02 12:16:21.200 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=640MB phys_disk=79GB used_disk=1GB total_vcpus=8 used_vcpus=1 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:16:21 compute-0 nova_compute[192079]: 2025-10-02 12:16:21.220 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing inventories for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708 _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:804
Oct 02 12:16:21 compute-0 nova_compute[192079]: 2025-10-02 12:16:21.343 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Updating ProviderTree inventory for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 from _refresh_and_get_inventory using data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} _refresh_and_get_inventory /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:768
Oct 02 12:16:21 compute-0 nova_compute[192079]: 2025-10-02 12:16:21.344 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Updating inventory in ProviderTree for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 with inventory: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:176
Oct 02 12:16:21 compute-0 nova_compute[192079]: 2025-10-02 12:16:21.367 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing aggregate associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, aggregates: None _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:813
Oct 02 12:16:21 compute-0 nova_compute[192079]: 2025-10-02 12:16:21.387 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing trait associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, traits: COMPUTE_SECURITY_UEFI_SECURE_BOOT,COMPUTE_VIOMMU_MODEL_VIRTIO,COMPUTE_VIOMMU_MODEL_AUTO,COMPUTE_IMAGE_TYPE_AKI,COMPUTE_GRAPHICS_MODEL_VIRTIO,COMPUTE_NET_VIF_MODEL_PCNET,HW_CPU_X86_SSE42,COMPUTE_RESCUE_BFV,COMPUTE_VOLUME_EXTEND,COMPUTE_IMAGE_TYPE_QCOW2,COMPUTE_TRUSTED_CERTS,COMPUTE_SOCKET_PCI_NUMA_AFFINITY,COMPUTE_GRAPHICS_MODEL_CIRRUS,HW_CPU_X86_MMX,COMPUTE_STORAGE_BUS_VIRTIO,COMPUTE_NET_ATTACH_INTERFACE_WITH_TAG,COMPUTE_STORAGE_BUS_FDC,COMPUTE_STORAGE_BUS_USB,COMPUTE_NODE,HW_CPU_X86_SSSE3,HW_CPU_X86_SSE2,COMPUTE_GRAPHICS_MODEL_BOCHS,COMPUTE_NET_VIF_MODEL_E1000E,COMPUTE_IMAGE_TYPE_RAW,COMPUTE_NET_VIF_MODEL_NE2K_PCI,COMPUTE_IMAGE_TYPE_AMI,COMPUTE_VIOMMU_MODEL_INTEL,COMPUTE_SECURITY_TPM_2_0,COMPUTE_STORAGE_BUS_SCSI,COMPUTE_IMAGE_TYPE_ARI,COMPUTE_NET_VIF_MODEL_VMXNET3,COMPUTE_SECURITY_TPM_1_2,COMPUTE_NET_VIF_MODEL_E1000,HW_CPU_X86_SSE,COMPUTE_VOLUME_MULTI_ATTACH,COMPUTE_STORAGE_BUS_IDE,COMPUTE_GRAPHICS_MODEL_NONE,COMPUTE_VOLUME_ATTACH_WITH_TAG,COMPUTE_NET_VIF_MODEL_VIRTIO,HW_CPU_X86_SSE41,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_DEVICE_TAGGING,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_ACCELERATORS,COMPUTE_NET_VIF_MODEL_RTL8139,COMPUTE_GRAPHICS_MODEL_VGA,COMPUTE_STORAGE_BUS_SATA,COMPUTE_NET_VIF_MODEL_SPAPR_VLAN _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:825
Oct 02 12:16:21 compute-0 nova_compute[192079]: 2025-10-02 12:16:21.445 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:16:21 compute-0 nova_compute[192079]: 2025-10-02 12:16:21.475 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:16:21 compute-0 nova_compute[192079]: 2025-10-02 12:16:21.517 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:16:21 compute-0 nova_compute[192079]: 2025-10-02 12:16:21.518 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.422s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:16:22 compute-0 nova_compute[192079]: 2025-10-02 12:16:22.518 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:16:22 compute-0 nova_compute[192079]: 2025-10-02 12:16:22.519 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:16:22 compute-0 nova_compute[192079]: 2025-10-02 12:16:22.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:16:22 compute-0 nova_compute[192079]: 2025-10-02 12:16:22.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:16:22 compute-0 nova_compute[192079]: 2025-10-02 12:16:22.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:16:23 compute-0 nova_compute[192079]: 2025-10-02 12:16:23.661 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:16:23 compute-0 nova_compute[192079]: 2025-10-02 12:16:23.691 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:16:23 compute-0 nova_compute[192079]: 2025-10-02 12:16:23.692 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:16:23 compute-0 nova_compute[192079]: 2025-10-02 12:16:23.692 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:16:23 compute-0 nova_compute[192079]: 2025-10-02 12:16:23.693 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759407368.6919053, fa72d8b8-93c0-417b-9793-ccd611ffbb84 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:16:23 compute-0 nova_compute[192079]: 2025-10-02 12:16:23.693 2 INFO nova.compute.manager [-] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] VM Stopped (Lifecycle Event)
Oct 02 12:16:23 compute-0 nova_compute[192079]: 2025-10-02 12:16:23.726 2 DEBUG nova.compute.manager [None req-34130fde-dd9b-4332-8ae1-b5dd65e35b72 - - - - - -] [instance: fa72d8b8-93c0-417b-9793-ccd611ffbb84] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:16:23 compute-0 nova_compute[192079]: 2025-10-02 12:16:23.745 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:23 compute-0 nova_compute[192079]: 2025-10-02 12:16:23.924 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "refresh_cache-92f5a241-27d9-416b-a19f-da7560348296" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:16:23 compute-0 nova_compute[192079]: 2025-10-02 12:16:23.925 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquired lock "refresh_cache-92f5a241-27d9-416b-a19f-da7560348296" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:16:23 compute-0 nova_compute[192079]: 2025-10-02 12:16:23.925 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Forcefully refreshing network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2004
Oct 02 12:16:23 compute-0 nova_compute[192079]: 2025-10-02 12:16:23.925 2 DEBUG nova.objects.instance [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lazy-loading 'info_cache' on Instance uuid 92f5a241-27d9-416b-a19f-da7560348296 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:16:26 compute-0 nova_compute[192079]: 2025-10-02 12:16:26.096 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:27 compute-0 podman[232301]: 2025-10-02 12:16:27.143135142 +0000 UTC m=+0.058381863 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.openshift.expose-services=, com.redhat.component=ubi9-minimal-container, release=1755695350, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.buildah.version=1.33.7, name=ubi9-minimal, url=https://catalog.redhat.com/en/search?searchType=containers, vcs-type=git, container_name=openstack_network_exporter, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vendor=Red Hat, Inc., managed_by=edpm_ansible, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.openshift.tags=minimal rhel9, build-date=2025-08-20T13:12:41, config_id=edpm, maintainer=Red Hat, Inc., architecture=x86_64, distribution-scope=public, version=9.6, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal)
Oct 02 12:16:27 compute-0 podman[232302]: 2025-10-02 12:16:27.17280935 +0000 UTC m=+0.086616693 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0)
Oct 02 12:16:27 compute-0 nova_compute[192079]: 2025-10-02 12:16:27.494 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Updating instance_info_cache with network_info: [{"id": "f289b804-29b2-4f3d-985c-e9cc226259ad", "address": "fa:16:3e:a7:ed:93", "network": {"id": "5716ac1c-acf7-48a7-8b93-dda3a5af31f6", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1571059342-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ed7af923ad494ac5b7dbd3d8403dc33e", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf289b804-29", "ovs_interfaceid": "f289b804-29b2-4f3d-985c-e9cc226259ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:16:27 compute-0 nova_compute[192079]: 2025-10-02 12:16:27.517 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Releasing lock "refresh_cache-92f5a241-27d9-416b-a19f-da7560348296" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:16:27 compute-0 nova_compute[192079]: 2025-10-02 12:16:27.517 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Updated the network info_cache for instance _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9929
Oct 02 12:16:27 compute-0 nova_compute[192079]: 2025-10-02 12:16:27.517 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:16:27 compute-0 nova_compute[192079]: 2025-10-02 12:16:27.517 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:16:28 compute-0 nova_compute[192079]: 2025-10-02 12:16:28.772 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:31 compute-0 nova_compute[192079]: 2025-10-02 12:16:31.143 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:32 compute-0 podman[232342]: 2025-10-02 12:16:32.130977336 +0000 UTC m=+0.049866440 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:16:32 compute-0 podman[232343]: 2025-10-02 12:16:32.135766667 +0000 UTC m=+0.050093057 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=iscsid)
Oct 02 12:16:33 compute-0 nova_compute[192079]: 2025-10-02 12:16:33.794 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:36 compute-0 nova_compute[192079]: 2025-10-02 12:16:36.183 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:38 compute-0 nova_compute[192079]: 2025-10-02 12:16:38.798 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:40.945 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=21, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=20) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:16:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:40.946 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 0 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:16:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:40.947 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '21'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:16:40 compute-0 nova_compute[192079]: 2025-10-02 12:16:40.947 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:41 compute-0 ovn_controller[94336]: 2025-10-02T12:16:41Z|00274|binding|INFO|Releasing lport cc8e73bf-6cd9-4487-9685-abdace89cf29 from this chassis (sb_readonly=0)
Oct 02 12:16:41 compute-0 nova_compute[192079]: 2025-10-02 12:16:41.237 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:41 compute-0 nova_compute[192079]: 2025-10-02 12:16:41.240 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:42 compute-0 podman[232386]: 2025-10-02 12:16:42.127790884 +0000 UTC m=+0.046055737 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_id=ovn_metadata_agent, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, container_name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']})
Oct 02 12:16:42 compute-0 podman[232388]: 2025-10-02 12:16:42.144021466 +0000 UTC m=+0.051248878 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:16:42 compute-0 podman[232387]: 2025-10-02 12:16:42.15845309 +0000 UTC m=+0.071909802 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, config_id=ovn_controller, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, container_name=ovn_controller, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, managed_by=edpm_ansible, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2)
Oct 02 12:16:43 compute-0 nova_compute[192079]: 2025-10-02 12:16:43.818 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:46 compute-0 nova_compute[192079]: 2025-10-02 12:16:46.239 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:48 compute-0 nova_compute[192079]: 2025-10-02 12:16:48.449 2 DEBUG oslo_concurrency.lockutils [None req-933ecfbc-511e-476b-96d6-eff9e3bc8c75 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Acquiring lock "92f5a241-27d9-416b-a19f-da7560348296" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:16:48 compute-0 nova_compute[192079]: 2025-10-02 12:16:48.449 2 DEBUG oslo_concurrency.lockutils [None req-933ecfbc-511e-476b-96d6-eff9e3bc8c75 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Lock "92f5a241-27d9-416b-a19f-da7560348296" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:16:48 compute-0 nova_compute[192079]: 2025-10-02 12:16:48.449 2 DEBUG oslo_concurrency.lockutils [None req-933ecfbc-511e-476b-96d6-eff9e3bc8c75 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Acquiring lock "92f5a241-27d9-416b-a19f-da7560348296-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:16:48 compute-0 nova_compute[192079]: 2025-10-02 12:16:48.449 2 DEBUG oslo_concurrency.lockutils [None req-933ecfbc-511e-476b-96d6-eff9e3bc8c75 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Lock "92f5a241-27d9-416b-a19f-da7560348296-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:16:48 compute-0 nova_compute[192079]: 2025-10-02 12:16:48.450 2 DEBUG oslo_concurrency.lockutils [None req-933ecfbc-511e-476b-96d6-eff9e3bc8c75 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Lock "92f5a241-27d9-416b-a19f-da7560348296-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:16:48 compute-0 nova_compute[192079]: 2025-10-02 12:16:48.462 2 INFO nova.compute.manager [None req-933ecfbc-511e-476b-96d6-eff9e3bc8c75 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Terminating instance
Oct 02 12:16:48 compute-0 nova_compute[192079]: 2025-10-02 12:16:48.471 2 DEBUG nova.compute.manager [None req-933ecfbc-511e-476b-96d6-eff9e3bc8c75 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:16:48 compute-0 kernel: tapf289b804-29 (unregistering): left promiscuous mode
Oct 02 12:16:48 compute-0 NetworkManager[51160]: <info>  [1759407408.5075] device (tapf289b804-29): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:16:48 compute-0 ovn_controller[94336]: 2025-10-02T12:16:48Z|00275|binding|INFO|Releasing lport f289b804-29b2-4f3d-985c-e9cc226259ad from this chassis (sb_readonly=0)
Oct 02 12:16:48 compute-0 ovn_controller[94336]: 2025-10-02T12:16:48Z|00276|binding|INFO|Setting lport f289b804-29b2-4f3d-985c-e9cc226259ad down in Southbound
Oct 02 12:16:48 compute-0 nova_compute[192079]: 2025-10-02 12:16:48.515 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:48 compute-0 ovn_controller[94336]: 2025-10-02T12:16:48Z|00277|binding|INFO|Removing iface tapf289b804-29 ovn-installed in OVS
Oct 02 12:16:48 compute-0 nova_compute[192079]: 2025-10-02 12:16:48.517 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:48.533 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:a7:ed:93 10.100.0.8'], port_security=['fa:16:3e:a7:ed:93 10.100.0.8'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.8/28', 'neutron:device_id': '92f5a241-27d9-416b-a19f-da7560348296', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-5716ac1c-acf7-48a7-8b93-dda3a5af31f6', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'ed7af923ad494ac5b7dbd3d8403dc33e', 'neutron:revision_number': '6', 'neutron:security_group_ids': 'c2d5fc9c-779f-46ff-9303-43ea6115b06c ea9e5bf2-e32b-49e5-a686-bc4f5cbc2419 f6f46a30-ca89-45c9-b4fd-d5c78d4ee0ae', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=08fc185f-7900-4a64-ba36-f229e6cb956d, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=f289b804-29b2-4f3d-985c-e9cc226259ad) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:16:48 compute-0 nova_compute[192079]: 2025-10-02 12:16:48.534 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:48.535 103294 INFO neutron.agent.ovn.metadata.agent [-] Port f289b804-29b2-4f3d-985c-e9cc226259ad in datapath 5716ac1c-acf7-48a7-8b93-dda3a5af31f6 unbound from our chassis
Oct 02 12:16:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:48.536 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 5716ac1c-acf7-48a7-8b93-dda3a5af31f6, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:16:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:48.537 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[78bdf5d9-0d71-4e25-90bc-39129a565301]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:48.538 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-5716ac1c-acf7-48a7-8b93-dda3a5af31f6 namespace which is not needed anymore
Oct 02 12:16:48 compute-0 systemd[1]: machine-qemu\x2d37\x2dinstance\x2d0000004e.scope: Deactivated successfully.
Oct 02 12:16:48 compute-0 systemd[1]: machine-qemu\x2d37\x2dinstance\x2d0000004e.scope: Consumed 16.748s CPU time.
Oct 02 12:16:48 compute-0 systemd-machined[152150]: Machine qemu-37-instance-0000004e terminated.
Oct 02 12:16:48 compute-0 nova_compute[192079]: 2025-10-02 12:16:48.762 2 INFO nova.virt.libvirt.driver [-] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Instance destroyed successfully.
Oct 02 12:16:48 compute-0 nova_compute[192079]: 2025-10-02 12:16:48.763 2 DEBUG nova.objects.instance [None req-933ecfbc-511e-476b-96d6-eff9e3bc8c75 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Lazy-loading 'resources' on Instance uuid 92f5a241-27d9-416b-a19f-da7560348296 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:16:48 compute-0 nova_compute[192079]: 2025-10-02 12:16:48.794 2 DEBUG nova.compute.manager [req-86dc4bb8-d350-4e5c-8562-538116f68d66 req-766cfcfd-7885-4daf-95bd-dd7535d5bce4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Received event network-vif-unplugged-f289b804-29b2-4f3d-985c-e9cc226259ad external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:16:48 compute-0 nova_compute[192079]: 2025-10-02 12:16:48.794 2 DEBUG oslo_concurrency.lockutils [req-86dc4bb8-d350-4e5c-8562-538116f68d66 req-766cfcfd-7885-4daf-95bd-dd7535d5bce4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "92f5a241-27d9-416b-a19f-da7560348296-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:16:48 compute-0 nova_compute[192079]: 2025-10-02 12:16:48.794 2 DEBUG oslo_concurrency.lockutils [req-86dc4bb8-d350-4e5c-8562-538116f68d66 req-766cfcfd-7885-4daf-95bd-dd7535d5bce4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "92f5a241-27d9-416b-a19f-da7560348296-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:16:48 compute-0 nova_compute[192079]: 2025-10-02 12:16:48.794 2 DEBUG oslo_concurrency.lockutils [req-86dc4bb8-d350-4e5c-8562-538116f68d66 req-766cfcfd-7885-4daf-95bd-dd7535d5bce4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "92f5a241-27d9-416b-a19f-da7560348296-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:16:48 compute-0 nova_compute[192079]: 2025-10-02 12:16:48.795 2 DEBUG nova.compute.manager [req-86dc4bb8-d350-4e5c-8562-538116f68d66 req-766cfcfd-7885-4daf-95bd-dd7535d5bce4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] No waiting events found dispatching network-vif-unplugged-f289b804-29b2-4f3d-985c-e9cc226259ad pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:16:48 compute-0 nova_compute[192079]: 2025-10-02 12:16:48.795 2 DEBUG nova.compute.manager [req-86dc4bb8-d350-4e5c-8562-538116f68d66 req-766cfcfd-7885-4daf-95bd-dd7535d5bce4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Received event network-vif-unplugged-f289b804-29b2-4f3d-985c-e9cc226259ad for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:16:48 compute-0 nova_compute[192079]: 2025-10-02 12:16:48.820 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:48 compute-0 neutron-haproxy-ovnmeta-5716ac1c-acf7-48a7-8b93-dda3a5af31f6[231757]: [NOTICE]   (231762) : haproxy version is 2.8.14-c23fe91
Oct 02 12:16:48 compute-0 neutron-haproxy-ovnmeta-5716ac1c-acf7-48a7-8b93-dda3a5af31f6[231757]: [NOTICE]   (231762) : path to executable is /usr/sbin/haproxy
Oct 02 12:16:48 compute-0 neutron-haproxy-ovnmeta-5716ac1c-acf7-48a7-8b93-dda3a5af31f6[231757]: [WARNING]  (231762) : Exiting Master process...
Oct 02 12:16:48 compute-0 neutron-haproxy-ovnmeta-5716ac1c-acf7-48a7-8b93-dda3a5af31f6[231757]: [WARNING]  (231762) : Exiting Master process...
Oct 02 12:16:48 compute-0 neutron-haproxy-ovnmeta-5716ac1c-acf7-48a7-8b93-dda3a5af31f6[231757]: [ALERT]    (231762) : Current worker (231764) exited with code 143 (Terminated)
Oct 02 12:16:48 compute-0 neutron-haproxy-ovnmeta-5716ac1c-acf7-48a7-8b93-dda3a5af31f6[231757]: [WARNING]  (231762) : All workers exited. Exiting... (0)
Oct 02 12:16:48 compute-0 systemd[1]: libpod-5521ce23a20daf86f597b7a6134edb99d381efaa7192b898403c9ac11b6025e3.scope: Deactivated successfully.
Oct 02 12:16:48 compute-0 podman[232480]: 2025-10-02 12:16:48.84420475 +0000 UTC m=+0.226557389 container died 5521ce23a20daf86f597b7a6134edb99d381efaa7192b898403c9ac11b6025e3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-5716ac1c-acf7-48a7-8b93-dda3a5af31f6, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3)
Oct 02 12:16:48 compute-0 nova_compute[192079]: 2025-10-02 12:16:48.913 2 DEBUG nova.virt.libvirt.vif [None req-933ecfbc-511e-476b-96d6-eff9e3bc8c75 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:15:25Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-SecurityGroupsTestJSON-server-1096579654',display_name='tempest-SecurityGroupsTestJSON-server-1096579654',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-securitygroupstestjson-server-1096579654',id=78,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:15:37Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='ed7af923ad494ac5b7dbd3d8403dc33e',ramdisk_id='',reservation_id='r-0nbzpwe9',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-SecurityGroupsTestJSON-431508526',owner_user_name='tempest-SecurityGroupsTestJSON-431508526-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:15:38Z,user_data=None,user_id='341760d37e2c44209429d234ca5f01ae',uuid=92f5a241-27d9-416b-a19f-da7560348296,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "f289b804-29b2-4f3d-985c-e9cc226259ad", "address": "fa:16:3e:a7:ed:93", "network": {"id": "5716ac1c-acf7-48a7-8b93-dda3a5af31f6", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1571059342-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ed7af923ad494ac5b7dbd3d8403dc33e", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf289b804-29", "ovs_interfaceid": "f289b804-29b2-4f3d-985c-e9cc226259ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:16:48 compute-0 nova_compute[192079]: 2025-10-02 12:16:48.913 2 DEBUG nova.network.os_vif_util [None req-933ecfbc-511e-476b-96d6-eff9e3bc8c75 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Converting VIF {"id": "f289b804-29b2-4f3d-985c-e9cc226259ad", "address": "fa:16:3e:a7:ed:93", "network": {"id": "5716ac1c-acf7-48a7-8b93-dda3a5af31f6", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1571059342-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ed7af923ad494ac5b7dbd3d8403dc33e", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf289b804-29", "ovs_interfaceid": "f289b804-29b2-4f3d-985c-e9cc226259ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:16:48 compute-0 nova_compute[192079]: 2025-10-02 12:16:48.914 2 DEBUG nova.network.os_vif_util [None req-933ecfbc-511e-476b-96d6-eff9e3bc8c75 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:a7:ed:93,bridge_name='br-int',has_traffic_filtering=True,id=f289b804-29b2-4f3d-985c-e9cc226259ad,network=Network(5716ac1c-acf7-48a7-8b93-dda3a5af31f6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf289b804-29') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:16:48 compute-0 nova_compute[192079]: 2025-10-02 12:16:48.914 2 DEBUG os_vif [None req-933ecfbc-511e-476b-96d6-eff9e3bc8c75 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:a7:ed:93,bridge_name='br-int',has_traffic_filtering=True,id=f289b804-29b2-4f3d-985c-e9cc226259ad,network=Network(5716ac1c-acf7-48a7-8b93-dda3a5af31f6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf289b804-29') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:16:48 compute-0 nova_compute[192079]: 2025-10-02 12:16:48.915 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:48 compute-0 nova_compute[192079]: 2025-10-02 12:16:48.916 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapf289b804-29, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:16:48 compute-0 nova_compute[192079]: 2025-10-02 12:16:48.919 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:16:48 compute-0 nova_compute[192079]: 2025-10-02 12:16:48.921 2 INFO os_vif [None req-933ecfbc-511e-476b-96d6-eff9e3bc8c75 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:a7:ed:93,bridge_name='br-int',has_traffic_filtering=True,id=f289b804-29b2-4f3d-985c-e9cc226259ad,network=Network(5716ac1c-acf7-48a7-8b93-dda3a5af31f6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf289b804-29')
Oct 02 12:16:48 compute-0 nova_compute[192079]: 2025-10-02 12:16:48.922 2 INFO nova.virt.libvirt.driver [None req-933ecfbc-511e-476b-96d6-eff9e3bc8c75 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Deleting instance files /var/lib/nova/instances/92f5a241-27d9-416b-a19f-da7560348296_del
Oct 02 12:16:48 compute-0 nova_compute[192079]: 2025-10-02 12:16:48.923 2 INFO nova.virt.libvirt.driver [None req-933ecfbc-511e-476b-96d6-eff9e3bc8c75 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Deletion of /var/lib/nova/instances/92f5a241-27d9-416b-a19f-da7560348296_del complete
Oct 02 12:16:49 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-5521ce23a20daf86f597b7a6134edb99d381efaa7192b898403c9ac11b6025e3-userdata-shm.mount: Deactivated successfully.
Oct 02 12:16:49 compute-0 systemd[1]: var-lib-containers-storage-overlay-ea6d57d2aeb27b8fcf91fdd57dfa8a1f4128844b7033da84fbe8a2fa1951c93a-merged.mount: Deactivated successfully.
Oct 02 12:16:49 compute-0 podman[232480]: 2025-10-02 12:16:49.04482413 +0000 UTC m=+0.427176779 container cleanup 5521ce23a20daf86f597b7a6134edb99d381efaa7192b898403c9ac11b6025e3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-5716ac1c-acf7-48a7-8b93-dda3a5af31f6, org.label-schema.schema-version=1.0, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001)
Oct 02 12:16:49 compute-0 systemd[1]: libpod-conmon-5521ce23a20daf86f597b7a6134edb99d381efaa7192b898403c9ac11b6025e3.scope: Deactivated successfully.
Oct 02 12:16:49 compute-0 nova_compute[192079]: 2025-10-02 12:16:49.122 2 INFO nova.compute.manager [None req-933ecfbc-511e-476b-96d6-eff9e3bc8c75 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Took 0.65 seconds to destroy the instance on the hypervisor.
Oct 02 12:16:49 compute-0 nova_compute[192079]: 2025-10-02 12:16:49.123 2 DEBUG oslo.service.loopingcall [None req-933ecfbc-511e-476b-96d6-eff9e3bc8c75 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:16:49 compute-0 nova_compute[192079]: 2025-10-02 12:16:49.123 2 DEBUG nova.compute.manager [-] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:16:49 compute-0 nova_compute[192079]: 2025-10-02 12:16:49.124 2 DEBUG nova.network.neutron [-] [instance: 92f5a241-27d9-416b-a19f-da7560348296] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:16:49 compute-0 podman[232527]: 2025-10-02 12:16:49.303332668 +0000 UTC m=+0.229132579 container remove 5521ce23a20daf86f597b7a6134edb99d381efaa7192b898403c9ac11b6025e3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-5716ac1c-acf7-48a7-8b93-dda3a5af31f6, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2)
Oct 02 12:16:49 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:49.309 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e9f62ea3-c781-4f49-bfbd-b662d8ba57b9]: (4, ('Thu Oct  2 12:16:48 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-5716ac1c-acf7-48a7-8b93-dda3a5af31f6 (5521ce23a20daf86f597b7a6134edb99d381efaa7192b898403c9ac11b6025e3)\n5521ce23a20daf86f597b7a6134edb99d381efaa7192b898403c9ac11b6025e3\nThu Oct  2 12:16:49 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-5716ac1c-acf7-48a7-8b93-dda3a5af31f6 (5521ce23a20daf86f597b7a6134edb99d381efaa7192b898403c9ac11b6025e3)\n5521ce23a20daf86f597b7a6134edb99d381efaa7192b898403c9ac11b6025e3\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:49 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:49.310 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[eca9ca89-202f-4632-bdfd-ac066b94f63d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:49 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:49.311 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap5716ac1c-a0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:16:49 compute-0 nova_compute[192079]: 2025-10-02 12:16:49.313 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:49 compute-0 kernel: tap5716ac1c-a0: left promiscuous mode
Oct 02 12:16:49 compute-0 nova_compute[192079]: 2025-10-02 12:16:49.315 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:49 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:49.317 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0c5ed3f0-b9c4-4726-87a9-f15a616d3a11]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:49 compute-0 nova_compute[192079]: 2025-10-02 12:16:49.327 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:49 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:49.353 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b26f9797-4b8c-475c-a477-011519f81204]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:49 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:49.355 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7740119c-d230-4465-9175-4472c735163b]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:49 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:49.373 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1e915957-94c2-4a71-be38-953ccbed3504]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 533398, 'reachable_time': 23848, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 232544, 'error': None, 'target': 'ovnmeta-5716ac1c-acf7-48a7-8b93-dda3a5af31f6', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:49 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:49.375 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-5716ac1c-acf7-48a7-8b93-dda3a5af31f6 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:16:49 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:49.375 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[48320a5b-f74f-4d39-aafb-1b642b461b88]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:49 compute-0 systemd[1]: run-netns-ovnmeta\x2d5716ac1c\x2dacf7\x2d48a7\x2d8b93\x2ddda3a5af31f6.mount: Deactivated successfully.
Oct 02 12:16:49 compute-0 nova_compute[192079]: 2025-10-02 12:16:49.816 2 DEBUG nova.network.neutron [-] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:16:49 compute-0 nova_compute[192079]: 2025-10-02 12:16:49.831 2 INFO nova.compute.manager [-] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Took 0.71 seconds to deallocate network for instance.
Oct 02 12:16:49 compute-0 nova_compute[192079]: 2025-10-02 12:16:49.895 2 DEBUG oslo_concurrency.lockutils [None req-933ecfbc-511e-476b-96d6-eff9e3bc8c75 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:16:49 compute-0 nova_compute[192079]: 2025-10-02 12:16:49.896 2 DEBUG oslo_concurrency.lockutils [None req-933ecfbc-511e-476b-96d6-eff9e3bc8c75 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:16:49 compute-0 nova_compute[192079]: 2025-10-02 12:16:49.956 2 DEBUG nova.compute.provider_tree [None req-933ecfbc-511e-476b-96d6-eff9e3bc8c75 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:16:49 compute-0 nova_compute[192079]: 2025-10-02 12:16:49.972 2 DEBUG nova.scheduler.client.report [None req-933ecfbc-511e-476b-96d6-eff9e3bc8c75 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:16:49 compute-0 nova_compute[192079]: 2025-10-02 12:16:49.994 2 DEBUG oslo_concurrency.lockutils [None req-933ecfbc-511e-476b-96d6-eff9e3bc8c75 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.098s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:16:50 compute-0 nova_compute[192079]: 2025-10-02 12:16:50.028 2 INFO nova.scheduler.client.report [None req-933ecfbc-511e-476b-96d6-eff9e3bc8c75 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Deleted allocations for instance 92f5a241-27d9-416b-a19f-da7560348296
Oct 02 12:16:50 compute-0 nova_compute[192079]: 2025-10-02 12:16:50.151 2 DEBUG nova.compute.manager [req-b5583204-b3e0-46f7-bae9-8f16c2a3980e req-00334b89-b381-438d-8061-0dd0aee3c987 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Received event network-vif-deleted-f289b804-29b2-4f3d-985c-e9cc226259ad external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:16:50 compute-0 nova_compute[192079]: 2025-10-02 12:16:50.153 2 DEBUG oslo_concurrency.lockutils [None req-933ecfbc-511e-476b-96d6-eff9e3bc8c75 341760d37e2c44209429d234ca5f01ae ed7af923ad494ac5b7dbd3d8403dc33e - - default default] Lock "92f5a241-27d9-416b-a19f-da7560348296" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.704s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:16:51 compute-0 podman[232545]: 2025-10-02 12:16:51.217477527 +0000 UTC m=+0.116377943 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=edpm, managed_by=edpm_ansible, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2)
Oct 02 12:16:51 compute-0 nova_compute[192079]: 2025-10-02 12:16:51.263 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:51 compute-0 nova_compute[192079]: 2025-10-02 12:16:51.344 2 DEBUG nova.compute.manager [req-435f8d83-4391-4a0e-b056-f459fbb506af req-d4033cfd-64a9-42e0-8b77-4df2b6b95a35 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Received event network-vif-plugged-f289b804-29b2-4f3d-985c-e9cc226259ad external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:16:51 compute-0 nova_compute[192079]: 2025-10-02 12:16:51.344 2 DEBUG oslo_concurrency.lockutils [req-435f8d83-4391-4a0e-b056-f459fbb506af req-d4033cfd-64a9-42e0-8b77-4df2b6b95a35 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "92f5a241-27d9-416b-a19f-da7560348296-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:16:51 compute-0 nova_compute[192079]: 2025-10-02 12:16:51.345 2 DEBUG oslo_concurrency.lockutils [req-435f8d83-4391-4a0e-b056-f459fbb506af req-d4033cfd-64a9-42e0-8b77-4df2b6b95a35 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "92f5a241-27d9-416b-a19f-da7560348296-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:16:51 compute-0 nova_compute[192079]: 2025-10-02 12:16:51.345 2 DEBUG oslo_concurrency.lockutils [req-435f8d83-4391-4a0e-b056-f459fbb506af req-d4033cfd-64a9-42e0-8b77-4df2b6b95a35 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "92f5a241-27d9-416b-a19f-da7560348296-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:16:51 compute-0 nova_compute[192079]: 2025-10-02 12:16:51.347 2 DEBUG nova.compute.manager [req-435f8d83-4391-4a0e-b056-f459fbb506af req-d4033cfd-64a9-42e0-8b77-4df2b6b95a35 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] No waiting events found dispatching network-vif-plugged-f289b804-29b2-4f3d-985c-e9cc226259ad pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:16:51 compute-0 nova_compute[192079]: 2025-10-02 12:16:51.347 2 WARNING nova.compute.manager [req-435f8d83-4391-4a0e-b056-f459fbb506af req-d4033cfd-64a9-42e0-8b77-4df2b6b95a35 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Received unexpected event network-vif-plugged-f289b804-29b2-4f3d-985c-e9cc226259ad for instance with vm_state deleted and task_state None.
Oct 02 12:16:51 compute-0 nova_compute[192079]: 2025-10-02 12:16:51.390 2 DEBUG oslo_concurrency.lockutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Acquiring lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:16:51 compute-0 nova_compute[192079]: 2025-10-02 12:16:51.391 2 DEBUG oslo_concurrency.lockutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:16:51 compute-0 nova_compute[192079]: 2025-10-02 12:16:51.416 2 DEBUG nova.compute.manager [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:16:51 compute-0 nova_compute[192079]: 2025-10-02 12:16:51.532 2 DEBUG oslo_concurrency.lockutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:16:51 compute-0 nova_compute[192079]: 2025-10-02 12:16:51.532 2 DEBUG oslo_concurrency.lockutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:16:51 compute-0 nova_compute[192079]: 2025-10-02 12:16:51.541 2 DEBUG nova.virt.hardware [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:16:51 compute-0 nova_compute[192079]: 2025-10-02 12:16:51.542 2 INFO nova.compute.claims [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:16:51 compute-0 nova_compute[192079]: 2025-10-02 12:16:51.715 2 DEBUG nova.compute.provider_tree [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:16:51 compute-0 nova_compute[192079]: 2025-10-02 12:16:51.735 2 DEBUG nova.scheduler.client.report [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:16:51 compute-0 nova_compute[192079]: 2025-10-02 12:16:51.765 2 DEBUG oslo_concurrency.lockutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.233s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:16:51 compute-0 nova_compute[192079]: 2025-10-02 12:16:51.766 2 DEBUG nova.compute.manager [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:16:51 compute-0 nova_compute[192079]: 2025-10-02 12:16:51.826 2 DEBUG nova.compute.manager [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:16:51 compute-0 nova_compute[192079]: 2025-10-02 12:16:51.826 2 DEBUG nova.network.neutron [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:16:51 compute-0 nova_compute[192079]: 2025-10-02 12:16:51.846 2 INFO nova.virt.libvirt.driver [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:16:51 compute-0 nova_compute[192079]: 2025-10-02 12:16:51.864 2 DEBUG nova.compute.manager [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:16:51 compute-0 nova_compute[192079]: 2025-10-02 12:16:51.991 2 DEBUG nova.compute.manager [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:16:51 compute-0 nova_compute[192079]: 2025-10-02 12:16:51.993 2 DEBUG nova.virt.libvirt.driver [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:16:51 compute-0 nova_compute[192079]: 2025-10-02 12:16:51.994 2 INFO nova.virt.libvirt.driver [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Creating image(s)
Oct 02 12:16:51 compute-0 nova_compute[192079]: 2025-10-02 12:16:51.995 2 DEBUG oslo_concurrency.lockutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Acquiring lock "/var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:16:51 compute-0 nova_compute[192079]: 2025-10-02 12:16:51.996 2 DEBUG oslo_concurrency.lockutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lock "/var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:16:51 compute-0 nova_compute[192079]: 2025-10-02 12:16:51.997 2 DEBUG oslo_concurrency.lockutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lock "/var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:16:52 compute-0 nova_compute[192079]: 2025-10-02 12:16:52.022 2 DEBUG nova.policy [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:16:52 compute-0 nova_compute[192079]: 2025-10-02 12:16:52.026 2 DEBUG oslo_concurrency.processutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:16:52 compute-0 nova_compute[192079]: 2025-10-02 12:16:52.087 2 DEBUG oslo_concurrency.processutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.061s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:16:52 compute-0 nova_compute[192079]: 2025-10-02 12:16:52.089 2 DEBUG oslo_concurrency.lockutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:16:52 compute-0 nova_compute[192079]: 2025-10-02 12:16:52.090 2 DEBUG oslo_concurrency.lockutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:16:52 compute-0 nova_compute[192079]: 2025-10-02 12:16:52.114 2 DEBUG oslo_concurrency.processutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:16:52 compute-0 nova_compute[192079]: 2025-10-02 12:16:52.170 2 DEBUG oslo_concurrency.processutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:16:52 compute-0 nova_compute[192079]: 2025-10-02 12:16:52.171 2 DEBUG oslo_concurrency.processutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:16:52 compute-0 nova_compute[192079]: 2025-10-02 12:16:52.207 2 DEBUG oslo_concurrency.processutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk 1073741824" returned: 0 in 0.036s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:16:52 compute-0 nova_compute[192079]: 2025-10-02 12:16:52.208 2 DEBUG oslo_concurrency.lockutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.118s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:16:52 compute-0 nova_compute[192079]: 2025-10-02 12:16:52.208 2 DEBUG oslo_concurrency.processutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:16:52 compute-0 nova_compute[192079]: 2025-10-02 12:16:52.263 2 DEBUG oslo_concurrency.processutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:16:52 compute-0 nova_compute[192079]: 2025-10-02 12:16:52.264 2 DEBUG nova.virt.disk.api [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Checking if we can resize image /var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:16:52 compute-0 nova_compute[192079]: 2025-10-02 12:16:52.264 2 DEBUG oslo_concurrency.processutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:16:52 compute-0 nova_compute[192079]: 2025-10-02 12:16:52.326 2 DEBUG oslo_concurrency.processutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk --force-share --output=json" returned: 0 in 0.062s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:16:52 compute-0 nova_compute[192079]: 2025-10-02 12:16:52.327 2 DEBUG nova.virt.disk.api [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Cannot resize image /var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:16:52 compute-0 nova_compute[192079]: 2025-10-02 12:16:52.328 2 DEBUG nova.objects.instance [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lazy-loading 'migration_context' on Instance uuid ebc56e2c-d3a3-4ade-8849-7e23fc710e78 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:16:52 compute-0 nova_compute[192079]: 2025-10-02 12:16:52.356 2 DEBUG nova.virt.libvirt.driver [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:16:52 compute-0 nova_compute[192079]: 2025-10-02 12:16:52.356 2 DEBUG nova.virt.libvirt.driver [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Ensure instance console log exists: /var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:16:52 compute-0 nova_compute[192079]: 2025-10-02 12:16:52.357 2 DEBUG oslo_concurrency.lockutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:16:52 compute-0 nova_compute[192079]: 2025-10-02 12:16:52.357 2 DEBUG oslo_concurrency.lockutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:16:52 compute-0 nova_compute[192079]: 2025-10-02 12:16:52.358 2 DEBUG oslo_concurrency.lockutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:16:52 compute-0 nova_compute[192079]: 2025-10-02 12:16:52.704 2 DEBUG nova.network.neutron [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Successfully created port: 73f6f99f-8348-41c9-8194-e4cd3d448fd9 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:16:53 compute-0 nova_compute[192079]: 2025-10-02 12:16:53.691 2 DEBUG nova.network.neutron [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Successfully updated port: 73f6f99f-8348-41c9-8194-e4cd3d448fd9 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:16:53 compute-0 nova_compute[192079]: 2025-10-02 12:16:53.709 2 DEBUG oslo_concurrency.lockutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Acquiring lock "refresh_cache-ebc56e2c-d3a3-4ade-8849-7e23fc710e78" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:16:53 compute-0 nova_compute[192079]: 2025-10-02 12:16:53.709 2 DEBUG oslo_concurrency.lockutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Acquired lock "refresh_cache-ebc56e2c-d3a3-4ade-8849-7e23fc710e78" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:16:53 compute-0 nova_compute[192079]: 2025-10-02 12:16:53.710 2 DEBUG nova.network.neutron [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:16:53 compute-0 nova_compute[192079]: 2025-10-02 12:16:53.789 2 DEBUG nova.compute.manager [req-c4a55826-2478-48b3-93ac-f89d939b9f2c req-e9d79ffe-2b79-46e0-be5f-2130ad0d1b5c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Received event network-changed-73f6f99f-8348-41c9-8194-e4cd3d448fd9 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:16:53 compute-0 nova_compute[192079]: 2025-10-02 12:16:53.789 2 DEBUG nova.compute.manager [req-c4a55826-2478-48b3-93ac-f89d939b9f2c req-e9d79ffe-2b79-46e0-be5f-2130ad0d1b5c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Refreshing instance network info cache due to event network-changed-73f6f99f-8348-41c9-8194-e4cd3d448fd9. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:16:53 compute-0 nova_compute[192079]: 2025-10-02 12:16:53.789 2 DEBUG oslo_concurrency.lockutils [req-c4a55826-2478-48b3-93ac-f89d939b9f2c req-e9d79ffe-2b79-46e0-be5f-2130ad0d1b5c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-ebc56e2c-d3a3-4ade-8849-7e23fc710e78" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:16:53 compute-0 nova_compute[192079]: 2025-10-02 12:16:53.865 2 DEBUG nova.network.neutron [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:16:53 compute-0 nova_compute[192079]: 2025-10-02 12:16:53.918 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:54 compute-0 nova_compute[192079]: 2025-10-02 12:16:54.146 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:54 compute-0 nova_compute[192079]: 2025-10-02 12:16:54.996 2 DEBUG nova.network.neutron [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Updating instance_info_cache with network_info: [{"id": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "address": "fa:16:3e:b3:31:fe", "network": {"id": "bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-542543245-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e0277f0bb0f4a349e2e6d8ddfa24edf", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap73f6f99f-83", "ovs_interfaceid": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.027 2 DEBUG oslo_concurrency.lockutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Releasing lock "refresh_cache-ebc56e2c-d3a3-4ade-8849-7e23fc710e78" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.028 2 DEBUG nova.compute.manager [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Instance network_info: |[{"id": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "address": "fa:16:3e:b3:31:fe", "network": {"id": "bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-542543245-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e0277f0bb0f4a349e2e6d8ddfa24edf", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap73f6f99f-83", "ovs_interfaceid": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.028 2 DEBUG oslo_concurrency.lockutils [req-c4a55826-2478-48b3-93ac-f89d939b9f2c req-e9d79ffe-2b79-46e0-be5f-2130ad0d1b5c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-ebc56e2c-d3a3-4ade-8849-7e23fc710e78" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.028 2 DEBUG nova.network.neutron [req-c4a55826-2478-48b3-93ac-f89d939b9f2c req-e9d79ffe-2b79-46e0-be5f-2130ad0d1b5c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Refreshing network info cache for port 73f6f99f-8348-41c9-8194-e4cd3d448fd9 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.031 2 DEBUG nova.virt.libvirt.driver [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Start _get_guest_xml network_info=[{"id": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "address": "fa:16:3e:b3:31:fe", "network": {"id": "bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-542543245-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e0277f0bb0f4a349e2e6d8ddfa24edf", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap73f6f99f-83", "ovs_interfaceid": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.035 2 WARNING nova.virt.libvirt.driver [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.039 2 DEBUG nova.virt.libvirt.host [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.039 2 DEBUG nova.virt.libvirt.host [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.042 2 DEBUG nova.virt.libvirt.host [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.043 2 DEBUG nova.virt.libvirt.host [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.044 2 DEBUG nova.virt.libvirt.driver [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.044 2 DEBUG nova.virt.hardware [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.044 2 DEBUG nova.virt.hardware [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.045 2 DEBUG nova.virt.hardware [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.045 2 DEBUG nova.virt.hardware [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.045 2 DEBUG nova.virt.hardware [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.045 2 DEBUG nova.virt.hardware [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.045 2 DEBUG nova.virt.hardware [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.046 2 DEBUG nova.virt.hardware [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.046 2 DEBUG nova.virt.hardware [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.046 2 DEBUG nova.virt.hardware [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.046 2 DEBUG nova.virt.hardware [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.050 2 DEBUG nova.virt.libvirt.vif [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:16:50Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ListServerFiltersTestJSON-instance-1707027906',display_name='tempest-ListServerFiltersTestJSON-instance-1707027906',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-listserverfilterstestjson-instance-1707027906',id=85,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='6e0277f0bb0f4a349e2e6d8ddfa24edf',ramdisk_id='',reservation_id='r-wvwxsid2',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ListServerFiltersTestJSON-298715262',owner_user_name='tempest-ListServerFiltersTestJSON-298715262-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:16:51Z,user_data=None,user_id='001d2d51902d4e299b775131f430a5db',uuid=ebc56e2c-d3a3-4ade-8849-7e23fc710e78,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "address": "fa:16:3e:b3:31:fe", "network": {"id": "bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-542543245-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e0277f0bb0f4a349e2e6d8ddfa24edf", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap73f6f99f-83", "ovs_interfaceid": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.050 2 DEBUG nova.network.os_vif_util [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Converting VIF {"id": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "address": "fa:16:3e:b3:31:fe", "network": {"id": "bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-542543245-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e0277f0bb0f4a349e2e6d8ddfa24edf", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap73f6f99f-83", "ovs_interfaceid": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.051 2 DEBUG nova.network.os_vif_util [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:b3:31:fe,bridge_name='br-int',has_traffic_filtering=True,id=73f6f99f-8348-41c9-8194-e4cd3d448fd9,network=Network(bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap73f6f99f-83') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.052 2 DEBUG nova.objects.instance [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lazy-loading 'pci_devices' on Instance uuid ebc56e2c-d3a3-4ade-8849-7e23fc710e78 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.068 2 DEBUG nova.virt.libvirt.driver [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:16:55 compute-0 nova_compute[192079]:   <uuid>ebc56e2c-d3a3-4ade-8849-7e23fc710e78</uuid>
Oct 02 12:16:55 compute-0 nova_compute[192079]:   <name>instance-00000055</name>
Oct 02 12:16:55 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:16:55 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:16:55 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:16:55 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:       <nova:name>tempest-ListServerFiltersTestJSON-instance-1707027906</nova:name>
Oct 02 12:16:55 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:16:55</nova:creationTime>
Oct 02 12:16:55 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:16:55 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:16:55 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:16:55 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:16:55 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:16:55 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:16:55 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:16:55 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:16:55 compute-0 nova_compute[192079]:         <nova:user uuid="001d2d51902d4e299b775131f430a5db">tempest-ListServerFiltersTestJSON-298715262-project-member</nova:user>
Oct 02 12:16:55 compute-0 nova_compute[192079]:         <nova:project uuid="6e0277f0bb0f4a349e2e6d8ddfa24edf">tempest-ListServerFiltersTestJSON-298715262</nova:project>
Oct 02 12:16:55 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:16:55 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:16:55 compute-0 nova_compute[192079]:         <nova:port uuid="73f6f99f-8348-41c9-8194-e4cd3d448fd9">
Oct 02 12:16:55 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.11" ipVersion="4"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:16:55 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:16:55 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:16:55 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <system>
Oct 02 12:16:55 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:16:55 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:16:55 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:16:55 compute-0 nova_compute[192079]:       <entry name="serial">ebc56e2c-d3a3-4ade-8849-7e23fc710e78</entry>
Oct 02 12:16:55 compute-0 nova_compute[192079]:       <entry name="uuid">ebc56e2c-d3a3-4ade-8849-7e23fc710e78</entry>
Oct 02 12:16:55 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     </system>
Oct 02 12:16:55 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:16:55 compute-0 nova_compute[192079]:   <os>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:   </os>
Oct 02 12:16:55 compute-0 nova_compute[192079]:   <features>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:   </features>
Oct 02 12:16:55 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:16:55 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:16:55 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:16:55 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:16:55 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.config"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:16:55 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:b3:31:fe"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:       <target dev="tap73f6f99f-83"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:16:55 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/console.log" append="off"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <video>
Oct 02 12:16:55 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     </video>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:16:55 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:16:55 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:16:55 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:16:55 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:16:55 compute-0 nova_compute[192079]: </domain>
Oct 02 12:16:55 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.069 2 DEBUG nova.compute.manager [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Preparing to wait for external event network-vif-plugged-73f6f99f-8348-41c9-8194-e4cd3d448fd9 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.069 2 DEBUG oslo_concurrency.lockutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Acquiring lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.070 2 DEBUG oslo_concurrency.lockutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.070 2 DEBUG oslo_concurrency.lockutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.070 2 DEBUG nova.virt.libvirt.vif [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:16:50Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ListServerFiltersTestJSON-instance-1707027906',display_name='tempest-ListServerFiltersTestJSON-instance-1707027906',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-listserverfilterstestjson-instance-1707027906',id=85,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='6e0277f0bb0f4a349e2e6d8ddfa24edf',ramdisk_id='',reservation_id='r-wvwxsid2',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ListServerFiltersTestJSON-298715262',owner_user_name='tempest-ListServerFiltersTestJSON-298715262-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:16:51Z,user_data=None,user_id='001d2d51902d4e299b775131f430a5db',uuid=ebc56e2c-d3a3-4ade-8849-7e23fc710e78,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "address": "fa:16:3e:b3:31:fe", "network": {"id": "bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-542543245-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e0277f0bb0f4a349e2e6d8ddfa24edf", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap73f6f99f-83", "ovs_interfaceid": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.071 2 DEBUG nova.network.os_vif_util [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Converting VIF {"id": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "address": "fa:16:3e:b3:31:fe", "network": {"id": "bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-542543245-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e0277f0bb0f4a349e2e6d8ddfa24edf", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap73f6f99f-83", "ovs_interfaceid": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.071 2 DEBUG nova.network.os_vif_util [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:b3:31:fe,bridge_name='br-int',has_traffic_filtering=True,id=73f6f99f-8348-41c9-8194-e4cd3d448fd9,network=Network(bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap73f6f99f-83') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.071 2 DEBUG os_vif [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:b3:31:fe,bridge_name='br-int',has_traffic_filtering=True,id=73f6f99f-8348-41c9-8194-e4cd3d448fd9,network=Network(bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap73f6f99f-83') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.072 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.072 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.072 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.075 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.076 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap73f6f99f-83, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.077 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap73f6f99f-83, col_values=(('external_ids', {'iface-id': '73f6f99f-8348-41c9-8194-e4cd3d448fd9', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:b3:31:fe', 'vm-uuid': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:16:55 compute-0 NetworkManager[51160]: <info>  [1759407415.0796] manager: (tap73f6f99f-83): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/141)
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.079 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.084 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.087 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.088 2 INFO os_vif [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:b3:31:fe,bridge_name='br-int',has_traffic_filtering=True,id=73f6f99f-8348-41c9-8194-e4cd3d448fd9,network=Network(bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap73f6f99f-83')
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.226 2 DEBUG nova.virt.libvirt.driver [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.226 2 DEBUG nova.virt.libvirt.driver [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.227 2 DEBUG nova.virt.libvirt.driver [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] No VIF found with MAC fa:16:3e:b3:31:fe, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:16:55 compute-0 nova_compute[192079]: 2025-10-02 12:16:55.227 2 INFO nova.virt.libvirt.driver [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Using config drive
Oct 02 12:16:56 compute-0 nova_compute[192079]: 2025-10-02 12:16:56.295 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:57 compute-0 nova_compute[192079]: 2025-10-02 12:16:57.109 2 INFO nova.virt.libvirt.driver [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Creating config drive at /var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.config
Oct 02 12:16:57 compute-0 nova_compute[192079]: 2025-10-02 12:16:57.118 2 DEBUG oslo_concurrency.processutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpr2cblbtb execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:16:57 compute-0 nova_compute[192079]: 2025-10-02 12:16:57.263 2 DEBUG oslo_concurrency.processutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpr2cblbtb" returned: 0 in 0.145s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:16:57 compute-0 kernel: tap73f6f99f-83: entered promiscuous mode
Oct 02 12:16:57 compute-0 NetworkManager[51160]: <info>  [1759407417.3470] manager: (tap73f6f99f-83): new Tun device (/org/freedesktop/NetworkManager/Devices/142)
Oct 02 12:16:57 compute-0 nova_compute[192079]: 2025-10-02 12:16:57.346 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:57 compute-0 ovn_controller[94336]: 2025-10-02T12:16:57Z|00278|binding|INFO|Claiming lport 73f6f99f-8348-41c9-8194-e4cd3d448fd9 for this chassis.
Oct 02 12:16:57 compute-0 ovn_controller[94336]: 2025-10-02T12:16:57Z|00279|binding|INFO|73f6f99f-8348-41c9-8194-e4cd3d448fd9: Claiming fa:16:3e:b3:31:fe 10.100.0.11
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:57.360 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:b3:31:fe 10.100.0.11'], port_security=['fa:16:3e:b3:31:fe 10.100.0.11'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.11/28', 'neutron:device_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'neutron:revision_number': '2', 'neutron:security_group_ids': 'b4e0bc42-3cfd-4f42-a319-553606576b33', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=a043239b-039e-45fa-8277-43e361a8bae7, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=2, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=73f6f99f-8348-41c9-8194-e4cd3d448fd9) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:57.361 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 73f6f99f-8348-41c9-8194-e4cd3d448fd9 in datapath bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5 bound to our chassis
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:57.363 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:57.375 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[401f1f66-ee3e-4927-85e6-65895d40896d]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:57.376 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tapbd543a6a-b1 in ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:57.378 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tapbd543a6a-b0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:57.378 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[eb8e2a07-4a51-4861-8f90-e258c4d6d18f]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:57.379 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[725cf087-3b71-41b4-a8bd-f3b49eb7b074]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:57 compute-0 systemd-udevd[232629]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:57.390 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[1f92a144-4702-4b37-b5e7-aa843c15b3dc]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:57 compute-0 NetworkManager[51160]: <info>  [1759407417.4101] device (tap73f6f99f-83): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:16:57 compute-0 NetworkManager[51160]: <info>  [1759407417.4107] device (tap73f6f99f-83): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:16:57 compute-0 systemd-machined[152150]: New machine qemu-39-instance-00000055.
Oct 02 12:16:57 compute-0 nova_compute[192079]: 2025-10-02 12:16:57.414 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:57 compute-0 ovn_controller[94336]: 2025-10-02T12:16:57Z|00280|binding|INFO|Setting lport 73f6f99f-8348-41c9-8194-e4cd3d448fd9 ovn-installed in OVS
Oct 02 12:16:57 compute-0 ovn_controller[94336]: 2025-10-02T12:16:57Z|00281|binding|INFO|Setting lport 73f6f99f-8348-41c9-8194-e4cd3d448fd9 up in Southbound
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:57.414 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ebc30f73-7a8f-43cf-a735-0bf508166af0]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:57 compute-0 nova_compute[192079]: 2025-10-02 12:16:57.417 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:57 compute-0 systemd[1]: Started Virtual Machine qemu-39-instance-00000055.
Oct 02 12:16:57 compute-0 podman[232594]: 2025-10-02 12:16:57.448735835 +0000 UTC m=+0.110773302 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, vcs-type=git, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, com.redhat.component=ubi9-minimal-container, config_id=edpm, name=ubi9-minimal, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, release=1755695350, version=9.6, architecture=x86_64, build-date=2025-08-20T13:12:41, container_name=openstack_network_exporter, url=https://catalog.redhat.com/en/search?searchType=containers, io.buildah.version=1.33.7, vendor=Red Hat, Inc., description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., maintainer=Red Hat, Inc., summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., distribution-scope=public, io.openshift.expose-services=, managed_by=edpm_ansible, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.openshift.tags=minimal rhel9)
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:57.449 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[517966e9-12bd-4e19-8f42-4b289528c661]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:57 compute-0 podman[232595]: 2025-10-02 12:16:57.453793234 +0000 UTC m=+0.113475226 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, container_name=multipathd, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001)
Oct 02 12:16:57 compute-0 NetworkManager[51160]: <info>  [1759407417.4559] manager: (tapbd543a6a-b0): new Veth device (/org/freedesktop/NetworkManager/Devices/143)
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:57.454 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3e21829e-446b-46f7-adaf-98ccc64b470e]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:57.485 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[13cb5422-4c1f-450c-9291-46bf75fd396f]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:57.489 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[64b001da-08cb-43ff-b811-a434b024e288]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:57 compute-0 NetworkManager[51160]: <info>  [1759407417.5070] device (tapbd543a6a-b0): carrier: link connected
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:57.511 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[8ff1e723-35b3-4739-a088-74a11b3ede8a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:57.528 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[887b279a-e0af-44e5-a72f-dd2e12594dad]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapbd543a6a-b1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:71:7a:4a'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 89], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 541513, 'reachable_time': 19510, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 232674, 'error': None, 'target': 'ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:57.541 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[35f5660e-5fb5-45de-8d8b-77afef807b2c]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe71:7a4a'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 541513, 'tstamp': 541513}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 232675, 'error': None, 'target': 'ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:57.558 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9a99c744-da0b-46a4-aa0c-a458aad7bca2]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapbd543a6a-b1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:71:7a:4a'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 89], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 541513, 'reachable_time': 19510, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 232676, 'error': None, 'target': 'ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:57.587 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b39bcb87-2118-4393-b83c-dd8991996ea1]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:57.653 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[681fe645-c72e-4188-a69d-4caea80e619e]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:57.654 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapbd543a6a-b0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:57.655 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:57.655 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapbd543a6a-b0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:16:57 compute-0 nova_compute[192079]: 2025-10-02 12:16:57.657 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:57 compute-0 NetworkManager[51160]: <info>  [1759407417.6583] manager: (tapbd543a6a-b0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/144)
Oct 02 12:16:57 compute-0 kernel: tapbd543a6a-b0: entered promiscuous mode
Oct 02 12:16:57 compute-0 nova_compute[192079]: 2025-10-02 12:16:57.659 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:57.660 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tapbd543a6a-b0, col_values=(('external_ids', {'iface-id': '1bd1cb43-f90b-4e8c-92cc-e89ec36a0b0f'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:16:57 compute-0 nova_compute[192079]: 2025-10-02 12:16:57.661 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:57 compute-0 ovn_controller[94336]: 2025-10-02T12:16:57Z|00282|binding|INFO|Releasing lport 1bd1cb43-f90b-4e8c-92cc-e89ec36a0b0f from this chassis (sb_readonly=0)
Oct 02 12:16:57 compute-0 nova_compute[192079]: 2025-10-02 12:16:57.673 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:57.674 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:57.675 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[903d38b6-f4e8-4fb0-ae10-90496d43f4cd]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:57.676 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5.pid.haproxy
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:16:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:16:57.677 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5', 'env', 'PROCESS_TAG=haproxy-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:16:58 compute-0 podman[232715]: 2025-10-02 12:16:58.100749022 +0000 UTC m=+0.059862693 container create 4326bc2128ef700472a26297ff9dc9fc1f5fba4abbb2dcc09d1fbff2652f95cf (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, tcib_managed=true, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001)
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.106 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407418.1061606, ebc56e2c-d3a3-4ade-8849-7e23fc710e78 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.107 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] VM Started (Lifecycle Event)
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.142 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:16:58 compute-0 systemd[1]: Started libpod-conmon-4326bc2128ef700472a26297ff9dc9fc1f5fba4abbb2dcc09d1fbff2652f95cf.scope.
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.147 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407418.1063805, ebc56e2c-d3a3-4ade-8849-7e23fc710e78 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.148 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] VM Paused (Lifecycle Event)
Oct 02 12:16:58 compute-0 podman[232715]: 2025-10-02 12:16:58.067520787 +0000 UTC m=+0.026634408 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.167 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:16:58 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.170 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:16:58 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/e378bec0e99bbca9b4f332ad335218074a9b447cf903915046b576100b413fb4/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:16:58 compute-0 podman[232715]: 2025-10-02 12:16:58.19087358 +0000 UTC m=+0.149987221 container init 4326bc2128ef700472a26297ff9dc9fc1f5fba4abbb2dcc09d1fbff2652f95cf (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2)
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.195 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:16:58 compute-0 podman[232715]: 2025-10-02 12:16:58.19638303 +0000 UTC m=+0.155496641 container start 4326bc2128ef700472a26297ff9dc9fc1f5fba4abbb2dcc09d1fbff2652f95cf (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS)
Oct 02 12:16:58 compute-0 neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5[232730]: [NOTICE]   (232734) : New worker (232736) forked
Oct 02 12:16:58 compute-0 neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5[232730]: [NOTICE]   (232734) : Loading success.
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.322 2 DEBUG nova.compute.manager [req-1f1acee5-331e-4d47-9bb8-9b213493b21b req-eaa9068b-098c-4791-b7a2-3a5e88fa969b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Received event network-vif-plugged-73f6f99f-8348-41c9-8194-e4cd3d448fd9 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.322 2 DEBUG oslo_concurrency.lockutils [req-1f1acee5-331e-4d47-9bb8-9b213493b21b req-eaa9068b-098c-4791-b7a2-3a5e88fa969b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.322 2 DEBUG oslo_concurrency.lockutils [req-1f1acee5-331e-4d47-9bb8-9b213493b21b req-eaa9068b-098c-4791-b7a2-3a5e88fa969b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.323 2 DEBUG oslo_concurrency.lockutils [req-1f1acee5-331e-4d47-9bb8-9b213493b21b req-eaa9068b-098c-4791-b7a2-3a5e88fa969b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.323 2 DEBUG nova.compute.manager [req-1f1acee5-331e-4d47-9bb8-9b213493b21b req-eaa9068b-098c-4791-b7a2-3a5e88fa969b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Processing event network-vif-plugged-73f6f99f-8348-41c9-8194-e4cd3d448fd9 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.323 2 DEBUG nova.compute.manager [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.327 2 DEBUG nova.virt.libvirt.driver [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.327 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407418.3270857, ebc56e2c-d3a3-4ade-8849-7e23fc710e78 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.328 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] VM Resumed (Lifecycle Event)
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.333 2 INFO nova.virt.libvirt.driver [-] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Instance spawned successfully.
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.334 2 DEBUG nova.virt.libvirt.driver [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.355 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.364 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.368 2 DEBUG nova.virt.libvirt.driver [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.369 2 DEBUG nova.virt.libvirt.driver [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.369 2 DEBUG nova.virt.libvirt.driver [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.370 2 DEBUG nova.virt.libvirt.driver [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.370 2 DEBUG nova.virt.libvirt.driver [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.371 2 DEBUG nova.virt.libvirt.driver [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.412 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.495 2 INFO nova.compute.manager [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Took 6.50 seconds to spawn the instance on the hypervisor.
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.496 2 DEBUG nova.compute.manager [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.609 2 INFO nova.compute.manager [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Took 7.11 seconds to build instance.
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.642 2 DEBUG oslo_concurrency.lockutils [None req-c25aedfa-2a01-423f-ba42-93cbb1d810ee 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 7.251s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.909 2 DEBUG nova.network.neutron [req-c4a55826-2478-48b3-93ac-f89d939b9f2c req-e9d79ffe-2b79-46e0-be5f-2130ad0d1b5c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Updated VIF entry in instance network info cache for port 73f6f99f-8348-41c9-8194-e4cd3d448fd9. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.910 2 DEBUG nova.network.neutron [req-c4a55826-2478-48b3-93ac-f89d939b9f2c req-e9d79ffe-2b79-46e0-be5f-2130ad0d1b5c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Updating instance_info_cache with network_info: [{"id": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "address": "fa:16:3e:b3:31:fe", "network": {"id": "bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-542543245-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e0277f0bb0f4a349e2e6d8ddfa24edf", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap73f6f99f-83", "ovs_interfaceid": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:16:58 compute-0 nova_compute[192079]: 2025-10-02 12:16:58.944 2 DEBUG oslo_concurrency.lockutils [req-c4a55826-2478-48b3-93ac-f89d939b9f2c req-e9d79ffe-2b79-46e0-be5f-2130ad0d1b5c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-ebc56e2c-d3a3-4ade-8849-7e23fc710e78" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:17:00 compute-0 nova_compute[192079]: 2025-10-02 12:17:00.080 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:01 compute-0 nova_compute[192079]: 2025-10-02 12:17:01.348 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:02 compute-0 nova_compute[192079]: 2025-10-02 12:17:02.094 2 DEBUG nova.compute.manager [req-3c57d94f-cf21-4738-bad4-4d96ea51d5ee req-89e636f1-9c3f-4890-a915-354100d505e7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Received event network-vif-plugged-73f6f99f-8348-41c9-8194-e4cd3d448fd9 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:17:02 compute-0 nova_compute[192079]: 2025-10-02 12:17:02.094 2 DEBUG oslo_concurrency.lockutils [req-3c57d94f-cf21-4738-bad4-4d96ea51d5ee req-89e636f1-9c3f-4890-a915-354100d505e7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:17:02 compute-0 nova_compute[192079]: 2025-10-02 12:17:02.094 2 DEBUG oslo_concurrency.lockutils [req-3c57d94f-cf21-4738-bad4-4d96ea51d5ee req-89e636f1-9c3f-4890-a915-354100d505e7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:17:02 compute-0 nova_compute[192079]: 2025-10-02 12:17:02.095 2 DEBUG oslo_concurrency.lockutils [req-3c57d94f-cf21-4738-bad4-4d96ea51d5ee req-89e636f1-9c3f-4890-a915-354100d505e7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:17:02 compute-0 nova_compute[192079]: 2025-10-02 12:17:02.095 2 DEBUG nova.compute.manager [req-3c57d94f-cf21-4738-bad4-4d96ea51d5ee req-89e636f1-9c3f-4890-a915-354100d505e7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] No waiting events found dispatching network-vif-plugged-73f6f99f-8348-41c9-8194-e4cd3d448fd9 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:17:02 compute-0 nova_compute[192079]: 2025-10-02 12:17:02.095 2 WARNING nova.compute.manager [req-3c57d94f-cf21-4738-bad4-4d96ea51d5ee req-89e636f1-9c3f-4890-a915-354100d505e7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Received unexpected event network-vif-plugged-73f6f99f-8348-41c9-8194-e4cd3d448fd9 for instance with vm_state active and task_state None.
Oct 02 12:17:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:02.216 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:17:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:02.217 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:17:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:02.218 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:17:03 compute-0 podman[232746]: 2025-10-02 12:17:03.165506874 +0000 UTC m=+0.062295829 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=iscsid, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, config_id=iscsid, org.label-schema.schema-version=1.0)
Oct 02 12:17:03 compute-0 podman[232745]: 2025-10-02 12:17:03.175250601 +0000 UTC m=+0.067535023 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:17:03 compute-0 nova_compute[192079]: 2025-10-02 12:17:03.762 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759407408.7604644, 92f5a241-27d9-416b-a19f-da7560348296 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:17:03 compute-0 nova_compute[192079]: 2025-10-02 12:17:03.762 2 INFO nova.compute.manager [-] [instance: 92f5a241-27d9-416b-a19f-da7560348296] VM Stopped (Lifecycle Event)
Oct 02 12:17:03 compute-0 nova_compute[192079]: 2025-10-02 12:17:03.791 2 DEBUG nova.compute.manager [None req-72b497f6-4c90-4f1a-af1a-991d8d7169cf - - - - - -] [instance: 92f5a241-27d9-416b-a19f-da7560348296] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:17:05 compute-0 nova_compute[192079]: 2025-10-02 12:17:05.082 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:06 compute-0 nova_compute[192079]: 2025-10-02 12:17:06.349 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:10 compute-0 nova_compute[192079]: 2025-10-02 12:17:10.085 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:11 compute-0 nova_compute[192079]: 2025-10-02 12:17:11.351 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:11 compute-0 ovn_controller[94336]: 2025-10-02T12:17:11Z|00026|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:b3:31:fe 10.100.0.11
Oct 02 12:17:11 compute-0 ovn_controller[94336]: 2025-10-02T12:17:11Z|00027|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:b3:31:fe 10.100.0.11
Oct 02 12:17:13 compute-0 podman[232803]: 2025-10-02 12:17:13.151640079 +0000 UTC m=+0.058848285 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 12:17:13 compute-0 podman[232801]: 2025-10-02 12:17:13.171298975 +0000 UTC m=+0.085810970 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, container_name=ovn_metadata_agent)
Oct 02 12:17:13 compute-0 podman[232802]: 2025-10-02 12:17:13.186770087 +0000 UTC m=+0.091621219 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, io.buildah.version=1.41.3, managed_by=edpm_ansible, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001)
Oct 02 12:17:15 compute-0 nova_compute[192079]: 2025-10-02 12:17:15.088 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:16 compute-0 nova_compute[192079]: 2025-10-02 12:17:16.354 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.105 12 DEBUG ceilometer.compute.discovery [-] instance data: {'id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'os_type': 'hvm', 'architecture': 'x86_64', 'OS-EXT-SRV-ATTR:instance_name': 'instance-00000055', 'OS-EXT-SRV-ATTR:host': 'compute-0.ctlplane.example.com', 'OS-EXT-STS:vm_state': 'running', 'tenant_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'user_id': '001d2d51902d4e299b775131f430a5db', 'hostId': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'status': 'active', 'metadata': {}} discover_libvirt_polling /usr/lib/python3.9/site-packages/ceilometer/compute/discovery.py:228
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.106 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets.drop in the context of pollsters
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.109 12 DEBUG ceilometer.compute.virt.libvirt.inspector [-] No delta meter predecessor for ebc56e2c-d3a3-4ade-8849-7e23fc710e78 / tap73f6f99f-83 inspect_vnics /usr/lib/python3.9/site-packages/ceilometer/compute/virt/libvirt/inspector.py:136
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.110 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/network.outgoing.packets.drop volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '980511fc-c2c4-48fe-9ec0-aa90886621bc', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets.drop', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'instance-00000055-ebc56e2c-d3a3-4ade-8849-7e23fc710e78-tap73f6f99f-83', 'timestamp': '2025-10-02T12:17:17.106226', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'tap73f6f99f-83', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:b3:31:fe', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap73f6f99f-83'}, 'message_id': 'bc8be39a-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.793279059, 'message_signature': 'bf2a731c148e67de58771bccc9b7567436b4594c9257126d32813d123cca1146'}]}, 'timestamp': '2025-10-02 12:17:17.110775', '_unique_id': 'f24e7ff02cd5414cadc6070d03594bdb'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.112 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.latency in the context of pollsters
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.140 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.device.read.latency volume: 696875204 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.141 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.device.read.latency volume: 36034421 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '965d3b1b-8f71-413f-ad56-d61b1d156418', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 696875204, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78-vda', 'timestamp': '2025-10-02T12:17:17.113777', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'instance-00000055', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'bc908954-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.800900577, 'message_signature': '01e7f0da48e7970264324cbba5452ec6632b9323acfde78bc2ae7f235915a49a'}, {'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 36034421, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78-sda', 'timestamp': '2025-10-02T12:17:17.113777', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'instance-00000055', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'bc909a8e-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.800900577, 'message_signature': 'b5b2e3045532d330b47a7b24ab096694a238eee93f5d16951b30faa8fdc62e15'}]}, 'timestamp': '2025-10-02 12:17:17.141628', '_unique_id': 'eb3bb370a6fb42ffaad019818958b782'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.142 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.144 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.iops in the context of pollsters
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.144 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for PerDeviceDiskIOPSPollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.144 12 ERROR ceilometer.polling.manager [-] Prevent pollster disk.device.iops from polling [<NovaLikeServer: tempest-ListServerFiltersTestJSON-instance-1707027906>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-ListServerFiltersTestJSON-instance-1707027906>]
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.144 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes in the context of pollsters
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.144 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/network.outgoing.bytes volume: 1438 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'a1b7ec30-bc5d-4ede-9e03-101231eb28bd', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 1438, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'instance-00000055-ebc56e2c-d3a3-4ade-8849-7e23fc710e78-tap73f6f99f-83', 'timestamp': '2025-10-02T12:17:17.144725', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'tap73f6f99f-83', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:b3:31:fe', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap73f6f99f-83'}, 'message_id': 'bc911fa4-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.793279059, 'message_signature': '13519f6fd992ae19c420d84714fb8dc09233bca2542770502ea9801d997bd711'}]}, 'timestamp': '2025-10-02 12:17:17.145035', '_unique_id': '10645a64de96405bb1db91bad23050e3'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.145 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.146 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.capacity in the context of pollsters
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.161 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.device.capacity volume: 1073741824 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.162 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.device.capacity volume: 485376 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '643a9d76-6a9d-4391-9d4f-2e222f1362fa', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 1073741824, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78-vda', 'timestamp': '2025-10-02T12:17:17.146381', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'instance-00000055', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'bc93bbd8-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.833461024, 'message_signature': '4c26fc2a2e83b3bbfb446c81e793e0f75154e83cc06197f49282edbed87d9558'}, {'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 485376, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78-sda', 'timestamp': '2025-10-02T12:17:17.146381', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'instance-00000055', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'bc93c9a2-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.833461024, 'message_signature': '19a0a336ba34ee9b32fae7e084e8364898e32ef3b6bd3328938988c07793f126'}]}, 'timestamp': '2025-10-02 12:17:17.162482', '_unique_id': '93b2995a9cf14bbfb8915064f9fed6d6'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.163 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.164 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.requests in the context of pollsters
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.164 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.device.read.requests volume: 1102 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.164 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.device.read.requests volume: 108 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '0f624b00-a7cf-4b69-8617-faafc5fd75ab', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 1102, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78-vda', 'timestamp': '2025-10-02T12:17:17.164601', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'instance-00000055', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'bc94287a-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.800900577, 'message_signature': '73bc1d41b465edee4f4b1d4fdbe6080a47940f0c491ab969697e784b75fa9b9b'}, {'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 108, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78-sda', 'timestamp': '2025-10-02T12:17:17.164601', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'instance-00000055', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'bc943496-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.800900577, 'message_signature': '2f9754186cc1274d1274130984a44a565c899cf4db8481bd5fe842ddc5662df6'}]}, 'timestamp': '2025-10-02 12:17:17.165205', '_unique_id': '6f667d46dd804774bea2156098fae1c6'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.165 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.166 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes.rate in the context of pollsters
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.166 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for IncomingBytesRatePollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.166 12 ERROR ceilometer.polling.manager [-] Prevent pollster network.incoming.bytes.rate from polling [<NovaLikeServer: tempest-ListServerFiltersTestJSON-instance-1707027906>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-ListServerFiltersTestJSON-instance-1707027906>]
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.166 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes.delta in the context of pollsters
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/network.outgoing.bytes.delta volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '8f45466e-a4f6-43cc-bb0a-a701154f4984', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.bytes.delta', 'counter_type': 'delta', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'instance-00000055-ebc56e2c-d3a3-4ade-8849-7e23fc710e78-tap73f6f99f-83', 'timestamp': '2025-10-02T12:17:17.167060', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'tap73f6f99f-83', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:b3:31:fe', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap73f6f99f-83'}, 'message_id': 'bc948842-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.793279059, 'message_signature': 'aa9ca66fbe0631a8f84dac3ab39b8d5d6e5e0e79b295fb6549e95c35226409b2'}]}, 'timestamp': '2025-10-02 12:17:17.167335', '_unique_id': '5bcb8d7f94574321a1060cc871977206'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.167 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.168 12 INFO ceilometer.polling.manager [-] Polling pollster memory.usage in the context of pollsters
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.194 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/memory.usage volume: 40.41015625 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'c679ee14-2748-431a-8041-2e236d8ec576', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'memory.usage', 'counter_type': 'gauge', 'counter_unit': 'MB', 'counter_volume': 40.41015625, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'timestamp': '2025-10-02T12:17:17.168795', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'instance-00000055', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1}, 'message_id': 'bc98b96c-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.881156535, 'message_signature': 'bbf87e278f8c183422b0f38bbd96fe6403d4098202be77ea211fa97be4e08cb0'}]}, 'timestamp': '2025-10-02 12:17:17.195082', '_unique_id': '33cd334a23ac4402889ed4d6b999b70d'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.196 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.197 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.usage in the context of pollsters
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.198 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.device.usage volume: 29884416 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.198 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.device.usage volume: 485376 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'c74e2f8c-e8c0-40ba-b2b5-f33a9bb85632', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 29884416, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78-vda', 'timestamp': '2025-10-02T12:17:17.198133', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'instance-00000055', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'bc994e86-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.833461024, 'message_signature': '05c7d87ab0aab9985bbcb41c5d955252258d03797b0aa4c04964654f41126e2d'}, {'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 485376, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78-sda', 'timestamp': '2025-10-02T12:17:17.198133', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'instance-00000055', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'bc996984-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.833461024, 'message_signature': '07a57fc28382e04e67a7d6b348cc00eea84fb3a8c15cf81512bdd84ad36b4e13'}]}, 'timestamp': '2025-10-02 12:17:17.199515', '_unique_id': '524aaf4e4d794d5d83f4157e9e20cfe7'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.200 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.203 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes.rate in the context of pollsters
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.203 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for OutgoingBytesRatePollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.203 12 ERROR ceilometer.polling.manager [-] Prevent pollster network.outgoing.bytes.rate from polling [<NovaLikeServer: tempest-ListServerFiltersTestJSON-instance-1707027906>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-ListServerFiltersTestJSON-instance-1707027906>]
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.204 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets.error in the context of pollsters
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.204 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/network.outgoing.packets.error volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '570cf0a4-d1fb-48ed-b3bd-cc0e4b54372d', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets.error', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'instance-00000055-ebc56e2c-d3a3-4ade-8849-7e23fc710e78-tap73f6f99f-83', 'timestamp': '2025-10-02T12:17:17.204307', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'tap73f6f99f-83', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:b3:31:fe', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap73f6f99f-83'}, 'message_id': 'bc9a3fbc-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.793279059, 'message_signature': '14dac69e158441000be4da60e23b62155c883891bc38cc7d62c992548390589b'}]}, 'timestamp': '2025-10-02 12:17:17.205070', '_unique_id': '4daf5245af9e4ca4b9abc8370ce28ce9'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.206 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.208 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.bytes in the context of pollsters
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.208 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.device.read.bytes volume: 30513664 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.209 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.device.read.bytes volume: 274750 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '04001018-438c-4543-8fba-6739680f8c83', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 30513664, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78-vda', 'timestamp': '2025-10-02T12:17:17.208419', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'instance-00000055', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'bc9adf76-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.800900577, 'message_signature': '200d97378c9856a0061ca62e6d9f259aad22d6e87cb255766d4ba0cf6f3d75db'}, {'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 274750, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78-sda', 'timestamp': '2025-10-02T12:17:17.208419', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'instance-00000055', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'bc9afa24-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.800900577, 'message_signature': 'a13bb3ddd6dff1a2183fabb7861cbeb6dfab52bcd2429820dfd5ab9950ebeabf'}]}, 'timestamp': '2025-10-02 12:17:17.209763', '_unique_id': '60f19da9e19d43dfb9c97dffeb0f2d13'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.211 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.212 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.allocation in the context of pollsters
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.213 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.device.allocation volume: 30154752 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.213 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.device.allocation volume: 487424 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '64081381-4f9b-4390-a616-b32b60656622', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 30154752, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78-vda', 'timestamp': '2025-10-02T12:17:17.213199', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'instance-00000055', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'bc9b9a7e-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.833461024, 'message_signature': '126744a3c9a68f48692b6b34a9ff673f3e3ffbd8ca1334c291fb5d7b8b25b540'}, {'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 487424, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78-sda', 'timestamp': '2025-10-02T12:17:17.213199', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'instance-00000055', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'bc9bb536-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.833461024, 'message_signature': 'd11927e9e4dcfe3bdbf1a0ddbbadfb5aea2776e8219dbec461628d3f9bc50519'}]}, 'timestamp': '2025-10-02 12:17:17.214550', '_unique_id': '81b211e2ec6e4aa095a494253b8cba61'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.215 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.217 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.latency in the context of pollsters
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.217 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for PerDeviceDiskLatencyPollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.217 12 ERROR ceilometer.polling.manager [-] Prevent pollster disk.device.latency from polling [<NovaLikeServer: tempest-ListServerFiltersTestJSON-instance-1707027906>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-ListServerFiltersTestJSON-instance-1707027906>]
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.218 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes.delta in the context of pollsters
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.218 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/network.incoming.bytes.delta volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '426ebfda-7a87-455d-ab5f-b90e5e26f26f', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.bytes.delta', 'counter_type': 'delta', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'instance-00000055-ebc56e2c-d3a3-4ade-8849-7e23fc710e78-tap73f6f99f-83', 'timestamp': '2025-10-02T12:17:17.218195', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'tap73f6f99f-83', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:b3:31:fe', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap73f6f99f-83'}, 'message_id': 'bc9c598c-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.793279059, 'message_signature': '35fb5f01f84065737de95ec92fa523436b18bec6ab5ff4298e35999e95b9ef6d'}]}, 'timestamp': '2025-10-02 12:17:17.218682', '_unique_id': '432218e745da45eeab7c85ab79704e7a'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.219 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.220 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes in the context of pollsters
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.221 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/network.incoming.bytes volume: 1562 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '0655bbf7-b7a8-4651-acd5-330551129dc6', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 1562, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'instance-00000055-ebc56e2c-d3a3-4ade-8849-7e23fc710e78-tap73f6f99f-83', 'timestamp': '2025-10-02T12:17:17.220950', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'tap73f6f99f-83', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:b3:31:fe', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap73f6f99f-83'}, 'message_id': 'bc9cc656-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.793279059, 'message_signature': '0f23cae9edf8f2db9fa4b6a87be8eaa455d068d7d8db28483568a96a5a7020dc'}]}, 'timestamp': '2025-10-02 12:17:17.221466', '_unique_id': 'c3c142f06876413fbfae9dcee77a24cc'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.222 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.223 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.bytes in the context of pollsters
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.223 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.device.write.bytes volume: 72863744 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.224 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.device.write.bytes volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'cfd98b53-3502-48ba-9758-fc6b72dc2584', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 72863744, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78-vda', 'timestamp': '2025-10-02T12:17:17.223639', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'instance-00000055', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'bc9d2dda-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.800900577, 'message_signature': '8121b04151e354b83179c40859f24439b3578509130667f7a384a4ae11bc2731'}, {'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78-sda', 'timestamp': '2025-10-02T12:17:17.223639', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'instance-00000055', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'bc9d40fe-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.800900577, 'message_signature': 'd27b0a164d2347cfef5ba88f78510efc2cd20da009c3ceb82e6e2bd4e6c7d5e1'}]}, 'timestamp': '2025-10-02 12:17:17.224578', '_unique_id': 'e53c84e263df4097b580a45740d7131d'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.225 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.226 12 INFO ceilometer.polling.manager [-] Polling pollster cpu in the context of pollsters
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.226 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/cpu volume: 11950000000 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'bdc1b14a-2b41-45b1-ba6b-1d602d048a64', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'cpu', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 11950000000, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'timestamp': '2025-10-02T12:17:17.226774', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'instance-00000055', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'cpu_number': 1}, 'message_id': 'bc9da8fa-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.881156535, 'message_signature': 'e8eb7b5a8b158c3a717d293b3f5e1bddeadfccd4a67381de3f8090dd5269cb26'}]}, 'timestamp': '2025-10-02 12:17:17.227252', '_unique_id': 'e9f2308a0dbf42c09a0cc971577439ff'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.228 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.229 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.requests in the context of pollsters
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.229 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.device.write.requests volume: 310 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.229 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.device.write.requests volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '5e1454ed-c2da-4cbe-b7e2-87ab540753c7', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 310, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78-vda', 'timestamp': '2025-10-02T12:17:17.229419', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'instance-00000055', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'bc9e0f34-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.800900577, 'message_signature': 'e45e740482c3b2b41e6e2081d834c80c5f496c4375bc6d7022459b780deaf1a4'}, {'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 0, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78-sda', 'timestamp': '2025-10-02T12:17:17.229419', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'instance-00000055', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'bc9e2050-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.800900577, 'message_signature': '6b7052367e43f0f696cb9e3b85b95582a96cc614a91763f4e1fd42a264c7f1aa'}]}, 'timestamp': '2025-10-02 12:17:17.230295', '_unique_id': '47a5a4886ee646e3ab81c9c2aea08336'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.231 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.232 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets in the context of pollsters
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.232 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/network.outgoing.packets volume: 13 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '98d5ae04-7253-482e-91b3-f075edd0d25a', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 13, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'instance-00000055-ebc56e2c-d3a3-4ade-8849-7e23fc710e78-tap73f6f99f-83', 'timestamp': '2025-10-02T12:17:17.232574', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'tap73f6f99f-83', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:b3:31:fe', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap73f6f99f-83'}, 'message_id': 'bc9e8b08-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.793279059, 'message_signature': '103873b647f23d5b3a6e74fc127254a34403632ff92e036a1216ee4cd992e1b0'}]}, 'timestamp': '2025-10-02 12:17:17.233083', '_unique_id': '58d0bc7aba724beb926689f378fc480f'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.233 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.235 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.latency in the context of pollsters
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.235 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.device.write.latency volume: 1907812641 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.235 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.device.write.latency volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '458d05bf-409d-466d-a41d-be19ab776d4c', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 1907812641, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78-vda', 'timestamp': '2025-10-02T12:17:17.235253', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'instance-00000055', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'bc9ef318-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.800900577, 'message_signature': '85eb3018f00eab1f0f043409191645068d52bb42bcdd3b954869e19672fecaf2'}, {'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 0, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78-sda', 'timestamp': '2025-10-02T12:17:17.235253', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'instance-00000055', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'bc9f034e-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.800900577, 'message_signature': '5395686339c26d3e9c0b4a3623ba8a4d865b5bd1dd3de125ce5fd4c8e0eaaa39'}]}, 'timestamp': '2025-10-02 12:17:17.236154', '_unique_id': '15c80b9f1a6d4e12b4ebeaaba5c5d409'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.237 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.238 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets.error in the context of pollsters
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.238 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/network.incoming.packets.error volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '7b6a5d2e-9de2-4414-8c6e-61a6f4ab5081', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets.error', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'instance-00000055-ebc56e2c-d3a3-4ade-8849-7e23fc710e78-tap73f6f99f-83', 'timestamp': '2025-10-02T12:17:17.238413', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'tap73f6f99f-83', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:b3:31:fe', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap73f6f99f-83'}, 'message_id': 'bc9f6f00-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.793279059, 'message_signature': '4a1aa9ab37648dc15a218aa2b22686b07de96c551410049291a79700f073be74'}]}, 'timestamp': '2025-10-02 12:17:17.238888', '_unique_id': '31c5106e40384b5baa50fc1990e7be6e'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.239 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.241 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets in the context of pollsters
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.241 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/network.incoming.packets volume: 10 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'f42b865a-79aa-407b-b5c0-f3ade7291f97', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 10, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'instance-00000055-ebc56e2c-d3a3-4ade-8849-7e23fc710e78-tap73f6f99f-83', 'timestamp': '2025-10-02T12:17:17.241180', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'tap73f6f99f-83', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:b3:31:fe', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap73f6f99f-83'}, 'message_id': 'bc9fdb7a-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.793279059, 'message_signature': '242d7c9f3bcd8b06aaff9e3b1ab0653a20dede320f74dbe84172702987f23633'}]}, 'timestamp': '2025-10-02 12:17:17.241673', '_unique_id': '35e19db282e04d478585db558e5ed451'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.242 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.243 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets.drop in the context of pollsters
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.243 12 DEBUG ceilometer.compute.pollsters [-] ebc56e2c-d3a3-4ade-8849-7e23fc710e78/network.incoming.packets.drop volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '8f0bf566-54b0-4635-8173-65827b4a1bd0', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets.drop', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '001d2d51902d4e299b775131f430a5db', 'user_name': None, 'project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'project_name': None, 'resource_id': 'instance-00000055-ebc56e2c-d3a3-4ade-8849-7e23fc710e78-tap73f6f99f-83', 'timestamp': '2025-10-02T12:17:17.243922', 'resource_metadata': {'display_name': 'tempest-ListServerFiltersTestJSON-instance-1707027906', 'name': 'tap73f6f99f-83', 'instance_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'instance_type': 'm1.nano', 'host': '685ad1ef54cbbe160d6f20f4ba6803736927e592ed0f2b172dd239fd', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:b3:31:fe', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap73f6f99f-83'}, 'message_id': 'bca0484e-9f89-11f0-af18-fa163efc5e78', 'monotonic_time': 5434.793279059, 'message_signature': '89279375a3a23f0a8cb5e50649633a07b7019f604bedefd39b8282cc9e000f5a'}]}, 'timestamp': '2025-10-02 12:17:17.244452', '_unique_id': '8bddb8c88cf74a439af3a48ba227194e'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:17:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:17:17.245 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:17:20 compute-0 nova_compute[192079]: 2025-10-02 12:17:20.091 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:21 compute-0 nova_compute[192079]: 2025-10-02 12:17:21.356 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:21 compute-0 nova_compute[192079]: 2025-10-02 12:17:21.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:17:21 compute-0 nova_compute[192079]: 2025-10-02 12:17:21.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:17:22 compute-0 podman[232871]: 2025-10-02 12:17:22.158976065 +0000 UTC m=+0.067554513 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, config_id=edpm, managed_by=edpm_ansible, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3)
Oct 02 12:17:22 compute-0 nova_compute[192079]: 2025-10-02 12:17:22.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:17:22 compute-0 nova_compute[192079]: 2025-10-02 12:17:22.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:17:22 compute-0 nova_compute[192079]: 2025-10-02 12:17:22.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:17:22 compute-0 nova_compute[192079]: 2025-10-02 12:17:22.918 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:17:22 compute-0 nova_compute[192079]: 2025-10-02 12:17:22.918 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:17:22 compute-0 nova_compute[192079]: 2025-10-02 12:17:22.918 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:17:22 compute-0 nova_compute[192079]: 2025-10-02 12:17:22.919 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:17:22 compute-0 nova_compute[192079]: 2025-10-02 12:17:22.986 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:17:23 compute-0 nova_compute[192079]: 2025-10-02 12:17:23.007 2 DEBUG oslo_concurrency.lockutils [None req-2dbdd85a-c69a-4b17-b0fc-dc9af093731b 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Acquiring lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78" by "nova.compute.manager.ComputeManager.stop_instance.<locals>.do_stop_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:17:23 compute-0 nova_compute[192079]: 2025-10-02 12:17:23.008 2 DEBUG oslo_concurrency.lockutils [None req-2dbdd85a-c69a-4b17-b0fc-dc9af093731b 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78" acquired by "nova.compute.manager.ComputeManager.stop_instance.<locals>.do_stop_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:17:23 compute-0 nova_compute[192079]: 2025-10-02 12:17:23.008 2 DEBUG nova.compute.manager [None req-2dbdd85a-c69a-4b17-b0fc-dc9af093731b 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:17:23 compute-0 nova_compute[192079]: 2025-10-02 12:17:23.012 2 DEBUG nova.compute.manager [None req-2dbdd85a-c69a-4b17-b0fc-dc9af093731b 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 do_stop_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3338
Oct 02 12:17:23 compute-0 nova_compute[192079]: 2025-10-02 12:17:23.014 2 DEBUG nova.objects.instance [None req-2dbdd85a-c69a-4b17-b0fc-dc9af093731b 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lazy-loading 'flavor' on Instance uuid ebc56e2c-d3a3-4ade-8849-7e23fc710e78 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:17:23 compute-0 nova_compute[192079]: 2025-10-02 12:17:23.046 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk --force-share --output=json" returned: 0 in 0.060s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:17:23 compute-0 nova_compute[192079]: 2025-10-02 12:17:23.046 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:17:23 compute-0 nova_compute[192079]: 2025-10-02 12:17:23.102 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:17:23 compute-0 nova_compute[192079]: 2025-10-02 12:17:23.116 2 DEBUG nova.objects.instance [None req-2dbdd85a-c69a-4b17-b0fc-dc9af093731b 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lazy-loading 'info_cache' on Instance uuid ebc56e2c-d3a3-4ade-8849-7e23fc710e78 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:17:23 compute-0 nova_compute[192079]: 2025-10-02 12:17:23.199 2 DEBUG nova.virt.libvirt.driver [None req-2dbdd85a-c69a-4b17-b0fc-dc9af093731b 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Shutting down instance from state 1 _clean_shutdown /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4071
Oct 02 12:17:23 compute-0 nova_compute[192079]: 2025-10-02 12:17:23.263 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:17:23 compute-0 nova_compute[192079]: 2025-10-02 12:17:23.264 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5567MB free_disk=73.32070541381836GB free_vcpus=7 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:17:23 compute-0 nova_compute[192079]: 2025-10-02 12:17:23.264 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:17:23 compute-0 nova_compute[192079]: 2025-10-02 12:17:23.264 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:17:23 compute-0 nova_compute[192079]: 2025-10-02 12:17:23.372 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance ebc56e2c-d3a3-4ade-8849-7e23fc710e78 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:17:23 compute-0 nova_compute[192079]: 2025-10-02 12:17:23.373 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 1 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:17:23 compute-0 nova_compute[192079]: 2025-10-02 12:17:23.373 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=640MB phys_disk=79GB used_disk=1GB total_vcpus=8 used_vcpus=1 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:17:23 compute-0 nova_compute[192079]: 2025-10-02 12:17:23.447 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:17:23 compute-0 nova_compute[192079]: 2025-10-02 12:17:23.467 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:17:23 compute-0 nova_compute[192079]: 2025-10-02 12:17:23.496 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:17:23 compute-0 nova_compute[192079]: 2025-10-02 12:17:23.496 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.232s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:17:24 compute-0 nova_compute[192079]: 2025-10-02 12:17:24.497 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:17:24 compute-0 nova_compute[192079]: 2025-10-02 12:17:24.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:17:24 compute-0 nova_compute[192079]: 2025-10-02 12:17:24.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:17:24 compute-0 nova_compute[192079]: 2025-10-02 12:17:24.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:17:24 compute-0 nova_compute[192079]: 2025-10-02 12:17:24.720 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "refresh_cache-ebc56e2c-d3a3-4ade-8849-7e23fc710e78" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:17:24 compute-0 nova_compute[192079]: 2025-10-02 12:17:24.720 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquired lock "refresh_cache-ebc56e2c-d3a3-4ade-8849-7e23fc710e78" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:17:24 compute-0 nova_compute[192079]: 2025-10-02 12:17:24.721 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Forcefully refreshing network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2004
Oct 02 12:17:24 compute-0 nova_compute[192079]: 2025-10-02 12:17:24.721 2 DEBUG nova.objects.instance [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lazy-loading 'info_cache' on Instance uuid ebc56e2c-d3a3-4ade-8849-7e23fc710e78 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:17:25 compute-0 nova_compute[192079]: 2025-10-02 12:17:25.093 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:25 compute-0 kernel: tap73f6f99f-83 (unregistering): left promiscuous mode
Oct 02 12:17:25 compute-0 NetworkManager[51160]: <info>  [1759407445.3682] device (tap73f6f99f-83): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:17:25 compute-0 ovn_controller[94336]: 2025-10-02T12:17:25Z|00283|binding|INFO|Releasing lport 73f6f99f-8348-41c9-8194-e4cd3d448fd9 from this chassis (sb_readonly=0)
Oct 02 12:17:25 compute-0 nova_compute[192079]: 2025-10-02 12:17:25.380 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:25 compute-0 ovn_controller[94336]: 2025-10-02T12:17:25Z|00284|binding|INFO|Setting lport 73f6f99f-8348-41c9-8194-e4cd3d448fd9 down in Southbound
Oct 02 12:17:25 compute-0 ovn_controller[94336]: 2025-10-02T12:17:25Z|00285|binding|INFO|Removing iface tap73f6f99f-83 ovn-installed in OVS
Oct 02 12:17:25 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:25.397 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:b3:31:fe 10.100.0.11'], port_security=['fa:16:3e:b3:31:fe 10.100.0.11'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.11/28', 'neutron:device_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'neutron:revision_number': '4', 'neutron:security_group_ids': 'b4e0bc42-3cfd-4f42-a319-553606576b33', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=a043239b-039e-45fa-8277-43e361a8bae7, chassis=[], tunnel_key=2, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=73f6f99f-8348-41c9-8194-e4cd3d448fd9) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:17:25 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:25.400 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 73f6f99f-8348-41c9-8194-e4cd3d448fd9 in datapath bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5 unbound from our chassis
Oct 02 12:17:25 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:25.402 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:17:25 compute-0 nova_compute[192079]: 2025-10-02 12:17:25.402 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:25 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:25.405 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[10f601b3-7bd8-42d2-b94b-7984a1bcfd74]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:25 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:25.406 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5 namespace which is not needed anymore
Oct 02 12:17:25 compute-0 systemd[1]: machine-qemu\x2d39\x2dinstance\x2d00000055.scope: Deactivated successfully.
Oct 02 12:17:25 compute-0 systemd[1]: machine-qemu\x2d39\x2dinstance\x2d00000055.scope: Consumed 13.953s CPU time.
Oct 02 12:17:25 compute-0 systemd-machined[152150]: Machine qemu-39-instance-00000055 terminated.
Oct 02 12:17:25 compute-0 neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5[232730]: [NOTICE]   (232734) : haproxy version is 2.8.14-c23fe91
Oct 02 12:17:25 compute-0 neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5[232730]: [NOTICE]   (232734) : path to executable is /usr/sbin/haproxy
Oct 02 12:17:25 compute-0 neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5[232730]: [WARNING]  (232734) : Exiting Master process...
Oct 02 12:17:25 compute-0 neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5[232730]: [ALERT]    (232734) : Current worker (232736) exited with code 143 (Terminated)
Oct 02 12:17:25 compute-0 neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5[232730]: [WARNING]  (232734) : All workers exited. Exiting... (0)
Oct 02 12:17:25 compute-0 systemd[1]: libpod-4326bc2128ef700472a26297ff9dc9fc1f5fba4abbb2dcc09d1fbff2652f95cf.scope: Deactivated successfully.
Oct 02 12:17:25 compute-0 podman[232922]: 2025-10-02 12:17:25.56086811 +0000 UTC m=+0.046894070 container died 4326bc2128ef700472a26297ff9dc9fc1f5fba4abbb2dcc09d1fbff2652f95cf (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS)
Oct 02 12:17:25 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-4326bc2128ef700472a26297ff9dc9fc1f5fba4abbb2dcc09d1fbff2652f95cf-userdata-shm.mount: Deactivated successfully.
Oct 02 12:17:25 compute-0 systemd[1]: var-lib-containers-storage-overlay-e378bec0e99bbca9b4f332ad335218074a9b447cf903915046b576100b413fb4-merged.mount: Deactivated successfully.
Oct 02 12:17:25 compute-0 podman[232922]: 2025-10-02 12:17:25.61222349 +0000 UTC m=+0.098249480 container cleanup 4326bc2128ef700472a26297ff9dc9fc1f5fba4abbb2dcc09d1fbff2652f95cf (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, tcib_managed=true, org.label-schema.build-date=20251001)
Oct 02 12:17:25 compute-0 systemd[1]: libpod-conmon-4326bc2128ef700472a26297ff9dc9fc1f5fba4abbb2dcc09d1fbff2652f95cf.scope: Deactivated successfully.
Oct 02 12:17:25 compute-0 podman[232963]: 2025-10-02 12:17:25.674868028 +0000 UTC m=+0.039009554 container remove 4326bc2128ef700472a26297ff9dc9fc1f5fba4abbb2dcc09d1fbff2652f95cf (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, tcib_managed=true, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:17:25 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:25.679 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b0b82b51-fd40-4a64-8adc-f93305eaf3cc]: (4, ('Thu Oct  2 12:17:25 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5 (4326bc2128ef700472a26297ff9dc9fc1f5fba4abbb2dcc09d1fbff2652f95cf)\n4326bc2128ef700472a26297ff9dc9fc1f5fba4abbb2dcc09d1fbff2652f95cf\nThu Oct  2 12:17:25 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5 (4326bc2128ef700472a26297ff9dc9fc1f5fba4abbb2dcc09d1fbff2652f95cf)\n4326bc2128ef700472a26297ff9dc9fc1f5fba4abbb2dcc09d1fbff2652f95cf\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:25 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:25.680 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[175ca3e4-fd86-4619-93c3-83aaee24f4ce]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:25 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:25.681 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapbd543a6a-b0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:17:25 compute-0 nova_compute[192079]: 2025-10-02 12:17:25.682 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:25 compute-0 kernel: tapbd543a6a-b0: left promiscuous mode
Oct 02 12:17:25 compute-0 nova_compute[192079]: 2025-10-02 12:17:25.696 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:25 compute-0 nova_compute[192079]: 2025-10-02 12:17:25.698 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:25 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:25.700 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c5427364-d23b-41c2-bc91-b1b2a41bb307]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:25 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:25.736 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[cbb7c175-d53d-4cd6-b378-70cd78335343]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:25 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:25.737 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0e50b7bb-a8e4-4429-b35c-6450c9f45e03]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:25 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:25.749 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[baca905d-6f26-4b03-a3a8-53c311e580d2]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 541507, 'reachable_time': 20846, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 232989, 'error': None, 'target': 'ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:25 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:25.752 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:17:25 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:25.752 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[2c65748e-a483-492f-8bba-7d98a90ae47d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:25 compute-0 systemd[1]: run-netns-ovnmeta\x2dbd543a6a\x2dbba1\x2d4bd5\x2d9cbf\x2dfc87bf95cbe5.mount: Deactivated successfully.
Oct 02 12:17:25 compute-0 nova_compute[192079]: 2025-10-02 12:17:25.778 2 DEBUG nova.compute.manager [req-15f8ddca-2866-4205-b802-a89433d58c4e req-fa6433fc-961f-4ac0-803b-e956d8f44e77 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Received event network-vif-unplugged-73f6f99f-8348-41c9-8194-e4cd3d448fd9 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:17:25 compute-0 nova_compute[192079]: 2025-10-02 12:17:25.779 2 DEBUG oslo_concurrency.lockutils [req-15f8ddca-2866-4205-b802-a89433d58c4e req-fa6433fc-961f-4ac0-803b-e956d8f44e77 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:17:25 compute-0 nova_compute[192079]: 2025-10-02 12:17:25.779 2 DEBUG oslo_concurrency.lockutils [req-15f8ddca-2866-4205-b802-a89433d58c4e req-fa6433fc-961f-4ac0-803b-e956d8f44e77 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:17:25 compute-0 nova_compute[192079]: 2025-10-02 12:17:25.779 2 DEBUG oslo_concurrency.lockutils [req-15f8ddca-2866-4205-b802-a89433d58c4e req-fa6433fc-961f-4ac0-803b-e956d8f44e77 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:17:25 compute-0 nova_compute[192079]: 2025-10-02 12:17:25.779 2 DEBUG nova.compute.manager [req-15f8ddca-2866-4205-b802-a89433d58c4e req-fa6433fc-961f-4ac0-803b-e956d8f44e77 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] No waiting events found dispatching network-vif-unplugged-73f6f99f-8348-41c9-8194-e4cd3d448fd9 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:17:25 compute-0 nova_compute[192079]: 2025-10-02 12:17:25.779 2 WARNING nova.compute.manager [req-15f8ddca-2866-4205-b802-a89433d58c4e req-fa6433fc-961f-4ac0-803b-e956d8f44e77 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Received unexpected event network-vif-unplugged-73f6f99f-8348-41c9-8194-e4cd3d448fd9 for instance with vm_state active and task_state powering-off.
Oct 02 12:17:26 compute-0 nova_compute[192079]: 2025-10-02 12:17:26.034 2 DEBUG oslo_concurrency.lockutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Acquiring lock "21aa2a67-6284-4d30-9a7c-499db76c4042" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:17:26 compute-0 nova_compute[192079]: 2025-10-02 12:17:26.034 2 DEBUG oslo_concurrency.lockutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Lock "21aa2a67-6284-4d30-9a7c-499db76c4042" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:17:26 compute-0 nova_compute[192079]: 2025-10-02 12:17:26.134 2 DEBUG nova.compute.manager [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:17:26 compute-0 nova_compute[192079]: 2025-10-02 12:17:26.215 2 INFO nova.virt.libvirt.driver [None req-2dbdd85a-c69a-4b17-b0fc-dc9af093731b 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Instance shutdown successfully after 3 seconds.
Oct 02 12:17:26 compute-0 nova_compute[192079]: 2025-10-02 12:17:26.221 2 INFO nova.virt.libvirt.driver [-] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Instance destroyed successfully.
Oct 02 12:17:26 compute-0 nova_compute[192079]: 2025-10-02 12:17:26.221 2 DEBUG nova.objects.instance [None req-2dbdd85a-c69a-4b17-b0fc-dc9af093731b 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lazy-loading 'numa_topology' on Instance uuid ebc56e2c-d3a3-4ade-8849-7e23fc710e78 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:17:26 compute-0 nova_compute[192079]: 2025-10-02 12:17:26.350 2 DEBUG nova.compute.manager [None req-2dbdd85a-c69a-4b17-b0fc-dc9af093731b 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:17:26 compute-0 nova_compute[192079]: 2025-10-02 12:17:26.358 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:26 compute-0 nova_compute[192079]: 2025-10-02 12:17:26.483 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:26 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:26.487 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=22, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=21) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:17:26 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:26.488 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 9 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:17:26 compute-0 nova_compute[192079]: 2025-10-02 12:17:26.489 2 DEBUG oslo_concurrency.lockutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:17:26 compute-0 nova_compute[192079]: 2025-10-02 12:17:26.489 2 DEBUG oslo_concurrency.lockutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:17:26 compute-0 nova_compute[192079]: 2025-10-02 12:17:26.500 2 DEBUG nova.virt.hardware [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:17:26 compute-0 nova_compute[192079]: 2025-10-02 12:17:26.501 2 INFO nova.compute.claims [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:17:26 compute-0 nova_compute[192079]: 2025-10-02 12:17:26.527 2 DEBUG oslo_concurrency.lockutils [None req-2dbdd85a-c69a-4b17-b0fc-dc9af093731b 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78" "released" by "nova.compute.manager.ComputeManager.stop_instance.<locals>.do_stop_instance" :: held 3.519s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:17:26 compute-0 nova_compute[192079]: 2025-10-02 12:17:26.702 2 DEBUG nova.compute.provider_tree [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:17:26 compute-0 nova_compute[192079]: 2025-10-02 12:17:26.719 2 DEBUG nova.scheduler.client.report [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:17:26 compute-0 nova_compute[192079]: 2025-10-02 12:17:26.744 2 DEBUG oslo_concurrency.lockutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.255s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:17:26 compute-0 nova_compute[192079]: 2025-10-02 12:17:26.745 2 DEBUG nova.compute.manager [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:17:28 compute-0 podman[232990]: 2025-10-02 12:17:28.144726398 +0000 UTC m=+0.060933065 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, vendor=Red Hat, Inc., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vcs-type=git, build-date=2025-08-20T13:12:41, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, maintainer=Red Hat, Inc., version=9.6, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, managed_by=edpm_ansible, name=ubi9-minimal, url=https://catalog.redhat.com/en/search?searchType=containers, architecture=x86_64, distribution-scope=public, com.redhat.component=ubi9-minimal-container, io.buildah.version=1.33.7, config_id=edpm, io.openshift.expose-services=, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., container_name=openstack_network_exporter, release=1755695350, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.openshift.tags=minimal rhel9)
Oct 02 12:17:28 compute-0 podman[232991]: 2025-10-02 12:17:28.148869001 +0000 UTC m=+0.060895073 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, config_id=multipathd, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, managed_by=edpm_ansible)
Oct 02 12:17:28 compute-0 nova_compute[192079]: 2025-10-02 12:17:28.457 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Updating instance_info_cache with network_info: [{"id": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "address": "fa:16:3e:b3:31:fe", "network": {"id": "bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-542543245-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e0277f0bb0f4a349e2e6d8ddfa24edf", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap73f6f99f-83", "ovs_interfaceid": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:17:28 compute-0 nova_compute[192079]: 2025-10-02 12:17:28.843 2 DEBUG nova.compute.manager [req-50ec943a-c4c1-4c8f-8eea-9fcbd3a254cd req-efc0603d-fa34-443b-99f6-6c6c2e38a410 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Received event network-vif-plugged-73f6f99f-8348-41c9-8194-e4cd3d448fd9 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:17:28 compute-0 nova_compute[192079]: 2025-10-02 12:17:28.843 2 DEBUG oslo_concurrency.lockutils [req-50ec943a-c4c1-4c8f-8eea-9fcbd3a254cd req-efc0603d-fa34-443b-99f6-6c6c2e38a410 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:17:28 compute-0 nova_compute[192079]: 2025-10-02 12:17:28.843 2 DEBUG oslo_concurrency.lockutils [req-50ec943a-c4c1-4c8f-8eea-9fcbd3a254cd req-efc0603d-fa34-443b-99f6-6c6c2e38a410 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:17:28 compute-0 nova_compute[192079]: 2025-10-02 12:17:28.843 2 DEBUG oslo_concurrency.lockutils [req-50ec943a-c4c1-4c8f-8eea-9fcbd3a254cd req-efc0603d-fa34-443b-99f6-6c6c2e38a410 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:17:28 compute-0 nova_compute[192079]: 2025-10-02 12:17:28.843 2 DEBUG nova.compute.manager [req-50ec943a-c4c1-4c8f-8eea-9fcbd3a254cd req-efc0603d-fa34-443b-99f6-6c6c2e38a410 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] No waiting events found dispatching network-vif-plugged-73f6f99f-8348-41c9-8194-e4cd3d448fd9 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:17:28 compute-0 nova_compute[192079]: 2025-10-02 12:17:28.844 2 WARNING nova.compute.manager [req-50ec943a-c4c1-4c8f-8eea-9fcbd3a254cd req-efc0603d-fa34-443b-99f6-6c6c2e38a410 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Received unexpected event network-vif-plugged-73f6f99f-8348-41c9-8194-e4cd3d448fd9 for instance with vm_state stopped and task_state None.
Oct 02 12:17:28 compute-0 nova_compute[192079]: 2025-10-02 12:17:28.849 2 DEBUG nova.compute.manager [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:17:28 compute-0 nova_compute[192079]: 2025-10-02 12:17:28.850 2 DEBUG nova.network.neutron [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:17:28 compute-0 nova_compute[192079]: 2025-10-02 12:17:28.865 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Releasing lock "refresh_cache-ebc56e2c-d3a3-4ade-8849-7e23fc710e78" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:17:28 compute-0 nova_compute[192079]: 2025-10-02 12:17:28.865 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Updated the network info_cache for instance _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9929
Oct 02 12:17:28 compute-0 nova_compute[192079]: 2025-10-02 12:17:28.865 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:17:28 compute-0 nova_compute[192079]: 2025-10-02 12:17:28.865 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:17:28 compute-0 nova_compute[192079]: 2025-10-02 12:17:28.866 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:17:28 compute-0 nova_compute[192079]: 2025-10-02 12:17:28.882 2 INFO nova.virt.libvirt.driver [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:17:28 compute-0 nova_compute[192079]: 2025-10-02 12:17:28.898 2 DEBUG nova.compute.manager [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:17:28 compute-0 nova_compute[192079]: 2025-10-02 12:17:28.948 2 DEBUG nova.objects.instance [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lazy-loading 'flavor' on Instance uuid ebc56e2c-d3a3-4ade-8849-7e23fc710e78 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:17:28 compute-0 nova_compute[192079]: 2025-10-02 12:17:28.989 2 DEBUG nova.objects.instance [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lazy-loading 'info_cache' on Instance uuid ebc56e2c-d3a3-4ade-8849-7e23fc710e78 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.018 2 DEBUG oslo_concurrency.lockutils [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Acquiring lock "refresh_cache-ebc56e2c-d3a3-4ade-8849-7e23fc710e78" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.019 2 DEBUG oslo_concurrency.lockutils [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Acquired lock "refresh_cache-ebc56e2c-d3a3-4ade-8849-7e23fc710e78" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.019 2 DEBUG nova.network.neutron [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.107 2 DEBUG nova.policy [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0c0ba8ddde504431b51e593c63f40361', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd5db64e6714348c1a7f57bb53de80915', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.130 2 DEBUG nova.compute.manager [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.131 2 DEBUG nova.virt.libvirt.driver [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.131 2 INFO nova.virt.libvirt.driver [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Creating image(s)
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.132 2 DEBUG oslo_concurrency.lockutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Acquiring lock "/var/lib/nova/instances/21aa2a67-6284-4d30-9a7c-499db76c4042/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.132 2 DEBUG oslo_concurrency.lockutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Lock "/var/lib/nova/instances/21aa2a67-6284-4d30-9a7c-499db76c4042/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.133 2 DEBUG oslo_concurrency.lockutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Lock "/var/lib/nova/instances/21aa2a67-6284-4d30-9a7c-499db76c4042/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.147 2 DEBUG oslo_concurrency.processutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.202 2 DEBUG oslo_concurrency.processutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.203 2 DEBUG oslo_concurrency.lockutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.204 2 DEBUG oslo_concurrency.lockutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.216 2 DEBUG oslo_concurrency.processutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.287 2 DEBUG oslo_concurrency.processutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.071s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.288 2 DEBUG oslo_concurrency.processutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/21aa2a67-6284-4d30-9a7c-499db76c4042/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.339 2 DEBUG oslo_concurrency.processutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/21aa2a67-6284-4d30-9a7c-499db76c4042/disk 1073741824" returned: 0 in 0.051s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.340 2 DEBUG oslo_concurrency.lockutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.136s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.341 2 DEBUG oslo_concurrency.processutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.393 2 DEBUG oslo_concurrency.processutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.053s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.394 2 DEBUG nova.virt.disk.api [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Checking if we can resize image /var/lib/nova/instances/21aa2a67-6284-4d30-9a7c-499db76c4042/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.395 2 DEBUG oslo_concurrency.processutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/21aa2a67-6284-4d30-9a7c-499db76c4042/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.446 2 DEBUG oslo_concurrency.processutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/21aa2a67-6284-4d30-9a7c-499db76c4042/disk --force-share --output=json" returned: 0 in 0.052s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.447 2 DEBUG nova.virt.disk.api [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Cannot resize image /var/lib/nova/instances/21aa2a67-6284-4d30-9a7c-499db76c4042/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.448 2 DEBUG nova.objects.instance [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Lazy-loading 'migration_context' on Instance uuid 21aa2a67-6284-4d30-9a7c-499db76c4042 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.502 2 DEBUG nova.virt.libvirt.driver [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.502 2 DEBUG nova.virt.libvirt.driver [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Ensure instance console log exists: /var/lib/nova/instances/21aa2a67-6284-4d30-9a7c-499db76c4042/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.503 2 DEBUG oslo_concurrency.lockutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.503 2 DEBUG oslo_concurrency.lockutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:17:29 compute-0 nova_compute[192079]: 2025-10-02 12:17:29.503 2 DEBUG oslo_concurrency.lockutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:17:30 compute-0 nova_compute[192079]: 2025-10-02 12:17:30.096 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:30 compute-0 nova_compute[192079]: 2025-10-02 12:17:30.778 2 DEBUG nova.network.neutron [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Successfully created port: 61697d43-f76f-4fbc-9f9c-d624fa50ac8f _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:17:31 compute-0 nova_compute[192079]: 2025-10-02 12:17:31.360 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:31 compute-0 nova_compute[192079]: 2025-10-02 12:17:31.909 2 DEBUG nova.network.neutron [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Updating instance_info_cache with network_info: [{"id": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "address": "fa:16:3e:b3:31:fe", "network": {"id": "bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-542543245-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e0277f0bb0f4a349e2e6d8ddfa24edf", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap73f6f99f-83", "ovs_interfaceid": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.477 2 DEBUG oslo_concurrency.lockutils [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Releasing lock "refresh_cache-ebc56e2c-d3a3-4ade-8849-7e23fc710e78" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.524 2 INFO nova.virt.libvirt.driver [-] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Instance destroyed successfully.
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.524 2 DEBUG nova.objects.instance [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lazy-loading 'numa_topology' on Instance uuid ebc56e2c-d3a3-4ade-8849-7e23fc710e78 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.537 2 DEBUG nova.objects.instance [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lazy-loading 'resources' on Instance uuid ebc56e2c-d3a3-4ade-8849-7e23fc710e78 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.564 2 DEBUG nova.virt.libvirt.vif [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:16:50Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ListServerFiltersTestJSON-instance-1707027906',display_name='tempest-ListServerFiltersTestJSON-instance-1707027906',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-listserverfilterstestjson-instance-1707027906',id=85,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:16:58Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=4,progress=0,project_id='6e0277f0bb0f4a349e2e6d8ddfa24edf',ramdisk_id='',reservation_id='r-wvwxsid2',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=<?>,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ListServerFiltersTestJSON-298715262',owner_user_name='tempest-ListServerFiltersTestJSON-298715262-project-member'},tags=<?>,task_state='powering-on',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:17:26Z,user_data=None,user_id='001d2d51902d4e299b775131f430a5db',uuid=ebc56e2c-d3a3-4ade-8849-7e23fc710e78,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='stopped') vif={"id": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "address": "fa:16:3e:b3:31:fe", "network": {"id": "bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-542543245-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e0277f0bb0f4a349e2e6d8ddfa24edf", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap73f6f99f-83", "ovs_interfaceid": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.565 2 DEBUG nova.network.os_vif_util [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Converting VIF {"id": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "address": "fa:16:3e:b3:31:fe", "network": {"id": "bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-542543245-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e0277f0bb0f4a349e2e6d8ddfa24edf", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap73f6f99f-83", "ovs_interfaceid": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.566 2 DEBUG nova.network.os_vif_util [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:b3:31:fe,bridge_name='br-int',has_traffic_filtering=True,id=73f6f99f-8348-41c9-8194-e4cd3d448fd9,network=Network(bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap73f6f99f-83') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.566 2 DEBUG os_vif [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:b3:31:fe,bridge_name='br-int',has_traffic_filtering=True,id=73f6f99f-8348-41c9-8194-e4cd3d448fd9,network=Network(bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap73f6f99f-83') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.567 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.568 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap73f6f99f-83, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.624 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.627 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.629 2 INFO os_vif [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:b3:31:fe,bridge_name='br-int',has_traffic_filtering=True,id=73f6f99f-8348-41c9-8194-e4cd3d448fd9,network=Network(bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap73f6f99f-83')
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.636 2 DEBUG nova.virt.libvirt.driver [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Start _get_guest_xml network_info=[{"id": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "address": "fa:16:3e:b3:31:fe", "network": {"id": "bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-542543245-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e0277f0bb0f4a349e2e6d8ddfa24edf", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap73f6f99f-83", "ovs_interfaceid": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum=<?>,container_format='bare',created_at=<?>,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=1,min_ram=0,name=<?>,owner=<?>,properties=ImageMetaProps,protected=<?>,size=<?>,status=<?>,tags=<?>,updated_at=<?>,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.639 2 WARNING nova.virt.libvirt.driver [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.643 2 DEBUG nova.virt.libvirt.host [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.644 2 DEBUG nova.virt.libvirt.host [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.646 2 DEBUG nova.virt.libvirt.host [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.647 2 DEBUG nova.virt.libvirt.host [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.648 2 DEBUG nova.virt.libvirt.driver [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.648 2 DEBUG nova.virt.hardware [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=<?>,container_format='bare',created_at=<?>,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=1,min_ram=0,name=<?>,owner=<?>,properties=ImageMetaProps,protected=<?>,size=<?>,status=<?>,tags=<?>,updated_at=<?>,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.649 2 DEBUG nova.virt.hardware [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.649 2 DEBUG nova.virt.hardware [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.649 2 DEBUG nova.virt.hardware [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.649 2 DEBUG nova.virt.hardware [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.650 2 DEBUG nova.virt.hardware [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.650 2 DEBUG nova.virt.hardware [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.650 2 DEBUG nova.virt.hardware [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.650 2 DEBUG nova.virt.hardware [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.651 2 DEBUG nova.virt.hardware [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.651 2 DEBUG nova.virt.hardware [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.651 2 DEBUG nova.objects.instance [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lazy-loading 'vcpu_model' on Instance uuid ebc56e2c-d3a3-4ade-8849-7e23fc710e78 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.680 2 DEBUG oslo_concurrency.processutils [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.config --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.737 2 DEBUG oslo_concurrency.processutils [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.config --force-share --output=json" returned: 0 in 0.057s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.738 2 DEBUG oslo_concurrency.lockutils [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Acquiring lock "/var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.739 2 DEBUG oslo_concurrency.lockutils [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lock "/var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.739 2 DEBUG oslo_concurrency.lockutils [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lock "/var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.740 2 DEBUG nova.virt.libvirt.vif [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:16:50Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ListServerFiltersTestJSON-instance-1707027906',display_name='tempest-ListServerFiltersTestJSON-instance-1707027906',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-listserverfilterstestjson-instance-1707027906',id=85,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:16:58Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=4,progress=0,project_id='6e0277f0bb0f4a349e2e6d8ddfa24edf',ramdisk_id='',reservation_id='r-wvwxsid2',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=<?>,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ListServerFiltersTestJSON-298715262',owner_user_name='tempest-ListServerFiltersTestJSON-298715262-project-member'},tags=<?>,task_state='powering-on',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:17:26Z,user_data=None,user_id='001d2d51902d4e299b775131f430a5db',uuid=ebc56e2c-d3a3-4ade-8849-7e23fc710e78,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='stopped') vif={"id": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "address": "fa:16:3e:b3:31:fe", "network": {"id": "bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-542543245-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e0277f0bb0f4a349e2e6d8ddfa24edf", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap73f6f99f-83", "ovs_interfaceid": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.741 2 DEBUG nova.network.os_vif_util [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Converting VIF {"id": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "address": "fa:16:3e:b3:31:fe", "network": {"id": "bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-542543245-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e0277f0bb0f4a349e2e6d8ddfa24edf", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap73f6f99f-83", "ovs_interfaceid": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.741 2 DEBUG nova.network.os_vif_util [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:b3:31:fe,bridge_name='br-int',has_traffic_filtering=True,id=73f6f99f-8348-41c9-8194-e4cd3d448fd9,network=Network(bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap73f6f99f-83') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.742 2 DEBUG nova.objects.instance [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lazy-loading 'pci_devices' on Instance uuid ebc56e2c-d3a3-4ade-8849-7e23fc710e78 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.762 2 DEBUG nova.virt.libvirt.driver [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:17:33 compute-0 nova_compute[192079]:   <uuid>ebc56e2c-d3a3-4ade-8849-7e23fc710e78</uuid>
Oct 02 12:17:33 compute-0 nova_compute[192079]:   <name>instance-00000055</name>
Oct 02 12:17:33 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:17:33 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:17:33 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:17:33 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:       <nova:name>tempest-ListServerFiltersTestJSON-instance-1707027906</nova:name>
Oct 02 12:17:33 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:17:33</nova:creationTime>
Oct 02 12:17:33 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:17:33 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:17:33 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:17:33 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:17:33 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:17:33 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:17:33 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:17:33 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:17:33 compute-0 nova_compute[192079]:         <nova:user uuid="001d2d51902d4e299b775131f430a5db">tempest-ListServerFiltersTestJSON-298715262-project-member</nova:user>
Oct 02 12:17:33 compute-0 nova_compute[192079]:         <nova:project uuid="6e0277f0bb0f4a349e2e6d8ddfa24edf">tempest-ListServerFiltersTestJSON-298715262</nova:project>
Oct 02 12:17:33 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:17:33 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:17:33 compute-0 nova_compute[192079]:         <nova:port uuid="73f6f99f-8348-41c9-8194-e4cd3d448fd9">
Oct 02 12:17:33 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.11" ipVersion="4"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:17:33 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:17:33 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:17:33 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <system>
Oct 02 12:17:33 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:17:33 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:17:33 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:17:33 compute-0 nova_compute[192079]:       <entry name="serial">ebc56e2c-d3a3-4ade-8849-7e23fc710e78</entry>
Oct 02 12:17:33 compute-0 nova_compute[192079]:       <entry name="uuid">ebc56e2c-d3a3-4ade-8849-7e23fc710e78</entry>
Oct 02 12:17:33 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     </system>
Oct 02 12:17:33 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:17:33 compute-0 nova_compute[192079]:   <os>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:   </os>
Oct 02 12:17:33 compute-0 nova_compute[192079]:   <features>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:   </features>
Oct 02 12:17:33 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:17:33 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:17:33 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:17:33 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:17:33 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk.config"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:17:33 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:b3:31:fe"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:       <target dev="tap73f6f99f-83"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:17:33 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/console.log" append="off"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <video>
Oct 02 12:17:33 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     </video>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <input type="keyboard" bus="usb"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:17:33 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:17:33 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:17:33 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:17:33 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:17:33 compute-0 nova_compute[192079]: </domain>
Oct 02 12:17:33 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.764 2 DEBUG oslo_concurrency.processutils [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.826 2 DEBUG oslo_concurrency.processutils [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk --force-share --output=json" returned: 0 in 0.062s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.828 2 DEBUG oslo_concurrency.processutils [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.886 2 DEBUG oslo_concurrency.processutils [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk --force-share --output=json" returned: 0 in 0.058s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.888 2 DEBUG nova.objects.instance [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lazy-loading 'trusted_certs' on Instance uuid ebc56e2c-d3a3-4ade-8849-7e23fc710e78 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.912 2 DEBUG oslo_concurrency.processutils [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.973 2 DEBUG oslo_concurrency.processutils [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.060s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.974 2 DEBUG nova.virt.disk.api [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Checking if we can resize image /var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:17:33 compute-0 nova_compute[192079]: 2025-10-02 12:17:33.975 2 DEBUG oslo_concurrency.processutils [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.035 2 DEBUG oslo_concurrency.processutils [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk --force-share --output=json" returned: 0 in 0.059s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.036 2 DEBUG nova.virt.disk.api [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Cannot resize image /var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.037 2 DEBUG nova.objects.instance [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lazy-loading 'migration_context' on Instance uuid ebc56e2c-d3a3-4ade-8849-7e23fc710e78 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.064 2 DEBUG nova.virt.libvirt.vif [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:16:50Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ListServerFiltersTestJSON-instance-1707027906',display_name='tempest-ListServerFiltersTestJSON-instance-1707027906',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-listserverfilterstestjson-instance-1707027906',id=85,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:16:58Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=<?>,power_state=4,progress=0,project_id='6e0277f0bb0f4a349e2e6d8ddfa24edf',ramdisk_id='',reservation_id='r-wvwxsid2',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=<?>,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ListServerFiltersTestJSON-298715262',owner_user_name='tempest-ListServerFiltersTestJSON-298715262-project-member'},tags=<?>,task_state='powering-on',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:17:26Z,user_data=None,user_id='001d2d51902d4e299b775131f430a5db',uuid=ebc56e2c-d3a3-4ade-8849-7e23fc710e78,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='stopped') vif={"id": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "address": "fa:16:3e:b3:31:fe", "network": {"id": "bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-542543245-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e0277f0bb0f4a349e2e6d8ddfa24edf", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap73f6f99f-83", "ovs_interfaceid": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.065 2 DEBUG nova.network.os_vif_util [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Converting VIF {"id": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "address": "fa:16:3e:b3:31:fe", "network": {"id": "bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-542543245-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e0277f0bb0f4a349e2e6d8ddfa24edf", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap73f6f99f-83", "ovs_interfaceid": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.066 2 DEBUG nova.network.os_vif_util [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:b3:31:fe,bridge_name='br-int',has_traffic_filtering=True,id=73f6f99f-8348-41c9-8194-e4cd3d448fd9,network=Network(bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap73f6f99f-83') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.067 2 DEBUG os_vif [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:b3:31:fe,bridge_name='br-int',has_traffic_filtering=True,id=73f6f99f-8348-41c9-8194-e4cd3d448fd9,network=Network(bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap73f6f99f-83') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.069 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.070 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.071 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.074 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.074 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap73f6f99f-83, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.076 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap73f6f99f-83, col_values=(('external_ids', {'iface-id': '73f6f99f-8348-41c9-8194-e4cd3d448fd9', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:b3:31:fe', 'vm-uuid': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:17:34 compute-0 NetworkManager[51160]: <info>  [1759407454.0790] manager: (tap73f6f99f-83): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/145)
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.079 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.083 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.085 2 INFO os_vif [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:b3:31:fe,bridge_name='br-int',has_traffic_filtering=True,id=73f6f99f-8348-41c9-8194-e4cd3d448fd9,network=Network(bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap73f6f99f-83')
Oct 02 12:17:34 compute-0 podman[233062]: 2025-10-02 12:17:34.164821938 +0000 UTC m=+0.067578003 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:17:34 compute-0 podman[233065]: 2025-10-02 12:17:34.165293681 +0000 UTC m=+0.067227744 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=iscsid, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_id=iscsid, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0)
Oct 02 12:17:34 compute-0 kernel: tap73f6f99f-83: entered promiscuous mode
Oct 02 12:17:34 compute-0 NetworkManager[51160]: <info>  [1759407454.1935] manager: (tap73f6f99f-83): new Tun device (/org/freedesktop/NetworkManager/Devices/146)
Oct 02 12:17:34 compute-0 ovn_controller[94336]: 2025-10-02T12:17:34Z|00286|binding|INFO|Claiming lport 73f6f99f-8348-41c9-8194-e4cd3d448fd9 for this chassis.
Oct 02 12:17:34 compute-0 ovn_controller[94336]: 2025-10-02T12:17:34Z|00287|binding|INFO|73f6f99f-8348-41c9-8194-e4cd3d448fd9: Claiming fa:16:3e:b3:31:fe 10.100.0.11
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.196 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:34.204 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:b3:31:fe 10.100.0.11'], port_security=['fa:16:3e:b3:31:fe 10.100.0.11'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.11/28', 'neutron:device_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'neutron:revision_number': '5', 'neutron:security_group_ids': 'b4e0bc42-3cfd-4f42-a319-553606576b33', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=a043239b-039e-45fa-8277-43e361a8bae7, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=2, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=73f6f99f-8348-41c9-8194-e4cd3d448fd9) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:34.206 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 73f6f99f-8348-41c9-8194-e4cd3d448fd9 in datapath bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5 bound to our chassis
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:34.207 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:34.218 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2dafee1a-179b-4ba9-8fe5-f410ea4b6eed]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:34.219 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tapbd543a6a-b1 in ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:17:34 compute-0 systemd-udevd[233119]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:34.224 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tapbd543a6a-b0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:34.225 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c136066b-abe6-44b9-b949-cf4a9ee0c955]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:34.225 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0c48dbdb-6220-4232-8d98-0da70a1a7941]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:34 compute-0 ovn_controller[94336]: 2025-10-02T12:17:34Z|00288|binding|INFO|Setting lport 73f6f99f-8348-41c9-8194-e4cd3d448fd9 ovn-installed in OVS
Oct 02 12:17:34 compute-0 ovn_controller[94336]: 2025-10-02T12:17:34Z|00289|binding|INFO|Setting lport 73f6f99f-8348-41c9-8194-e4cd3d448fd9 up in Southbound
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.227 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.230 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:34.240 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[961a5251-7764-47d3-8016-f2870a6bfefb]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:34 compute-0 NetworkManager[51160]: <info>  [1759407454.2428] device (tap73f6f99f-83): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:17:34 compute-0 NetworkManager[51160]: <info>  [1759407454.2437] device (tap73f6f99f-83): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:17:34 compute-0 systemd-machined[152150]: New machine qemu-40-instance-00000055.
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:34.263 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ffa9b211-d025-4c97-ab33-1770c4c80176]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:34 compute-0 systemd[1]: Started Virtual Machine qemu-40-instance-00000055.
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:34.293 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[e3619535-fdc4-40e8-bfd2-4988441c34b0]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:34 compute-0 NetworkManager[51160]: <info>  [1759407454.3023] manager: (tapbd543a6a-b0): new Veth device (/org/freedesktop/NetworkManager/Devices/147)
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:34.301 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[40de6b01-ae7a-4698-a628-e89d63399bec]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:34.332 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[57c18bb4-2a40-4b5d-a32f-b1f6867d1033]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:34.335 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[0ad54e85-c542-4e55-8a31-c527c1b8c0c9]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:34 compute-0 NetworkManager[51160]: <info>  [1759407454.3586] device (tapbd543a6a-b0): carrier: link connected
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:34.364 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[9595daf8-c330-4444-9e38-4e57b0f98b7d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:34.381 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4365d447-1069-434a-bb5e-aa17cd1ba3d5]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapbd543a6a-b1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:71:7a:4a'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 92], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 545198, 'reachable_time': 31228, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 233154, 'error': None, 'target': 'ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:34.396 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[420bb470-6f61-46c0-89b0-b0cda282d460]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe71:7a4a'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 545198, 'tstamp': 545198}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 233155, 'error': None, 'target': 'ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:34.413 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a7f27c18-e6a9-40df-a78b-cf91762386ed]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapbd543a6a-b1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:71:7a:4a'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 92], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 545198, 'reachable_time': 31228, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 233156, 'error': None, 'target': 'ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:34.439 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6fe7054e-b9c6-4e41-b086-530877d8bd0c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:34.495 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[470213ac-8b26-4a38-9617-7e14a0dc01a1]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:34.497 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapbd543a6a-b0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:34.497 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:34.497 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapbd543a6a-b0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:17:34 compute-0 NetworkManager[51160]: <info>  [1759407454.5005] manager: (tapbd543a6a-b0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/148)
Oct 02 12:17:34 compute-0 kernel: tapbd543a6a-b0: entered promiscuous mode
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:34.504 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tapbd543a6a-b0, col_values=(('external_ids', {'iface-id': '1bd1cb43-f90b-4e8c-92cc-e89ec36a0b0f'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.500 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:34 compute-0 ovn_controller[94336]: 2025-10-02T12:17:34Z|00290|binding|INFO|Releasing lport 1bd1cb43-f90b-4e8c-92cc-e89ec36a0b0f from this chassis (sb_readonly=0)
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:34.507 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:34.508 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[34ca3677-fc73-4159-ad24-c2f1aa518a3e]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:34.509 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5.pid.haproxy
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:17:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:34.510 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5', 'env', 'PROCESS_TAG=haproxy-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.518 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.524 2 DEBUG nova.network.neutron [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Successfully updated port: 61697d43-f76f-4fbc-9f9c-d624fa50ac8f _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.537 2 DEBUG nova.compute.manager [req-0f9476ee-01db-4dee-a48a-02ac883c7214 req-3fc79eb7-74c9-45f5-979e-165953925439 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Received event network-vif-plugged-73f6f99f-8348-41c9-8194-e4cd3d448fd9 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.537 2 DEBUG oslo_concurrency.lockutils [req-0f9476ee-01db-4dee-a48a-02ac883c7214 req-3fc79eb7-74c9-45f5-979e-165953925439 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.538 2 DEBUG oslo_concurrency.lockutils [req-0f9476ee-01db-4dee-a48a-02ac883c7214 req-3fc79eb7-74c9-45f5-979e-165953925439 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.538 2 DEBUG oslo_concurrency.lockutils [req-0f9476ee-01db-4dee-a48a-02ac883c7214 req-3fc79eb7-74c9-45f5-979e-165953925439 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.538 2 DEBUG nova.compute.manager [req-0f9476ee-01db-4dee-a48a-02ac883c7214 req-3fc79eb7-74c9-45f5-979e-165953925439 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] No waiting events found dispatching network-vif-plugged-73f6f99f-8348-41c9-8194-e4cd3d448fd9 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.538 2 WARNING nova.compute.manager [req-0f9476ee-01db-4dee-a48a-02ac883c7214 req-3fc79eb7-74c9-45f5-979e-165953925439 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Received unexpected event network-vif-plugged-73f6f99f-8348-41c9-8194-e4cd3d448fd9 for instance with vm_state stopped and task_state powering-on.
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.544 2 DEBUG oslo_concurrency.lockutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Acquiring lock "refresh_cache-21aa2a67-6284-4d30-9a7c-499db76c4042" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.545 2 DEBUG oslo_concurrency.lockutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Acquired lock "refresh_cache-21aa2a67-6284-4d30-9a7c-499db76c4042" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.545 2 DEBUG nova.network.neutron [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.688 2 DEBUG nova.compute.manager [req-f30b52c6-c44d-41b5-a788-1a0d73af16e6 req-0f7068f2-6210-481d-889a-075f1e77e995 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Received event network-changed-61697d43-f76f-4fbc-9f9c-d624fa50ac8f external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.689 2 DEBUG nova.compute.manager [req-f30b52c6-c44d-41b5-a788-1a0d73af16e6 req-0f7068f2-6210-481d-889a-075f1e77e995 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Refreshing instance network info cache due to event network-changed-61697d43-f76f-4fbc-9f9c-d624fa50ac8f. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.689 2 DEBUG oslo_concurrency.lockutils [req-f30b52c6-c44d-41b5-a788-1a0d73af16e6 req-0f7068f2-6210-481d-889a-075f1e77e995 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-21aa2a67-6284-4d30-9a7c-499db76c4042" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:17:34 compute-0 podman[233195]: 2025-10-02 12:17:34.85291145 +0000 UTC m=+0.054776415 container create 3e79bd21414ee2a47fff4393573df261ba9294834dfb2a23568b345c7fd27ab9 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0)
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.861 2 DEBUG nova.network.neutron [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:17:34 compute-0 systemd[1]: Started libpod-conmon-3e79bd21414ee2a47fff4393573df261ba9294834dfb2a23568b345c7fd27ab9.scope.
Oct 02 12:17:34 compute-0 podman[233195]: 2025-10-02 12:17:34.822953223 +0000 UTC m=+0.024818238 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:17:34 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:17:34 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/b39b048189479d67135c89bdac8d8f9c50fd299d0fc554f82f9362eb5a9aec1e/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:17:34 compute-0 podman[233195]: 2025-10-02 12:17:34.941196166 +0000 UTC m=+0.143061221 container init 3e79bd21414ee2a47fff4393573df261ba9294834dfb2a23568b345c7fd27ab9 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2)
Oct 02 12:17:34 compute-0 podman[233195]: 2025-10-02 12:17:34.949529264 +0000 UTC m=+0.151394259 container start 3e79bd21414ee2a47fff4393573df261ba9294834dfb2a23568b345c7fd27ab9 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.schema-version=1.0)
Oct 02 12:17:34 compute-0 neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5[233210]: [NOTICE]   (233214) : New worker (233216) forked
Oct 02 12:17:34 compute-0 neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5[233210]: [NOTICE]   (233214) : Loading success.
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.990 2 DEBUG nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Removed pending event for ebc56e2c-d3a3-4ade-8849-7e23fc710e78 due to event _event_emit_delayed /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:438
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.992 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407454.9897134, ebc56e2c-d3a3-4ade-8849-7e23fc710e78 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.993 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] VM Resumed (Lifecycle Event)
Oct 02 12:17:34 compute-0 nova_compute[192079]: 2025-10-02 12:17:34.995 2 DEBUG nova.compute.manager [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:17:35 compute-0 nova_compute[192079]: 2025-10-02 12:17:35.000 2 INFO nova.virt.libvirt.driver [-] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Instance rebooted successfully.
Oct 02 12:17:35 compute-0 nova_compute[192079]: 2025-10-02 12:17:35.000 2 DEBUG nova.compute.manager [None req-45bb48eb-4f92-4949-8368-4ec5bd4bf9d2 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:17:35 compute-0 nova_compute[192079]: 2025-10-02 12:17:35.046 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:17:35 compute-0 nova_compute[192079]: 2025-10-02 12:17:35.050 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: stopped, current task_state: powering-on, current DB power_state: 4, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:17:35 compute-0 nova_compute[192079]: 2025-10-02 12:17:35.098 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] During sync_power_state the instance has a pending task (powering-on). Skip.
Oct 02 12:17:35 compute-0 nova_compute[192079]: 2025-10-02 12:17:35.098 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407454.9898973, ebc56e2c-d3a3-4ade-8849-7e23fc710e78 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:17:35 compute-0 nova_compute[192079]: 2025-10-02 12:17:35.098 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] VM Started (Lifecycle Event)
Oct 02 12:17:35 compute-0 nova_compute[192079]: 2025-10-02 12:17:35.140 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:17:35 compute-0 nova_compute[192079]: 2025-10-02 12:17:35.145 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Synchronizing instance power state after lifecycle event "Started"; current vm_state: active, current task_state: None, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:17:35 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:35.490 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '22'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:17:36 compute-0 nova_compute[192079]: 2025-10-02 12:17:36.411 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:37 compute-0 nova_compute[192079]: 2025-10-02 12:17:37.074 2 DEBUG nova.network.neutron [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Updating instance_info_cache with network_info: [{"id": "61697d43-f76f-4fbc-9f9c-d624fa50ac8f", "address": "fa:16:3e:43:cf:5c", "network": {"id": "b97b8849-844c-4190-8b13-fd7a2d073ce8", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1299594383-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "d5db64e6714348c1a7f57bb53de80915", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap61697d43-f7", "ovs_interfaceid": "61697d43-f76f-4fbc-9f9c-d624fa50ac8f", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.615 2 DEBUG nova.compute.manager [req-b0a81b0a-ff82-4093-8a59-5ee88044c4c0 req-39d86f5d-32c7-4c2d-a8dd-f67b3350c444 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Received event network-vif-plugged-73f6f99f-8348-41c9-8194-e4cd3d448fd9 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.616 2 DEBUG oslo_concurrency.lockutils [req-b0a81b0a-ff82-4093-8a59-5ee88044c4c0 req-39d86f5d-32c7-4c2d-a8dd-f67b3350c444 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.616 2 DEBUG oslo_concurrency.lockutils [req-b0a81b0a-ff82-4093-8a59-5ee88044c4c0 req-39d86f5d-32c7-4c2d-a8dd-f67b3350c444 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.617 2 DEBUG oslo_concurrency.lockutils [req-b0a81b0a-ff82-4093-8a59-5ee88044c4c0 req-39d86f5d-32c7-4c2d-a8dd-f67b3350c444 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.617 2 DEBUG nova.compute.manager [req-b0a81b0a-ff82-4093-8a59-5ee88044c4c0 req-39d86f5d-32c7-4c2d-a8dd-f67b3350c444 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] No waiting events found dispatching network-vif-plugged-73f6f99f-8348-41c9-8194-e4cd3d448fd9 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.617 2 WARNING nova.compute.manager [req-b0a81b0a-ff82-4093-8a59-5ee88044c4c0 req-39d86f5d-32c7-4c2d-a8dd-f67b3350c444 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Received unexpected event network-vif-plugged-73f6f99f-8348-41c9-8194-e4cd3d448fd9 for instance with vm_state active and task_state None.
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.756 2 DEBUG oslo_concurrency.lockutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Releasing lock "refresh_cache-21aa2a67-6284-4d30-9a7c-499db76c4042" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.756 2 DEBUG nova.compute.manager [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Instance network_info: |[{"id": "61697d43-f76f-4fbc-9f9c-d624fa50ac8f", "address": "fa:16:3e:43:cf:5c", "network": {"id": "b97b8849-844c-4190-8b13-fd7a2d073ce8", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1299594383-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "d5db64e6714348c1a7f57bb53de80915", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap61697d43-f7", "ovs_interfaceid": "61697d43-f76f-4fbc-9f9c-d624fa50ac8f", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.757 2 DEBUG oslo_concurrency.lockutils [req-f30b52c6-c44d-41b5-a788-1a0d73af16e6 req-0f7068f2-6210-481d-889a-075f1e77e995 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-21aa2a67-6284-4d30-9a7c-499db76c4042" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.757 2 DEBUG nova.network.neutron [req-f30b52c6-c44d-41b5-a788-1a0d73af16e6 req-0f7068f2-6210-481d-889a-075f1e77e995 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Refreshing network info cache for port 61697d43-f76f-4fbc-9f9c-d624fa50ac8f _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.760 2 DEBUG nova.virt.libvirt.driver [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Start _get_guest_xml network_info=[{"id": "61697d43-f76f-4fbc-9f9c-d624fa50ac8f", "address": "fa:16:3e:43:cf:5c", "network": {"id": "b97b8849-844c-4190-8b13-fd7a2d073ce8", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1299594383-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "d5db64e6714348c1a7f57bb53de80915", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap61697d43-f7", "ovs_interfaceid": "61697d43-f76f-4fbc-9f9c-d624fa50ac8f", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.765 2 WARNING nova.virt.libvirt.driver [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.772 2 DEBUG nova.virt.libvirt.host [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.773 2 DEBUG nova.virt.libvirt.host [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.776 2 DEBUG nova.virt.libvirt.host [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.777 2 DEBUG nova.virt.libvirt.host [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.778 2 DEBUG nova.virt.libvirt.driver [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.778 2 DEBUG nova.virt.hardware [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.779 2 DEBUG nova.virt.hardware [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.779 2 DEBUG nova.virt.hardware [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.780 2 DEBUG nova.virt.hardware [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.781 2 DEBUG nova.virt.hardware [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.781 2 DEBUG nova.virt.hardware [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.781 2 DEBUG nova.virt.hardware [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.781 2 DEBUG nova.virt.hardware [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.782 2 DEBUG nova.virt.hardware [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.782 2 DEBUG nova.virt.hardware [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.782 2 DEBUG nova.virt.hardware [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.786 2 DEBUG nova.virt.libvirt.vif [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:17:24Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-DeleteServersTestJSON-server-831017505',display_name='tempest-DeleteServersTestJSON-server-831017505',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-deleteserverstestjson-server-831017505',id=88,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='d5db64e6714348c1a7f57bb53de80915',ramdisk_id='',reservation_id='r-sz03g7b2',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-DeleteServersTestJSON-548982240',owner_user_name='tempest-DeleteServersTestJSON-548982240-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:17:28Z,user_data=None,user_id='0c0ba8ddde504431b51e593c63f40361',uuid=21aa2a67-6284-4d30-9a7c-499db76c4042,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "61697d43-f76f-4fbc-9f9c-d624fa50ac8f", "address": "fa:16:3e:43:cf:5c", "network": {"id": "b97b8849-844c-4190-8b13-fd7a2d073ce8", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1299594383-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "d5db64e6714348c1a7f57bb53de80915", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap61697d43-f7", "ovs_interfaceid": "61697d43-f76f-4fbc-9f9c-d624fa50ac8f", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.787 2 DEBUG nova.network.os_vif_util [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Converting VIF {"id": "61697d43-f76f-4fbc-9f9c-d624fa50ac8f", "address": "fa:16:3e:43:cf:5c", "network": {"id": "b97b8849-844c-4190-8b13-fd7a2d073ce8", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1299594383-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "d5db64e6714348c1a7f57bb53de80915", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap61697d43-f7", "ovs_interfaceid": "61697d43-f76f-4fbc-9f9c-d624fa50ac8f", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.787 2 DEBUG nova.network.os_vif_util [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:43:cf:5c,bridge_name='br-int',has_traffic_filtering=True,id=61697d43-f76f-4fbc-9f9c-d624fa50ac8f,network=Network(b97b8849-844c-4190-8b13-fd7a2d073ce8),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap61697d43-f7') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.788 2 DEBUG nova.objects.instance [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Lazy-loading 'pci_devices' on Instance uuid 21aa2a67-6284-4d30-9a7c-499db76c4042 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.835 2 DEBUG nova.virt.libvirt.driver [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:17:38 compute-0 nova_compute[192079]:   <uuid>21aa2a67-6284-4d30-9a7c-499db76c4042</uuid>
Oct 02 12:17:38 compute-0 nova_compute[192079]:   <name>instance-00000058</name>
Oct 02 12:17:38 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:17:38 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:17:38 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:17:38 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:       <nova:name>tempest-DeleteServersTestJSON-server-831017505</nova:name>
Oct 02 12:17:38 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:17:38</nova:creationTime>
Oct 02 12:17:38 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:17:38 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:17:38 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:17:38 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:17:38 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:17:38 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:17:38 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:17:38 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:17:38 compute-0 nova_compute[192079]:         <nova:user uuid="0c0ba8ddde504431b51e593c63f40361">tempest-DeleteServersTestJSON-548982240-project-member</nova:user>
Oct 02 12:17:38 compute-0 nova_compute[192079]:         <nova:project uuid="d5db64e6714348c1a7f57bb53de80915">tempest-DeleteServersTestJSON-548982240</nova:project>
Oct 02 12:17:38 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:17:38 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:17:38 compute-0 nova_compute[192079]:         <nova:port uuid="61697d43-f76f-4fbc-9f9c-d624fa50ac8f">
Oct 02 12:17:38 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.8" ipVersion="4"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:17:38 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:17:38 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:17:38 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <system>
Oct 02 12:17:38 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:17:38 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:17:38 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:17:38 compute-0 nova_compute[192079]:       <entry name="serial">21aa2a67-6284-4d30-9a7c-499db76c4042</entry>
Oct 02 12:17:38 compute-0 nova_compute[192079]:       <entry name="uuid">21aa2a67-6284-4d30-9a7c-499db76c4042</entry>
Oct 02 12:17:38 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     </system>
Oct 02 12:17:38 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:17:38 compute-0 nova_compute[192079]:   <os>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:   </os>
Oct 02 12:17:38 compute-0 nova_compute[192079]:   <features>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:   </features>
Oct 02 12:17:38 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:17:38 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:17:38 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:17:38 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/21aa2a67-6284-4d30-9a7c-499db76c4042/disk"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:17:38 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/21aa2a67-6284-4d30-9a7c-499db76c4042/disk.config"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:17:38 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:43:cf:5c"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:       <target dev="tap61697d43-f7"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:17:38 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/21aa2a67-6284-4d30-9a7c-499db76c4042/console.log" append="off"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <video>
Oct 02 12:17:38 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     </video>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:17:38 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:17:38 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:17:38 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:17:38 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:17:38 compute-0 nova_compute[192079]: </domain>
Oct 02 12:17:38 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.836 2 DEBUG nova.compute.manager [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Preparing to wait for external event network-vif-plugged-61697d43-f76f-4fbc-9f9c-d624fa50ac8f prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.836 2 DEBUG oslo_concurrency.lockutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Acquiring lock "21aa2a67-6284-4d30-9a7c-499db76c4042-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.837 2 DEBUG oslo_concurrency.lockutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Lock "21aa2a67-6284-4d30-9a7c-499db76c4042-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.837 2 DEBUG oslo_concurrency.lockutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Lock "21aa2a67-6284-4d30-9a7c-499db76c4042-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.838 2 DEBUG nova.virt.libvirt.vif [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:17:24Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-DeleteServersTestJSON-server-831017505',display_name='tempest-DeleteServersTestJSON-server-831017505',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-deleteserverstestjson-server-831017505',id=88,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='d5db64e6714348c1a7f57bb53de80915',ramdisk_id='',reservation_id='r-sz03g7b2',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-DeleteServersTestJSON-548982240',owner_user_name='tempest-DeleteServersTestJSON-548982240-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:17:28Z,user_data=None,user_id='0c0ba8ddde504431b51e593c63f40361',uuid=21aa2a67-6284-4d30-9a7c-499db76c4042,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "61697d43-f76f-4fbc-9f9c-d624fa50ac8f", "address": "fa:16:3e:43:cf:5c", "network": {"id": "b97b8849-844c-4190-8b13-fd7a2d073ce8", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1299594383-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "d5db64e6714348c1a7f57bb53de80915", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap61697d43-f7", "ovs_interfaceid": "61697d43-f76f-4fbc-9f9c-d624fa50ac8f", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.838 2 DEBUG nova.network.os_vif_util [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Converting VIF {"id": "61697d43-f76f-4fbc-9f9c-d624fa50ac8f", "address": "fa:16:3e:43:cf:5c", "network": {"id": "b97b8849-844c-4190-8b13-fd7a2d073ce8", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1299594383-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "d5db64e6714348c1a7f57bb53de80915", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap61697d43-f7", "ovs_interfaceid": "61697d43-f76f-4fbc-9f9c-d624fa50ac8f", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.839 2 DEBUG nova.network.os_vif_util [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:43:cf:5c,bridge_name='br-int',has_traffic_filtering=True,id=61697d43-f76f-4fbc-9f9c-d624fa50ac8f,network=Network(b97b8849-844c-4190-8b13-fd7a2d073ce8),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap61697d43-f7') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.839 2 DEBUG os_vif [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:43:cf:5c,bridge_name='br-int',has_traffic_filtering=True,id=61697d43-f76f-4fbc-9f9c-d624fa50ac8f,network=Network(b97b8849-844c-4190-8b13-fd7a2d073ce8),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap61697d43-f7') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.840 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.840 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.840 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.843 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.843 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap61697d43-f7, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.844 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap61697d43-f7, col_values=(('external_ids', {'iface-id': '61697d43-f76f-4fbc-9f9c-d624fa50ac8f', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:43:cf:5c', 'vm-uuid': '21aa2a67-6284-4d30-9a7c-499db76c4042'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.845 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:38 compute-0 NetworkManager[51160]: <info>  [1759407458.8468] manager: (tap61697d43-f7): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/149)
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.848 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.852 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.852 2 INFO os_vif [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:43:cf:5c,bridge_name='br-int',has_traffic_filtering=True,id=61697d43-f76f-4fbc-9f9c-d624fa50ac8f,network=Network(b97b8849-844c-4190-8b13-fd7a2d073ce8),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap61697d43-f7')
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.945 2 DEBUG nova.virt.libvirt.driver [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.946 2 DEBUG nova.virt.libvirt.driver [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.946 2 DEBUG nova.virt.libvirt.driver [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] No VIF found with MAC fa:16:3e:43:cf:5c, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:17:38 compute-0 nova_compute[192079]: 2025-10-02 12:17:38.947 2 INFO nova.virt.libvirt.driver [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Using config drive
Oct 02 12:17:41 compute-0 nova_compute[192079]: 2025-10-02 12:17:41.149 2 INFO nova.virt.libvirt.driver [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Creating config drive at /var/lib/nova/instances/21aa2a67-6284-4d30-9a7c-499db76c4042/disk.config
Oct 02 12:17:41 compute-0 nova_compute[192079]: 2025-10-02 12:17:41.154 2 DEBUG oslo_concurrency.processutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/21aa2a67-6284-4d30-9a7c-499db76c4042/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpzrp5lgfq execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:17:41 compute-0 nova_compute[192079]: 2025-10-02 12:17:41.295 2 DEBUG oslo_concurrency.processutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/21aa2a67-6284-4d30-9a7c-499db76c4042/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpzrp5lgfq" returned: 0 in 0.141s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:17:41 compute-0 NetworkManager[51160]: <info>  [1759407461.3722] manager: (tap61697d43-f7): new Tun device (/org/freedesktop/NetworkManager/Devices/150)
Oct 02 12:17:41 compute-0 kernel: tap61697d43-f7: entered promiscuous mode
Oct 02 12:17:41 compute-0 systemd-udevd[233244]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:17:41 compute-0 nova_compute[192079]: 2025-10-02 12:17:41.426 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:41 compute-0 ovn_controller[94336]: 2025-10-02T12:17:41Z|00291|binding|INFO|Claiming lport 61697d43-f76f-4fbc-9f9c-d624fa50ac8f for this chassis.
Oct 02 12:17:41 compute-0 ovn_controller[94336]: 2025-10-02T12:17:41Z|00292|binding|INFO|61697d43-f76f-4fbc-9f9c-d624fa50ac8f: Claiming fa:16:3e:43:cf:5c 10.100.0.8
Oct 02 12:17:41 compute-0 NetworkManager[51160]: <info>  [1759407461.4376] device (tap61697d43-f7): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:17:41 compute-0 NetworkManager[51160]: <info>  [1759407461.4389] device (tap61697d43-f7): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:41.436 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:43:cf:5c 10.100.0.8'], port_security=['fa:16:3e:43:cf:5c 10.100.0.8'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.8/28', 'neutron:device_id': '21aa2a67-6284-4d30-9a7c-499db76c4042', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-b97b8849-844c-4190-8b13-fd7a2d073ce8', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'd5db64e6714348c1a7f57bb53de80915', 'neutron:revision_number': '2', 'neutron:security_group_ids': '063f732a-6071-414f-814d-a5d6c4e9e012', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=2011b0da-7062-465f-963e-59e92e88a653, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=61697d43-f76f-4fbc-9f9c-d624fa50ac8f) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:41.438 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 61697d43-f76f-4fbc-9f9c-d624fa50ac8f in datapath b97b8849-844c-4190-8b13-fd7a2d073ce8 bound to our chassis
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:41.439 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network b97b8849-844c-4190-8b13-fd7a2d073ce8
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:41.450 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[611e0039-6b81-4c7c-b8f4-25a872e860d0]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:41.451 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tapb97b8849-81 in ovnmeta-b97b8849-844c-4190-8b13-fd7a2d073ce8 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:41.453 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tapb97b8849-80 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:41.453 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a16a1622-d382-41d0-8c3e-abdb400234f1]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:41.454 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2eeb780d-23c9-4061-b469-38abd35283ff]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:41 compute-0 systemd-machined[152150]: New machine qemu-41-instance-00000058.
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:41.464 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[c53c9ccb-8a08-47ad-aa7c-0b07b1835f8a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:41 compute-0 ovn_controller[94336]: 2025-10-02T12:17:41Z|00293|binding|INFO|Setting lport 61697d43-f76f-4fbc-9f9c-d624fa50ac8f ovn-installed in OVS
Oct 02 12:17:41 compute-0 ovn_controller[94336]: 2025-10-02T12:17:41Z|00294|binding|INFO|Setting lport 61697d43-f76f-4fbc-9f9c-d624fa50ac8f up in Southbound
Oct 02 12:17:41 compute-0 nova_compute[192079]: 2025-10-02 12:17:41.482 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:41 compute-0 systemd[1]: Started Virtual Machine qemu-41-instance-00000058.
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:41.488 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[58bbde7d-ac95-49d8-95b0-286878ed2133]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:41.517 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[7a4d9d02-026f-40f5-93a6-4a19b47de35f]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:41.523 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[bbe0e133-183f-4e3d-9f99-e6b777b68c8b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:41 compute-0 NetworkManager[51160]: <info>  [1759407461.5244] manager: (tapb97b8849-80): new Veth device (/org/freedesktop/NetworkManager/Devices/151)
Oct 02 12:17:41 compute-0 systemd-udevd[233248]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:41.558 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[cb72b1e2-e7f8-4a02-a4f5-f14ab7db1b94]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:41.563 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[aaefbd52-2196-4bf3-9a7f-9a6596d20b59]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:41 compute-0 NetworkManager[51160]: <info>  [1759407461.5867] device (tapb97b8849-80): carrier: link connected
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:41.592 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[95cdc9f9-3824-4fbb-979d-5b3c36dcf989]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:41.609 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ea5afa75-92f7-490b-9a5f-e3e16a06b28d]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapb97b8849-81'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:ea:e0:b0'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 94], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 545921, 'reachable_time': 18433, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 233280, 'error': None, 'target': 'ovnmeta-b97b8849-844c-4190-8b13-fd7a2d073ce8', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:41.624 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2fad05c7-4c62-4417-a9dd-5855b3838faf]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:feea:e0b0'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 545921, 'tstamp': 545921}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 233281, 'error': None, 'target': 'ovnmeta-b97b8849-844c-4190-8b13-fd7a2d073ce8', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:41.641 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e7d9f3cc-4388-479a-9b2c-f68cae5a6b77]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapb97b8849-81'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:ea:e0:b0'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 94], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 545921, 'reachable_time': 18433, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 233282, 'error': None, 'target': 'ovnmeta-b97b8849-844c-4190-8b13-fd7a2d073ce8', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:41.668 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[bb4de9d1-9974-4c50-8689-438e3b0f6029]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:41.725 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[af03c611-0acb-4795-9be2-a6b9a9f30f8e]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:41.726 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapb97b8849-80, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:41.726 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:41.727 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapb97b8849-80, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:17:41 compute-0 NetworkManager[51160]: <info>  [1759407461.7293] manager: (tapb97b8849-80): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/152)
Oct 02 12:17:41 compute-0 kernel: tapb97b8849-80: entered promiscuous mode
Oct 02 12:17:41 compute-0 nova_compute[192079]: 2025-10-02 12:17:41.728 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:41 compute-0 nova_compute[192079]: 2025-10-02 12:17:41.731 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:41.732 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tapb97b8849-80, col_values=(('external_ids', {'iface-id': '055cf080-4472-4807-a697-69de84e96953'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:17:41 compute-0 nova_compute[192079]: 2025-10-02 12:17:41.733 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:41 compute-0 ovn_controller[94336]: 2025-10-02T12:17:41Z|00295|binding|INFO|Releasing lport 055cf080-4472-4807-a697-69de84e96953 from this chassis (sb_readonly=0)
Oct 02 12:17:41 compute-0 nova_compute[192079]: 2025-10-02 12:17:41.734 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:41.735 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/b97b8849-844c-4190-8b13-fd7a2d073ce8.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/b97b8849-844c-4190-8b13-fd7a2d073ce8.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:41.743 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[bc243071-1391-4073-9faa-f75a3fac451d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:41.744 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-b97b8849-844c-4190-8b13-fd7a2d073ce8
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/b97b8849-844c-4190-8b13-fd7a2d073ce8.pid.haproxy
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID b97b8849-844c-4190-8b13-fd7a2d073ce8
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:17:41 compute-0 nova_compute[192079]: 2025-10-02 12:17:41.744 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:41.745 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-b97b8849-844c-4190-8b13-fd7a2d073ce8', 'env', 'PROCESS_TAG=haproxy-b97b8849-844c-4190-8b13-fd7a2d073ce8', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/b97b8849-844c-4190-8b13-fd7a2d073ce8.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:17:42 compute-0 podman[233318]: 2025-10-02 12:17:42.109234286 +0000 UTC m=+0.034370798 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:17:42 compute-0 nova_compute[192079]: 2025-10-02 12:17:42.262 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407462.261085, 21aa2a67-6284-4d30-9a7c-499db76c4042 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:17:42 compute-0 nova_compute[192079]: 2025-10-02 12:17:42.262 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] VM Started (Lifecycle Event)
Oct 02 12:17:42 compute-0 nova_compute[192079]: 2025-10-02 12:17:42.305 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:17:42 compute-0 nova_compute[192079]: 2025-10-02 12:17:42.311 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407462.2612386, 21aa2a67-6284-4d30-9a7c-499db76c4042 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:17:42 compute-0 nova_compute[192079]: 2025-10-02 12:17:42.311 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] VM Paused (Lifecycle Event)
Oct 02 12:17:42 compute-0 podman[233318]: 2025-10-02 12:17:42.320202298 +0000 UTC m=+0.245338720 container create 62cb75ea34dd3769f8493caf35570ec15f2d0563f5d82a3258186699e04f6de7 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-b97b8849-844c-4190-8b13-fd7a2d073ce8, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true)
Oct 02 12:17:42 compute-0 nova_compute[192079]: 2025-10-02 12:17:42.337 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:17:42 compute-0 nova_compute[192079]: 2025-10-02 12:17:42.342 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:17:42 compute-0 systemd[1]: Started libpod-conmon-62cb75ea34dd3769f8493caf35570ec15f2d0563f5d82a3258186699e04f6de7.scope.
Oct 02 12:17:42 compute-0 nova_compute[192079]: 2025-10-02 12:17:42.377 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:17:42 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:17:42 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/66ad72e9b7e7e1d5c47a745218ae058ac0bdce2d6a478cac133aa801a5af02b6/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:17:42 compute-0 podman[233318]: 2025-10-02 12:17:42.436847948 +0000 UTC m=+0.361984390 container init 62cb75ea34dd3769f8493caf35570ec15f2d0563f5d82a3258186699e04f6de7 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-b97b8849-844c-4190-8b13-fd7a2d073ce8, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS)
Oct 02 12:17:42 compute-0 podman[233318]: 2025-10-02 12:17:42.442891693 +0000 UTC m=+0.368028115 container start 62cb75ea34dd3769f8493caf35570ec15f2d0563f5d82a3258186699e04f6de7 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-b97b8849-844c-4190-8b13-fd7a2d073ce8, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:17:42 compute-0 neutron-haproxy-ovnmeta-b97b8849-844c-4190-8b13-fd7a2d073ce8[233333]: [NOTICE]   (233337) : New worker (233339) forked
Oct 02 12:17:42 compute-0 neutron-haproxy-ovnmeta-b97b8849-844c-4190-8b13-fd7a2d073ce8[233333]: [NOTICE]   (233337) : Loading success.
Oct 02 12:17:43 compute-0 nova_compute[192079]: 2025-10-02 12:17:43.602 2 DEBUG nova.compute.manager [req-134b1919-dbf7-46ef-9561-3ff587def76d req-44337bc6-c7dd-419a-aea7-4e47f0598504 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Received event network-vif-plugged-61697d43-f76f-4fbc-9f9c-d624fa50ac8f external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:17:43 compute-0 nova_compute[192079]: 2025-10-02 12:17:43.603 2 DEBUG oslo_concurrency.lockutils [req-134b1919-dbf7-46ef-9561-3ff587def76d req-44337bc6-c7dd-419a-aea7-4e47f0598504 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "21aa2a67-6284-4d30-9a7c-499db76c4042-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:17:43 compute-0 nova_compute[192079]: 2025-10-02 12:17:43.603 2 DEBUG oslo_concurrency.lockutils [req-134b1919-dbf7-46ef-9561-3ff587def76d req-44337bc6-c7dd-419a-aea7-4e47f0598504 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "21aa2a67-6284-4d30-9a7c-499db76c4042-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:17:43 compute-0 nova_compute[192079]: 2025-10-02 12:17:43.603 2 DEBUG oslo_concurrency.lockutils [req-134b1919-dbf7-46ef-9561-3ff587def76d req-44337bc6-c7dd-419a-aea7-4e47f0598504 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "21aa2a67-6284-4d30-9a7c-499db76c4042-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:17:43 compute-0 nova_compute[192079]: 2025-10-02 12:17:43.604 2 DEBUG nova.compute.manager [req-134b1919-dbf7-46ef-9561-3ff587def76d req-44337bc6-c7dd-419a-aea7-4e47f0598504 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Processing event network-vif-plugged-61697d43-f76f-4fbc-9f9c-d624fa50ac8f _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:17:43 compute-0 nova_compute[192079]: 2025-10-02 12:17:43.604 2 DEBUG nova.compute.manager [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Instance event wait completed in 1 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:17:43 compute-0 nova_compute[192079]: 2025-10-02 12:17:43.609 2 DEBUG nova.virt.libvirt.driver [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:17:43 compute-0 nova_compute[192079]: 2025-10-02 12:17:43.610 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407463.610303, 21aa2a67-6284-4d30-9a7c-499db76c4042 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:17:43 compute-0 nova_compute[192079]: 2025-10-02 12:17:43.610 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] VM Resumed (Lifecycle Event)
Oct 02 12:17:43 compute-0 nova_compute[192079]: 2025-10-02 12:17:43.615 2 INFO nova.virt.libvirt.driver [-] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Instance spawned successfully.
Oct 02 12:17:43 compute-0 nova_compute[192079]: 2025-10-02 12:17:43.615 2 DEBUG nova.virt.libvirt.driver [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:17:43 compute-0 nova_compute[192079]: 2025-10-02 12:17:43.647 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:17:43 compute-0 nova_compute[192079]: 2025-10-02 12:17:43.650 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:17:43 compute-0 nova_compute[192079]: 2025-10-02 12:17:43.658 2 DEBUG nova.virt.libvirt.driver [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:17:43 compute-0 nova_compute[192079]: 2025-10-02 12:17:43.659 2 DEBUG nova.virt.libvirt.driver [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:17:43 compute-0 nova_compute[192079]: 2025-10-02 12:17:43.659 2 DEBUG nova.virt.libvirt.driver [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:17:43 compute-0 nova_compute[192079]: 2025-10-02 12:17:43.660 2 DEBUG nova.virt.libvirt.driver [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:17:43 compute-0 nova_compute[192079]: 2025-10-02 12:17:43.660 2 DEBUG nova.virt.libvirt.driver [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:17:43 compute-0 nova_compute[192079]: 2025-10-02 12:17:43.661 2 DEBUG nova.virt.libvirt.driver [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:17:43 compute-0 nova_compute[192079]: 2025-10-02 12:17:43.686 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:17:43 compute-0 nova_compute[192079]: 2025-10-02 12:17:43.762 2 INFO nova.compute.manager [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Took 14.63 seconds to spawn the instance on the hypervisor.
Oct 02 12:17:43 compute-0 nova_compute[192079]: 2025-10-02 12:17:43.762 2 DEBUG nova.compute.manager [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:17:43 compute-0 nova_compute[192079]: 2025-10-02 12:17:43.848 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:43 compute-0 nova_compute[192079]: 2025-10-02 12:17:43.889 2 INFO nova.compute.manager [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Took 17.45 seconds to build instance.
Oct 02 12:17:43 compute-0 nova_compute[192079]: 2025-10-02 12:17:43.913 2 DEBUG oslo_concurrency.lockutils [None req-81be4265-9cde-4231-849b-0c25177fbd9c 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Lock "21aa2a67-6284-4d30-9a7c-499db76c4042" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 17.879s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:17:44 compute-0 nova_compute[192079]: 2025-10-02 12:17:44.031 2 DEBUG nova.network.neutron [req-f30b52c6-c44d-41b5-a788-1a0d73af16e6 req-0f7068f2-6210-481d-889a-075f1e77e995 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Updated VIF entry in instance network info cache for port 61697d43-f76f-4fbc-9f9c-d624fa50ac8f. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:17:44 compute-0 nova_compute[192079]: 2025-10-02 12:17:44.031 2 DEBUG nova.network.neutron [req-f30b52c6-c44d-41b5-a788-1a0d73af16e6 req-0f7068f2-6210-481d-889a-075f1e77e995 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Updating instance_info_cache with network_info: [{"id": "61697d43-f76f-4fbc-9f9c-d624fa50ac8f", "address": "fa:16:3e:43:cf:5c", "network": {"id": "b97b8849-844c-4190-8b13-fd7a2d073ce8", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1299594383-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "d5db64e6714348c1a7f57bb53de80915", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap61697d43-f7", "ovs_interfaceid": "61697d43-f76f-4fbc-9f9c-d624fa50ac8f", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:17:44 compute-0 nova_compute[192079]: 2025-10-02 12:17:44.054 2 DEBUG oslo_concurrency.lockutils [req-f30b52c6-c44d-41b5-a788-1a0d73af16e6 req-0f7068f2-6210-481d-889a-075f1e77e995 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-21aa2a67-6284-4d30-9a7c-499db76c4042" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:17:44 compute-0 podman[233350]: 2025-10-02 12:17:44.159899388 +0000 UTC m=+0.067424820 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 12:17:44 compute-0 podman[233348]: 2025-10-02 12:17:44.187880141 +0000 UTC m=+0.095650889 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, managed_by=edpm_ansible, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_managed=true, config_id=ovn_metadata_agent)
Oct 02 12:17:44 compute-0 podman[233349]: 2025-10-02 12:17:44.221019234 +0000 UTC m=+0.128517815 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, config_id=ovn_controller, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, container_name=ovn_controller, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']})
Oct 02 12:17:46 compute-0 nova_compute[192079]: 2025-10-02 12:17:46.361 2 DEBUG nova.compute.manager [req-a4946320-3389-4f74-a901-3fc85365b54c req-8a1b18fd-462c-4ffc-aa46-6b250ea4e099 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Received event network-vif-plugged-61697d43-f76f-4fbc-9f9c-d624fa50ac8f external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:17:46 compute-0 nova_compute[192079]: 2025-10-02 12:17:46.361 2 DEBUG oslo_concurrency.lockutils [req-a4946320-3389-4f74-a901-3fc85365b54c req-8a1b18fd-462c-4ffc-aa46-6b250ea4e099 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "21aa2a67-6284-4d30-9a7c-499db76c4042-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:17:46 compute-0 nova_compute[192079]: 2025-10-02 12:17:46.361 2 DEBUG oslo_concurrency.lockutils [req-a4946320-3389-4f74-a901-3fc85365b54c req-8a1b18fd-462c-4ffc-aa46-6b250ea4e099 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "21aa2a67-6284-4d30-9a7c-499db76c4042-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:17:46 compute-0 nova_compute[192079]: 2025-10-02 12:17:46.362 2 DEBUG oslo_concurrency.lockutils [req-a4946320-3389-4f74-a901-3fc85365b54c req-8a1b18fd-462c-4ffc-aa46-6b250ea4e099 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "21aa2a67-6284-4d30-9a7c-499db76c4042-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:17:46 compute-0 nova_compute[192079]: 2025-10-02 12:17:46.362 2 DEBUG nova.compute.manager [req-a4946320-3389-4f74-a901-3fc85365b54c req-8a1b18fd-462c-4ffc-aa46-6b250ea4e099 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] No waiting events found dispatching network-vif-plugged-61697d43-f76f-4fbc-9f9c-d624fa50ac8f pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:17:46 compute-0 nova_compute[192079]: 2025-10-02 12:17:46.362 2 WARNING nova.compute.manager [req-a4946320-3389-4f74-a901-3fc85365b54c req-8a1b18fd-462c-4ffc-aa46-6b250ea4e099 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Received unexpected event network-vif-plugged-61697d43-f76f-4fbc-9f9c-d624fa50ac8f for instance with vm_state active and task_state None.
Oct 02 12:17:46 compute-0 nova_compute[192079]: 2025-10-02 12:17:46.457 2 DEBUG oslo_concurrency.lockutils [None req-d647eac1-3fa4-47d9-bfe8-4ba74b4b8f4b 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Acquiring lock "21aa2a67-6284-4d30-9a7c-499db76c4042" by "nova.compute.manager.ComputeManager.stop_instance.<locals>.do_stop_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:17:46 compute-0 nova_compute[192079]: 2025-10-02 12:17:46.458 2 DEBUG oslo_concurrency.lockutils [None req-d647eac1-3fa4-47d9-bfe8-4ba74b4b8f4b 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Lock "21aa2a67-6284-4d30-9a7c-499db76c4042" acquired by "nova.compute.manager.ComputeManager.stop_instance.<locals>.do_stop_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:17:46 compute-0 nova_compute[192079]: 2025-10-02 12:17:46.458 2 DEBUG nova.compute.manager [None req-d647eac1-3fa4-47d9-bfe8-4ba74b4b8f4b 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:17:46 compute-0 nova_compute[192079]: 2025-10-02 12:17:46.462 2 DEBUG nova.compute.manager [None req-d647eac1-3fa4-47d9-bfe8-4ba74b4b8f4b 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 do_stop_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3338
Oct 02 12:17:46 compute-0 nova_compute[192079]: 2025-10-02 12:17:46.462 2 DEBUG nova.objects.instance [None req-d647eac1-3fa4-47d9-bfe8-4ba74b4b8f4b 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Lazy-loading 'flavor' on Instance uuid 21aa2a67-6284-4d30-9a7c-499db76c4042 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:17:46 compute-0 nova_compute[192079]: 2025-10-02 12:17:46.479 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:46 compute-0 nova_compute[192079]: 2025-10-02 12:17:46.497 2 DEBUG nova.objects.instance [None req-d647eac1-3fa4-47d9-bfe8-4ba74b4b8f4b 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Lazy-loading 'info_cache' on Instance uuid 21aa2a67-6284-4d30-9a7c-499db76c4042 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:17:46 compute-0 nova_compute[192079]: 2025-10-02 12:17:46.542 2 DEBUG nova.virt.libvirt.driver [None req-d647eac1-3fa4-47d9-bfe8-4ba74b4b8f4b 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Shutting down instance from state 1 _clean_shutdown /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4071
Oct 02 12:17:47 compute-0 ovn_controller[94336]: 2025-10-02T12:17:47Z|00028|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:b3:31:fe 10.100.0.11
Oct 02 12:17:48 compute-0 nova_compute[192079]: 2025-10-02 12:17:48.852 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:51 compute-0 nova_compute[192079]: 2025-10-02 12:17:51.480 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:53 compute-0 podman[233422]: 2025-10-02 12:17:53.151782065 +0000 UTC m=+0.061757255 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, config_id=edpm, io.buildah.version=1.41.3, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, org.label-schema.build-date=20251001, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible)
Oct 02 12:17:53 compute-0 nova_compute[192079]: 2025-10-02 12:17:53.856 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:54 compute-0 nova_compute[192079]: 2025-10-02 12:17:54.844 2 DEBUG oslo_concurrency.lockutils [None req-cb2c6add-8dd1-4986-ae12-d5675e067437 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Acquiring lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:17:54 compute-0 nova_compute[192079]: 2025-10-02 12:17:54.845 2 DEBUG oslo_concurrency.lockutils [None req-cb2c6add-8dd1-4986-ae12-d5675e067437 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:17:54 compute-0 nova_compute[192079]: 2025-10-02 12:17:54.845 2 DEBUG oslo_concurrency.lockutils [None req-cb2c6add-8dd1-4986-ae12-d5675e067437 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Acquiring lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:17:54 compute-0 nova_compute[192079]: 2025-10-02 12:17:54.846 2 DEBUG oslo_concurrency.lockutils [None req-cb2c6add-8dd1-4986-ae12-d5675e067437 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:17:54 compute-0 nova_compute[192079]: 2025-10-02 12:17:54.846 2 DEBUG oslo_concurrency.lockutils [None req-cb2c6add-8dd1-4986-ae12-d5675e067437 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:17:54 compute-0 nova_compute[192079]: 2025-10-02 12:17:54.866 2 INFO nova.compute.manager [None req-cb2c6add-8dd1-4986-ae12-d5675e067437 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Terminating instance
Oct 02 12:17:54 compute-0 nova_compute[192079]: 2025-10-02 12:17:54.882 2 DEBUG nova.compute.manager [None req-cb2c6add-8dd1-4986-ae12-d5675e067437 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:17:54 compute-0 kernel: tap73f6f99f-83 (unregistering): left promiscuous mode
Oct 02 12:17:54 compute-0 NetworkManager[51160]: <info>  [1759407474.9052] device (tap73f6f99f-83): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:17:54 compute-0 ovn_controller[94336]: 2025-10-02T12:17:54Z|00296|binding|INFO|Releasing lport 73f6f99f-8348-41c9-8194-e4cd3d448fd9 from this chassis (sb_readonly=0)
Oct 02 12:17:54 compute-0 nova_compute[192079]: 2025-10-02 12:17:54.914 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:54 compute-0 ovn_controller[94336]: 2025-10-02T12:17:54Z|00297|binding|INFO|Setting lport 73f6f99f-8348-41c9-8194-e4cd3d448fd9 down in Southbound
Oct 02 12:17:54 compute-0 ovn_controller[94336]: 2025-10-02T12:17:54Z|00298|binding|INFO|Removing iface tap73f6f99f-83 ovn-installed in OVS
Oct 02 12:17:54 compute-0 nova_compute[192079]: 2025-10-02 12:17:54.916 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:54 compute-0 nova_compute[192079]: 2025-10-02 12:17:54.926 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:54.932 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:b3:31:fe 10.100.0.11'], port_security=['fa:16:3e:b3:31:fe 10.100.0.11'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.11/28', 'neutron:device_id': 'ebc56e2c-d3a3-4ade-8849-7e23fc710e78', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '6e0277f0bb0f4a349e2e6d8ddfa24edf', 'neutron:revision_number': '6', 'neutron:security_group_ids': 'b4e0bc42-3cfd-4f42-a319-553606576b33', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=a043239b-039e-45fa-8277-43e361a8bae7, chassis=[], tunnel_key=2, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=73f6f99f-8348-41c9-8194-e4cd3d448fd9) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:17:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:54.933 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 73f6f99f-8348-41c9-8194-e4cd3d448fd9 in datapath bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5 unbound from our chassis
Oct 02 12:17:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:54.935 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:17:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:54.936 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b4a31074-cb31-4da2-a666-e63e1070c8a1]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:54.936 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5 namespace which is not needed anymore
Oct 02 12:17:54 compute-0 systemd[1]: machine-qemu\x2d40\x2dinstance\x2d00000055.scope: Deactivated successfully.
Oct 02 12:17:54 compute-0 systemd[1]: machine-qemu\x2d40\x2dinstance\x2d00000055.scope: Consumed 13.488s CPU time.
Oct 02 12:17:54 compute-0 systemd-machined[152150]: Machine qemu-40-instance-00000055 terminated.
Oct 02 12:17:55 compute-0 neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5[233210]: [NOTICE]   (233214) : haproxy version is 2.8.14-c23fe91
Oct 02 12:17:55 compute-0 neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5[233210]: [NOTICE]   (233214) : path to executable is /usr/sbin/haproxy
Oct 02 12:17:55 compute-0 neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5[233210]: [WARNING]  (233214) : Exiting Master process...
Oct 02 12:17:55 compute-0 neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5[233210]: [WARNING]  (233214) : Exiting Master process...
Oct 02 12:17:55 compute-0 neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5[233210]: [ALERT]    (233214) : Current worker (233216) exited with code 143 (Terminated)
Oct 02 12:17:55 compute-0 neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5[233210]: [WARNING]  (233214) : All workers exited. Exiting... (0)
Oct 02 12:17:55 compute-0 systemd[1]: libpod-3e79bd21414ee2a47fff4393573df261ba9294834dfb2a23568b345c7fd27ab9.scope: Deactivated successfully.
Oct 02 12:17:55 compute-0 podman[233466]: 2025-10-02 12:17:55.059806618 +0000 UTC m=+0.041844372 container died 3e79bd21414ee2a47fff4393573df261ba9294834dfb2a23568b345c7fd27ab9 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5, org.label-schema.build-date=20251001, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:17:55 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-3e79bd21414ee2a47fff4393573df261ba9294834dfb2a23568b345c7fd27ab9-userdata-shm.mount: Deactivated successfully.
Oct 02 12:17:55 compute-0 systemd[1]: var-lib-containers-storage-overlay-b39b048189479d67135c89bdac8d8f9c50fd299d0fc554f82f9362eb5a9aec1e-merged.mount: Deactivated successfully.
Oct 02 12:17:55 compute-0 podman[233466]: 2025-10-02 12:17:55.09583559 +0000 UTC m=+0.077873334 container cleanup 3e79bd21414ee2a47fff4393573df261ba9294834dfb2a23568b345c7fd27ab9 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS)
Oct 02 12:17:55 compute-0 nova_compute[192079]: 2025-10-02 12:17:55.103 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:55 compute-0 nova_compute[192079]: 2025-10-02 12:17:55.108 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:55 compute-0 systemd[1]: libpod-conmon-3e79bd21414ee2a47fff4393573df261ba9294834dfb2a23568b345c7fd27ab9.scope: Deactivated successfully.
Oct 02 12:17:55 compute-0 nova_compute[192079]: 2025-10-02 12:17:55.139 2 INFO nova.virt.libvirt.driver [-] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Instance destroyed successfully.
Oct 02 12:17:55 compute-0 nova_compute[192079]: 2025-10-02 12:17:55.141 2 DEBUG nova.objects.instance [None req-cb2c6add-8dd1-4986-ae12-d5675e067437 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lazy-loading 'resources' on Instance uuid ebc56e2c-d3a3-4ade-8849-7e23fc710e78 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:17:55 compute-0 nova_compute[192079]: 2025-10-02 12:17:55.154 2 DEBUG nova.virt.libvirt.vif [None req-cb2c6add-8dd1-4986-ae12-d5675e067437 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:16:50Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ListServerFiltersTestJSON-instance-1707027906',display_name='tempest-ListServerFiltersTestJSON-instance-1707027906',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-listserverfilterstestjson-instance-1707027906',id=85,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:16:58Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='6e0277f0bb0f4a349e2e6d8ddfa24edf',ramdisk_id='',reservation_id='r-wvwxsid2',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ListServerFiltersTestJSON-298715262',owner_user_name='tempest-ListServerFiltersTestJSON-298715262-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:17:35Z,user_data=None,user_id='001d2d51902d4e299b775131f430a5db',uuid=ebc56e2c-d3a3-4ade-8849-7e23fc710e78,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "address": "fa:16:3e:b3:31:fe", "network": {"id": "bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-542543245-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e0277f0bb0f4a349e2e6d8ddfa24edf", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap73f6f99f-83", "ovs_interfaceid": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:17:55 compute-0 nova_compute[192079]: 2025-10-02 12:17:55.154 2 DEBUG nova.network.os_vif_util [None req-cb2c6add-8dd1-4986-ae12-d5675e067437 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Converting VIF {"id": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "address": "fa:16:3e:b3:31:fe", "network": {"id": "bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-542543245-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e0277f0bb0f4a349e2e6d8ddfa24edf", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap73f6f99f-83", "ovs_interfaceid": "73f6f99f-8348-41c9-8194-e4cd3d448fd9", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:17:55 compute-0 nova_compute[192079]: 2025-10-02 12:17:55.155 2 DEBUG nova.network.os_vif_util [None req-cb2c6add-8dd1-4986-ae12-d5675e067437 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:b3:31:fe,bridge_name='br-int',has_traffic_filtering=True,id=73f6f99f-8348-41c9-8194-e4cd3d448fd9,network=Network(bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap73f6f99f-83') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:17:55 compute-0 nova_compute[192079]: 2025-10-02 12:17:55.155 2 DEBUG os_vif [None req-cb2c6add-8dd1-4986-ae12-d5675e067437 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:b3:31:fe,bridge_name='br-int',has_traffic_filtering=True,id=73f6f99f-8348-41c9-8194-e4cd3d448fd9,network=Network(bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap73f6f99f-83') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:17:55 compute-0 nova_compute[192079]: 2025-10-02 12:17:55.157 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:55 compute-0 nova_compute[192079]: 2025-10-02 12:17:55.157 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap73f6f99f-83, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:17:55 compute-0 podman[233501]: 2025-10-02 12:17:55.158669263 +0000 UTC m=+0.039522508 container remove 3e79bd21414ee2a47fff4393573df261ba9294834dfb2a23568b345c7fd27ab9 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001)
Oct 02 12:17:55 compute-0 nova_compute[192079]: 2025-10-02 12:17:55.158 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:55 compute-0 nova_compute[192079]: 2025-10-02 12:17:55.161 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:17:55 compute-0 nova_compute[192079]: 2025-10-02 12:17:55.163 2 INFO os_vif [None req-cb2c6add-8dd1-4986-ae12-d5675e067437 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:b3:31:fe,bridge_name='br-int',has_traffic_filtering=True,id=73f6f99f-8348-41c9-8194-e4cd3d448fd9,network=Network(bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap73f6f99f-83')
Oct 02 12:17:55 compute-0 nova_compute[192079]: 2025-10-02 12:17:55.163 2 INFO nova.virt.libvirt.driver [None req-cb2c6add-8dd1-4986-ae12-d5675e067437 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Deleting instance files /var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78_del
Oct 02 12:17:55 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:55.163 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1bf5a650-e65a-4b07-8f5c-fc5357446bef]: (4, ('Thu Oct  2 12:17:55 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5 (3e79bd21414ee2a47fff4393573df261ba9294834dfb2a23568b345c7fd27ab9)\n3e79bd21414ee2a47fff4393573df261ba9294834dfb2a23568b345c7fd27ab9\nThu Oct  2 12:17:55 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5 (3e79bd21414ee2a47fff4393573df261ba9294834dfb2a23568b345c7fd27ab9)\n3e79bd21414ee2a47fff4393573df261ba9294834dfb2a23568b345c7fd27ab9\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:55 compute-0 nova_compute[192079]: 2025-10-02 12:17:55.164 2 INFO nova.virt.libvirt.driver [None req-cb2c6add-8dd1-4986-ae12-d5675e067437 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Deletion of /var/lib/nova/instances/ebc56e2c-d3a3-4ade-8849-7e23fc710e78_del complete
Oct 02 12:17:55 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:55.164 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b5bd6746-648b-4d16-a41c-4c90ac445deb]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:55 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:55.165 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapbd543a6a-b0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:17:55 compute-0 nova_compute[192079]: 2025-10-02 12:17:55.168 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:55 compute-0 kernel: tapbd543a6a-b0: left promiscuous mode
Oct 02 12:17:55 compute-0 nova_compute[192079]: 2025-10-02 12:17:55.178 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:55 compute-0 nova_compute[192079]: 2025-10-02 12:17:55.179 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:55 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:55.180 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3c8da659-5c75-410b-a05c-eb348db3aa2a]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:55 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:55.209 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0967f0bd-7462-4c49-aa6c-fc40eed34e25]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:55 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:55.210 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[00646b04-38e9-444f-a388-97740668894c]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:55 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:55.224 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9994e532-1698-4802-b642-2588530f6634]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 545191, 'reachable_time': 17774, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 233527, 'error': None, 'target': 'ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:55 compute-0 systemd[1]: run-netns-ovnmeta\x2dbd543a6a\x2dbba1\x2d4bd5\x2d9cbf\x2dfc87bf95cbe5.mount: Deactivated successfully.
Oct 02 12:17:55 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:55.227 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-bd543a6a-bba1-4bd5-9cbf-fc87bf95cbe5 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:17:55 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:55.227 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[99220149-bf59-44d0-aad7-7a67858da039]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:55 compute-0 nova_compute[192079]: 2025-10-02 12:17:55.313 2 INFO nova.compute.manager [None req-cb2c6add-8dd1-4986-ae12-d5675e067437 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Took 0.43 seconds to destroy the instance on the hypervisor.
Oct 02 12:17:55 compute-0 nova_compute[192079]: 2025-10-02 12:17:55.313 2 DEBUG oslo.service.loopingcall [None req-cb2c6add-8dd1-4986-ae12-d5675e067437 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:17:55 compute-0 nova_compute[192079]: 2025-10-02 12:17:55.314 2 DEBUG nova.compute.manager [-] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:17:55 compute-0 nova_compute[192079]: 2025-10-02 12:17:55.314 2 DEBUG nova.network.neutron [-] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:17:55 compute-0 nova_compute[192079]: 2025-10-02 12:17:55.456 2 DEBUG nova.compute.manager [req-0d7db808-ad7e-499d-8811-823e6037cb22 req-1b9ec7ac-a0fc-4050-b9a9-3f110908f9ec 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Received event network-vif-unplugged-73f6f99f-8348-41c9-8194-e4cd3d448fd9 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:17:55 compute-0 nova_compute[192079]: 2025-10-02 12:17:55.457 2 DEBUG oslo_concurrency.lockutils [req-0d7db808-ad7e-499d-8811-823e6037cb22 req-1b9ec7ac-a0fc-4050-b9a9-3f110908f9ec 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:17:55 compute-0 nova_compute[192079]: 2025-10-02 12:17:55.458 2 DEBUG oslo_concurrency.lockutils [req-0d7db808-ad7e-499d-8811-823e6037cb22 req-1b9ec7ac-a0fc-4050-b9a9-3f110908f9ec 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:17:55 compute-0 nova_compute[192079]: 2025-10-02 12:17:55.459 2 DEBUG oslo_concurrency.lockutils [req-0d7db808-ad7e-499d-8811-823e6037cb22 req-1b9ec7ac-a0fc-4050-b9a9-3f110908f9ec 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:17:55 compute-0 nova_compute[192079]: 2025-10-02 12:17:55.459 2 DEBUG nova.compute.manager [req-0d7db808-ad7e-499d-8811-823e6037cb22 req-1b9ec7ac-a0fc-4050-b9a9-3f110908f9ec 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] No waiting events found dispatching network-vif-unplugged-73f6f99f-8348-41c9-8194-e4cd3d448fd9 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:17:55 compute-0 nova_compute[192079]: 2025-10-02 12:17:55.460 2 DEBUG nova.compute.manager [req-0d7db808-ad7e-499d-8811-823e6037cb22 req-1b9ec7ac-a0fc-4050-b9a9-3f110908f9ec 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Received event network-vif-unplugged-73f6f99f-8348-41c9-8194-e4cd3d448fd9 for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:17:56 compute-0 ovn_controller[94336]: 2025-10-02T12:17:56Z|00029|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:43:cf:5c 10.100.0.8
Oct 02 12:17:56 compute-0 ovn_controller[94336]: 2025-10-02T12:17:56Z|00030|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:43:cf:5c 10.100.0.8
Oct 02 12:17:56 compute-0 nova_compute[192079]: 2025-10-02 12:17:56.482 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:56 compute-0 nova_compute[192079]: 2025-10-02 12:17:56.583 2 DEBUG nova.virt.libvirt.driver [None req-d647eac1-3fa4-47d9-bfe8-4ba74b4b8f4b 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Instance in state 1 after 10 seconds - resending shutdown _clean_shutdown /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4101
Oct 02 12:17:56 compute-0 nova_compute[192079]: 2025-10-02 12:17:56.632 2 DEBUG nova.network.neutron [-] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:17:56 compute-0 nova_compute[192079]: 2025-10-02 12:17:56.665 2 INFO nova.compute.manager [-] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Took 1.35 seconds to deallocate network for instance.
Oct 02 12:17:56 compute-0 nova_compute[192079]: 2025-10-02 12:17:56.771 2 DEBUG oslo_concurrency.lockutils [None req-cb2c6add-8dd1-4986-ae12-d5675e067437 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:17:56 compute-0 nova_compute[192079]: 2025-10-02 12:17:56.772 2 DEBUG oslo_concurrency.lockutils [None req-cb2c6add-8dd1-4986-ae12-d5675e067437 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:17:56 compute-0 nova_compute[192079]: 2025-10-02 12:17:56.784 2 DEBUG nova.compute.manager [req-2411ecc6-d1c5-4ade-93e9-9f6d72010348 req-5c629e95-c84e-4094-bb32-12bd4eb31911 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Received event network-vif-deleted-73f6f99f-8348-41c9-8194-e4cd3d448fd9 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:17:56 compute-0 nova_compute[192079]: 2025-10-02 12:17:56.916 2 DEBUG nova.compute.provider_tree [None req-cb2c6add-8dd1-4986-ae12-d5675e067437 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:17:56 compute-0 nova_compute[192079]: 2025-10-02 12:17:56.937 2 DEBUG nova.scheduler.client.report [None req-cb2c6add-8dd1-4986-ae12-d5675e067437 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:17:56 compute-0 nova_compute[192079]: 2025-10-02 12:17:56.959 2 DEBUG oslo_concurrency.lockutils [None req-cb2c6add-8dd1-4986-ae12-d5675e067437 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.187s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:17:56 compute-0 nova_compute[192079]: 2025-10-02 12:17:56.985 2 INFO nova.scheduler.client.report [None req-cb2c6add-8dd1-4986-ae12-d5675e067437 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Deleted allocations for instance ebc56e2c-d3a3-4ade-8849-7e23fc710e78
Oct 02 12:17:57 compute-0 nova_compute[192079]: 2025-10-02 12:17:57.079 2 DEBUG oslo_concurrency.lockutils [None req-cb2c6add-8dd1-4986-ae12-d5675e067437 001d2d51902d4e299b775131f430a5db 6e0277f0bb0f4a349e2e6d8ddfa24edf - - default default] Lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 2.234s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:17:57 compute-0 nova_compute[192079]: 2025-10-02 12:17:57.590 2 DEBUG nova.compute.manager [req-255ec95f-1347-47f4-a572-a360557840ff req-a6ade316-fef5-4a97-8885-f059d5c249b0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Received event network-vif-plugged-73f6f99f-8348-41c9-8194-e4cd3d448fd9 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:17:57 compute-0 nova_compute[192079]: 2025-10-02 12:17:57.590 2 DEBUG oslo_concurrency.lockutils [req-255ec95f-1347-47f4-a572-a360557840ff req-a6ade316-fef5-4a97-8885-f059d5c249b0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:17:57 compute-0 nova_compute[192079]: 2025-10-02 12:17:57.591 2 DEBUG oslo_concurrency.lockutils [req-255ec95f-1347-47f4-a572-a360557840ff req-a6ade316-fef5-4a97-8885-f059d5c249b0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:17:57 compute-0 nova_compute[192079]: 2025-10-02 12:17:57.591 2 DEBUG oslo_concurrency.lockutils [req-255ec95f-1347-47f4-a572-a360557840ff req-a6ade316-fef5-4a97-8885-f059d5c249b0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ebc56e2c-d3a3-4ade-8849-7e23fc710e78-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:17:57 compute-0 nova_compute[192079]: 2025-10-02 12:17:57.591 2 DEBUG nova.compute.manager [req-255ec95f-1347-47f4-a572-a360557840ff req-a6ade316-fef5-4a97-8885-f059d5c249b0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] No waiting events found dispatching network-vif-plugged-73f6f99f-8348-41c9-8194-e4cd3d448fd9 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:17:57 compute-0 nova_compute[192079]: 2025-10-02 12:17:57.591 2 WARNING nova.compute.manager [req-255ec95f-1347-47f4-a572-a360557840ff req-a6ade316-fef5-4a97-8885-f059d5c249b0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Received unexpected event network-vif-plugged-73f6f99f-8348-41c9-8194-e4cd3d448fd9 for instance with vm_state deleted and task_state None.
Oct 02 12:17:58 compute-0 ovn_controller[94336]: 2025-10-02T12:17:58Z|00299|binding|INFO|Releasing lport 055cf080-4472-4807-a697-69de84e96953 from this chassis (sb_readonly=0)
Oct 02 12:17:58 compute-0 nova_compute[192079]: 2025-10-02 12:17:58.886 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:58 compute-0 kernel: tap61697d43-f7 (unregistering): left promiscuous mode
Oct 02 12:17:58 compute-0 NetworkManager[51160]: <info>  [1759407478.9885] device (tap61697d43-f7): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:17:58 compute-0 nova_compute[192079]: 2025-10-02 12:17:58.993 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:58 compute-0 ovn_controller[94336]: 2025-10-02T12:17:58Z|00300|binding|INFO|Releasing lport 61697d43-f76f-4fbc-9f9c-d624fa50ac8f from this chassis (sb_readonly=0)
Oct 02 12:17:58 compute-0 ovn_controller[94336]: 2025-10-02T12:17:58Z|00301|binding|INFO|Setting lport 61697d43-f76f-4fbc-9f9c-d624fa50ac8f down in Southbound
Oct 02 12:17:58 compute-0 ovn_controller[94336]: 2025-10-02T12:17:58Z|00302|binding|INFO|Removing iface tap61697d43-f7 ovn-installed in OVS
Oct 02 12:17:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:59.002 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:43:cf:5c 10.100.0.8'], port_security=['fa:16:3e:43:cf:5c 10.100.0.8'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.8/28', 'neutron:device_id': '21aa2a67-6284-4d30-9a7c-499db76c4042', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-b97b8849-844c-4190-8b13-fd7a2d073ce8', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'd5db64e6714348c1a7f57bb53de80915', 'neutron:revision_number': '4', 'neutron:security_group_ids': '063f732a-6071-414f-814d-a5d6c4e9e012', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=2011b0da-7062-465f-963e-59e92e88a653, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=61697d43-f76f-4fbc-9f9c-d624fa50ac8f) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:17:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:59.005 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 61697d43-f76f-4fbc-9f9c-d624fa50ac8f in datapath b97b8849-844c-4190-8b13-fd7a2d073ce8 unbound from our chassis
Oct 02 12:17:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:59.006 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network b97b8849-844c-4190-8b13-fd7a2d073ce8, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:17:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:59.007 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9439e21d-0249-4183-87ff-e4e5a61bf013]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:59.007 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-b97b8849-844c-4190-8b13-fd7a2d073ce8 namespace which is not needed anymore
Oct 02 12:17:59 compute-0 nova_compute[192079]: 2025-10-02 12:17:59.017 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:59 compute-0 systemd[1]: machine-qemu\x2d41\x2dinstance\x2d00000058.scope: Deactivated successfully.
Oct 02 12:17:59 compute-0 systemd[1]: machine-qemu\x2d41\x2dinstance\x2d00000058.scope: Consumed 13.298s CPU time.
Oct 02 12:17:59 compute-0 systemd-machined[152150]: Machine qemu-41-instance-00000058 terminated.
Oct 02 12:17:59 compute-0 podman[233547]: 2025-10-02 12:17:59.088714648 +0000 UTC m=+0.064122130 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=openstack_network_exporter, name=ubi9-minimal, vcs-type=git, io.buildah.version=1.33.7, release=1755695350, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., com.redhat.component=ubi9-minimal-container, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, io.openshift.tags=minimal rhel9, maintainer=Red Hat, Inc., architecture=x86_64, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., distribution-scope=public, build-date=2025-08-20T13:12:41, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vendor=Red Hat, Inc., managed_by=edpm_ansible, version=9.6, io.openshift.expose-services=, url=https://catalog.redhat.com/en/search?searchType=containers)
Oct 02 12:17:59 compute-0 podman[233550]: 2025-10-02 12:17:59.088837511 +0000 UTC m=+0.062395032 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, container_name=multipathd, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, tcib_managed=true)
Oct 02 12:17:59 compute-0 neutron-haproxy-ovnmeta-b97b8849-844c-4190-8b13-fd7a2d073ce8[233333]: [NOTICE]   (233337) : haproxy version is 2.8.14-c23fe91
Oct 02 12:17:59 compute-0 neutron-haproxy-ovnmeta-b97b8849-844c-4190-8b13-fd7a2d073ce8[233333]: [NOTICE]   (233337) : path to executable is /usr/sbin/haproxy
Oct 02 12:17:59 compute-0 neutron-haproxy-ovnmeta-b97b8849-844c-4190-8b13-fd7a2d073ce8[233333]: [WARNING]  (233337) : Exiting Master process...
Oct 02 12:17:59 compute-0 neutron-haproxy-ovnmeta-b97b8849-844c-4190-8b13-fd7a2d073ce8[233333]: [ALERT]    (233337) : Current worker (233339) exited with code 143 (Terminated)
Oct 02 12:17:59 compute-0 neutron-haproxy-ovnmeta-b97b8849-844c-4190-8b13-fd7a2d073ce8[233333]: [WARNING]  (233337) : All workers exited. Exiting... (0)
Oct 02 12:17:59 compute-0 systemd[1]: libpod-62cb75ea34dd3769f8493caf35570ec15f2d0563f5d82a3258186699e04f6de7.scope: Deactivated successfully.
Oct 02 12:17:59 compute-0 podman[233607]: 2025-10-02 12:17:59.134855176 +0000 UTC m=+0.041812711 container died 62cb75ea34dd3769f8493caf35570ec15f2d0563f5d82a3258186699e04f6de7 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-b97b8849-844c-4190-8b13-fd7a2d073ce8, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3)
Oct 02 12:17:59 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-62cb75ea34dd3769f8493caf35570ec15f2d0563f5d82a3258186699e04f6de7-userdata-shm.mount: Deactivated successfully.
Oct 02 12:17:59 compute-0 systemd[1]: var-lib-containers-storage-overlay-66ad72e9b7e7e1d5c47a745218ae058ac0bdce2d6a478cac133aa801a5af02b6-merged.mount: Deactivated successfully.
Oct 02 12:17:59 compute-0 podman[233607]: 2025-10-02 12:17:59.169265904 +0000 UTC m=+0.076223439 container cleanup 62cb75ea34dd3769f8493caf35570ec15f2d0563f5d82a3258186699e04f6de7 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-b97b8849-844c-4190-8b13-fd7a2d073ce8, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 12:17:59 compute-0 systemd[1]: libpod-conmon-62cb75ea34dd3769f8493caf35570ec15f2d0563f5d82a3258186699e04f6de7.scope: Deactivated successfully.
Oct 02 12:17:59 compute-0 podman[233636]: 2025-10-02 12:17:59.233057714 +0000 UTC m=+0.044632398 container remove 62cb75ea34dd3769f8493caf35570ec15f2d0563f5d82a3258186699e04f6de7 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-b97b8849-844c-4190-8b13-fd7a2d073ce8, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:17:59 compute-0 nova_compute[192079]: 2025-10-02 12:17:59.239 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:59.238 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3a425c35-5622-45fd-9a67-0820c9f4b26f]: (4, ('Thu Oct  2 12:17:59 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-b97b8849-844c-4190-8b13-fd7a2d073ce8 (62cb75ea34dd3769f8493caf35570ec15f2d0563f5d82a3258186699e04f6de7)\n62cb75ea34dd3769f8493caf35570ec15f2d0563f5d82a3258186699e04f6de7\nThu Oct  2 12:17:59 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-b97b8849-844c-4190-8b13-fd7a2d073ce8 (62cb75ea34dd3769f8493caf35570ec15f2d0563f5d82a3258186699e04f6de7)\n62cb75ea34dd3769f8493caf35570ec15f2d0563f5d82a3258186699e04f6de7\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:59.241 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c8a82972-c427-41a6-9fc5-0f6e4f2698b1]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:59.241 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapb97b8849-80, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:17:59 compute-0 nova_compute[192079]: 2025-10-02 12:17:59.243 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:59 compute-0 nova_compute[192079]: 2025-10-02 12:17:59.254 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:59 compute-0 nova_compute[192079]: 2025-10-02 12:17:59.254 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:59 compute-0 kernel: tapb97b8849-80: left promiscuous mode
Oct 02 12:17:59 compute-0 nova_compute[192079]: 2025-10-02 12:17:59.261 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:17:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:59.263 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c9659191-e2b3-4795-937d-70082acb1fa0]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:59.289 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c2ddd1f8-79bc-4318-aa70-d63b3ab68338]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:59.290 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[778eea1a-75d0-4e3f-9e41-8a5931c1a510]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:59.306 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[827da506-a391-41c9-8b60-88c6c47fc901]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 545913, 'reachable_time': 26292, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 233670, 'error': None, 'target': 'ovnmeta-b97b8849-844c-4190-8b13-fd7a2d073ce8', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:59.308 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-b97b8849-844c-4190-8b13-fd7a2d073ce8 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:17:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:17:59.308 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[e44defa1-d8f1-4129-b971-7f7f02135dfe]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:17:59 compute-0 systemd[1]: run-netns-ovnmeta\x2db97b8849\x2d844c\x2d4190\x2d8b13\x2dfd7a2d073ce8.mount: Deactivated successfully.
Oct 02 12:17:59 compute-0 nova_compute[192079]: 2025-10-02 12:17:59.601 2 INFO nova.virt.libvirt.driver [None req-d647eac1-3fa4-47d9-bfe8-4ba74b4b8f4b 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Instance shutdown successfully after 13 seconds.
Oct 02 12:17:59 compute-0 nova_compute[192079]: 2025-10-02 12:17:59.606 2 INFO nova.virt.libvirt.driver [-] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Instance destroyed successfully.
Oct 02 12:17:59 compute-0 nova_compute[192079]: 2025-10-02 12:17:59.607 2 DEBUG nova.objects.instance [None req-d647eac1-3fa4-47d9-bfe8-4ba74b4b8f4b 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Lazy-loading 'numa_topology' on Instance uuid 21aa2a67-6284-4d30-9a7c-499db76c4042 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:17:59 compute-0 nova_compute[192079]: 2025-10-02 12:17:59.620 2 DEBUG nova.compute.manager [None req-d647eac1-3fa4-47d9-bfe8-4ba74b4b8f4b 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:17:59 compute-0 nova_compute[192079]: 2025-10-02 12:17:59.704 2 DEBUG oslo_concurrency.lockutils [None req-d647eac1-3fa4-47d9-bfe8-4ba74b4b8f4b 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Lock "21aa2a67-6284-4d30-9a7c-499db76c4042" "released" by "nova.compute.manager.ComputeManager.stop_instance.<locals>.do_stop_instance" :: held 13.246s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:00 compute-0 nova_compute[192079]: 2025-10-02 12:18:00.160 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:01 compute-0 nova_compute[192079]: 2025-10-02 12:18:01.484 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:01 compute-0 nova_compute[192079]: 2025-10-02 12:18:01.799 2 DEBUG oslo_concurrency.lockutils [None req-0c5bcfe8-da0e-413a-a4f4-b2448a99de44 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Acquiring lock "21aa2a67-6284-4d30-9a7c-499db76c4042" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:01 compute-0 nova_compute[192079]: 2025-10-02 12:18:01.799 2 DEBUG oslo_concurrency.lockutils [None req-0c5bcfe8-da0e-413a-a4f4-b2448a99de44 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Lock "21aa2a67-6284-4d30-9a7c-499db76c4042" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:01 compute-0 nova_compute[192079]: 2025-10-02 12:18:01.800 2 DEBUG oslo_concurrency.lockutils [None req-0c5bcfe8-da0e-413a-a4f4-b2448a99de44 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Acquiring lock "21aa2a67-6284-4d30-9a7c-499db76c4042-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:01 compute-0 nova_compute[192079]: 2025-10-02 12:18:01.800 2 DEBUG oslo_concurrency.lockutils [None req-0c5bcfe8-da0e-413a-a4f4-b2448a99de44 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Lock "21aa2a67-6284-4d30-9a7c-499db76c4042-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:01 compute-0 nova_compute[192079]: 2025-10-02 12:18:01.800 2 DEBUG oslo_concurrency.lockutils [None req-0c5bcfe8-da0e-413a-a4f4-b2448a99de44 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Lock "21aa2a67-6284-4d30-9a7c-499db76c4042-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:01 compute-0 nova_compute[192079]: 2025-10-02 12:18:01.819 2 INFO nova.compute.manager [None req-0c5bcfe8-da0e-413a-a4f4-b2448a99de44 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Terminating instance
Oct 02 12:18:01 compute-0 nova_compute[192079]: 2025-10-02 12:18:01.828 2 DEBUG nova.compute.manager [None req-0c5bcfe8-da0e-413a-a4f4-b2448a99de44 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:18:01 compute-0 nova_compute[192079]: 2025-10-02 12:18:01.834 2 INFO nova.virt.libvirt.driver [-] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Instance destroyed successfully.
Oct 02 12:18:01 compute-0 nova_compute[192079]: 2025-10-02 12:18:01.834 2 DEBUG nova.objects.instance [None req-0c5bcfe8-da0e-413a-a4f4-b2448a99de44 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Lazy-loading 'resources' on Instance uuid 21aa2a67-6284-4d30-9a7c-499db76c4042 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:18:01 compute-0 nova_compute[192079]: 2025-10-02 12:18:01.851 2 DEBUG nova.virt.libvirt.vif [None req-0c5bcfe8-da0e-413a-a4f4-b2448a99de44 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:17:24Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-DeleteServersTestJSON-server-831017505',display_name='tempest-DeleteServersTestJSON-server-831017505',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-deleteserverstestjson-server-831017505',id=88,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:17:43Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=4,progress=0,project_id='d5db64e6714348c1a7f57bb53de80915',ramdisk_id='',reservation_id='r-sz03g7b2',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-DeleteServersTestJSON-548982240',owner_user_name='tempest-DeleteServersTestJSON-548982240-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:17:59Z,user_data=None,user_id='0c0ba8ddde504431b51e593c63f40361',uuid=21aa2a67-6284-4d30-9a7c-499db76c4042,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='stopped') vif={"id": "61697d43-f76f-4fbc-9f9c-d624fa50ac8f", "address": "fa:16:3e:43:cf:5c", "network": {"id": "b97b8849-844c-4190-8b13-fd7a2d073ce8", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1299594383-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "d5db64e6714348c1a7f57bb53de80915", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap61697d43-f7", "ovs_interfaceid": "61697d43-f76f-4fbc-9f9c-d624fa50ac8f", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:18:01 compute-0 nova_compute[192079]: 2025-10-02 12:18:01.852 2 DEBUG nova.network.os_vif_util [None req-0c5bcfe8-da0e-413a-a4f4-b2448a99de44 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Converting VIF {"id": "61697d43-f76f-4fbc-9f9c-d624fa50ac8f", "address": "fa:16:3e:43:cf:5c", "network": {"id": "b97b8849-844c-4190-8b13-fd7a2d073ce8", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1299594383-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "d5db64e6714348c1a7f57bb53de80915", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap61697d43-f7", "ovs_interfaceid": "61697d43-f76f-4fbc-9f9c-d624fa50ac8f", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:18:01 compute-0 nova_compute[192079]: 2025-10-02 12:18:01.853 2 DEBUG nova.network.os_vif_util [None req-0c5bcfe8-da0e-413a-a4f4-b2448a99de44 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:43:cf:5c,bridge_name='br-int',has_traffic_filtering=True,id=61697d43-f76f-4fbc-9f9c-d624fa50ac8f,network=Network(b97b8849-844c-4190-8b13-fd7a2d073ce8),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap61697d43-f7') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:18:01 compute-0 nova_compute[192079]: 2025-10-02 12:18:01.853 2 DEBUG os_vif [None req-0c5bcfe8-da0e-413a-a4f4-b2448a99de44 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:43:cf:5c,bridge_name='br-int',has_traffic_filtering=True,id=61697d43-f76f-4fbc-9f9c-d624fa50ac8f,network=Network(b97b8849-844c-4190-8b13-fd7a2d073ce8),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap61697d43-f7') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:18:01 compute-0 nova_compute[192079]: 2025-10-02 12:18:01.854 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:01 compute-0 nova_compute[192079]: 2025-10-02 12:18:01.854 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap61697d43-f7, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:18:01 compute-0 nova_compute[192079]: 2025-10-02 12:18:01.856 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:01 compute-0 nova_compute[192079]: 2025-10-02 12:18:01.857 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:01 compute-0 nova_compute[192079]: 2025-10-02 12:18:01.859 2 INFO os_vif [None req-0c5bcfe8-da0e-413a-a4f4-b2448a99de44 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:43:cf:5c,bridge_name='br-int',has_traffic_filtering=True,id=61697d43-f76f-4fbc-9f9c-d624fa50ac8f,network=Network(b97b8849-844c-4190-8b13-fd7a2d073ce8),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap61697d43-f7')
Oct 02 12:18:01 compute-0 nova_compute[192079]: 2025-10-02 12:18:01.860 2 INFO nova.virt.libvirt.driver [None req-0c5bcfe8-da0e-413a-a4f4-b2448a99de44 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Deleting instance files /var/lib/nova/instances/21aa2a67-6284-4d30-9a7c-499db76c4042_del
Oct 02 12:18:01 compute-0 nova_compute[192079]: 2025-10-02 12:18:01.860 2 INFO nova.virt.libvirt.driver [None req-0c5bcfe8-da0e-413a-a4f4-b2448a99de44 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Deletion of /var/lib/nova/instances/21aa2a67-6284-4d30-9a7c-499db76c4042_del complete
Oct 02 12:18:01 compute-0 nova_compute[192079]: 2025-10-02 12:18:01.944 2 INFO nova.compute.manager [None req-0c5bcfe8-da0e-413a-a4f4-b2448a99de44 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Took 0.12 seconds to destroy the instance on the hypervisor.
Oct 02 12:18:01 compute-0 nova_compute[192079]: 2025-10-02 12:18:01.960 2 DEBUG oslo.service.loopingcall [None req-0c5bcfe8-da0e-413a-a4f4-b2448a99de44 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:18:01 compute-0 nova_compute[192079]: 2025-10-02 12:18:01.962 2 DEBUG nova.compute.manager [-] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:18:01 compute-0 nova_compute[192079]: 2025-10-02 12:18:01.962 2 DEBUG nova.network.neutron [-] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:18:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:02.217 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:02.217 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:02.217 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:02 compute-0 nova_compute[192079]: 2025-10-02 12:18:02.725 2 DEBUG nova.network.neutron [-] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:18:02 compute-0 nova_compute[192079]: 2025-10-02 12:18:02.751 2 INFO nova.compute.manager [-] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Took 0.79 seconds to deallocate network for instance.
Oct 02 12:18:02 compute-0 nova_compute[192079]: 2025-10-02 12:18:02.827 2 DEBUG nova.compute.manager [req-22b35c3a-b69e-4431-9079-7c5d99f68fb3 req-2b99026c-3752-42fe-93f2-ad22df29644d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Received event network-vif-deleted-61697d43-f76f-4fbc-9f9c-d624fa50ac8f external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:18:02 compute-0 nova_compute[192079]: 2025-10-02 12:18:02.836 2 DEBUG oslo_concurrency.lockutils [None req-0c5bcfe8-da0e-413a-a4f4-b2448a99de44 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:02 compute-0 nova_compute[192079]: 2025-10-02 12:18:02.836 2 DEBUG oslo_concurrency.lockutils [None req-0c5bcfe8-da0e-413a-a4f4-b2448a99de44 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:03 compute-0 nova_compute[192079]: 2025-10-02 12:18:03.020 2 DEBUG nova.compute.provider_tree [None req-0c5bcfe8-da0e-413a-a4f4-b2448a99de44 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:18:03 compute-0 nova_compute[192079]: 2025-10-02 12:18:03.039 2 DEBUG nova.scheduler.client.report [None req-0c5bcfe8-da0e-413a-a4f4-b2448a99de44 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:18:03 compute-0 nova_compute[192079]: 2025-10-02 12:18:03.067 2 DEBUG oslo_concurrency.lockutils [None req-0c5bcfe8-da0e-413a-a4f4-b2448a99de44 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.230s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:03 compute-0 nova_compute[192079]: 2025-10-02 12:18:03.099 2 INFO nova.scheduler.client.report [None req-0c5bcfe8-da0e-413a-a4f4-b2448a99de44 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Deleted allocations for instance 21aa2a67-6284-4d30-9a7c-499db76c4042
Oct 02 12:18:03 compute-0 nova_compute[192079]: 2025-10-02 12:18:03.190 2 DEBUG oslo_concurrency.lockutils [None req-0c5bcfe8-da0e-413a-a4f4-b2448a99de44 0c0ba8ddde504431b51e593c63f40361 d5db64e6714348c1a7f57bb53de80915 - - default default] Lock "21aa2a67-6284-4d30-9a7c-499db76c4042" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.391s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:05 compute-0 podman[233671]: 2025-10-02 12:18:05.143232085 +0000 UTC m=+0.057034846 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:18:05 compute-0 podman[233672]: 2025-10-02 12:18:05.143566325 +0000 UTC m=+0.058500186 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_id=iscsid, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, io.buildah.version=1.41.3, container_name=iscsid, org.label-schema.schema-version=1.0, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2)
Oct 02 12:18:06 compute-0 nova_compute[192079]: 2025-10-02 12:18:06.487 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:06 compute-0 nova_compute[192079]: 2025-10-02 12:18:06.856 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:10 compute-0 nova_compute[192079]: 2025-10-02 12:18:10.138 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759407475.1371188, ebc56e2c-d3a3-4ade-8849-7e23fc710e78 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:18:10 compute-0 nova_compute[192079]: 2025-10-02 12:18:10.139 2 INFO nova.compute.manager [-] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] VM Stopped (Lifecycle Event)
Oct 02 12:18:10 compute-0 nova_compute[192079]: 2025-10-02 12:18:10.167 2 DEBUG nova.compute.manager [None req-e4372171-d69c-4100-ab77-038187976d4d - - - - - -] [instance: ebc56e2c-d3a3-4ade-8849-7e23fc710e78] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:18:11 compute-0 nova_compute[192079]: 2025-10-02 12:18:11.490 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:11 compute-0 nova_compute[192079]: 2025-10-02 12:18:11.858 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:14 compute-0 nova_compute[192079]: 2025-10-02 12:18:14.285 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759407479.2841582, 21aa2a67-6284-4d30-9a7c-499db76c4042 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:18:14 compute-0 nova_compute[192079]: 2025-10-02 12:18:14.285 2 INFO nova.compute.manager [-] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] VM Stopped (Lifecycle Event)
Oct 02 12:18:14 compute-0 nova_compute[192079]: 2025-10-02 12:18:14.310 2 DEBUG nova.compute.manager [None req-181af6d8-9921-468a-8599-fffddc15730a - - - - - -] [instance: 21aa2a67-6284-4d30-9a7c-499db76c4042] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:18:15 compute-0 podman[233717]: 2025-10-02 12:18:15.147710211 +0000 UTC m=+0.058672561 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 12:18:15 compute-0 podman[233715]: 2025-10-02 12:18:15.154510366 +0000 UTC m=+0.072868128 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, container_name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_id=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true)
Oct 02 12:18:15 compute-0 podman[233716]: 2025-10-02 12:18:15.196247424 +0000 UTC m=+0.111444040 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_controller, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=ovn_controller, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS)
Oct 02 12:18:16 compute-0 nova_compute[192079]: 2025-10-02 12:18:16.491 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:16 compute-0 nova_compute[192079]: 2025-10-02 12:18:16.637 2 DEBUG oslo_concurrency.lockutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Acquiring lock "19251892-5108-4594-94b2-8779316aac1b" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:16 compute-0 nova_compute[192079]: 2025-10-02 12:18:16.638 2 DEBUG oslo_concurrency.lockutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Lock "19251892-5108-4594-94b2-8779316aac1b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:16 compute-0 nova_compute[192079]: 2025-10-02 12:18:16.677 2 DEBUG nova.compute.manager [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:18:16 compute-0 nova_compute[192079]: 2025-10-02 12:18:16.789 2 DEBUG oslo_concurrency.lockutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:16 compute-0 nova_compute[192079]: 2025-10-02 12:18:16.790 2 DEBUG oslo_concurrency.lockutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:16 compute-0 nova_compute[192079]: 2025-10-02 12:18:16.798 2 DEBUG nova.virt.hardware [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:18:16 compute-0 nova_compute[192079]: 2025-10-02 12:18:16.798 2 INFO nova.compute.claims [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:18:16 compute-0 nova_compute[192079]: 2025-10-02 12:18:16.863 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:16 compute-0 nova_compute[192079]: 2025-10-02 12:18:16.914 2 DEBUG nova.compute.provider_tree [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:18:16 compute-0 nova_compute[192079]: 2025-10-02 12:18:16.925 2 DEBUG nova.scheduler.client.report [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:18:16 compute-0 nova_compute[192079]: 2025-10-02 12:18:16.943 2 DEBUG oslo_concurrency.lockutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.153s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:16 compute-0 nova_compute[192079]: 2025-10-02 12:18:16.944 2 DEBUG nova.compute.manager [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:18:16 compute-0 nova_compute[192079]: 2025-10-02 12:18:16.997 2 DEBUG nova.compute.manager [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:18:16 compute-0 nova_compute[192079]: 2025-10-02 12:18:16.998 2 DEBUG nova.network.neutron [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.014 2 INFO nova.virt.libvirt.driver [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.029 2 DEBUG nova.compute.manager [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.149 2 DEBUG nova.compute.manager [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.150 2 DEBUG nova.virt.libvirt.driver [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.151 2 INFO nova.virt.libvirt.driver [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Creating image(s)
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.151 2 DEBUG oslo_concurrency.lockutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Acquiring lock "/var/lib/nova/instances/19251892-5108-4594-94b2-8779316aac1b/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.151 2 DEBUG oslo_concurrency.lockutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Lock "/var/lib/nova/instances/19251892-5108-4594-94b2-8779316aac1b/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.152 2 DEBUG oslo_concurrency.lockutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Lock "/var/lib/nova/instances/19251892-5108-4594-94b2-8779316aac1b/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.163 2 DEBUG oslo_concurrency.processutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.260 2 DEBUG oslo_concurrency.processutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.097s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.261 2 DEBUG oslo_concurrency.lockutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.262 2 DEBUG oslo_concurrency.lockutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.272 2 DEBUG oslo_concurrency.processutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.323 2 DEBUG oslo_concurrency.processutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.051s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.324 2 DEBUG oslo_concurrency.processutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/19251892-5108-4594-94b2-8779316aac1b/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.355 2 DEBUG oslo_concurrency.processutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/19251892-5108-4594-94b2-8779316aac1b/disk 1073741824" returned: 0 in 0.031s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.356 2 DEBUG oslo_concurrency.lockutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.094s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.356 2 DEBUG oslo_concurrency.processutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.407 2 DEBUG oslo_concurrency.processutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.051s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.408 2 DEBUG nova.virt.disk.api [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Checking if we can resize image /var/lib/nova/instances/19251892-5108-4594-94b2-8779316aac1b/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.409 2 DEBUG oslo_concurrency.processutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/19251892-5108-4594-94b2-8779316aac1b/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.461 2 DEBUG oslo_concurrency.processutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/19251892-5108-4594-94b2-8779316aac1b/disk --force-share --output=json" returned: 0 in 0.052s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.463 2 DEBUG nova.virt.disk.api [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Cannot resize image /var/lib/nova/instances/19251892-5108-4594-94b2-8779316aac1b/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.463 2 DEBUG nova.objects.instance [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Lazy-loading 'migration_context' on Instance uuid 19251892-5108-4594-94b2-8779316aac1b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.483 2 DEBUG nova.virt.libvirt.driver [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.484 2 DEBUG nova.virt.libvirt.driver [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Ensure instance console log exists: /var/lib/nova/instances/19251892-5108-4594-94b2-8779316aac1b/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.484 2 DEBUG oslo_concurrency.lockutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.485 2 DEBUG oslo_concurrency.lockutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.485 2 DEBUG oslo_concurrency.lockutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:17 compute-0 nova_compute[192079]: 2025-10-02 12:18:17.487 2 DEBUG nova.policy [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8c91fa3e559044609ddabc81368d7546', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fa03c570c52a4c2a9445090389d03c6d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:18:18 compute-0 nova_compute[192079]: 2025-10-02 12:18:18.259 2 DEBUG nova.network.neutron [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Successfully created port: fe949a9a-bb0c-4664-8f69-767387ac0552 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:18:19 compute-0 nova_compute[192079]: 2025-10-02 12:18:19.065 2 DEBUG nova.network.neutron [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Successfully updated port: fe949a9a-bb0c-4664-8f69-767387ac0552 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:18:19 compute-0 nova_compute[192079]: 2025-10-02 12:18:19.085 2 DEBUG oslo_concurrency.lockutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Acquiring lock "refresh_cache-19251892-5108-4594-94b2-8779316aac1b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:18:19 compute-0 nova_compute[192079]: 2025-10-02 12:18:19.085 2 DEBUG oslo_concurrency.lockutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Acquired lock "refresh_cache-19251892-5108-4594-94b2-8779316aac1b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:18:19 compute-0 nova_compute[192079]: 2025-10-02 12:18:19.086 2 DEBUG nova.network.neutron [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:18:19 compute-0 nova_compute[192079]: 2025-10-02 12:18:19.183 2 DEBUG nova.compute.manager [req-8cb12b1f-6db5-4995-9c3a-7457581ed0d3 req-67779e14-9e28-4a43-afb0-dfe87cc804e0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Received event network-changed-fe949a9a-bb0c-4664-8f69-767387ac0552 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:18:19 compute-0 nova_compute[192079]: 2025-10-02 12:18:19.184 2 DEBUG nova.compute.manager [req-8cb12b1f-6db5-4995-9c3a-7457581ed0d3 req-67779e14-9e28-4a43-afb0-dfe87cc804e0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Refreshing instance network info cache due to event network-changed-fe949a9a-bb0c-4664-8f69-767387ac0552. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:18:19 compute-0 nova_compute[192079]: 2025-10-02 12:18:19.184 2 DEBUG oslo_concurrency.lockutils [req-8cb12b1f-6db5-4995-9c3a-7457581ed0d3 req-67779e14-9e28-4a43-afb0-dfe87cc804e0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-19251892-5108-4594-94b2-8779316aac1b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:18:19 compute-0 nova_compute[192079]: 2025-10-02 12:18:19.276 2 DEBUG nova.network.neutron [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:18:19 compute-0 nova_compute[192079]: 2025-10-02 12:18:19.454 2 DEBUG oslo_concurrency.lockutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Acquiring lock "12a1b5e0-e387-4171-8ff0-8d3aeb439ca7" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:19 compute-0 nova_compute[192079]: 2025-10-02 12:18:19.455 2 DEBUG oslo_concurrency.lockutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Lock "12a1b5e0-e387-4171-8ff0-8d3aeb439ca7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:19 compute-0 nova_compute[192079]: 2025-10-02 12:18:19.477 2 DEBUG nova.compute.manager [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:18:19 compute-0 nova_compute[192079]: 2025-10-02 12:18:19.638 2 DEBUG oslo_concurrency.lockutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:19 compute-0 nova_compute[192079]: 2025-10-02 12:18:19.638 2 DEBUG oslo_concurrency.lockutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:19 compute-0 nova_compute[192079]: 2025-10-02 12:18:19.690 2 DEBUG nova.virt.hardware [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:18:19 compute-0 nova_compute[192079]: 2025-10-02 12:18:19.691 2 INFO nova.compute.claims [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:18:19 compute-0 nova_compute[192079]: 2025-10-02 12:18:19.945 2 DEBUG nova.compute.provider_tree [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:18:19 compute-0 nova_compute[192079]: 2025-10-02 12:18:19.984 2 DEBUG nova.scheduler.client.report [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:18:20 compute-0 nova_compute[192079]: 2025-10-02 12:18:20.023 2 DEBUG oslo_concurrency.lockutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.385s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:20 compute-0 nova_compute[192079]: 2025-10-02 12:18:20.025 2 DEBUG nova.compute.manager [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:18:20 compute-0 nova_compute[192079]: 2025-10-02 12:18:20.474 2 DEBUG nova.compute.manager [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:18:20 compute-0 nova_compute[192079]: 2025-10-02 12:18:20.475 2 DEBUG nova.network.neutron [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:18:20 compute-0 nova_compute[192079]: 2025-10-02 12:18:20.579 2 INFO nova.virt.libvirt.driver [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:18:20 compute-0 rsyslogd[1013]: imjournal: journal files changed, reloading...  [v8.2506.0-2.el9 try https://www.rsyslog.com/e/0 ]
Oct 02 12:18:20 compute-0 nova_compute[192079]: 2025-10-02 12:18:20.694 2 DEBUG nova.compute.manager [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.233 2 DEBUG nova.compute.manager [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.235 2 DEBUG nova.virt.libvirt.driver [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.236 2 INFO nova.virt.libvirt.driver [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Creating image(s)
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.237 2 DEBUG oslo_concurrency.lockutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Acquiring lock "/var/lib/nova/instances/12a1b5e0-e387-4171-8ff0-8d3aeb439ca7/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.237 2 DEBUG oslo_concurrency.lockutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Lock "/var/lib/nova/instances/12a1b5e0-e387-4171-8ff0-8d3aeb439ca7/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.238 2 DEBUG oslo_concurrency.lockutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Lock "/var/lib/nova/instances/12a1b5e0-e387-4171-8ff0-8d3aeb439ca7/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.262 2 DEBUG nova.network.neutron [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Updating instance_info_cache with network_info: [{"id": "fe949a9a-bb0c-4664-8f69-767387ac0552", "address": "fa:16:3e:c0:2b:76", "network": {"id": "e895cece-6b67-405e-b05d-5b86ddbf8385", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-117197461-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fa03c570c52a4c2a9445090389d03c6d", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapfe949a9a-bb", "ovs_interfaceid": "fe949a9a-bb0c-4664-8f69-767387ac0552", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.265 2 DEBUG oslo_concurrency.processutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.297 2 DEBUG oslo_concurrency.lockutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Releasing lock "refresh_cache-19251892-5108-4594-94b2-8779316aac1b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.298 2 DEBUG nova.compute.manager [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Instance network_info: |[{"id": "fe949a9a-bb0c-4664-8f69-767387ac0552", "address": "fa:16:3e:c0:2b:76", "network": {"id": "e895cece-6b67-405e-b05d-5b86ddbf8385", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-117197461-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fa03c570c52a4c2a9445090389d03c6d", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapfe949a9a-bb", "ovs_interfaceid": "fe949a9a-bb0c-4664-8f69-767387ac0552", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.299 2 DEBUG oslo_concurrency.lockutils [req-8cb12b1f-6db5-4995-9c3a-7457581ed0d3 req-67779e14-9e28-4a43-afb0-dfe87cc804e0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-19251892-5108-4594-94b2-8779316aac1b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.300 2 DEBUG nova.network.neutron [req-8cb12b1f-6db5-4995-9c3a-7457581ed0d3 req-67779e14-9e28-4a43-afb0-dfe87cc804e0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Refreshing network info cache for port fe949a9a-bb0c-4664-8f69-767387ac0552 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.307 2 DEBUG nova.virt.libvirt.driver [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Start _get_guest_xml network_info=[{"id": "fe949a9a-bb0c-4664-8f69-767387ac0552", "address": "fa:16:3e:c0:2b:76", "network": {"id": "e895cece-6b67-405e-b05d-5b86ddbf8385", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-117197461-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fa03c570c52a4c2a9445090389d03c6d", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapfe949a9a-bb", "ovs_interfaceid": "fe949a9a-bb0c-4664-8f69-767387ac0552", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.315 2 WARNING nova.virt.libvirt.driver [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.330 2 DEBUG nova.policy [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '836c60c20a0f48dd994c9d659781fc06', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '49c6a5f4c4c84d7ba686d98befbc981a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.333 2 DEBUG nova.virt.libvirt.host [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.334 2 DEBUG nova.virt.libvirt.host [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.341 2 DEBUG nova.virt.libvirt.host [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.342 2 DEBUG nova.virt.libvirt.host [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.344 2 DEBUG nova.virt.libvirt.driver [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.344 2 DEBUG nova.virt.hardware [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.345 2 DEBUG nova.virt.hardware [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.345 2 DEBUG nova.virt.hardware [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.346 2 DEBUG nova.virt.hardware [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.346 2 DEBUG nova.virt.hardware [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.346 2 DEBUG nova.virt.hardware [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.347 2 DEBUG nova.virt.hardware [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.347 2 DEBUG nova.virt.hardware [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.347 2 DEBUG nova.virt.hardware [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.348 2 DEBUG nova.virt.hardware [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.348 2 DEBUG nova.virt.hardware [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.356 2 DEBUG nova.virt.libvirt.vif [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:18:15Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ServerRescueNegativeTestJSON-server-624951240',display_name='tempest-ServerRescueNegativeTestJSON-server-624951240',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverrescuenegativetestjson-server-624951240',id=90,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='fa03c570c52a4c2a9445090389d03c6d',ramdisk_id='',reservation_id='r-enwnk660',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServerRescueNegativeTestJSON-1968496116',owner_user_name='tempest-ServerRescueNegativeTestJSON-1968496116-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:18:17Z,user_data=None,user_id='8c91fa3e559044609ddabc81368d7546',uuid=19251892-5108-4594-94b2-8779316aac1b,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "fe949a9a-bb0c-4664-8f69-767387ac0552", "address": "fa:16:3e:c0:2b:76", "network": {"id": "e895cece-6b67-405e-b05d-5b86ddbf8385", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-117197461-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fa03c570c52a4c2a9445090389d03c6d", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapfe949a9a-bb", "ovs_interfaceid": "fe949a9a-bb0c-4664-8f69-767387ac0552", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.357 2 DEBUG nova.network.os_vif_util [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Converting VIF {"id": "fe949a9a-bb0c-4664-8f69-767387ac0552", "address": "fa:16:3e:c0:2b:76", "network": {"id": "e895cece-6b67-405e-b05d-5b86ddbf8385", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-117197461-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fa03c570c52a4c2a9445090389d03c6d", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapfe949a9a-bb", "ovs_interfaceid": "fe949a9a-bb0c-4664-8f69-767387ac0552", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.358 2 DEBUG nova.network.os_vif_util [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:c0:2b:76,bridge_name='br-int',has_traffic_filtering=True,id=fe949a9a-bb0c-4664-8f69-767387ac0552,network=Network(e895cece-6b67-405e-b05d-5b86ddbf8385),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapfe949a9a-bb') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.360 2 DEBUG nova.objects.instance [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Lazy-loading 'pci_devices' on Instance uuid 19251892-5108-4594-94b2-8779316aac1b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.363 2 DEBUG oslo_concurrency.processutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.098s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.364 2 DEBUG oslo_concurrency.lockutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.365 2 DEBUG oslo_concurrency.lockutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.381 2 DEBUG oslo_concurrency.processutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.405 2 DEBUG nova.virt.libvirt.driver [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:18:21 compute-0 nova_compute[192079]:   <uuid>19251892-5108-4594-94b2-8779316aac1b</uuid>
Oct 02 12:18:21 compute-0 nova_compute[192079]:   <name>instance-0000005a</name>
Oct 02 12:18:21 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:18:21 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:18:21 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:18:21 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:       <nova:name>tempest-ServerRescueNegativeTestJSON-server-624951240</nova:name>
Oct 02 12:18:21 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:18:21</nova:creationTime>
Oct 02 12:18:21 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:18:21 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:18:21 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:18:21 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:18:21 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:18:21 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:18:21 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:18:21 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:18:21 compute-0 nova_compute[192079]:         <nova:user uuid="8c91fa3e559044609ddabc81368d7546">tempest-ServerRescueNegativeTestJSON-1968496116-project-member</nova:user>
Oct 02 12:18:21 compute-0 nova_compute[192079]:         <nova:project uuid="fa03c570c52a4c2a9445090389d03c6d">tempest-ServerRescueNegativeTestJSON-1968496116</nova:project>
Oct 02 12:18:21 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:18:21 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:18:21 compute-0 nova_compute[192079]:         <nova:port uuid="fe949a9a-bb0c-4664-8f69-767387ac0552">
Oct 02 12:18:21 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.13" ipVersion="4"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:18:21 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:18:21 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:18:21 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <system>
Oct 02 12:18:21 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:18:21 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:18:21 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:18:21 compute-0 nova_compute[192079]:       <entry name="serial">19251892-5108-4594-94b2-8779316aac1b</entry>
Oct 02 12:18:21 compute-0 nova_compute[192079]:       <entry name="uuid">19251892-5108-4594-94b2-8779316aac1b</entry>
Oct 02 12:18:21 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     </system>
Oct 02 12:18:21 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:18:21 compute-0 nova_compute[192079]:   <os>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:   </os>
Oct 02 12:18:21 compute-0 nova_compute[192079]:   <features>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:   </features>
Oct 02 12:18:21 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:18:21 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:18:21 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:18:21 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/19251892-5108-4594-94b2-8779316aac1b/disk"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:18:21 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/19251892-5108-4594-94b2-8779316aac1b/disk.config"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:18:21 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:c0:2b:76"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:       <target dev="tapfe949a9a-bb"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:18:21 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/19251892-5108-4594-94b2-8779316aac1b/console.log" append="off"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <video>
Oct 02 12:18:21 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     </video>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:18:21 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:18:21 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:18:21 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:18:21 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:18:21 compute-0 nova_compute[192079]: </domain>
Oct 02 12:18:21 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.406 2 DEBUG nova.compute.manager [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Preparing to wait for external event network-vif-plugged-fe949a9a-bb0c-4664-8f69-767387ac0552 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.406 2 DEBUG oslo_concurrency.lockutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Acquiring lock "19251892-5108-4594-94b2-8779316aac1b-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.406 2 DEBUG oslo_concurrency.lockutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Lock "19251892-5108-4594-94b2-8779316aac1b-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.407 2 DEBUG oslo_concurrency.lockutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Lock "19251892-5108-4594-94b2-8779316aac1b-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.408 2 DEBUG nova.virt.libvirt.vif [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:18:15Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ServerRescueNegativeTestJSON-server-624951240',display_name='tempest-ServerRescueNegativeTestJSON-server-624951240',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverrescuenegativetestjson-server-624951240',id=90,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='fa03c570c52a4c2a9445090389d03c6d',ramdisk_id='',reservation_id='r-enwnk660',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServerRescueNegativeTestJSON-1968496116',owner_user_name='tempest-ServerRescueNegativeTestJSON-1968496116-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:18:17Z,user_data=None,user_id='8c91fa3e559044609ddabc81368d7546',uuid=19251892-5108-4594-94b2-8779316aac1b,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "fe949a9a-bb0c-4664-8f69-767387ac0552", "address": "fa:16:3e:c0:2b:76", "network": {"id": "e895cece-6b67-405e-b05d-5b86ddbf8385", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-117197461-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fa03c570c52a4c2a9445090389d03c6d", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapfe949a9a-bb", "ovs_interfaceid": "fe949a9a-bb0c-4664-8f69-767387ac0552", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.409 2 DEBUG nova.network.os_vif_util [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Converting VIF {"id": "fe949a9a-bb0c-4664-8f69-767387ac0552", "address": "fa:16:3e:c0:2b:76", "network": {"id": "e895cece-6b67-405e-b05d-5b86ddbf8385", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-117197461-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fa03c570c52a4c2a9445090389d03c6d", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapfe949a9a-bb", "ovs_interfaceid": "fe949a9a-bb0c-4664-8f69-767387ac0552", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.410 2 DEBUG nova.network.os_vif_util [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:c0:2b:76,bridge_name='br-int',has_traffic_filtering=True,id=fe949a9a-bb0c-4664-8f69-767387ac0552,network=Network(e895cece-6b67-405e-b05d-5b86ddbf8385),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapfe949a9a-bb') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.410 2 DEBUG os_vif [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:c0:2b:76,bridge_name='br-int',has_traffic_filtering=True,id=fe949a9a-bb0c-4664-8f69-767387ac0552,network=Network(e895cece-6b67-405e-b05d-5b86ddbf8385),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapfe949a9a-bb') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.411 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.412 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.413 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.416 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.416 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapfe949a9a-bb, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.417 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapfe949a9a-bb, col_values=(('external_ids', {'iface-id': 'fe949a9a-bb0c-4664-8f69-767387ac0552', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:c0:2b:76', 'vm-uuid': '19251892-5108-4594-94b2-8779316aac1b'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.438 2 DEBUG oslo_concurrency.processutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.438 2 DEBUG oslo_concurrency.processutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/12a1b5e0-e387-4171-8ff0-8d3aeb439ca7/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.458 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:21 compute-0 NetworkManager[51160]: <info>  [1759407501.4593] manager: (tapfe949a9a-bb): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/153)
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.463 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.464 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.464 2 INFO os_vif [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:c0:2b:76,bridge_name='br-int',has_traffic_filtering=True,id=fe949a9a-bb0c-4664-8f69-767387ac0552,network=Network(e895cece-6b67-405e-b05d-5b86ddbf8385),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapfe949a9a-bb')
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.470 2 DEBUG oslo_concurrency.processutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/12a1b5e0-e387-4171-8ff0-8d3aeb439ca7/disk 1073741824" returned: 0 in 0.032s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.470 2 DEBUG oslo_concurrency.lockutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.106s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.471 2 DEBUG oslo_concurrency.processutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.492 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.525 2 DEBUG oslo_concurrency.processutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.526 2 DEBUG nova.virt.disk.api [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Checking if we can resize image /var/lib/nova/instances/12a1b5e0-e387-4171-8ff0-8d3aeb439ca7/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.526 2 DEBUG oslo_concurrency.processutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/12a1b5e0-e387-4171-8ff0-8d3aeb439ca7/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.549 2 DEBUG nova.virt.libvirt.driver [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.549 2 DEBUG nova.virt.libvirt.driver [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.550 2 DEBUG nova.virt.libvirt.driver [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] No VIF found with MAC fa:16:3e:c0:2b:76, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.550 2 INFO nova.virt.libvirt.driver [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Using config drive
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.580 2 DEBUG oslo_concurrency.processutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/12a1b5e0-e387-4171-8ff0-8d3aeb439ca7/disk --force-share --output=json" returned: 0 in 0.053s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.580 2 DEBUG nova.virt.disk.api [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Cannot resize image /var/lib/nova/instances/12a1b5e0-e387-4171-8ff0-8d3aeb439ca7/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.580 2 DEBUG nova.objects.instance [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Lazy-loading 'migration_context' on Instance uuid 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.594 2 DEBUG nova.virt.libvirt.driver [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.594 2 DEBUG nova.virt.libvirt.driver [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Ensure instance console log exists: /var/lib/nova/instances/12a1b5e0-e387-4171-8ff0-8d3aeb439ca7/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.594 2 DEBUG oslo_concurrency.lockutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.595 2 DEBUG oslo_concurrency.lockutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:21 compute-0 nova_compute[192079]: 2025-10-02 12:18:21.595 2 DEBUG oslo_concurrency.lockutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:22 compute-0 nova_compute[192079]: 2025-10-02 12:18:22.326 2 INFO nova.virt.libvirt.driver [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Creating config drive at /var/lib/nova/instances/19251892-5108-4594-94b2-8779316aac1b/disk.config
Oct 02 12:18:22 compute-0 nova_compute[192079]: 2025-10-02 12:18:22.331 2 DEBUG oslo_concurrency.processutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/19251892-5108-4594-94b2-8779316aac1b/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmps5z68r4p execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:18:22 compute-0 nova_compute[192079]: 2025-10-02 12:18:22.473 2 DEBUG oslo_concurrency.processutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/19251892-5108-4594-94b2-8779316aac1b/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmps5z68r4p" returned: 0 in 0.142s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:18:22 compute-0 kernel: tapfe949a9a-bb: entered promiscuous mode
Oct 02 12:18:22 compute-0 NetworkManager[51160]: <info>  [1759407502.5653] manager: (tapfe949a9a-bb): new Tun device (/org/freedesktop/NetworkManager/Devices/154)
Oct 02 12:18:22 compute-0 ovn_controller[94336]: 2025-10-02T12:18:22Z|00303|binding|INFO|Claiming lport fe949a9a-bb0c-4664-8f69-767387ac0552 for this chassis.
Oct 02 12:18:22 compute-0 ovn_controller[94336]: 2025-10-02T12:18:22Z|00304|binding|INFO|fe949a9a-bb0c-4664-8f69-767387ac0552: Claiming fa:16:3e:c0:2b:76 10.100.0.13
Oct 02 12:18:22 compute-0 nova_compute[192079]: 2025-10-02 12:18:22.580 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:22 compute-0 nova_compute[192079]: 2025-10-02 12:18:22.592 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:22.601 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:c0:2b:76 10.100.0.13'], port_security=['fa:16:3e:c0:2b:76 10.100.0.13'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.13/28', 'neutron:device_id': '19251892-5108-4594-94b2-8779316aac1b', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-e895cece-6b67-405e-b05d-5b86ddbf8385', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fa03c570c52a4c2a9445090389d03c6d', 'neutron:revision_number': '2', 'neutron:security_group_ids': '86713f8f-e4ad-44d5-8c6e-92e3b3c5f67c', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=42f687d5-26a0-4ae5-91cd-f49120fff442, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=fe949a9a-bb0c-4664-8f69-767387ac0552) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:22.603 103294 INFO neutron.agent.ovn.metadata.agent [-] Port fe949a9a-bb0c-4664-8f69-767387ac0552 in datapath e895cece-6b67-405e-b05d-5b86ddbf8385 bound to our chassis
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:22.606 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network e895cece-6b67-405e-b05d-5b86ddbf8385
Oct 02 12:18:22 compute-0 systemd-udevd[233836]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:22.627 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c1b58186-4e0f-4856-84dd-0e5811f3aa4b]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:22.628 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tape895cece-61 in ovnmeta-e895cece-6b67-405e-b05d-5b86ddbf8385 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:22.630 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tape895cece-60 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:22.630 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[591f90bb-2381-4705-8d51-0640342b92a3]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:22.631 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0714bf24-7462-4c0f-a48c-a02d6b3d9b9e]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:22 compute-0 NetworkManager[51160]: <info>  [1759407502.6388] device (tapfe949a9a-bb): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:18:22 compute-0 systemd-machined[152150]: New machine qemu-42-instance-0000005a.
Oct 02 12:18:22 compute-0 NetworkManager[51160]: <info>  [1759407502.6397] device (tapfe949a9a-bb): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:22.648 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[eaf7b74a-62ea-4bd7-8ed1-2a0f9c78650a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:22 compute-0 nova_compute[192079]: 2025-10-02 12:18:22.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:18:22 compute-0 nova_compute[192079]: 2025-10-02 12:18:22.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:18:22 compute-0 systemd[1]: Started Virtual Machine qemu-42-instance-0000005a.
Oct 02 12:18:22 compute-0 nova_compute[192079]: 2025-10-02 12:18:22.679 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:22.682 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[86aef7c9-4ac9-42e4-ac38-eb23f7e3c5eb]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:22 compute-0 nova_compute[192079]: 2025-10-02 12:18:22.695 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:22 compute-0 nova_compute[192079]: 2025-10-02 12:18:22.695 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:22 compute-0 nova_compute[192079]: 2025-10-02 12:18:22.695 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:22 compute-0 nova_compute[192079]: 2025-10-02 12:18:22.696 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:18:22 compute-0 ovn_controller[94336]: 2025-10-02T12:18:22Z|00305|binding|INFO|Setting lport fe949a9a-bb0c-4664-8f69-767387ac0552 ovn-installed in OVS
Oct 02 12:18:22 compute-0 ovn_controller[94336]: 2025-10-02T12:18:22Z|00306|binding|INFO|Setting lport fe949a9a-bb0c-4664-8f69-767387ac0552 up in Southbound
Oct 02 12:18:22 compute-0 nova_compute[192079]: 2025-10-02 12:18:22.716 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:22.723 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[97742ad8-a73f-4e36-a39a-e9df622586c7]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:22.728 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[530c3f61-417e-4f7c-ba91-58548dafdbfb]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:22 compute-0 NetworkManager[51160]: <info>  [1759407502.7292] manager: (tape895cece-60): new Veth device (/org/freedesktop/NetworkManager/Devices/155)
Oct 02 12:18:22 compute-0 nova_compute[192079]: 2025-10-02 12:18:22.753 2 DEBUG nova.network.neutron [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Successfully created port: b44f12b2-8936-4a7b-815a-12f5996325c7 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:22.754 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[dc588d52-f358-4ea3-b557-80acdfec5724]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:22.758 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[18a4c0e1-0660-4221-9d2d-9d5969a784ff]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:22 compute-0 NetworkManager[51160]: <info>  [1759407502.7764] device (tape895cece-60): carrier: link connected
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:22.780 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[ab5893c2-e53a-489d-a7f1-456bb0e6c1ee]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:22.794 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[08560c76-4028-4699-a4c2-b4d1a3f56009]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tape895cece-61'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:03:96:29'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 98], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 550040, 'reachable_time': 18748, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 233870, 'error': None, 'target': 'ovnmeta-e895cece-6b67-405e-b05d-5b86ddbf8385', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:22.812 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8b622cc3-5471-4379-9684-c38f87d53734]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe03:9629'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 550040, 'tstamp': 550040}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 233871, 'error': None, 'target': 'ovnmeta-e895cece-6b67-405e-b05d-5b86ddbf8385', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:22 compute-0 nova_compute[192079]: 2025-10-02 12:18:22.809 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/19251892-5108-4594-94b2-8779316aac1b/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:22.828 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0776fafc-e542-4e19-afc6-676a26efba52]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tape895cece-61'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:03:96:29'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 2, 'tx_packets': 1, 'rx_bytes': 196, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 2, 'tx_packets': 1, 'rx_bytes': 196, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 98], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 550040, 'reachable_time': 18748, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 2, 'inoctets': 168, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 2, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 168, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 2, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 233873, 'error': None, 'target': 'ovnmeta-e895cece-6b67-405e-b05d-5b86ddbf8385', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:22.866 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[18d130d3-5752-48bc-ae7a-e7804a9b0cb9]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:22 compute-0 nova_compute[192079]: 2025-10-02 12:18:22.897 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/19251892-5108-4594-94b2-8779316aac1b/disk --force-share --output=json" returned: 0 in 0.088s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:18:22 compute-0 nova_compute[192079]: 2025-10-02 12:18:22.898 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/19251892-5108-4594-94b2-8779316aac1b/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:22.930 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e9147cf3-d5f2-4125-a0da-ed6cdf2cc3b0]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:22.932 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tape895cece-60, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:22.932 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:22.933 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tape895cece-60, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:18:22 compute-0 kernel: tape895cece-60: entered promiscuous mode
Oct 02 12:18:22 compute-0 NetworkManager[51160]: <info>  [1759407502.9365] manager: (tape895cece-60): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/156)
Oct 02 12:18:22 compute-0 nova_compute[192079]: 2025-10-02 12:18:22.936 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:22.941 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tape895cece-60, col_values=(('external_ids', {'iface-id': '893d58a9-c253-4923-8cf4-03927d247550'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:18:22 compute-0 ovn_controller[94336]: 2025-10-02T12:18:22Z|00307|binding|INFO|Releasing lport 893d58a9-c253-4923-8cf4-03927d247550 from this chassis (sb_readonly=0)
Oct 02 12:18:22 compute-0 nova_compute[192079]: 2025-10-02 12:18:22.943 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:22.944 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/e895cece-6b67-405e-b05d-5b86ddbf8385.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/e895cece-6b67-405e-b05d-5b86ddbf8385.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:22.945 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[039ef1b6-c332-4433-8373-1bb33faf1b09]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:22.946 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-e895cece-6b67-405e-b05d-5b86ddbf8385
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/e895cece-6b67-405e-b05d-5b86ddbf8385.pid.haproxy
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID e895cece-6b67-405e-b05d-5b86ddbf8385
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:18:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:22.949 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-e895cece-6b67-405e-b05d-5b86ddbf8385', 'env', 'PROCESS_TAG=haproxy-e895cece-6b67-405e-b05d-5b86ddbf8385', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/e895cece-6b67-405e-b05d-5b86ddbf8385.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:18:22 compute-0 nova_compute[192079]: 2025-10-02 12:18:22.954 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:22 compute-0 nova_compute[192079]: 2025-10-02 12:18:22.965 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/19251892-5108-4594-94b2-8779316aac1b/disk --force-share --output=json" returned: 0 in 0.068s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.133 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.135 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5735MB free_disk=73.34839630126953GB free_vcpus=7 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.135 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.135 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.158 2 DEBUG nova.network.neutron [req-8cb12b1f-6db5-4995-9c3a-7457581ed0d3 req-67779e14-9e28-4a43-afb0-dfe87cc804e0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Updated VIF entry in instance network info cache for port fe949a9a-bb0c-4664-8f69-767387ac0552. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.159 2 DEBUG nova.network.neutron [req-8cb12b1f-6db5-4995-9c3a-7457581ed0d3 req-67779e14-9e28-4a43-afb0-dfe87cc804e0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Updating instance_info_cache with network_info: [{"id": "fe949a9a-bb0c-4664-8f69-767387ac0552", "address": "fa:16:3e:c0:2b:76", "network": {"id": "e895cece-6b67-405e-b05d-5b86ddbf8385", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-117197461-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fa03c570c52a4c2a9445090389d03c6d", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapfe949a9a-bb", "ovs_interfaceid": "fe949a9a-bb0c-4664-8f69-767387ac0552", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.182 2 DEBUG nova.compute.manager [req-e33a89e4-7d14-46cf-a1eb-a2e55efc7588 req-8788d59b-023e-46bf-a9a0-947a54f27c54 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Received event network-vif-plugged-fe949a9a-bb0c-4664-8f69-767387ac0552 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.182 2 DEBUG oslo_concurrency.lockutils [req-e33a89e4-7d14-46cf-a1eb-a2e55efc7588 req-8788d59b-023e-46bf-a9a0-947a54f27c54 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "19251892-5108-4594-94b2-8779316aac1b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.183 2 DEBUG oslo_concurrency.lockutils [req-e33a89e4-7d14-46cf-a1eb-a2e55efc7588 req-8788d59b-023e-46bf-a9a0-947a54f27c54 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "19251892-5108-4594-94b2-8779316aac1b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.183 2 DEBUG oslo_concurrency.lockutils [req-e33a89e4-7d14-46cf-a1eb-a2e55efc7588 req-8788d59b-023e-46bf-a9a0-947a54f27c54 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "19251892-5108-4594-94b2-8779316aac1b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.183 2 DEBUG nova.compute.manager [req-e33a89e4-7d14-46cf-a1eb-a2e55efc7588 req-8788d59b-023e-46bf-a9a0-947a54f27c54 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Processing event network-vif-plugged-fe949a9a-bb0c-4664-8f69-767387ac0552 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.185 2 DEBUG oslo_concurrency.lockutils [req-8cb12b1f-6db5-4995-9c3a-7457581ed0d3 req-67779e14-9e28-4a43-afb0-dfe87cc804e0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-19251892-5108-4594-94b2-8779316aac1b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.258 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance 19251892-5108-4594-94b2-8779316aac1b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.258 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.258 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 2 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.259 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=768MB phys_disk=79GB used_disk=2GB total_vcpus=8 used_vcpus=2 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:18:23 compute-0 podman[233918]: 2025-10-02 12:18:23.295802022 +0000 UTC m=+0.041851822 container create 176db12ef7c818d4a751772c7dfb5fd3f0378430393a635f3935e277a2751e76 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-e895cece-6b67-405e-b05d-5b86ddbf8385, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:18:23 compute-0 systemd[1]: Started libpod-conmon-176db12ef7c818d4a751772c7dfb5fd3f0378430393a635f3935e277a2751e76.scope.
Oct 02 12:18:23 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:18:23 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/0cbb0246dfd002cba1dcd1e76a2e4d8a9b105389b19813e08f176bd1f2fa392c/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:18:23 compute-0 podman[233918]: 2025-10-02 12:18:23.274395169 +0000 UTC m=+0.020444979 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:18:23 compute-0 podman[233918]: 2025-10-02 12:18:23.37640264 +0000 UTC m=+0.122452470 container init 176db12ef7c818d4a751772c7dfb5fd3f0378430393a635f3935e277a2751e76 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-e895cece-6b67-405e-b05d-5b86ddbf8385, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true)
Oct 02 12:18:23 compute-0 podman[233932]: 2025-10-02 12:18:23.377303234 +0000 UTC m=+0.047353842 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=edpm, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0)
Oct 02 12:18:23 compute-0 podman[233918]: 2025-10-02 12:18:23.381962411 +0000 UTC m=+0.128012211 container start 176db12ef7c818d4a751772c7dfb5fd3f0378430393a635f3935e277a2751e76 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-e895cece-6b67-405e-b05d-5b86ddbf8385, io.buildah.version=1.41.3, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS)
Oct 02 12:18:23 compute-0 neutron-haproxy-ovnmeta-e895cece-6b67-405e-b05d-5b86ddbf8385[233935]: [NOTICE]   (233958) : New worker (233960) forked
Oct 02 12:18:23 compute-0 neutron-haproxy-ovnmeta-e895cece-6b67-405e-b05d-5b86ddbf8385[233935]: [NOTICE]   (233958) : Loading success.
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.428 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407503.4278438, 19251892-5108-4594-94b2-8779316aac1b => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.428 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 19251892-5108-4594-94b2-8779316aac1b] VM Started (Lifecycle Event)
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.429 2 DEBUG nova.compute.manager [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.432 2 DEBUG nova.virt.libvirt.driver [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.437 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.439 2 INFO nova.virt.libvirt.driver [-] [instance: 19251892-5108-4594-94b2-8779316aac1b] Instance spawned successfully.
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.439 2 DEBUG nova.virt.libvirt.driver [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.477 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 19251892-5108-4594-94b2-8779316aac1b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.479 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.483 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 19251892-5108-4594-94b2-8779316aac1b] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.544 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 19251892-5108-4594-94b2-8779316aac1b] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.545 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407503.4280202, 19251892-5108-4594-94b2-8779316aac1b => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.545 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 19251892-5108-4594-94b2-8779316aac1b] VM Paused (Lifecycle Event)
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.552 2 DEBUG nova.virt.libvirt.driver [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.552 2 DEBUG nova.virt.libvirt.driver [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.553 2 DEBUG nova.virt.libvirt.driver [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.553 2 DEBUG nova.virt.libvirt.driver [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.553 2 DEBUG nova.virt.libvirt.driver [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.554 2 DEBUG nova.virt.libvirt.driver [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.557 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.558 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.422s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.582 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 19251892-5108-4594-94b2-8779316aac1b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.585 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407503.4318025, 19251892-5108-4594-94b2-8779316aac1b => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.586 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 19251892-5108-4594-94b2-8779316aac1b] VM Resumed (Lifecycle Event)
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.634 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 19251892-5108-4594-94b2-8779316aac1b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.638 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 19251892-5108-4594-94b2-8779316aac1b] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.715 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 19251892-5108-4594-94b2-8779316aac1b] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.727 2 INFO nova.compute.manager [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Took 6.58 seconds to spawn the instance on the hypervisor.
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.728 2 DEBUG nova.compute.manager [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.951 2 INFO nova.compute.manager [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Took 7.20 seconds to build instance.
Oct 02 12:18:23 compute-0 nova_compute[192079]: 2025-10-02 12:18:23.979 2 DEBUG oslo_concurrency.lockutils [None req-dc75cb7f-9ef6-4c29-9101-d8cfc6f57043 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Lock "19251892-5108-4594-94b2-8779316aac1b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 7.341s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:24 compute-0 nova_compute[192079]: 2025-10-02 12:18:24.558 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:18:24 compute-0 nova_compute[192079]: 2025-10-02 12:18:24.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:18:24 compute-0 nova_compute[192079]: 2025-10-02 12:18:24.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:18:24 compute-0 nova_compute[192079]: 2025-10-02 12:18:24.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:18:24 compute-0 nova_compute[192079]: 2025-10-02 12:18:24.734 2 DEBUG nova.network.neutron [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Successfully updated port: b44f12b2-8936-4a7b-815a-12f5996325c7 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:18:24 compute-0 nova_compute[192079]: 2025-10-02 12:18:24.751 2 DEBUG oslo_concurrency.lockutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Acquiring lock "refresh_cache-12a1b5e0-e387-4171-8ff0-8d3aeb439ca7" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:18:24 compute-0 nova_compute[192079]: 2025-10-02 12:18:24.751 2 DEBUG oslo_concurrency.lockutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Acquired lock "refresh_cache-12a1b5e0-e387-4171-8ff0-8d3aeb439ca7" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:18:24 compute-0 nova_compute[192079]: 2025-10-02 12:18:24.752 2 DEBUG nova.network.neutron [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:18:25 compute-0 nova_compute[192079]: 2025-10-02 12:18:25.202 2 DEBUG nova.network.neutron [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:18:25 compute-0 nova_compute[192079]: 2025-10-02 12:18:25.339 2 DEBUG nova.compute.manager [req-8d31a323-560d-4ef6-a3d9-d98725c7f878 req-5803fab9-0506-4642-8809-fd9036cacc3c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Received event network-vif-plugged-fe949a9a-bb0c-4664-8f69-767387ac0552 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:18:25 compute-0 nova_compute[192079]: 2025-10-02 12:18:25.340 2 DEBUG oslo_concurrency.lockutils [req-8d31a323-560d-4ef6-a3d9-d98725c7f878 req-5803fab9-0506-4642-8809-fd9036cacc3c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "19251892-5108-4594-94b2-8779316aac1b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:25 compute-0 nova_compute[192079]: 2025-10-02 12:18:25.340 2 DEBUG oslo_concurrency.lockutils [req-8d31a323-560d-4ef6-a3d9-d98725c7f878 req-5803fab9-0506-4642-8809-fd9036cacc3c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "19251892-5108-4594-94b2-8779316aac1b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:25 compute-0 nova_compute[192079]: 2025-10-02 12:18:25.340 2 DEBUG oslo_concurrency.lockutils [req-8d31a323-560d-4ef6-a3d9-d98725c7f878 req-5803fab9-0506-4642-8809-fd9036cacc3c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "19251892-5108-4594-94b2-8779316aac1b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:25 compute-0 nova_compute[192079]: 2025-10-02 12:18:25.341 2 DEBUG nova.compute.manager [req-8d31a323-560d-4ef6-a3d9-d98725c7f878 req-5803fab9-0506-4642-8809-fd9036cacc3c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] No waiting events found dispatching network-vif-plugged-fe949a9a-bb0c-4664-8f69-767387ac0552 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:18:25 compute-0 nova_compute[192079]: 2025-10-02 12:18:25.341 2 WARNING nova.compute.manager [req-8d31a323-560d-4ef6-a3d9-d98725c7f878 req-5803fab9-0506-4642-8809-fd9036cacc3c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Received unexpected event network-vif-plugged-fe949a9a-bb0c-4664-8f69-767387ac0552 for instance with vm_state active and task_state None.
Oct 02 12:18:25 compute-0 nova_compute[192079]: 2025-10-02 12:18:25.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:18:25 compute-0 nova_compute[192079]: 2025-10-02 12:18:25.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:18:25 compute-0 nova_compute[192079]: 2025-10-02 12:18:25.970 2 DEBUG nova.compute.manager [req-f196b5f5-ca21-47ef-945d-927927ae0c11 req-45014e58-6505-427a-8dcb-9c6ae3b09b77 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Received event network-changed-b44f12b2-8936-4a7b-815a-12f5996325c7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:18:25 compute-0 nova_compute[192079]: 2025-10-02 12:18:25.970 2 DEBUG nova.compute.manager [req-f196b5f5-ca21-47ef-945d-927927ae0c11 req-45014e58-6505-427a-8dcb-9c6ae3b09b77 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Refreshing instance network info cache due to event network-changed-b44f12b2-8936-4a7b-815a-12f5996325c7. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:18:25 compute-0 nova_compute[192079]: 2025-10-02 12:18:25.971 2 DEBUG oslo_concurrency.lockutils [req-f196b5f5-ca21-47ef-945d-927927ae0c11 req-45014e58-6505-427a-8dcb-9c6ae3b09b77 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-12a1b5e0-e387-4171-8ff0-8d3aeb439ca7" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.459 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.493 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.667 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.694 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Skipping network cache update for instance because it is Building. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9871
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.733 2 DEBUG nova.network.neutron [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Updating instance_info_cache with network_info: [{"id": "b44f12b2-8936-4a7b-815a-12f5996325c7", "address": "fa:16:3e:14:7c:1c", "network": {"id": "5b886deb-ac8b-4d5e-a6d4-b19699c6ae92", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1740420896-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "49c6a5f4c4c84d7ba686d98befbc981a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb44f12b2-89", "ovs_interfaceid": "b44f12b2-8936-4a7b-815a-12f5996325c7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.775 2 DEBUG oslo_concurrency.lockutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Releasing lock "refresh_cache-12a1b5e0-e387-4171-8ff0-8d3aeb439ca7" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.776 2 DEBUG nova.compute.manager [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Instance network_info: |[{"id": "b44f12b2-8936-4a7b-815a-12f5996325c7", "address": "fa:16:3e:14:7c:1c", "network": {"id": "5b886deb-ac8b-4d5e-a6d4-b19699c6ae92", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1740420896-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "49c6a5f4c4c84d7ba686d98befbc981a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb44f12b2-89", "ovs_interfaceid": "b44f12b2-8936-4a7b-815a-12f5996325c7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.777 2 DEBUG oslo_concurrency.lockutils [req-f196b5f5-ca21-47ef-945d-927927ae0c11 req-45014e58-6505-427a-8dcb-9c6ae3b09b77 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-12a1b5e0-e387-4171-8ff0-8d3aeb439ca7" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.778 2 DEBUG nova.network.neutron [req-f196b5f5-ca21-47ef-945d-927927ae0c11 req-45014e58-6505-427a-8dcb-9c6ae3b09b77 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Refreshing network info cache for port b44f12b2-8936-4a7b-815a-12f5996325c7 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.784 2 DEBUG nova.virt.libvirt.driver [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Start _get_guest_xml network_info=[{"id": "b44f12b2-8936-4a7b-815a-12f5996325c7", "address": "fa:16:3e:14:7c:1c", "network": {"id": "5b886deb-ac8b-4d5e-a6d4-b19699c6ae92", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1740420896-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "49c6a5f4c4c84d7ba686d98befbc981a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb44f12b2-89", "ovs_interfaceid": "b44f12b2-8936-4a7b-815a-12f5996325c7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.791 2 WARNING nova.virt.libvirt.driver [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.840 2 DEBUG nova.virt.libvirt.host [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.842 2 DEBUG nova.virt.libvirt.host [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.848 2 DEBUG nova.virt.libvirt.host [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.849 2 DEBUG nova.virt.libvirt.host [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.850 2 DEBUG nova.virt.libvirt.driver [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.851 2 DEBUG nova.virt.hardware [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.851 2 DEBUG nova.virt.hardware [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.852 2 DEBUG nova.virt.hardware [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.852 2 DEBUG nova.virt.hardware [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.853 2 DEBUG nova.virt.hardware [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.853 2 DEBUG nova.virt.hardware [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.853 2 DEBUG nova.virt.hardware [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.854 2 DEBUG nova.virt.hardware [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.854 2 DEBUG nova.virt.hardware [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.854 2 DEBUG nova.virt.hardware [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.855 2 DEBUG nova.virt.hardware [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.859 2 DEBUG nova.virt.libvirt.vif [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:18:18Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ListServersNegativeTestJSON-server-636246405',display_name='tempest-ListServersNegativeTestJSON-server-636246405-3',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-listserversnegativetestjson-server-636246405-3',id=94,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=2,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='49c6a5f4c4c84d7ba686d98befbc981a',ramdisk_id='',reservation_id='r-pj0lxy21',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ListServersNegativeTestJSON-1724341867',owner_user_name='tempest-ListServersNegativeTestJSON-1724341867-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:18:20Z,user_data=None,user_id='836c60c20a0f48dd994c9d659781fc06',uuid=12a1b5e0-e387-4171-8ff0-8d3aeb439ca7,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "b44f12b2-8936-4a7b-815a-12f5996325c7", "address": "fa:16:3e:14:7c:1c", "network": {"id": "5b886deb-ac8b-4d5e-a6d4-b19699c6ae92", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1740420896-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "49c6a5f4c4c84d7ba686d98befbc981a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb44f12b2-89", "ovs_interfaceid": "b44f12b2-8936-4a7b-815a-12f5996325c7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.859 2 DEBUG nova.network.os_vif_util [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Converting VIF {"id": "b44f12b2-8936-4a7b-815a-12f5996325c7", "address": "fa:16:3e:14:7c:1c", "network": {"id": "5b886deb-ac8b-4d5e-a6d4-b19699c6ae92", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1740420896-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "49c6a5f4c4c84d7ba686d98befbc981a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb44f12b2-89", "ovs_interfaceid": "b44f12b2-8936-4a7b-815a-12f5996325c7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.861 2 DEBUG nova.network.os_vif_util [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:14:7c:1c,bridge_name='br-int',has_traffic_filtering=True,id=b44f12b2-8936-4a7b-815a-12f5996325c7,network=Network(5b886deb-ac8b-4d5e-a6d4-b19699c6ae92),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapb44f12b2-89') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.862 2 DEBUG nova.objects.instance [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Lazy-loading 'pci_devices' on Instance uuid 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.887 2 DEBUG nova.virt.libvirt.driver [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:18:26 compute-0 nova_compute[192079]:   <uuid>12a1b5e0-e387-4171-8ff0-8d3aeb439ca7</uuid>
Oct 02 12:18:26 compute-0 nova_compute[192079]:   <name>instance-0000005e</name>
Oct 02 12:18:26 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:18:26 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:18:26 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:18:26 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:       <nova:name>tempest-ListServersNegativeTestJSON-server-636246405-3</nova:name>
Oct 02 12:18:26 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:18:26</nova:creationTime>
Oct 02 12:18:26 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:18:26 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:18:26 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:18:26 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:18:26 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:18:26 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:18:26 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:18:26 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:18:26 compute-0 nova_compute[192079]:         <nova:user uuid="836c60c20a0f48dd994c9d659781fc06">tempest-ListServersNegativeTestJSON-1724341867-project-member</nova:user>
Oct 02 12:18:26 compute-0 nova_compute[192079]:         <nova:project uuid="49c6a5f4c4c84d7ba686d98befbc981a">tempest-ListServersNegativeTestJSON-1724341867</nova:project>
Oct 02 12:18:26 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:18:26 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:18:26 compute-0 nova_compute[192079]:         <nova:port uuid="b44f12b2-8936-4a7b-815a-12f5996325c7">
Oct 02 12:18:26 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.7" ipVersion="4"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:18:26 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:18:26 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:18:26 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <system>
Oct 02 12:18:26 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:18:26 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:18:26 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:18:26 compute-0 nova_compute[192079]:       <entry name="serial">12a1b5e0-e387-4171-8ff0-8d3aeb439ca7</entry>
Oct 02 12:18:26 compute-0 nova_compute[192079]:       <entry name="uuid">12a1b5e0-e387-4171-8ff0-8d3aeb439ca7</entry>
Oct 02 12:18:26 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     </system>
Oct 02 12:18:26 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:18:26 compute-0 nova_compute[192079]:   <os>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:   </os>
Oct 02 12:18:26 compute-0 nova_compute[192079]:   <features>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:   </features>
Oct 02 12:18:26 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:18:26 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:18:26 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:18:26 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/12a1b5e0-e387-4171-8ff0-8d3aeb439ca7/disk"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:18:26 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/12a1b5e0-e387-4171-8ff0-8d3aeb439ca7/disk.config"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:18:26 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:14:7c:1c"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:       <target dev="tapb44f12b2-89"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:18:26 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/12a1b5e0-e387-4171-8ff0-8d3aeb439ca7/console.log" append="off"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <video>
Oct 02 12:18:26 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     </video>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:18:26 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:18:26 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:18:26 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:18:26 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:18:26 compute-0 nova_compute[192079]: </domain>
Oct 02 12:18:26 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.895 2 DEBUG nova.compute.manager [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Preparing to wait for external event network-vif-plugged-b44f12b2-8936-4a7b-815a-12f5996325c7 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.895 2 DEBUG oslo_concurrency.lockutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Acquiring lock "12a1b5e0-e387-4171-8ff0-8d3aeb439ca7-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.895 2 DEBUG oslo_concurrency.lockutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Lock "12a1b5e0-e387-4171-8ff0-8d3aeb439ca7-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.895 2 DEBUG oslo_concurrency.lockutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Lock "12a1b5e0-e387-4171-8ff0-8d3aeb439ca7-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.896 2 DEBUG nova.virt.libvirt.vif [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:18:18Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ListServersNegativeTestJSON-server-636246405',display_name='tempest-ListServersNegativeTestJSON-server-636246405-3',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-listserversnegativetestjson-server-636246405-3',id=94,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=2,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='49c6a5f4c4c84d7ba686d98befbc981a',ramdisk_id='',reservation_id='r-pj0lxy21',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ListServersNegativeTestJSON-1724341867',owner_user_name='tempest-ListServersNegativeTestJSON-1724341867-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:18:20Z,user_data=None,user_id='836c60c20a0f48dd994c9d659781fc06',uuid=12a1b5e0-e387-4171-8ff0-8d3aeb439ca7,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "b44f12b2-8936-4a7b-815a-12f5996325c7", "address": "fa:16:3e:14:7c:1c", "network": {"id": "5b886deb-ac8b-4d5e-a6d4-b19699c6ae92", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1740420896-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "49c6a5f4c4c84d7ba686d98befbc981a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb44f12b2-89", "ovs_interfaceid": "b44f12b2-8936-4a7b-815a-12f5996325c7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.897 2 DEBUG nova.network.os_vif_util [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Converting VIF {"id": "b44f12b2-8936-4a7b-815a-12f5996325c7", "address": "fa:16:3e:14:7c:1c", "network": {"id": "5b886deb-ac8b-4d5e-a6d4-b19699c6ae92", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1740420896-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "49c6a5f4c4c84d7ba686d98befbc981a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb44f12b2-89", "ovs_interfaceid": "b44f12b2-8936-4a7b-815a-12f5996325c7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.898 2 DEBUG nova.network.os_vif_util [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:14:7c:1c,bridge_name='br-int',has_traffic_filtering=True,id=b44f12b2-8936-4a7b-815a-12f5996325c7,network=Network(5b886deb-ac8b-4d5e-a6d4-b19699c6ae92),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapb44f12b2-89') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.898 2 DEBUG os_vif [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:14:7c:1c,bridge_name='br-int',has_traffic_filtering=True,id=b44f12b2-8936-4a7b-815a-12f5996325c7,network=Network(5b886deb-ac8b-4d5e-a6d4-b19699c6ae92),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapb44f12b2-89') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.899 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.899 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.900 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.903 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.903 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapb44f12b2-89, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.904 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapb44f12b2-89, col_values=(('external_ids', {'iface-id': 'b44f12b2-8936-4a7b-815a-12f5996325c7', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:14:7c:1c', 'vm-uuid': '12a1b5e0-e387-4171-8ff0-8d3aeb439ca7'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.906 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:26 compute-0 NetworkManager[51160]: <info>  [1759407506.9067] manager: (tapb44f12b2-89): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/157)
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.908 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.918 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.919 2 INFO os_vif [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:14:7c:1c,bridge_name='br-int',has_traffic_filtering=True,id=b44f12b2-8936-4a7b-815a-12f5996325c7,network=Network(5b886deb-ac8b-4d5e-a6d4-b19699c6ae92),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapb44f12b2-89')
Oct 02 12:18:26 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.999 2 DEBUG nova.virt.libvirt.driver [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:18:27 compute-0 nova_compute[192079]: 2025-10-02 12:18:26.999 2 DEBUG nova.virt.libvirt.driver [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:18:27 compute-0 nova_compute[192079]: 2025-10-02 12:18:27.000 2 DEBUG nova.virt.libvirt.driver [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] No VIF found with MAC fa:16:3e:14:7c:1c, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:18:27 compute-0 nova_compute[192079]: 2025-10-02 12:18:27.000 2 INFO nova.virt.libvirt.driver [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Using config drive
Oct 02 12:18:27 compute-0 nova_compute[192079]: 2025-10-02 12:18:27.339 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "refresh_cache-19251892-5108-4594-94b2-8779316aac1b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:18:27 compute-0 nova_compute[192079]: 2025-10-02 12:18:27.340 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquired lock "refresh_cache-19251892-5108-4594-94b2-8779316aac1b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:18:27 compute-0 nova_compute[192079]: 2025-10-02 12:18:27.340 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 19251892-5108-4594-94b2-8779316aac1b] Forcefully refreshing network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2004
Oct 02 12:18:27 compute-0 nova_compute[192079]: 2025-10-02 12:18:27.341 2 DEBUG nova.objects.instance [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lazy-loading 'info_cache' on Instance uuid 19251892-5108-4594-94b2-8779316aac1b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:18:28 compute-0 nova_compute[192079]: 2025-10-02 12:18:28.184 2 INFO nova.virt.libvirt.driver [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Creating config drive at /var/lib/nova/instances/12a1b5e0-e387-4171-8ff0-8d3aeb439ca7/disk.config
Oct 02 12:18:28 compute-0 nova_compute[192079]: 2025-10-02 12:18:28.189 2 DEBUG oslo_concurrency.processutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/12a1b5e0-e387-4171-8ff0-8d3aeb439ca7/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp6v2bgnlj execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:18:28 compute-0 nova_compute[192079]: 2025-10-02 12:18:28.314 2 DEBUG oslo_concurrency.processutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/12a1b5e0-e387-4171-8ff0-8d3aeb439ca7/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp6v2bgnlj" returned: 0 in 0.125s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:18:28 compute-0 NetworkManager[51160]: <info>  [1759407508.3746] manager: (tapb44f12b2-89): new Tun device (/org/freedesktop/NetworkManager/Devices/158)
Oct 02 12:18:28 compute-0 kernel: tapb44f12b2-89: entered promiscuous mode
Oct 02 12:18:28 compute-0 nova_compute[192079]: 2025-10-02 12:18:28.378 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:28 compute-0 ovn_controller[94336]: 2025-10-02T12:18:28Z|00308|binding|INFO|Claiming lport b44f12b2-8936-4a7b-815a-12f5996325c7 for this chassis.
Oct 02 12:18:28 compute-0 ovn_controller[94336]: 2025-10-02T12:18:28Z|00309|binding|INFO|b44f12b2-8936-4a7b-815a-12f5996325c7: Claiming fa:16:3e:14:7c:1c 10.100.0.7
Oct 02 12:18:28 compute-0 nova_compute[192079]: 2025-10-02 12:18:28.384 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:28.393 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:14:7c:1c 10.100.0.7'], port_security=['fa:16:3e:14:7c:1c 10.100.0.7'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.7/28', 'neutron:device_id': '12a1b5e0-e387-4171-8ff0-8d3aeb439ca7', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-5b886deb-ac8b-4d5e-a6d4-b19699c6ae92', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '49c6a5f4c4c84d7ba686d98befbc981a', 'neutron:revision_number': '2', 'neutron:security_group_ids': '55283a5f-31d5-4a4d-bc9f-4b8e3fc9f6b5', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=2cc3415d-eee4-499b-a06c-93196fe04768, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=5, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=b44f12b2-8936-4a7b-815a-12f5996325c7) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:28.395 103294 INFO neutron.agent.ovn.metadata.agent [-] Port b44f12b2-8936-4a7b-815a-12f5996325c7 in datapath 5b886deb-ac8b-4d5e-a6d4-b19699c6ae92 bound to our chassis
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:28.397 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 5b886deb-ac8b-4d5e-a6d4-b19699c6ae92
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:28.410 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c6cd3e6f-6544-4534-87b7-50aec6761d3e]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:28.411 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap5b886deb-a1 in ovnmeta-5b886deb-ac8b-4d5e-a6d4-b19699c6ae92 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:28.412 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap5b886deb-a0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:28.413 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[eb03f5ed-1b09-44d5-ac8e-22cc0528e8ff]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:28.413 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[745691aa-74ce-4e81-9b68-760b5c0980d6]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:28 compute-0 systemd-udevd[233990]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:28.426 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[58a3f98a-d365-4ebc-9ca3-9f4fea4c8e04]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:28 compute-0 systemd-machined[152150]: New machine qemu-43-instance-0000005e.
Oct 02 12:18:28 compute-0 NetworkManager[51160]: <info>  [1759407508.4360] device (tapb44f12b2-89): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:18:28 compute-0 NetworkManager[51160]: <info>  [1759407508.4372] device (tapb44f12b2-89): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:18:28 compute-0 systemd[1]: Started Virtual Machine qemu-43-instance-0000005e.
Oct 02 12:18:28 compute-0 nova_compute[192079]: 2025-10-02 12:18:28.438 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:28 compute-0 ovn_controller[94336]: 2025-10-02T12:18:28Z|00310|binding|INFO|Setting lport b44f12b2-8936-4a7b-815a-12f5996325c7 ovn-installed in OVS
Oct 02 12:18:28 compute-0 ovn_controller[94336]: 2025-10-02T12:18:28Z|00311|binding|INFO|Setting lport b44f12b2-8936-4a7b-815a-12f5996325c7 up in Southbound
Oct 02 12:18:28 compute-0 nova_compute[192079]: 2025-10-02 12:18:28.441 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:28.455 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[08c4f879-06a5-465b-af79-52e0437c82c9]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:28.490 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[7fc8837c-cedd-4b6a-89f0-a400f0c72f38]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:28 compute-0 systemd-udevd[233994]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:18:28 compute-0 NetworkManager[51160]: <info>  [1759407508.5156] manager: (tap5b886deb-a0): new Veth device (/org/freedesktop/NetworkManager/Devices/159)
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:28.515 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[cc83320c-398c-4490-b280-0dcd23ba3922]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:28.550 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[1ed101c3-a18f-476e-8722-b875e617eac7]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:28.553 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[a0b57656-3b07-4ea6-9fc0-888409628ba5]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:28 compute-0 NetworkManager[51160]: <info>  [1759407508.5746] device (tap5b886deb-a0): carrier: link connected
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:28.580 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[186821b3-799a-40fe-8d80-2c79bc83eec7]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:28.595 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b5da0fd7-7405-4b21-81f1-ec175d15568f]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap5b886deb-a1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:77:39:f1'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 100], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 550620, 'reachable_time': 44286, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 234022, 'error': None, 'target': 'ovnmeta-5b886deb-ac8b-4d5e-a6d4-b19699c6ae92', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:28.609 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[867f54bb-c0a2-4743-9256-2efbf53ef67f]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe77:39f1'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 550620, 'tstamp': 550620}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 234025, 'error': None, 'target': 'ovnmeta-5b886deb-ac8b-4d5e-a6d4-b19699c6ae92', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:28.626 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1a10ed8c-2838-4d28-b321-5683c6f7d0fa]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap5b886deb-a1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:77:39:f1'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 100], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 550620, 'reachable_time': 44286, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 234030, 'error': None, 'target': 'ovnmeta-5b886deb-ac8b-4d5e-a6d4-b19699c6ae92', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:28.652 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0e54729e-fa78-40a0-93f8-353ac75e6d38]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:28.707 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[124b4849-52eb-43f5-a255-37dda9c7a1e3]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:28.709 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap5b886deb-a0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:28.710 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:28.711 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap5b886deb-a0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:18:28 compute-0 NetworkManager[51160]: <info>  [1759407508.7150] manager: (tap5b886deb-a0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/160)
Oct 02 12:18:28 compute-0 kernel: tap5b886deb-a0: entered promiscuous mode
Oct 02 12:18:28 compute-0 nova_compute[192079]: 2025-10-02 12:18:28.719 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:28.719 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap5b886deb-a0, col_values=(('external_ids', {'iface-id': '444f6470-b3a4-44de-9f71-88b373acc28c'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:18:28 compute-0 ovn_controller[94336]: 2025-10-02T12:18:28Z|00312|binding|INFO|Releasing lport 444f6470-b3a4-44de-9f71-88b373acc28c from this chassis (sb_readonly=0)
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:28.722 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/5b886deb-ac8b-4d5e-a6d4-b19699c6ae92.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/5b886deb-ac8b-4d5e-a6d4-b19699c6ae92.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:28.731 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f70c34fe-f26d-4ff4-863f-28559ecec9f5]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:28 compute-0 nova_compute[192079]: 2025-10-02 12:18:28.732 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:28.733 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-5b886deb-ac8b-4d5e-a6d4-b19699c6ae92
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/5b886deb-ac8b-4d5e-a6d4-b19699c6ae92.pid.haproxy
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 5b886deb-ac8b-4d5e-a6d4-b19699c6ae92
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:18:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:28.733 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-5b886deb-ac8b-4d5e-a6d4-b19699c6ae92', 'env', 'PROCESS_TAG=haproxy-5b886deb-ac8b-4d5e-a6d4-b19699c6ae92', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/5b886deb-ac8b-4d5e-a6d4-b19699c6ae92.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:18:29 compute-0 nova_compute[192079]: 2025-10-02 12:18:29.090 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407509.0902977, 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:18:29 compute-0 nova_compute[192079]: 2025-10-02 12:18:29.091 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] VM Started (Lifecycle Event)
Oct 02 12:18:29 compute-0 nova_compute[192079]: 2025-10-02 12:18:29.126 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:18:29 compute-0 nova_compute[192079]: 2025-10-02 12:18:29.130 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407509.090376, 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:18:29 compute-0 nova_compute[192079]: 2025-10-02 12:18:29.130 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] VM Paused (Lifecycle Event)
Oct 02 12:18:29 compute-0 nova_compute[192079]: 2025-10-02 12:18:29.147 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:18:29 compute-0 nova_compute[192079]: 2025-10-02 12:18:29.150 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:18:29 compute-0 nova_compute[192079]: 2025-10-02 12:18:29.169 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:18:29 compute-0 podman[234063]: 2025-10-02 12:18:29.195418227 +0000 UTC m=+0.072760715 container create 9f813e066ce69f637ddb9a585634d2d118790e81170673df01e933bfe694d66f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-5b886deb-ac8b-4d5e-a6d4-b19699c6ae92, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3)
Oct 02 12:18:29 compute-0 nova_compute[192079]: 2025-10-02 12:18:29.223 2 DEBUG nova.network.neutron [req-f196b5f5-ca21-47ef-945d-927927ae0c11 req-45014e58-6505-427a-8dcb-9c6ae3b09b77 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Updated VIF entry in instance network info cache for port b44f12b2-8936-4a7b-815a-12f5996325c7. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:18:29 compute-0 nova_compute[192079]: 2025-10-02 12:18:29.224 2 DEBUG nova.network.neutron [req-f196b5f5-ca21-47ef-945d-927927ae0c11 req-45014e58-6505-427a-8dcb-9c6ae3b09b77 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Updating instance_info_cache with network_info: [{"id": "b44f12b2-8936-4a7b-815a-12f5996325c7", "address": "fa:16:3e:14:7c:1c", "network": {"id": "5b886deb-ac8b-4d5e-a6d4-b19699c6ae92", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1740420896-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "49c6a5f4c4c84d7ba686d98befbc981a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb44f12b2-89", "ovs_interfaceid": "b44f12b2-8936-4a7b-815a-12f5996325c7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:18:29 compute-0 nova_compute[192079]: 2025-10-02 12:18:29.236 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 19251892-5108-4594-94b2-8779316aac1b] Updating instance_info_cache with network_info: [{"id": "fe949a9a-bb0c-4664-8f69-767387ac0552", "address": "fa:16:3e:c0:2b:76", "network": {"id": "e895cece-6b67-405e-b05d-5b86ddbf8385", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-117197461-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fa03c570c52a4c2a9445090389d03c6d", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapfe949a9a-bb", "ovs_interfaceid": "fe949a9a-bb0c-4664-8f69-767387ac0552", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:18:29 compute-0 systemd[1]: Started libpod-conmon-9f813e066ce69f637ddb9a585634d2d118790e81170673df01e933bfe694d66f.scope.
Oct 02 12:18:29 compute-0 podman[234063]: 2025-10-02 12:18:29.162585422 +0000 UTC m=+0.039927910 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:18:29 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:18:29 compute-0 nova_compute[192079]: 2025-10-02 12:18:29.277 2 DEBUG oslo_concurrency.lockutils [req-f196b5f5-ca21-47ef-945d-927927ae0c11 req-45014e58-6505-427a-8dcb-9c6ae3b09b77 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-12a1b5e0-e387-4171-8ff0-8d3aeb439ca7" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:18:29 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/f0955c381b1927f88a4fe3957feb9970cd80fbf80cf45f8291c9337486a01791/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:18:29 compute-0 nova_compute[192079]: 2025-10-02 12:18:29.289 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Releasing lock "refresh_cache-19251892-5108-4594-94b2-8779316aac1b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:18:29 compute-0 nova_compute[192079]: 2025-10-02 12:18:29.290 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 19251892-5108-4594-94b2-8779316aac1b] Updated the network info_cache for instance _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9929
Oct 02 12:18:29 compute-0 nova_compute[192079]: 2025-10-02 12:18:29.290 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:18:29 compute-0 podman[234063]: 2025-10-02 12:18:29.293028509 +0000 UTC m=+0.170370967 container init 9f813e066ce69f637ddb9a585634d2d118790e81170673df01e933bfe694d66f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-5b886deb-ac8b-4d5e-a6d4-b19699c6ae92, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 12:18:29 compute-0 podman[234078]: 2025-10-02 12:18:29.29381969 +0000 UTC m=+0.061511398 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, config_id=multipathd, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, container_name=multipathd, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_managed=true)
Oct 02 12:18:29 compute-0 podman[234076]: 2025-10-02 12:18:29.294471709 +0000 UTC m=+0.070292959 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, config_id=edpm, maintainer=Red Hat, Inc., architecture=x86_64, io.openshift.expose-services=, io.openshift.tags=minimal rhel9, com.redhat.component=ubi9-minimal-container, url=https://catalog.redhat.com/en/search?searchType=containers, vendor=Red Hat, Inc., distribution-scope=public, name=ubi9-minimal, build-date=2025-08-20T13:12:41, release=1755695350, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, version=9.6, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vcs-type=git, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., container_name=openstack_network_exporter, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.buildah.version=1.33.7, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, managed_by=edpm_ansible)
Oct 02 12:18:29 compute-0 podman[234063]: 2025-10-02 12:18:29.299439383 +0000 UTC m=+0.176781841 container start 9f813e066ce69f637ddb9a585634d2d118790e81170673df01e933bfe694d66f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-5b886deb-ac8b-4d5e-a6d4-b19699c6ae92, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:18:29 compute-0 neutron-haproxy-ovnmeta-5b886deb-ac8b-4d5e-a6d4-b19699c6ae92[234101]: [NOTICE]   (234120) : New worker (234122) forked
Oct 02 12:18:29 compute-0 neutron-haproxy-ovnmeta-5b886deb-ac8b-4d5e-a6d4-b19699c6ae92[234101]: [NOTICE]   (234120) : Loading success.
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.272 2 DEBUG nova.compute.manager [req-106c5a64-3f20-433a-b1ba-079d0661853f req-9f5c2829-5676-4f5f-8d45-63bb2b30650b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Received event network-vif-plugged-b44f12b2-8936-4a7b-815a-12f5996325c7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.273 2 DEBUG oslo_concurrency.lockutils [req-106c5a64-3f20-433a-b1ba-079d0661853f req-9f5c2829-5676-4f5f-8d45-63bb2b30650b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "12a1b5e0-e387-4171-8ff0-8d3aeb439ca7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.273 2 DEBUG oslo_concurrency.lockutils [req-106c5a64-3f20-433a-b1ba-079d0661853f req-9f5c2829-5676-4f5f-8d45-63bb2b30650b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "12a1b5e0-e387-4171-8ff0-8d3aeb439ca7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.274 2 DEBUG oslo_concurrency.lockutils [req-106c5a64-3f20-433a-b1ba-079d0661853f req-9f5c2829-5676-4f5f-8d45-63bb2b30650b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "12a1b5e0-e387-4171-8ff0-8d3aeb439ca7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.274 2 DEBUG nova.compute.manager [req-106c5a64-3f20-433a-b1ba-079d0661853f req-9f5c2829-5676-4f5f-8d45-63bb2b30650b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Processing event network-vif-plugged-b44f12b2-8936-4a7b-815a-12f5996325c7 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.274 2 DEBUG nova.compute.manager [req-106c5a64-3f20-433a-b1ba-079d0661853f req-9f5c2829-5676-4f5f-8d45-63bb2b30650b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Received event network-vif-plugged-b44f12b2-8936-4a7b-815a-12f5996325c7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.274 2 DEBUG oslo_concurrency.lockutils [req-106c5a64-3f20-433a-b1ba-079d0661853f req-9f5c2829-5676-4f5f-8d45-63bb2b30650b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "12a1b5e0-e387-4171-8ff0-8d3aeb439ca7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.275 2 DEBUG oslo_concurrency.lockutils [req-106c5a64-3f20-433a-b1ba-079d0661853f req-9f5c2829-5676-4f5f-8d45-63bb2b30650b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "12a1b5e0-e387-4171-8ff0-8d3aeb439ca7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.275 2 DEBUG oslo_concurrency.lockutils [req-106c5a64-3f20-433a-b1ba-079d0661853f req-9f5c2829-5676-4f5f-8d45-63bb2b30650b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "12a1b5e0-e387-4171-8ff0-8d3aeb439ca7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.275 2 DEBUG nova.compute.manager [req-106c5a64-3f20-433a-b1ba-079d0661853f req-9f5c2829-5676-4f5f-8d45-63bb2b30650b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] No waiting events found dispatching network-vif-plugged-b44f12b2-8936-4a7b-815a-12f5996325c7 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.275 2 WARNING nova.compute.manager [req-106c5a64-3f20-433a-b1ba-079d0661853f req-9f5c2829-5676-4f5f-8d45-63bb2b30650b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Received unexpected event network-vif-plugged-b44f12b2-8936-4a7b-815a-12f5996325c7 for instance with vm_state building and task_state spawning.
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.276 2 DEBUG nova.compute.manager [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Instance event wait completed in 1 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.281 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407510.2811449, 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.281 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] VM Resumed (Lifecycle Event)
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.283 2 DEBUG nova.virt.libvirt.driver [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.284 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.287 2 INFO nova.virt.libvirt.driver [-] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Instance spawned successfully.
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.287 2 DEBUG nova.virt.libvirt.driver [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.340 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.345 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.348 2 DEBUG nova.virt.libvirt.driver [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.348 2 DEBUG nova.virt.libvirt.driver [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.349 2 DEBUG nova.virt.libvirt.driver [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.349 2 DEBUG nova.virt.libvirt.driver [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.349 2 DEBUG nova.virt.libvirt.driver [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.350 2 DEBUG nova.virt.libvirt.driver [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.414 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.500 2 INFO nova.compute.manager [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Took 9.27 seconds to spawn the instance on the hypervisor.
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.501 2 DEBUG nova.compute.manager [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.680 2 INFO nova.compute.manager [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Took 11.11 seconds to build instance.
Oct 02 12:18:30 compute-0 nova_compute[192079]: 2025-10-02 12:18:30.762 2 DEBUG oslo_concurrency.lockutils [None req-8e065fde-4167-4cc4-96d0-33bdccc52d90 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Lock "12a1b5e0-e387-4171-8ff0-8d3aeb439ca7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 11.307s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:31 compute-0 nova_compute[192079]: 2025-10-02 12:18:31.495 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:31 compute-0 nova_compute[192079]: 2025-10-02 12:18:31.906 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:36 compute-0 podman[234148]: 2025-10-02 12:18:36.149984925 +0000 UTC m=+0.053662074 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:18:36 compute-0 podman[234149]: 2025-10-02 12:18:36.183769306 +0000 UTC m=+0.086752587 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, container_name=iscsid, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=iscsid, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.schema-version=1.0)
Oct 02 12:18:36 compute-0 nova_compute[192079]: 2025-10-02 12:18:36.497 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:36 compute-0 nova_compute[192079]: 2025-10-02 12:18:36.908 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:37 compute-0 ovn_controller[94336]: 2025-10-02T12:18:37Z|00031|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:c0:2b:76 10.100.0.13
Oct 02 12:18:37 compute-0 ovn_controller[94336]: 2025-10-02T12:18:37Z|00032|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:c0:2b:76 10.100.0.13
Oct 02 12:18:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:37.815 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=23, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=22) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:18:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:37.817 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 6 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:18:37 compute-0 nova_compute[192079]: 2025-10-02 12:18:37.818 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.192 2 DEBUG oslo_concurrency.lockutils [None req-5fa84191-4810-4cdb-8d7f-9b4b740bfe36 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Acquiring lock "12a1b5e0-e387-4171-8ff0-8d3aeb439ca7" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.193 2 DEBUG oslo_concurrency.lockutils [None req-5fa84191-4810-4cdb-8d7f-9b4b740bfe36 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Lock "12a1b5e0-e387-4171-8ff0-8d3aeb439ca7" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.193 2 DEBUG oslo_concurrency.lockutils [None req-5fa84191-4810-4cdb-8d7f-9b4b740bfe36 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Acquiring lock "12a1b5e0-e387-4171-8ff0-8d3aeb439ca7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.194 2 DEBUG oslo_concurrency.lockutils [None req-5fa84191-4810-4cdb-8d7f-9b4b740bfe36 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Lock "12a1b5e0-e387-4171-8ff0-8d3aeb439ca7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.195 2 DEBUG oslo_concurrency.lockutils [None req-5fa84191-4810-4cdb-8d7f-9b4b740bfe36 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Lock "12a1b5e0-e387-4171-8ff0-8d3aeb439ca7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.210 2 INFO nova.compute.manager [None req-5fa84191-4810-4cdb-8d7f-9b4b740bfe36 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Terminating instance
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.224 2 DEBUG nova.compute.manager [None req-5fa84191-4810-4cdb-8d7f-9b4b740bfe36 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:18:38 compute-0 kernel: tapb44f12b2-89 (unregistering): left promiscuous mode
Oct 02 12:18:38 compute-0 NetworkManager[51160]: <info>  [1759407518.2431] device (tapb44f12b2-89): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.254 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:38 compute-0 ovn_controller[94336]: 2025-10-02T12:18:38Z|00313|binding|INFO|Releasing lport b44f12b2-8936-4a7b-815a-12f5996325c7 from this chassis (sb_readonly=0)
Oct 02 12:18:38 compute-0 ovn_controller[94336]: 2025-10-02T12:18:38Z|00314|binding|INFO|Setting lport b44f12b2-8936-4a7b-815a-12f5996325c7 down in Southbound
Oct 02 12:18:38 compute-0 ovn_controller[94336]: 2025-10-02T12:18:38Z|00315|binding|INFO|Removing iface tapb44f12b2-89 ovn-installed in OVS
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.257 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:38 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:38.263 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:14:7c:1c 10.100.0.7'], port_security=['fa:16:3e:14:7c:1c 10.100.0.7'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.7/28', 'neutron:device_id': '12a1b5e0-e387-4171-8ff0-8d3aeb439ca7', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-5b886deb-ac8b-4d5e-a6d4-b19699c6ae92', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '49c6a5f4c4c84d7ba686d98befbc981a', 'neutron:revision_number': '4', 'neutron:security_group_ids': '55283a5f-31d5-4a4d-bc9f-4b8e3fc9f6b5', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=2cc3415d-eee4-499b-a06c-93196fe04768, chassis=[], tunnel_key=5, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=b44f12b2-8936-4a7b-815a-12f5996325c7) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:18:38 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:38.264 103294 INFO neutron.agent.ovn.metadata.agent [-] Port b44f12b2-8936-4a7b-815a-12f5996325c7 in datapath 5b886deb-ac8b-4d5e-a6d4-b19699c6ae92 unbound from our chassis
Oct 02 12:18:38 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:38.265 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 5b886deb-ac8b-4d5e-a6d4-b19699c6ae92, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:18:38 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:38.266 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4e3e83ee-ee54-4bda-a4c9-ba19f9b98309]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:38 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:38.267 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-5b886deb-ac8b-4d5e-a6d4-b19699c6ae92 namespace which is not needed anymore
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.268 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:38 compute-0 systemd[1]: machine-qemu\x2d43\x2dinstance\x2d0000005e.scope: Deactivated successfully.
Oct 02 12:18:38 compute-0 systemd[1]: machine-qemu\x2d43\x2dinstance\x2d0000005e.scope: Consumed 8.585s CPU time.
Oct 02 12:18:38 compute-0 systemd-machined[152150]: Machine qemu-43-instance-0000005e terminated.
Oct 02 12:18:38 compute-0 neutron-haproxy-ovnmeta-5b886deb-ac8b-4d5e-a6d4-b19699c6ae92[234101]: [NOTICE]   (234120) : haproxy version is 2.8.14-c23fe91
Oct 02 12:18:38 compute-0 neutron-haproxy-ovnmeta-5b886deb-ac8b-4d5e-a6d4-b19699c6ae92[234101]: [NOTICE]   (234120) : path to executable is /usr/sbin/haproxy
Oct 02 12:18:38 compute-0 neutron-haproxy-ovnmeta-5b886deb-ac8b-4d5e-a6d4-b19699c6ae92[234101]: [WARNING]  (234120) : Exiting Master process...
Oct 02 12:18:38 compute-0 neutron-haproxy-ovnmeta-5b886deb-ac8b-4d5e-a6d4-b19699c6ae92[234101]: [WARNING]  (234120) : Exiting Master process...
Oct 02 12:18:38 compute-0 neutron-haproxy-ovnmeta-5b886deb-ac8b-4d5e-a6d4-b19699c6ae92[234101]: [ALERT]    (234120) : Current worker (234122) exited with code 143 (Terminated)
Oct 02 12:18:38 compute-0 neutron-haproxy-ovnmeta-5b886deb-ac8b-4d5e-a6d4-b19699c6ae92[234101]: [WARNING]  (234120) : All workers exited. Exiting... (0)
Oct 02 12:18:38 compute-0 systemd[1]: libpod-9f813e066ce69f637ddb9a585634d2d118790e81170673df01e933bfe694d66f.scope: Deactivated successfully.
Oct 02 12:18:38 compute-0 conmon[234101]: conmon 9f813e066ce69f637ddb <nwarn>: Failed to open cgroups file: /sys/fs/cgroup/machine.slice/libpod-9f813e066ce69f637ddb9a585634d2d118790e81170673df01e933bfe694d66f.scope/container/memory.events
Oct 02 12:18:38 compute-0 podman[234215]: 2025-10-02 12:18:38.418167156 +0000 UTC m=+0.050662572 container died 9f813e066ce69f637ddb9a585634d2d118790e81170673df01e933bfe694d66f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-5b886deb-ac8b-4d5e-a6d4-b19699c6ae92, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.build-date=20251001)
Oct 02 12:18:38 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-9f813e066ce69f637ddb9a585634d2d118790e81170673df01e933bfe694d66f-userdata-shm.mount: Deactivated successfully.
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.451 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:38 compute-0 systemd[1]: var-lib-containers-storage-overlay-f0955c381b1927f88a4fe3957feb9970cd80fbf80cf45f8291c9337486a01791-merged.mount: Deactivated successfully.
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.453 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:38 compute-0 podman[234215]: 2025-10-02 12:18:38.470937865 +0000 UTC m=+0.103433281 container cleanup 9f813e066ce69f637ddb9a585634d2d118790e81170673df01e933bfe694d66f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-5b886deb-ac8b-4d5e-a6d4-b19699c6ae92, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:18:38 compute-0 systemd[1]: libpod-conmon-9f813e066ce69f637ddb9a585634d2d118790e81170673df01e933bfe694d66f.scope: Deactivated successfully.
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.490 2 INFO nova.virt.libvirt.driver [-] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Instance destroyed successfully.
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.491 2 DEBUG nova.objects.instance [None req-5fa84191-4810-4cdb-8d7f-9b4b740bfe36 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Lazy-loading 'resources' on Instance uuid 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.508 2 DEBUG nova.virt.libvirt.vif [None req-5fa84191-4810-4cdb-8d7f-9b4b740bfe36 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:18:18Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ListServersNegativeTestJSON-server-636246405',display_name='tempest-ListServersNegativeTestJSON-server-636246405-3',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-listserversnegativetestjson-server-636246405-3',id=94,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=2,launched_at=2025-10-02T12:18:30Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='49c6a5f4c4c84d7ba686d98befbc981a',ramdisk_id='',reservation_id='r-pj0lxy21',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ListServersNegativeTestJSON-1724341867',owner_user_name='tempest-ListServersNegativeTestJSON-1724341867-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:18:30Z,user_data=None,user_id='836c60c20a0f48dd994c9d659781fc06',uuid=12a1b5e0-e387-4171-8ff0-8d3aeb439ca7,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "b44f12b2-8936-4a7b-815a-12f5996325c7", "address": "fa:16:3e:14:7c:1c", "network": {"id": "5b886deb-ac8b-4d5e-a6d4-b19699c6ae92", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1740420896-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "49c6a5f4c4c84d7ba686d98befbc981a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb44f12b2-89", "ovs_interfaceid": "b44f12b2-8936-4a7b-815a-12f5996325c7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.508 2 DEBUG nova.network.os_vif_util [None req-5fa84191-4810-4cdb-8d7f-9b4b740bfe36 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Converting VIF {"id": "b44f12b2-8936-4a7b-815a-12f5996325c7", "address": "fa:16:3e:14:7c:1c", "network": {"id": "5b886deb-ac8b-4d5e-a6d4-b19699c6ae92", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1740420896-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "49c6a5f4c4c84d7ba686d98befbc981a", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb44f12b2-89", "ovs_interfaceid": "b44f12b2-8936-4a7b-815a-12f5996325c7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.509 2 DEBUG nova.network.os_vif_util [None req-5fa84191-4810-4cdb-8d7f-9b4b740bfe36 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:14:7c:1c,bridge_name='br-int',has_traffic_filtering=True,id=b44f12b2-8936-4a7b-815a-12f5996325c7,network=Network(5b886deb-ac8b-4d5e-a6d4-b19699c6ae92),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapb44f12b2-89') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.509 2 DEBUG os_vif [None req-5fa84191-4810-4cdb-8d7f-9b4b740bfe36 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:14:7c:1c,bridge_name='br-int',has_traffic_filtering=True,id=b44f12b2-8936-4a7b-815a-12f5996325c7,network=Network(5b886deb-ac8b-4d5e-a6d4-b19699c6ae92),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapb44f12b2-89') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.511 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.512 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapb44f12b2-89, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.513 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.515 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.517 2 INFO os_vif [None req-5fa84191-4810-4cdb-8d7f-9b4b740bfe36 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:14:7c:1c,bridge_name='br-int',has_traffic_filtering=True,id=b44f12b2-8936-4a7b-815a-12f5996325c7,network=Network(5b886deb-ac8b-4d5e-a6d4-b19699c6ae92),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapb44f12b2-89')
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.518 2 INFO nova.virt.libvirt.driver [None req-5fa84191-4810-4cdb-8d7f-9b4b740bfe36 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Deleting instance files /var/lib/nova/instances/12a1b5e0-e387-4171-8ff0-8d3aeb439ca7_del
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.518 2 INFO nova.virt.libvirt.driver [None req-5fa84191-4810-4cdb-8d7f-9b4b740bfe36 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Deletion of /var/lib/nova/instances/12a1b5e0-e387-4171-8ff0-8d3aeb439ca7_del complete
Oct 02 12:18:38 compute-0 podman[234261]: 2025-10-02 12:18:38.545714084 +0000 UTC m=+0.047815944 container remove 9f813e066ce69f637ddb9a585634d2d118790e81170673df01e933bfe694d66f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-5b886deb-ac8b-4d5e-a6d4-b19699c6ae92, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:18:38 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:38.551 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[abc9b1b4-d380-478f-9fcf-be5ad5551a69]: (4, ('Thu Oct  2 12:18:38 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-5b886deb-ac8b-4d5e-a6d4-b19699c6ae92 (9f813e066ce69f637ddb9a585634d2d118790e81170673df01e933bfe694d66f)\n9f813e066ce69f637ddb9a585634d2d118790e81170673df01e933bfe694d66f\nThu Oct  2 12:18:38 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-5b886deb-ac8b-4d5e-a6d4-b19699c6ae92 (9f813e066ce69f637ddb9a585634d2d118790e81170673df01e933bfe694d66f)\n9f813e066ce69f637ddb9a585634d2d118790e81170673df01e933bfe694d66f\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:38 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:38.554 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[11906a22-5af8-4720-a266-1e752c2ff17e]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:38 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:38.555 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap5b886deb-a0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.557 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:38 compute-0 kernel: tap5b886deb-a0: left promiscuous mode
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.580 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:38 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:38.583 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[681dd547-8149-456f-b52a-e812f880f11e]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:38 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:38.608 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a010c695-1617-4761-a72a-2be858335b31]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:38 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:38.609 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8abe9be9-cbd1-4bc5-9bc1-52195882619c]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.610 2 INFO nova.compute.manager [None req-5fa84191-4810-4cdb-8d7f-9b4b740bfe36 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Took 0.39 seconds to destroy the instance on the hypervisor.
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.611 2 DEBUG oslo.service.loopingcall [None req-5fa84191-4810-4cdb-8d7f-9b4b740bfe36 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.612 2 DEBUG nova.compute.manager [-] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:18:38 compute-0 nova_compute[192079]: 2025-10-02 12:18:38.612 2 DEBUG nova.network.neutron [-] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:18:38 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:38.626 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7565bcc2-2929-41e5-8c09-ba4254f2e0ea]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 550610, 'reachable_time': 38255, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 234276, 'error': None, 'target': 'ovnmeta-5b886deb-ac8b-4d5e-a6d4-b19699c6ae92', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:38 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:38.628 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-5b886deb-ac8b-4d5e-a6d4-b19699c6ae92 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:18:38 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:38.628 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[dc314809-ef44-4f5b-8820-a74f84ee52ad]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:38 compute-0 systemd[1]: run-netns-ovnmeta\x2d5b886deb\x2dac8b\x2d4d5e\x2da6d4\x2db19699c6ae92.mount: Deactivated successfully.
Oct 02 12:18:39 compute-0 nova_compute[192079]: 2025-10-02 12:18:39.846 2 DEBUG nova.network.neutron [-] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:18:39 compute-0 nova_compute[192079]: 2025-10-02 12:18:39.875 2 INFO nova.compute.manager [-] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Took 1.26 seconds to deallocate network for instance.
Oct 02 12:18:39 compute-0 nova_compute[192079]: 2025-10-02 12:18:39.936 2 DEBUG nova.compute.manager [req-7df906d8-bfc0-41d6-9f25-b7bcefd95fe8 req-fd5115f0-22fb-44eb-a47c-de1fb2d5a791 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Received event network-vif-deleted-b44f12b2-8936-4a7b-815a-12f5996325c7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:18:40 compute-0 nova_compute[192079]: 2025-10-02 12:18:40.005 2 DEBUG oslo_concurrency.lockutils [None req-5fa84191-4810-4cdb-8d7f-9b4b740bfe36 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:40 compute-0 nova_compute[192079]: 2025-10-02 12:18:40.005 2 DEBUG oslo_concurrency.lockutils [None req-5fa84191-4810-4cdb-8d7f-9b4b740bfe36 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:40 compute-0 nova_compute[192079]: 2025-10-02 12:18:40.080 2 DEBUG nova.compute.provider_tree [None req-5fa84191-4810-4cdb-8d7f-9b4b740bfe36 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:18:40 compute-0 nova_compute[192079]: 2025-10-02 12:18:40.095 2 DEBUG nova.scheduler.client.report [None req-5fa84191-4810-4cdb-8d7f-9b4b740bfe36 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:18:40 compute-0 nova_compute[192079]: 2025-10-02 12:18:40.121 2 DEBUG oslo_concurrency.lockutils [None req-5fa84191-4810-4cdb-8d7f-9b4b740bfe36 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.116s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:40 compute-0 nova_compute[192079]: 2025-10-02 12:18:40.166 2 INFO nova.scheduler.client.report [None req-5fa84191-4810-4cdb-8d7f-9b4b740bfe36 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Deleted allocations for instance 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7
Oct 02 12:18:40 compute-0 nova_compute[192079]: 2025-10-02 12:18:40.243 2 DEBUG oslo_concurrency.lockutils [None req-5fa84191-4810-4cdb-8d7f-9b4b740bfe36 836c60c20a0f48dd994c9d659781fc06 49c6a5f4c4c84d7ba686d98befbc981a - - default default] Lock "12a1b5e0-e387-4171-8ff0-8d3aeb439ca7" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 2.051s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:40 compute-0 nova_compute[192079]: 2025-10-02 12:18:40.465 2 DEBUG nova.compute.manager [req-262f72fb-4a05-424f-a54c-182d4586d435 req-f429ed85-a75e-4b3a-829c-db87398c00bc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Received event network-vif-unplugged-b44f12b2-8936-4a7b-815a-12f5996325c7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:18:40 compute-0 nova_compute[192079]: 2025-10-02 12:18:40.466 2 DEBUG oslo_concurrency.lockutils [req-262f72fb-4a05-424f-a54c-182d4586d435 req-f429ed85-a75e-4b3a-829c-db87398c00bc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "12a1b5e0-e387-4171-8ff0-8d3aeb439ca7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:40 compute-0 nova_compute[192079]: 2025-10-02 12:18:40.466 2 DEBUG oslo_concurrency.lockutils [req-262f72fb-4a05-424f-a54c-182d4586d435 req-f429ed85-a75e-4b3a-829c-db87398c00bc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "12a1b5e0-e387-4171-8ff0-8d3aeb439ca7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:40 compute-0 nova_compute[192079]: 2025-10-02 12:18:40.466 2 DEBUG oslo_concurrency.lockutils [req-262f72fb-4a05-424f-a54c-182d4586d435 req-f429ed85-a75e-4b3a-829c-db87398c00bc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "12a1b5e0-e387-4171-8ff0-8d3aeb439ca7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:40 compute-0 nova_compute[192079]: 2025-10-02 12:18:40.466 2 DEBUG nova.compute.manager [req-262f72fb-4a05-424f-a54c-182d4586d435 req-f429ed85-a75e-4b3a-829c-db87398c00bc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] No waiting events found dispatching network-vif-unplugged-b44f12b2-8936-4a7b-815a-12f5996325c7 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:18:40 compute-0 nova_compute[192079]: 2025-10-02 12:18:40.466 2 WARNING nova.compute.manager [req-262f72fb-4a05-424f-a54c-182d4586d435 req-f429ed85-a75e-4b3a-829c-db87398c00bc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Received unexpected event network-vif-unplugged-b44f12b2-8936-4a7b-815a-12f5996325c7 for instance with vm_state deleted and task_state None.
Oct 02 12:18:40 compute-0 nova_compute[192079]: 2025-10-02 12:18:40.467 2 DEBUG nova.compute.manager [req-262f72fb-4a05-424f-a54c-182d4586d435 req-f429ed85-a75e-4b3a-829c-db87398c00bc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Received event network-vif-plugged-b44f12b2-8936-4a7b-815a-12f5996325c7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:18:40 compute-0 nova_compute[192079]: 2025-10-02 12:18:40.467 2 DEBUG oslo_concurrency.lockutils [req-262f72fb-4a05-424f-a54c-182d4586d435 req-f429ed85-a75e-4b3a-829c-db87398c00bc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "12a1b5e0-e387-4171-8ff0-8d3aeb439ca7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:40 compute-0 nova_compute[192079]: 2025-10-02 12:18:40.467 2 DEBUG oslo_concurrency.lockutils [req-262f72fb-4a05-424f-a54c-182d4586d435 req-f429ed85-a75e-4b3a-829c-db87398c00bc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "12a1b5e0-e387-4171-8ff0-8d3aeb439ca7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:40 compute-0 nova_compute[192079]: 2025-10-02 12:18:40.467 2 DEBUG oslo_concurrency.lockutils [req-262f72fb-4a05-424f-a54c-182d4586d435 req-f429ed85-a75e-4b3a-829c-db87398c00bc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "12a1b5e0-e387-4171-8ff0-8d3aeb439ca7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:40 compute-0 nova_compute[192079]: 2025-10-02 12:18:40.467 2 DEBUG nova.compute.manager [req-262f72fb-4a05-424f-a54c-182d4586d435 req-f429ed85-a75e-4b3a-829c-db87398c00bc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] No waiting events found dispatching network-vif-plugged-b44f12b2-8936-4a7b-815a-12f5996325c7 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:18:40 compute-0 nova_compute[192079]: 2025-10-02 12:18:40.468 2 WARNING nova.compute.manager [req-262f72fb-4a05-424f-a54c-182d4586d435 req-f429ed85-a75e-4b3a-829c-db87398c00bc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Received unexpected event network-vif-plugged-b44f12b2-8936-4a7b-815a-12f5996325c7 for instance with vm_state deleted and task_state None.
Oct 02 12:18:41 compute-0 nova_compute[192079]: 2025-10-02 12:18:41.499 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:43 compute-0 nova_compute[192079]: 2025-10-02 12:18:43.516 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:43 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:43.820 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '23'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:18:44 compute-0 ovn_controller[94336]: 2025-10-02T12:18:44Z|00316|binding|INFO|Releasing lport 893d58a9-c253-4923-8cf4-03927d247550 from this chassis (sb_readonly=0)
Oct 02 12:18:44 compute-0 nova_compute[192079]: 2025-10-02 12:18:44.836 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:46 compute-0 podman[234277]: 2025-10-02 12:18:46.138887511 +0000 UTC m=+0.054149357 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.vendor=CentOS, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:18:46 compute-0 podman[234278]: 2025-10-02 12:18:46.165043055 +0000 UTC m=+0.078064580 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=ovn_controller, container_name=ovn_controller)
Oct 02 12:18:46 compute-0 podman[234279]: 2025-10-02 12:18:46.165202059 +0000 UTC m=+0.075385126 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:18:46 compute-0 nova_compute[192079]: 2025-10-02 12:18:46.502 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:48 compute-0 nova_compute[192079]: 2025-10-02 12:18:48.520 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:50 compute-0 nova_compute[192079]: 2025-10-02 12:18:50.449 2 INFO nova.compute.manager [None req-22fa3812-ca8b-46c4-8e08-df6af54708a0 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Pausing
Oct 02 12:18:50 compute-0 nova_compute[192079]: 2025-10-02 12:18:50.450 2 DEBUG nova.objects.instance [None req-22fa3812-ca8b-46c4-8e08-df6af54708a0 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Lazy-loading 'flavor' on Instance uuid 19251892-5108-4594-94b2-8779316aac1b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:18:50 compute-0 nova_compute[192079]: 2025-10-02 12:18:50.801 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407530.8011749, 19251892-5108-4594-94b2-8779316aac1b => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:18:50 compute-0 nova_compute[192079]: 2025-10-02 12:18:50.803 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 19251892-5108-4594-94b2-8779316aac1b] VM Paused (Lifecycle Event)
Oct 02 12:18:50 compute-0 nova_compute[192079]: 2025-10-02 12:18:50.805 2 DEBUG nova.compute.manager [None req-22fa3812-ca8b-46c4-8e08-df6af54708a0 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:18:50 compute-0 nova_compute[192079]: 2025-10-02 12:18:50.843 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 19251892-5108-4594-94b2-8779316aac1b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:18:50 compute-0 nova_compute[192079]: 2025-10-02 12:18:50.846 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 19251892-5108-4594-94b2-8779316aac1b] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: active, current task_state: pausing, current DB power_state: 1, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:18:50 compute-0 nova_compute[192079]: 2025-10-02 12:18:50.999 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 19251892-5108-4594-94b2-8779316aac1b] During sync_power_state the instance has a pending task (pausing). Skip.
Oct 02 12:18:51 compute-0 nova_compute[192079]: 2025-10-02 12:18:51.505 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:53 compute-0 nova_compute[192079]: 2025-10-02 12:18:53.463 2 INFO nova.compute.manager [None req-a8e00c64-f616-4f04-a9ef-d01c9ef7ad42 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Unpausing
Oct 02 12:18:53 compute-0 nova_compute[192079]: 2025-10-02 12:18:53.464 2 DEBUG nova.objects.instance [None req-a8e00c64-f616-4f04-a9ef-d01c9ef7ad42 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Lazy-loading 'flavor' on Instance uuid 19251892-5108-4594-94b2-8779316aac1b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:18:53 compute-0 nova_compute[192079]: 2025-10-02 12:18:53.488 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759407518.4870818, 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:18:53 compute-0 nova_compute[192079]: 2025-10-02 12:18:53.488 2 INFO nova.compute.manager [-] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] VM Stopped (Lifecycle Event)
Oct 02 12:18:53 compute-0 nova_compute[192079]: 2025-10-02 12:18:53.497 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407533.4975393, 19251892-5108-4594-94b2-8779316aac1b => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:18:53 compute-0 nova_compute[192079]: 2025-10-02 12:18:53.498 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 19251892-5108-4594-94b2-8779316aac1b] VM Resumed (Lifecycle Event)
Oct 02 12:18:53 compute-0 virtqemud[191807]: argument unsupported: QEMU guest agent is not configured
Oct 02 12:18:53 compute-0 nova_compute[192079]: 2025-10-02 12:18:53.503 2 DEBUG nova.virt.libvirt.guest [None req-a8e00c64-f616-4f04-a9ef-d01c9ef7ad42 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Failed to set time: agent not configured sync_guest_time /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:200
Oct 02 12:18:53 compute-0 nova_compute[192079]: 2025-10-02 12:18:53.505 2 DEBUG nova.compute.manager [None req-a8e00c64-f616-4f04-a9ef-d01c9ef7ad42 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:18:53 compute-0 nova_compute[192079]: 2025-10-02 12:18:53.523 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:53 compute-0 nova_compute[192079]: 2025-10-02 12:18:53.547 2 DEBUG nova.compute.manager [None req-2263be0a-a0e9-4071-9c7e-c1bdd7cb4ed2 - - - - - -] [instance: 12a1b5e0-e387-4171-8ff0-8d3aeb439ca7] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:18:53 compute-0 nova_compute[192079]: 2025-10-02 12:18:53.572 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 19251892-5108-4594-94b2-8779316aac1b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:18:53 compute-0 nova_compute[192079]: 2025-10-02 12:18:53.575 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 19251892-5108-4594-94b2-8779316aac1b] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: paused, current task_state: unpausing, current DB power_state: 3, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:18:54 compute-0 podman[234345]: 2025-10-02 12:18:54.150756458 +0000 UTC m=+0.067460720 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, config_id=edpm, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3)
Oct 02 12:18:56 compute-0 nova_compute[192079]: 2025-10-02 12:18:56.507 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:58 compute-0 nova_compute[192079]: 2025-10-02 12:18:58.526 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:58 compute-0 nova_compute[192079]: 2025-10-02 12:18:58.621 2 DEBUG oslo_concurrency.lockutils [None req-f114efcb-a9db-4c5b-a211-83f1736bd829 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Acquiring lock "19251892-5108-4594-94b2-8779316aac1b" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:58 compute-0 nova_compute[192079]: 2025-10-02 12:18:58.622 2 DEBUG oslo_concurrency.lockutils [None req-f114efcb-a9db-4c5b-a211-83f1736bd829 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Lock "19251892-5108-4594-94b2-8779316aac1b" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:58 compute-0 nova_compute[192079]: 2025-10-02 12:18:58.622 2 DEBUG oslo_concurrency.lockutils [None req-f114efcb-a9db-4c5b-a211-83f1736bd829 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Acquiring lock "19251892-5108-4594-94b2-8779316aac1b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:58 compute-0 nova_compute[192079]: 2025-10-02 12:18:58.622 2 DEBUG oslo_concurrency.lockutils [None req-f114efcb-a9db-4c5b-a211-83f1736bd829 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Lock "19251892-5108-4594-94b2-8779316aac1b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:58 compute-0 nova_compute[192079]: 2025-10-02 12:18:58.622 2 DEBUG oslo_concurrency.lockutils [None req-f114efcb-a9db-4c5b-a211-83f1736bd829 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Lock "19251892-5108-4594-94b2-8779316aac1b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:58 compute-0 nova_compute[192079]: 2025-10-02 12:18:58.636 2 INFO nova.compute.manager [None req-f114efcb-a9db-4c5b-a211-83f1736bd829 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Terminating instance
Oct 02 12:18:58 compute-0 nova_compute[192079]: 2025-10-02 12:18:58.647 2 DEBUG nova.compute.manager [None req-f114efcb-a9db-4c5b-a211-83f1736bd829 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:18:58 compute-0 kernel: tapfe949a9a-bb (unregistering): left promiscuous mode
Oct 02 12:18:58 compute-0 NetworkManager[51160]: <info>  [1759407538.6685] device (tapfe949a9a-bb): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:18:58 compute-0 ovn_controller[94336]: 2025-10-02T12:18:58Z|00317|binding|INFO|Releasing lport fe949a9a-bb0c-4664-8f69-767387ac0552 from this chassis (sb_readonly=0)
Oct 02 12:18:58 compute-0 ovn_controller[94336]: 2025-10-02T12:18:58Z|00318|binding|INFO|Setting lport fe949a9a-bb0c-4664-8f69-767387ac0552 down in Southbound
Oct 02 12:18:58 compute-0 nova_compute[192079]: 2025-10-02 12:18:58.675 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:58 compute-0 ovn_controller[94336]: 2025-10-02T12:18:58Z|00319|binding|INFO|Removing iface tapfe949a9a-bb ovn-installed in OVS
Oct 02 12:18:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:58.684 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:c0:2b:76 10.100.0.13'], port_security=['fa:16:3e:c0:2b:76 10.100.0.13'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.13/28', 'neutron:device_id': '19251892-5108-4594-94b2-8779316aac1b', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-e895cece-6b67-405e-b05d-5b86ddbf8385', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fa03c570c52a4c2a9445090389d03c6d', 'neutron:revision_number': '4', 'neutron:security_group_ids': '86713f8f-e4ad-44d5-8c6e-92e3b3c5f67c', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=42f687d5-26a0-4ae5-91cd-f49120fff442, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=fe949a9a-bb0c-4664-8f69-767387ac0552) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:18:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:58.686 103294 INFO neutron.agent.ovn.metadata.agent [-] Port fe949a9a-bb0c-4664-8f69-767387ac0552 in datapath e895cece-6b67-405e-b05d-5b86ddbf8385 unbound from our chassis
Oct 02 12:18:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:58.687 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network e895cece-6b67-405e-b05d-5b86ddbf8385, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:18:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:58.688 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[28b2a833-8cc2-4519-9063-8ec605135b0c]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:58.689 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-e895cece-6b67-405e-b05d-5b86ddbf8385 namespace which is not needed anymore
Oct 02 12:18:58 compute-0 nova_compute[192079]: 2025-10-02 12:18:58.691 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:58 compute-0 systemd[1]: machine-qemu\x2d42\x2dinstance\x2d0000005a.scope: Deactivated successfully.
Oct 02 12:18:58 compute-0 systemd[1]: machine-qemu\x2d42\x2dinstance\x2d0000005a.scope: Consumed 14.337s CPU time.
Oct 02 12:18:58 compute-0 systemd-machined[152150]: Machine qemu-42-instance-0000005a terminated.
Oct 02 12:18:58 compute-0 neutron-haproxy-ovnmeta-e895cece-6b67-405e-b05d-5b86ddbf8385[233935]: [NOTICE]   (233958) : haproxy version is 2.8.14-c23fe91
Oct 02 12:18:58 compute-0 neutron-haproxy-ovnmeta-e895cece-6b67-405e-b05d-5b86ddbf8385[233935]: [NOTICE]   (233958) : path to executable is /usr/sbin/haproxy
Oct 02 12:18:58 compute-0 neutron-haproxy-ovnmeta-e895cece-6b67-405e-b05d-5b86ddbf8385[233935]: [WARNING]  (233958) : Exiting Master process...
Oct 02 12:18:58 compute-0 neutron-haproxy-ovnmeta-e895cece-6b67-405e-b05d-5b86ddbf8385[233935]: [WARNING]  (233958) : Exiting Master process...
Oct 02 12:18:58 compute-0 neutron-haproxy-ovnmeta-e895cece-6b67-405e-b05d-5b86ddbf8385[233935]: [ALERT]    (233958) : Current worker (233960) exited with code 143 (Terminated)
Oct 02 12:18:58 compute-0 neutron-haproxy-ovnmeta-e895cece-6b67-405e-b05d-5b86ddbf8385[233935]: [WARNING]  (233958) : All workers exited. Exiting... (0)
Oct 02 12:18:58 compute-0 systemd[1]: libpod-176db12ef7c818d4a751772c7dfb5fd3f0378430393a635f3935e277a2751e76.scope: Deactivated successfully.
Oct 02 12:18:58 compute-0 podman[234389]: 2025-10-02 12:18:58.866958208 +0000 UTC m=+0.071864561 container died 176db12ef7c818d4a751772c7dfb5fd3f0378430393a635f3935e277a2751e76 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-e895cece-6b67-405e-b05d-5b86ddbf8385, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS)
Oct 02 12:18:58 compute-0 nova_compute[192079]: 2025-10-02 12:18:58.916 2 INFO nova.virt.libvirt.driver [-] [instance: 19251892-5108-4594-94b2-8779316aac1b] Instance destroyed successfully.
Oct 02 12:18:58 compute-0 nova_compute[192079]: 2025-10-02 12:18:58.917 2 DEBUG nova.objects.instance [None req-f114efcb-a9db-4c5b-a211-83f1736bd829 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Lazy-loading 'resources' on Instance uuid 19251892-5108-4594-94b2-8779316aac1b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:18:58 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-176db12ef7c818d4a751772c7dfb5fd3f0378430393a635f3935e277a2751e76-userdata-shm.mount: Deactivated successfully.
Oct 02 12:18:58 compute-0 systemd[1]: var-lib-containers-storage-overlay-0cbb0246dfd002cba1dcd1e76a2e4d8a9b105389b19813e08f176bd1f2fa392c-merged.mount: Deactivated successfully.
Oct 02 12:18:58 compute-0 podman[234389]: 2025-10-02 12:18:58.943568506 +0000 UTC m=+0.148474829 container cleanup 176db12ef7c818d4a751772c7dfb5fd3f0378430393a635f3935e277a2751e76 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-e895cece-6b67-405e-b05d-5b86ddbf8385, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.vendor=CentOS, tcib_managed=true, io.buildah.version=1.41.3)
Oct 02 12:18:58 compute-0 nova_compute[192079]: 2025-10-02 12:18:58.950 2 DEBUG nova.virt.libvirt.vif [None req-f114efcb-a9db-4c5b-a211-83f1736bd829 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:18:15Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerRescueNegativeTestJSON-server-624951240',display_name='tempest-ServerRescueNegativeTestJSON-server-624951240',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverrescuenegativetestjson-server-624951240',id=90,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:18:23Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='fa03c570c52a4c2a9445090389d03c6d',ramdisk_id='',reservation_id='r-enwnk660',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServerRescueNegativeTestJSON-1968496116',owner_user_name='tempest-ServerRescueNegativeTestJSON-1968496116-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:18:53Z,user_data=None,user_id='8c91fa3e559044609ddabc81368d7546',uuid=19251892-5108-4594-94b2-8779316aac1b,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "fe949a9a-bb0c-4664-8f69-767387ac0552", "address": "fa:16:3e:c0:2b:76", "network": {"id": "e895cece-6b67-405e-b05d-5b86ddbf8385", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-117197461-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fa03c570c52a4c2a9445090389d03c6d", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapfe949a9a-bb", "ovs_interfaceid": "fe949a9a-bb0c-4664-8f69-767387ac0552", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:18:58 compute-0 nova_compute[192079]: 2025-10-02 12:18:58.950 2 DEBUG nova.network.os_vif_util [None req-f114efcb-a9db-4c5b-a211-83f1736bd829 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Converting VIF {"id": "fe949a9a-bb0c-4664-8f69-767387ac0552", "address": "fa:16:3e:c0:2b:76", "network": {"id": "e895cece-6b67-405e-b05d-5b86ddbf8385", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-117197461-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fa03c570c52a4c2a9445090389d03c6d", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapfe949a9a-bb", "ovs_interfaceid": "fe949a9a-bb0c-4664-8f69-767387ac0552", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:18:58 compute-0 nova_compute[192079]: 2025-10-02 12:18:58.951 2 DEBUG nova.network.os_vif_util [None req-f114efcb-a9db-4c5b-a211-83f1736bd829 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:c0:2b:76,bridge_name='br-int',has_traffic_filtering=True,id=fe949a9a-bb0c-4664-8f69-767387ac0552,network=Network(e895cece-6b67-405e-b05d-5b86ddbf8385),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapfe949a9a-bb') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:18:58 compute-0 systemd[1]: libpod-conmon-176db12ef7c818d4a751772c7dfb5fd3f0378430393a635f3935e277a2751e76.scope: Deactivated successfully.
Oct 02 12:18:58 compute-0 nova_compute[192079]: 2025-10-02 12:18:58.951 2 DEBUG os_vif [None req-f114efcb-a9db-4c5b-a211-83f1736bd829 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:c0:2b:76,bridge_name='br-int',has_traffic_filtering=True,id=fe949a9a-bb0c-4664-8f69-767387ac0552,network=Network(e895cece-6b67-405e-b05d-5b86ddbf8385),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapfe949a9a-bb') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:18:58 compute-0 nova_compute[192079]: 2025-10-02 12:18:58.953 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:58 compute-0 nova_compute[192079]: 2025-10-02 12:18:58.953 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapfe949a9a-bb, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:18:58 compute-0 nova_compute[192079]: 2025-10-02 12:18:58.954 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:58 compute-0 nova_compute[192079]: 2025-10-02 12:18:58.955 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:58 compute-0 nova_compute[192079]: 2025-10-02 12:18:58.957 2 INFO os_vif [None req-f114efcb-a9db-4c5b-a211-83f1736bd829 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:c0:2b:76,bridge_name='br-int',has_traffic_filtering=True,id=fe949a9a-bb0c-4664-8f69-767387ac0552,network=Network(e895cece-6b67-405e-b05d-5b86ddbf8385),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapfe949a9a-bb')
Oct 02 12:18:58 compute-0 nova_compute[192079]: 2025-10-02 12:18:58.958 2 INFO nova.virt.libvirt.driver [None req-f114efcb-a9db-4c5b-a211-83f1736bd829 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Deleting instance files /var/lib/nova/instances/19251892-5108-4594-94b2-8779316aac1b_del
Oct 02 12:18:58 compute-0 nova_compute[192079]: 2025-10-02 12:18:58.958 2 INFO nova.virt.libvirt.driver [None req-f114efcb-a9db-4c5b-a211-83f1736bd829 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Deletion of /var/lib/nova/instances/19251892-5108-4594-94b2-8779316aac1b_del complete
Oct 02 12:18:59 compute-0 podman[234435]: 2025-10-02 12:18:59.030504357 +0000 UTC m=+0.065301502 container remove 176db12ef7c818d4a751772c7dfb5fd3f0378430393a635f3935e277a2751e76 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-e895cece-6b67-405e-b05d-5b86ddbf8385, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:18:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:59.035 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7aee51af-850e-424d-8c34-72f22f28d035]: (4, ('Thu Oct  2 12:18:58 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-e895cece-6b67-405e-b05d-5b86ddbf8385 (176db12ef7c818d4a751772c7dfb5fd3f0378430393a635f3935e277a2751e76)\n176db12ef7c818d4a751772c7dfb5fd3f0378430393a635f3935e277a2751e76\nThu Oct  2 12:18:58 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-e895cece-6b67-405e-b05d-5b86ddbf8385 (176db12ef7c818d4a751772c7dfb5fd3f0378430393a635f3935e277a2751e76)\n176db12ef7c818d4a751772c7dfb5fd3f0378430393a635f3935e277a2751e76\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:59.036 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[12838b0b-5426-4126-8a18-aa01658b8347]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:59.037 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tape895cece-60, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:18:59 compute-0 kernel: tape895cece-60: left promiscuous mode
Oct 02 12:18:59 compute-0 nova_compute[192079]: 2025-10-02 12:18:59.045 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:59.052 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[33856538-a5b1-4dae-a88f-694a32d0415d]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:59 compute-0 nova_compute[192079]: 2025-10-02 12:18:59.054 2 INFO nova.compute.manager [None req-f114efcb-a9db-4c5b-a211-83f1736bd829 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Took 0.41 seconds to destroy the instance on the hypervisor.
Oct 02 12:18:59 compute-0 nova_compute[192079]: 2025-10-02 12:18:59.054 2 DEBUG oslo.service.loopingcall [None req-f114efcb-a9db-4c5b-a211-83f1736bd829 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:18:59 compute-0 nova_compute[192079]: 2025-10-02 12:18:59.055 2 DEBUG nova.compute.manager [-] [instance: 19251892-5108-4594-94b2-8779316aac1b] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:18:59 compute-0 nova_compute[192079]: 2025-10-02 12:18:59.055 2 DEBUG nova.network.neutron [-] [instance: 19251892-5108-4594-94b2-8779316aac1b] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:18:59 compute-0 nova_compute[192079]: 2025-10-02 12:18:59.062 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:18:59 compute-0 nova_compute[192079]: 2025-10-02 12:18:59.065 2 DEBUG nova.compute.manager [req-f303fdf6-f26c-4280-b8ce-a27aaa2c0d77 req-c71da070-4124-4319-9168-56ce6eba3a1c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Received event network-vif-unplugged-fe949a9a-bb0c-4664-8f69-767387ac0552 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:18:59 compute-0 nova_compute[192079]: 2025-10-02 12:18:59.066 2 DEBUG oslo_concurrency.lockutils [req-f303fdf6-f26c-4280-b8ce-a27aaa2c0d77 req-c71da070-4124-4319-9168-56ce6eba3a1c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "19251892-5108-4594-94b2-8779316aac1b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:59 compute-0 nova_compute[192079]: 2025-10-02 12:18:59.066 2 DEBUG oslo_concurrency.lockutils [req-f303fdf6-f26c-4280-b8ce-a27aaa2c0d77 req-c71da070-4124-4319-9168-56ce6eba3a1c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "19251892-5108-4594-94b2-8779316aac1b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:59 compute-0 nova_compute[192079]: 2025-10-02 12:18:59.066 2 DEBUG oslo_concurrency.lockutils [req-f303fdf6-f26c-4280-b8ce-a27aaa2c0d77 req-c71da070-4124-4319-9168-56ce6eba3a1c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "19251892-5108-4594-94b2-8779316aac1b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:18:59 compute-0 nova_compute[192079]: 2025-10-02 12:18:59.067 2 DEBUG nova.compute.manager [req-f303fdf6-f26c-4280-b8ce-a27aaa2c0d77 req-c71da070-4124-4319-9168-56ce6eba3a1c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] No waiting events found dispatching network-vif-unplugged-fe949a9a-bb0c-4664-8f69-767387ac0552 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:18:59 compute-0 nova_compute[192079]: 2025-10-02 12:18:59.067 2 DEBUG nova.compute.manager [req-f303fdf6-f26c-4280-b8ce-a27aaa2c0d77 req-c71da070-4124-4319-9168-56ce6eba3a1c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Received event network-vif-unplugged-fe949a9a-bb0c-4664-8f69-767387ac0552 for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:18:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:59.093 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2cb04df0-6459-49a9-8657-8fb2137ba08a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:59.095 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[28f9d940-a6ec-4421-8c0b-0f3a724d4fbe]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:59.111 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d261ef6d-ed09-49a7-92c9-aca64a204956]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 550034, 'reachable_time': 31163, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 234450, 'error': None, 'target': 'ovnmeta-e895cece-6b67-405e-b05d-5b86ddbf8385', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:59.114 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-e895cece-6b67-405e-b05d-5b86ddbf8385 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:18:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:18:59.114 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[39274cb9-7ab9-4ff8-b523-b0fc305bf100]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:18:59 compute-0 systemd[1]: run-netns-ovnmeta\x2de895cece\x2d6b67\x2d405e\x2db05d\x2d5b86ddbf8385.mount: Deactivated successfully.
Oct 02 12:18:59 compute-0 nova_compute[192079]: 2025-10-02 12:18:59.817 2 DEBUG nova.network.neutron [-] [instance: 19251892-5108-4594-94b2-8779316aac1b] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:18:59 compute-0 nova_compute[192079]: 2025-10-02 12:18:59.837 2 INFO nova.compute.manager [-] [instance: 19251892-5108-4594-94b2-8779316aac1b] Took 0.78 seconds to deallocate network for instance.
Oct 02 12:18:59 compute-0 nova_compute[192079]: 2025-10-02 12:18:59.926 2 DEBUG nova.compute.manager [req-f558e5f4-e619-432b-a5cf-6c801875dd15 req-22ecb0ce-40cf-4f65-91b0-f57d2d08f733 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Received event network-vif-deleted-fe949a9a-bb0c-4664-8f69-767387ac0552 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:18:59 compute-0 nova_compute[192079]: 2025-10-02 12:18:59.945 2 DEBUG oslo_concurrency.lockutils [None req-f114efcb-a9db-4c5b-a211-83f1736bd829 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:18:59 compute-0 nova_compute[192079]: 2025-10-02 12:18:59.946 2 DEBUG oslo_concurrency.lockutils [None req-f114efcb-a9db-4c5b-a211-83f1736bd829 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:18:59 compute-0 nova_compute[192079]: 2025-10-02 12:18:59.996 2 DEBUG nova.compute.provider_tree [None req-f114efcb-a9db-4c5b-a211-83f1736bd829 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:19:00 compute-0 nova_compute[192079]: 2025-10-02 12:19:00.014 2 DEBUG nova.scheduler.client.report [None req-f114efcb-a9db-4c5b-a211-83f1736bd829 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:19:00 compute-0 nova_compute[192079]: 2025-10-02 12:19:00.033 2 DEBUG oslo_concurrency.lockutils [None req-f114efcb-a9db-4c5b-a211-83f1736bd829 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.087s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:00 compute-0 nova_compute[192079]: 2025-10-02 12:19:00.066 2 INFO nova.scheduler.client.report [None req-f114efcb-a9db-4c5b-a211-83f1736bd829 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Deleted allocations for instance 19251892-5108-4594-94b2-8779316aac1b
Oct 02 12:19:00 compute-0 podman[234451]: 2025-10-02 12:19:00.136609606 +0000 UTC m=+0.056057240 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, release=1755695350, architecture=x86_64, managed_by=edpm_ansible, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, vendor=Red Hat, Inc., io.openshift.expose-services=, url=https://catalog.redhat.com/en/search?searchType=containers, distribution-scope=public, build-date=2025-08-20T13:12:41, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, maintainer=Red Hat, Inc., io.buildah.version=1.33.7, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vcs-type=git, io.openshift.tags=minimal rhel9, container_name=openstack_network_exporter, com.redhat.component=ubi9-minimal-container, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., name=ubi9-minimal, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., version=9.6, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']})
Oct 02 12:19:00 compute-0 nova_compute[192079]: 2025-10-02 12:19:00.149 2 DEBUG oslo_concurrency.lockutils [None req-f114efcb-a9db-4c5b-a211-83f1736bd829 8c91fa3e559044609ddabc81368d7546 fa03c570c52a4c2a9445090389d03c6d - - default default] Lock "19251892-5108-4594-94b2-8779316aac1b" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.528s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:00 compute-0 podman[234452]: 2025-10-02 12:19:00.164021693 +0000 UTC m=+0.079087087 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=multipathd, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, tcib_managed=true, config_id=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.build-date=20251001)
Oct 02 12:19:01 compute-0 nova_compute[192079]: 2025-10-02 12:19:01.295 2 DEBUG nova.compute.manager [req-aab07147-8086-4557-a536-d81fd7847cb7 req-569c9c9f-8cd8-4c0a-b959-053387f28c22 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Received event network-vif-plugged-fe949a9a-bb0c-4664-8f69-767387ac0552 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:19:01 compute-0 nova_compute[192079]: 2025-10-02 12:19:01.295 2 DEBUG oslo_concurrency.lockutils [req-aab07147-8086-4557-a536-d81fd7847cb7 req-569c9c9f-8cd8-4c0a-b959-053387f28c22 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "19251892-5108-4594-94b2-8779316aac1b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:01 compute-0 nova_compute[192079]: 2025-10-02 12:19:01.296 2 DEBUG oslo_concurrency.lockutils [req-aab07147-8086-4557-a536-d81fd7847cb7 req-569c9c9f-8cd8-4c0a-b959-053387f28c22 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "19251892-5108-4594-94b2-8779316aac1b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:01 compute-0 nova_compute[192079]: 2025-10-02 12:19:01.296 2 DEBUG oslo_concurrency.lockutils [req-aab07147-8086-4557-a536-d81fd7847cb7 req-569c9c9f-8cd8-4c0a-b959-053387f28c22 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "19251892-5108-4594-94b2-8779316aac1b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:01 compute-0 nova_compute[192079]: 2025-10-02 12:19:01.296 2 DEBUG nova.compute.manager [req-aab07147-8086-4557-a536-d81fd7847cb7 req-569c9c9f-8cd8-4c0a-b959-053387f28c22 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] No waiting events found dispatching network-vif-plugged-fe949a9a-bb0c-4664-8f69-767387ac0552 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:19:01 compute-0 nova_compute[192079]: 2025-10-02 12:19:01.296 2 WARNING nova.compute.manager [req-aab07147-8086-4557-a536-d81fd7847cb7 req-569c9c9f-8cd8-4c0a-b959-053387f28c22 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 19251892-5108-4594-94b2-8779316aac1b] Received unexpected event network-vif-plugged-fe949a9a-bb0c-4664-8f69-767387ac0552 for instance with vm_state deleted and task_state None.
Oct 02 12:19:01 compute-0 nova_compute[192079]: 2025-10-02 12:19:01.509 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:02.218 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:02.218 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:02.219 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:03 compute-0 nova_compute[192079]: 2025-10-02 12:19:03.945 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:03 compute-0 nova_compute[192079]: 2025-10-02 12:19:03.954 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:06 compute-0 nova_compute[192079]: 2025-10-02 12:19:06.510 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:07 compute-0 podman[234489]: 2025-10-02 12:19:07.136031148 +0000 UTC m=+0.050742454 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>)
Oct 02 12:19:07 compute-0 podman[234490]: 2025-10-02 12:19:07.142918525 +0000 UTC m=+0.053230712 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, org.label-schema.schema-version=1.0, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=iscsid, container_name=iscsid, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2)
Oct 02 12:19:08 compute-0 nova_compute[192079]: 2025-10-02 12:19:08.958 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:11 compute-0 nova_compute[192079]: 2025-10-02 12:19:11.512 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:11 compute-0 nova_compute[192079]: 2025-10-02 12:19:11.833 2 DEBUG oslo_concurrency.lockutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Acquiring lock "6059254e-5c4b-4d87-991a-cf72fab61216" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:11 compute-0 nova_compute[192079]: 2025-10-02 12:19:11.834 2 DEBUG oslo_concurrency.lockutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Lock "6059254e-5c4b-4d87-991a-cf72fab61216" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:11 compute-0 nova_compute[192079]: 2025-10-02 12:19:11.987 2 DEBUG nova.compute.manager [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:19:12 compute-0 nova_compute[192079]: 2025-10-02 12:19:12.451 2 DEBUG oslo_concurrency.lockutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:12 compute-0 nova_compute[192079]: 2025-10-02 12:19:12.452 2 DEBUG oslo_concurrency.lockutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:12 compute-0 nova_compute[192079]: 2025-10-02 12:19:12.458 2 DEBUG nova.virt.hardware [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:19:12 compute-0 nova_compute[192079]: 2025-10-02 12:19:12.459 2 INFO nova.compute.claims [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:19:12 compute-0 nova_compute[192079]: 2025-10-02 12:19:12.593 2 DEBUG nova.compute.provider_tree [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:19:12 compute-0 nova_compute[192079]: 2025-10-02 12:19:12.609 2 DEBUG nova.scheduler.client.report [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:19:12 compute-0 nova_compute[192079]: 2025-10-02 12:19:12.634 2 DEBUG oslo_concurrency.lockutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.183s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:12 compute-0 nova_compute[192079]: 2025-10-02 12:19:12.635 2 DEBUG nova.compute.manager [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:19:12 compute-0 nova_compute[192079]: 2025-10-02 12:19:12.706 2 DEBUG nova.compute.manager [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:19:12 compute-0 nova_compute[192079]: 2025-10-02 12:19:12.706 2 DEBUG nova.network.neutron [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:19:12 compute-0 nova_compute[192079]: 2025-10-02 12:19:12.727 2 INFO nova.virt.libvirt.driver [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:19:12 compute-0 nova_compute[192079]: 2025-10-02 12:19:12.746 2 DEBUG nova.compute.manager [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:19:12 compute-0 nova_compute[192079]: 2025-10-02 12:19:12.862 2 DEBUG nova.compute.manager [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:19:12 compute-0 nova_compute[192079]: 2025-10-02 12:19:12.865 2 DEBUG nova.virt.libvirt.driver [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:19:12 compute-0 nova_compute[192079]: 2025-10-02 12:19:12.866 2 INFO nova.virt.libvirt.driver [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Creating image(s)
Oct 02 12:19:12 compute-0 nova_compute[192079]: 2025-10-02 12:19:12.867 2 DEBUG oslo_concurrency.lockutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Acquiring lock "/var/lib/nova/instances/6059254e-5c4b-4d87-991a-cf72fab61216/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:12 compute-0 nova_compute[192079]: 2025-10-02 12:19:12.867 2 DEBUG oslo_concurrency.lockutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Lock "/var/lib/nova/instances/6059254e-5c4b-4d87-991a-cf72fab61216/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:12 compute-0 nova_compute[192079]: 2025-10-02 12:19:12.868 2 DEBUG oslo_concurrency.lockutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Lock "/var/lib/nova/instances/6059254e-5c4b-4d87-991a-cf72fab61216/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:12 compute-0 nova_compute[192079]: 2025-10-02 12:19:12.881 2 DEBUG oslo_concurrency.processutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:19:12 compute-0 nova_compute[192079]: 2025-10-02 12:19:12.952 2 DEBUG oslo_concurrency.processutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.071s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:19:12 compute-0 nova_compute[192079]: 2025-10-02 12:19:12.953 2 DEBUG oslo_concurrency.lockutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:12 compute-0 nova_compute[192079]: 2025-10-02 12:19:12.954 2 DEBUG oslo_concurrency.lockutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:12 compute-0 nova_compute[192079]: 2025-10-02 12:19:12.969 2 DEBUG oslo_concurrency.processutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:19:13 compute-0 nova_compute[192079]: 2025-10-02 12:19:13.034 2 DEBUG oslo_concurrency.processutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.065s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:19:13 compute-0 nova_compute[192079]: 2025-10-02 12:19:13.035 2 DEBUG oslo_concurrency.processutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/6059254e-5c4b-4d87-991a-cf72fab61216/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:19:13 compute-0 nova_compute[192079]: 2025-10-02 12:19:13.344 2 DEBUG nova.policy [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1793e510b24c43859b967a36edab096a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '48b51f55b7294d64bf3395e17ff310a8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:19:13 compute-0 nova_compute[192079]: 2025-10-02 12:19:13.519 2 DEBUG oslo_concurrency.processutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/6059254e-5c4b-4d87-991a-cf72fab61216/disk 1073741824" returned: 0 in 0.484s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:19:13 compute-0 nova_compute[192079]: 2025-10-02 12:19:13.521 2 DEBUG oslo_concurrency.lockutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.567s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:13 compute-0 nova_compute[192079]: 2025-10-02 12:19:13.522 2 DEBUG oslo_concurrency.processutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:19:13 compute-0 nova_compute[192079]: 2025-10-02 12:19:13.617 2 DEBUG oslo_concurrency.processutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.096s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:19:13 compute-0 nova_compute[192079]: 2025-10-02 12:19:13.619 2 DEBUG nova.virt.disk.api [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Checking if we can resize image /var/lib/nova/instances/6059254e-5c4b-4d87-991a-cf72fab61216/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:19:13 compute-0 nova_compute[192079]: 2025-10-02 12:19:13.619 2 DEBUG oslo_concurrency.processutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/6059254e-5c4b-4d87-991a-cf72fab61216/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:19:13 compute-0 nova_compute[192079]: 2025-10-02 12:19:13.677 2 DEBUG oslo_concurrency.processutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/6059254e-5c4b-4d87-991a-cf72fab61216/disk --force-share --output=json" returned: 0 in 0.058s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:19:13 compute-0 nova_compute[192079]: 2025-10-02 12:19:13.678 2 DEBUG nova.virt.disk.api [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Cannot resize image /var/lib/nova/instances/6059254e-5c4b-4d87-991a-cf72fab61216/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:19:13 compute-0 nova_compute[192079]: 2025-10-02 12:19:13.678 2 DEBUG nova.objects.instance [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Lazy-loading 'migration_context' on Instance uuid 6059254e-5c4b-4d87-991a-cf72fab61216 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:19:13 compute-0 nova_compute[192079]: 2025-10-02 12:19:13.702 2 DEBUG nova.virt.libvirt.driver [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:19:13 compute-0 nova_compute[192079]: 2025-10-02 12:19:13.702 2 DEBUG nova.virt.libvirt.driver [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Ensure instance console log exists: /var/lib/nova/instances/6059254e-5c4b-4d87-991a-cf72fab61216/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:19:13 compute-0 nova_compute[192079]: 2025-10-02 12:19:13.702 2 DEBUG oslo_concurrency.lockutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:13 compute-0 nova_compute[192079]: 2025-10-02 12:19:13.703 2 DEBUG oslo_concurrency.lockutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:13 compute-0 nova_compute[192079]: 2025-10-02 12:19:13.703 2 DEBUG oslo_concurrency.lockutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:13 compute-0 nova_compute[192079]: 2025-10-02 12:19:13.914 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759407538.914119, 19251892-5108-4594-94b2-8779316aac1b => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:19:13 compute-0 nova_compute[192079]: 2025-10-02 12:19:13.915 2 INFO nova.compute.manager [-] [instance: 19251892-5108-4594-94b2-8779316aac1b] VM Stopped (Lifecycle Event)
Oct 02 12:19:13 compute-0 nova_compute[192079]: 2025-10-02 12:19:13.943 2 DEBUG nova.compute.manager [None req-a76dc415-ab4c-48b6-9ca6-c1eaf5bcf84e - - - - - -] [instance: 19251892-5108-4594-94b2-8779316aac1b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:19:13 compute-0 nova_compute[192079]: 2025-10-02 12:19:13.962 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:14 compute-0 nova_compute[192079]: 2025-10-02 12:19:14.985 2 DEBUG nova.network.neutron [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Successfully created port: d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:19:15 compute-0 nova_compute[192079]: 2025-10-02 12:19:15.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_incomplete_migrations run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:19:15 compute-0 nova_compute[192079]: 2025-10-02 12:19:15.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Cleaning up deleted instances with incomplete migration  _cleanup_incomplete_migrations /usr/lib/python3.9/site-packages/nova/compute/manager.py:11183
Oct 02 12:19:16 compute-0 nova_compute[192079]: 2025-10-02 12:19:16.392 2 DEBUG nova.network.neutron [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Successfully updated port: d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:19:16 compute-0 nova_compute[192079]: 2025-10-02 12:19:16.408 2 DEBUG oslo_concurrency.lockutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Acquiring lock "refresh_cache-6059254e-5c4b-4d87-991a-cf72fab61216" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:19:16 compute-0 nova_compute[192079]: 2025-10-02 12:19:16.408 2 DEBUG oslo_concurrency.lockutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Acquired lock "refresh_cache-6059254e-5c4b-4d87-991a-cf72fab61216" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:19:16 compute-0 nova_compute[192079]: 2025-10-02 12:19:16.409 2 DEBUG nova.network.neutron [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:19:16 compute-0 nova_compute[192079]: 2025-10-02 12:19:16.514 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:16 compute-0 nova_compute[192079]: 2025-10-02 12:19:16.625 2 DEBUG nova.network.neutron [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:19:16 compute-0 nova_compute[192079]: 2025-10-02 12:19:16.964 2 DEBUG nova.compute.manager [req-c57d1c4a-35fd-4b08-878d-e5a4a945c967 req-9b4e49a4-59a5-479f-8443-df33e9c96fc2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Received event network-changed-d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:19:16 compute-0 nova_compute[192079]: 2025-10-02 12:19:16.965 2 DEBUG nova.compute.manager [req-c57d1c4a-35fd-4b08-878d-e5a4a945c967 req-9b4e49a4-59a5-479f-8443-df33e9c96fc2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Refreshing instance network info cache due to event network-changed-d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:19:16 compute-0 nova_compute[192079]: 2025-10-02 12:19:16.966 2 DEBUG oslo_concurrency.lockutils [req-c57d1c4a-35fd-4b08-878d-e5a4a945c967 req-9b4e49a4-59a5-479f-8443-df33e9c96fc2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-6059254e-5c4b-4d87-991a-cf72fab61216" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:19:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:19:17.102 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.capacity, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:19:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:19:17.103 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:19:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:19:17.103 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:19:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:19:17.103 12 DEBUG ceilometer.polling.manager [-] Skip pollster memory.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:19:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:19:17.103 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:19:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:19:17.103 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:19:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:19:17.103 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:19:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:19:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:19:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:19:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:19:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:19:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:19:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:19:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:19:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:19:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:19:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:19:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:19:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:19:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.iops, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:19:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:19:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:19:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:19:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:19:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:19:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.allocation, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:19:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:19:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:19:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:19:17.105 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:19:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:19:17.105 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:19:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:19:17.105 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:19:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:19:17.105 12 DEBUG ceilometer.polling.manager [-] Skip pollster cpu, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:19:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:19:17.105 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:19:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:19:17.105 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:19:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:19:17.105 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:19:17 compute-0 podman[234548]: 2025-10-02 12:19:17.145250667 +0000 UTC m=+0.058727282 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, org.label-schema.build-date=20251001)
Oct 02 12:19:17 compute-0 podman[234550]: 2025-10-02 12:19:17.16885644 +0000 UTC m=+0.059759599 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:19:17 compute-0 podman[234549]: 2025-10-02 12:19:17.184371884 +0000 UTC m=+0.089948623 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, container_name=ovn_controller, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, config_id=ovn_controller, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:19:17 compute-0 nova_compute[192079]: 2025-10-02 12:19:17.991 2 DEBUG nova.network.neutron [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Updating instance_info_cache with network_info: [{"id": "d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4", "address": "fa:16:3e:49:2a:3a", "network": {"id": "a8ec7a12-2cd0-4482-af67-574169d36973", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-2001340159-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "48b51f55b7294d64bf3395e17ff310a8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd04f9a8b-2c", "ovs_interfaceid": "d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.023 2 DEBUG oslo_concurrency.lockutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Releasing lock "refresh_cache-6059254e-5c4b-4d87-991a-cf72fab61216" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.023 2 DEBUG nova.compute.manager [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Instance network_info: |[{"id": "d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4", "address": "fa:16:3e:49:2a:3a", "network": {"id": "a8ec7a12-2cd0-4482-af67-574169d36973", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-2001340159-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "48b51f55b7294d64bf3395e17ff310a8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd04f9a8b-2c", "ovs_interfaceid": "d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.023 2 DEBUG oslo_concurrency.lockutils [req-c57d1c4a-35fd-4b08-878d-e5a4a945c967 req-9b4e49a4-59a5-479f-8443-df33e9c96fc2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-6059254e-5c4b-4d87-991a-cf72fab61216" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.024 2 DEBUG nova.network.neutron [req-c57d1c4a-35fd-4b08-878d-e5a4a945c967 req-9b4e49a4-59a5-479f-8443-df33e9c96fc2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Refreshing network info cache for port d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.027 2 DEBUG nova.virt.libvirt.driver [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Start _get_guest_xml network_info=[{"id": "d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4", "address": "fa:16:3e:49:2a:3a", "network": {"id": "a8ec7a12-2cd0-4482-af67-574169d36973", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-2001340159-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "48b51f55b7294d64bf3395e17ff310a8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd04f9a8b-2c", "ovs_interfaceid": "d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.031 2 WARNING nova.virt.libvirt.driver [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.037 2 DEBUG nova.virt.libvirt.host [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.037 2 DEBUG nova.virt.libvirt.host [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.042 2 DEBUG nova.virt.libvirt.host [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.043 2 DEBUG nova.virt.libvirt.host [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.044 2 DEBUG nova.virt.libvirt.driver [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.044 2 DEBUG nova.virt.hardware [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.044 2 DEBUG nova.virt.hardware [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.045 2 DEBUG nova.virt.hardware [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.045 2 DEBUG nova.virt.hardware [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.045 2 DEBUG nova.virt.hardware [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.045 2 DEBUG nova.virt.hardware [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.046 2 DEBUG nova.virt.hardware [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.046 2 DEBUG nova.virt.hardware [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.046 2 DEBUG nova.virt.hardware [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.046 2 DEBUG nova.virt.hardware [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.046 2 DEBUG nova.virt.hardware [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.050 2 DEBUG nova.virt.libvirt.vif [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:19:10Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ServerPasswordTestJSON-server-1400810981',display_name='tempest-ServerPasswordTestJSON-server-1400810981',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverpasswordtestjson-server-1400810981',id=96,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='48b51f55b7294d64bf3395e17ff310a8',ramdisk_id='',reservation_id='r-nnyvkhk2',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServerPasswordTestJSON-807356368',owner_user_name='tempest-ServerPasswordTestJSON-807356368-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:19:12Z,user_data=None,user_id='1793e510b24c43859b967a36edab096a',uuid=6059254e-5c4b-4d87-991a-cf72fab61216,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4", "address": "fa:16:3e:49:2a:3a", "network": {"id": "a8ec7a12-2cd0-4482-af67-574169d36973", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-2001340159-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "48b51f55b7294d64bf3395e17ff310a8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd04f9a8b-2c", "ovs_interfaceid": "d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.050 2 DEBUG nova.network.os_vif_util [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Converting VIF {"id": "d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4", "address": "fa:16:3e:49:2a:3a", "network": {"id": "a8ec7a12-2cd0-4482-af67-574169d36973", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-2001340159-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "48b51f55b7294d64bf3395e17ff310a8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd04f9a8b-2c", "ovs_interfaceid": "d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.051 2 DEBUG nova.network.os_vif_util [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:49:2a:3a,bridge_name='br-int',has_traffic_filtering=True,id=d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4,network=Network(a8ec7a12-2cd0-4482-af67-574169d36973),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd04f9a8b-2c') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.052 2 DEBUG nova.objects.instance [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Lazy-loading 'pci_devices' on Instance uuid 6059254e-5c4b-4d87-991a-cf72fab61216 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.071 2 DEBUG nova.virt.libvirt.driver [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:19:18 compute-0 nova_compute[192079]:   <uuid>6059254e-5c4b-4d87-991a-cf72fab61216</uuid>
Oct 02 12:19:18 compute-0 nova_compute[192079]:   <name>instance-00000060</name>
Oct 02 12:19:18 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:19:18 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:19:18 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:19:18 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:       <nova:name>tempest-ServerPasswordTestJSON-server-1400810981</nova:name>
Oct 02 12:19:18 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:19:18</nova:creationTime>
Oct 02 12:19:18 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:19:18 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:19:18 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:19:18 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:19:18 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:19:18 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:19:18 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:19:18 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:19:18 compute-0 nova_compute[192079]:         <nova:user uuid="1793e510b24c43859b967a36edab096a">tempest-ServerPasswordTestJSON-807356368-project-member</nova:user>
Oct 02 12:19:18 compute-0 nova_compute[192079]:         <nova:project uuid="48b51f55b7294d64bf3395e17ff310a8">tempest-ServerPasswordTestJSON-807356368</nova:project>
Oct 02 12:19:18 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:19:18 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:19:18 compute-0 nova_compute[192079]:         <nova:port uuid="d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4">
Oct 02 12:19:18 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.11" ipVersion="4"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:19:18 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:19:18 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:19:18 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <system>
Oct 02 12:19:18 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:19:18 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:19:18 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:19:18 compute-0 nova_compute[192079]:       <entry name="serial">6059254e-5c4b-4d87-991a-cf72fab61216</entry>
Oct 02 12:19:18 compute-0 nova_compute[192079]:       <entry name="uuid">6059254e-5c4b-4d87-991a-cf72fab61216</entry>
Oct 02 12:19:18 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     </system>
Oct 02 12:19:18 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:19:18 compute-0 nova_compute[192079]:   <os>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:   </os>
Oct 02 12:19:18 compute-0 nova_compute[192079]:   <features>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:   </features>
Oct 02 12:19:18 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:19:18 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:19:18 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:19:18 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/6059254e-5c4b-4d87-991a-cf72fab61216/disk"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:19:18 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/6059254e-5c4b-4d87-991a-cf72fab61216/disk.config"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:19:18 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:49:2a:3a"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:       <target dev="tapd04f9a8b-2c"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:19:18 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/6059254e-5c4b-4d87-991a-cf72fab61216/console.log" append="off"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <video>
Oct 02 12:19:18 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     </video>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:19:18 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:19:18 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:19:18 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:19:18 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:19:18 compute-0 nova_compute[192079]: </domain>
Oct 02 12:19:18 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.072 2 DEBUG nova.compute.manager [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Preparing to wait for external event network-vif-plugged-d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.073 2 DEBUG oslo_concurrency.lockutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Acquiring lock "6059254e-5c4b-4d87-991a-cf72fab61216-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.073 2 DEBUG oslo_concurrency.lockutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Lock "6059254e-5c4b-4d87-991a-cf72fab61216-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.073 2 DEBUG oslo_concurrency.lockutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Lock "6059254e-5c4b-4d87-991a-cf72fab61216-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.074 2 DEBUG nova.virt.libvirt.vif [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:19:10Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ServerPasswordTestJSON-server-1400810981',display_name='tempest-ServerPasswordTestJSON-server-1400810981',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverpasswordtestjson-server-1400810981',id=96,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='48b51f55b7294d64bf3395e17ff310a8',ramdisk_id='',reservation_id='r-nnyvkhk2',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServerPasswordTestJSON-807356368',owner_user_name='tempest-ServerPasswordTestJSON-807356368-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:19:12Z,user_data=None,user_id='1793e510b24c43859b967a36edab096a',uuid=6059254e-5c4b-4d87-991a-cf72fab61216,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4", "address": "fa:16:3e:49:2a:3a", "network": {"id": "a8ec7a12-2cd0-4482-af67-574169d36973", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-2001340159-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "48b51f55b7294d64bf3395e17ff310a8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd04f9a8b-2c", "ovs_interfaceid": "d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.074 2 DEBUG nova.network.os_vif_util [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Converting VIF {"id": "d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4", "address": "fa:16:3e:49:2a:3a", "network": {"id": "a8ec7a12-2cd0-4482-af67-574169d36973", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-2001340159-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "48b51f55b7294d64bf3395e17ff310a8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd04f9a8b-2c", "ovs_interfaceid": "d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.075 2 DEBUG nova.network.os_vif_util [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:49:2a:3a,bridge_name='br-int',has_traffic_filtering=True,id=d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4,network=Network(a8ec7a12-2cd0-4482-af67-574169d36973),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd04f9a8b-2c') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.075 2 DEBUG os_vif [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:49:2a:3a,bridge_name='br-int',has_traffic_filtering=True,id=d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4,network=Network(a8ec7a12-2cd0-4482-af67-574169d36973),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd04f9a8b-2c') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.076 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.076 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.076 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.079 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.079 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapd04f9a8b-2c, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.079 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapd04f9a8b-2c, col_values=(('external_ids', {'iface-id': 'd04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:49:2a:3a', 'vm-uuid': '6059254e-5c4b-4d87-991a-cf72fab61216'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.081 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:18 compute-0 NetworkManager[51160]: <info>  [1759407558.0822] manager: (tapd04f9a8b-2c): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/161)
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.083 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.089 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.090 2 INFO os_vif [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:49:2a:3a,bridge_name='br-int',has_traffic_filtering=True,id=d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4,network=Network(a8ec7a12-2cd0-4482-af67-574169d36973),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd04f9a8b-2c')
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.164 2 DEBUG nova.virt.libvirt.driver [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.166 2 DEBUG nova.virt.libvirt.driver [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.167 2 DEBUG nova.virt.libvirt.driver [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] No VIF found with MAC fa:16:3e:49:2a:3a, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.168 2 INFO nova.virt.libvirt.driver [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Using config drive
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.523 2 INFO nova.virt.libvirt.driver [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Creating config drive at /var/lib/nova/instances/6059254e-5c4b-4d87-991a-cf72fab61216/disk.config
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.533 2 DEBUG oslo_concurrency.processutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/6059254e-5c4b-4d87-991a-cf72fab61216/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp_a7pv0t5 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.663 2 DEBUG oslo_concurrency.processutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/6059254e-5c4b-4d87-991a-cf72fab61216/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp_a7pv0t5" returned: 0 in 0.130s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:19:18 compute-0 kernel: tapd04f9a8b-2c: entered promiscuous mode
Oct 02 12:19:18 compute-0 NetworkManager[51160]: <info>  [1759407558.7357] manager: (tapd04f9a8b-2c): new Tun device (/org/freedesktop/NetworkManager/Devices/162)
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.776 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:18 compute-0 ovn_controller[94336]: 2025-10-02T12:19:18Z|00320|binding|INFO|Claiming lport d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4 for this chassis.
Oct 02 12:19:18 compute-0 ovn_controller[94336]: 2025-10-02T12:19:18Z|00321|binding|INFO|d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4: Claiming fa:16:3e:49:2a:3a 10.100.0.11
Oct 02 12:19:18 compute-0 systemd-udevd[234632]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.779 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.783 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:18.789 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:49:2a:3a 10.100.0.11'], port_security=['fa:16:3e:49:2a:3a 10.100.0.11'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.11/28', 'neutron:device_id': '6059254e-5c4b-4d87-991a-cf72fab61216', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-a8ec7a12-2cd0-4482-af67-574169d36973', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '48b51f55b7294d64bf3395e17ff310a8', 'neutron:revision_number': '2', 'neutron:security_group_ids': '514ef4ae-4aea-4dd5-b0e2-1a2f52985029', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=c84c9884-c04d-4b89-9454-14af6f2e48a6, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:19:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:18.790 103294 INFO neutron.agent.ovn.metadata.agent [-] Port d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4 in datapath a8ec7a12-2cd0-4482-af67-574169d36973 bound to our chassis
Oct 02 12:19:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:18.791 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network a8ec7a12-2cd0-4482-af67-574169d36973
Oct 02 12:19:18 compute-0 NetworkManager[51160]: <info>  [1759407558.8017] device (tapd04f9a8b-2c): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:19:18 compute-0 NetworkManager[51160]: <info>  [1759407558.8024] device (tapd04f9a8b-2c): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:19:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:18.801 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[cbbacf52-7714-4485-9b1a-915ab40e9791]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:18.802 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tapa8ec7a12-21 in ovnmeta-a8ec7a12-2cd0-4482-af67-574169d36973 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:19:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:18.804 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tapa8ec7a12-20 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:19:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:18.804 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[598c3227-36ab-47a1-8140-d7cb3677b243]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:18.805 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[43dfc345-d897-4c88-a7cd-b02d3b63e20c]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:18.814 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[cb1fa0b0-c961-43cb-99b7-0cf251067ab8]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:18 compute-0 systemd-machined[152150]: New machine qemu-44-instance-00000060.
Oct 02 12:19:18 compute-0 ovn_controller[94336]: 2025-10-02T12:19:18Z|00322|binding|INFO|Setting lport d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4 ovn-installed in OVS
Oct 02 12:19:18 compute-0 ovn_controller[94336]: 2025-10-02T12:19:18Z|00323|binding|INFO|Setting lport d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4 up in Southbound
Oct 02 12:19:18 compute-0 systemd[1]: Started Virtual Machine qemu-44-instance-00000060.
Oct 02 12:19:18 compute-0 nova_compute[192079]: 2025-10-02 12:19:18.855 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:18.858 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[96d82a77-c0a7-4769-b841-62a46e13e1c0]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:18.892 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[f57dc594-ff52-438b-af70-692d13701fc6]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:18 compute-0 systemd-udevd[234637]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:19:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:18.899 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d14c3fb2-4709-4baa-894a-28e21af644ef]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:18 compute-0 NetworkManager[51160]: <info>  [1759407558.9004] manager: (tapa8ec7a12-20): new Veth device (/org/freedesktop/NetworkManager/Devices/163)
Oct 02 12:19:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:18.930 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[2bd41e3e-f899-4b17-a66a-044840f7fb2f]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:18.934 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[6bb9afe4-a855-4ad9-b41f-0faa2de6444a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:18 compute-0 NetworkManager[51160]: <info>  [1759407558.9630] device (tapa8ec7a12-20): carrier: link connected
Oct 02 12:19:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:18.966 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[1c575cff-f199-402d-9845-8a0c47569412]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:18.982 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f1b7604c-9e5b-45fe-ac11-ba00db16fd21]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapa8ec7a12-21'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:61:ad:7f'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 104], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 555659, 'reachable_time': 40816, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 234668, 'error': None, 'target': 'ovnmeta-a8ec7a12-2cd0-4482-af67-574169d36973', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:18.997 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[07223107-3d99-47b6-8f47-5e7c5fa6501a]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe61:ad7f'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 555659, 'tstamp': 555659}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 234669, 'error': None, 'target': 'ovnmeta-a8ec7a12-2cd0-4482-af67-574169d36973', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:19.014 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7365a0ba-e133-45dd-b725-720c85608612]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapa8ec7a12-21'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:61:ad:7f'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 104], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 555659, 'reachable_time': 40816, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 234670, 'error': None, 'target': 'ovnmeta-a8ec7a12-2cd0-4482-af67-574169d36973', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:19.050 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6009e65f-85b6-4ed9-bdcd-b77ec7b9fadf]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:19.108 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[28dd191f-10b9-400d-a22f-08d19f61f725]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:19.109 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapa8ec7a12-20, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:19.109 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:19.110 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapa8ec7a12-20, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:19:19 compute-0 nova_compute[192079]: 2025-10-02 12:19:19.111 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:19 compute-0 NetworkManager[51160]: <info>  [1759407559.1132] manager: (tapa8ec7a12-20): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/164)
Oct 02 12:19:19 compute-0 kernel: tapa8ec7a12-20: entered promiscuous mode
Oct 02 12:19:19 compute-0 nova_compute[192079]: 2025-10-02 12:19:19.118 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:19.119 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tapa8ec7a12-20, col_values=(('external_ids', {'iface-id': 'e5482cd2-fe03-4f8c-9729-b806cb28b339'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:19:19 compute-0 nova_compute[192079]: 2025-10-02 12:19:19.120 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:19 compute-0 ovn_controller[94336]: 2025-10-02T12:19:19Z|00324|binding|INFO|Releasing lport e5482cd2-fe03-4f8c-9729-b806cb28b339 from this chassis (sb_readonly=0)
Oct 02 12:19:19 compute-0 nova_compute[192079]: 2025-10-02 12:19:19.145 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:19 compute-0 nova_compute[192079]: 2025-10-02 12:19:19.146 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:19.147 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/a8ec7a12-2cd0-4482-af67-574169d36973.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/a8ec7a12-2cd0-4482-af67-574169d36973.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:19.148 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[52a713e9-5da8-4c65-8446-e514400324a5]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:19.149 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-a8ec7a12-2cd0-4482-af67-574169d36973
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/a8ec7a12-2cd0-4482-af67-574169d36973.pid.haproxy
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID a8ec7a12-2cd0-4482-af67-574169d36973
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:19:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:19.150 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-a8ec7a12-2cd0-4482-af67-574169d36973', 'env', 'PROCESS_TAG=haproxy-a8ec7a12-2cd0-4482-af67-574169d36973', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/a8ec7a12-2cd0-4482-af67-574169d36973.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:19:19 compute-0 nova_compute[192079]: 2025-10-02 12:19:19.427 2 DEBUG nova.compute.manager [req-bab1f84f-f150-4aca-b1c1-ed3048e19b2e req-e2dd4501-5135-4eae-9a32-e949fbbc61c5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Received event network-vif-plugged-d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:19:19 compute-0 nova_compute[192079]: 2025-10-02 12:19:19.428 2 DEBUG oslo_concurrency.lockutils [req-bab1f84f-f150-4aca-b1c1-ed3048e19b2e req-e2dd4501-5135-4eae-9a32-e949fbbc61c5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "6059254e-5c4b-4d87-991a-cf72fab61216-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:19 compute-0 nova_compute[192079]: 2025-10-02 12:19:19.428 2 DEBUG oslo_concurrency.lockutils [req-bab1f84f-f150-4aca-b1c1-ed3048e19b2e req-e2dd4501-5135-4eae-9a32-e949fbbc61c5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6059254e-5c4b-4d87-991a-cf72fab61216-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:19 compute-0 nova_compute[192079]: 2025-10-02 12:19:19.429 2 DEBUG oslo_concurrency.lockutils [req-bab1f84f-f150-4aca-b1c1-ed3048e19b2e req-e2dd4501-5135-4eae-9a32-e949fbbc61c5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6059254e-5c4b-4d87-991a-cf72fab61216-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:19 compute-0 nova_compute[192079]: 2025-10-02 12:19:19.429 2 DEBUG nova.compute.manager [req-bab1f84f-f150-4aca-b1c1-ed3048e19b2e req-e2dd4501-5135-4eae-9a32-e949fbbc61c5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Processing event network-vif-plugged-d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:19:19 compute-0 podman[234702]: 2025-10-02 12:19:19.574860693 +0000 UTC m=+0.068070117 container create aa85080db314c1ee010e780839b3a121d1d862217279d9df39fa97e3601fd48f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a8ec7a12-2cd0-4482-af67-574169d36973, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001)
Oct 02 12:19:19 compute-0 systemd[1]: Started libpod-conmon-aa85080db314c1ee010e780839b3a121d1d862217279d9df39fa97e3601fd48f.scope.
Oct 02 12:19:19 compute-0 podman[234702]: 2025-10-02 12:19:19.537245248 +0000 UTC m=+0.030454751 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:19:19 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:19:19 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/110a8760c5a8b7c743871245305605490ab9c60c6d33114b7d9a9f2efe32b0be/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:19:19 compute-0 podman[234702]: 2025-10-02 12:19:19.680397421 +0000 UTC m=+0.173606914 container init aa85080db314c1ee010e780839b3a121d1d862217279d9df39fa97e3601fd48f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a8ec7a12-2cd0-4482-af67-574169d36973, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3)
Oct 02 12:19:19 compute-0 podman[234702]: 2025-10-02 12:19:19.689838168 +0000 UTC m=+0.183047601 container start aa85080db314c1ee010e780839b3a121d1d862217279d9df39fa97e3601fd48f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a8ec7a12-2cd0-4482-af67-574169d36973, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:19:19 compute-0 neutron-haproxy-ovnmeta-a8ec7a12-2cd0-4482-af67-574169d36973[234717]: [NOTICE]   (234721) : New worker (234724) forked
Oct 02 12:19:19 compute-0 neutron-haproxy-ovnmeta-a8ec7a12-2cd0-4482-af67-574169d36973[234717]: [NOTICE]   (234721) : Loading success.
Oct 02 12:19:19 compute-0 nova_compute[192079]: 2025-10-02 12:19:19.847 2 DEBUG nova.network.neutron [req-c57d1c4a-35fd-4b08-878d-e5a4a945c967 req-9b4e49a4-59a5-479f-8443-df33e9c96fc2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Updated VIF entry in instance network info cache for port d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:19:19 compute-0 nova_compute[192079]: 2025-10-02 12:19:19.848 2 DEBUG nova.network.neutron [req-c57d1c4a-35fd-4b08-878d-e5a4a945c967 req-9b4e49a4-59a5-479f-8443-df33e9c96fc2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Updating instance_info_cache with network_info: [{"id": "d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4", "address": "fa:16:3e:49:2a:3a", "network": {"id": "a8ec7a12-2cd0-4482-af67-574169d36973", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-2001340159-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "48b51f55b7294d64bf3395e17ff310a8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd04f9a8b-2c", "ovs_interfaceid": "d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:19:19 compute-0 nova_compute[192079]: 2025-10-02 12:19:19.868 2 DEBUG oslo_concurrency.lockutils [req-c57d1c4a-35fd-4b08-878d-e5a4a945c967 req-9b4e49a4-59a5-479f-8443-df33e9c96fc2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-6059254e-5c4b-4d87-991a-cf72fab61216" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:19:20 compute-0 nova_compute[192079]: 2025-10-02 12:19:20.184 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407560.1840723, 6059254e-5c4b-4d87-991a-cf72fab61216 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:19:20 compute-0 nova_compute[192079]: 2025-10-02 12:19:20.185 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] VM Started (Lifecycle Event)
Oct 02 12:19:20 compute-0 nova_compute[192079]: 2025-10-02 12:19:20.187 2 DEBUG nova.compute.manager [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:19:20 compute-0 nova_compute[192079]: 2025-10-02 12:19:20.190 2 DEBUG nova.virt.libvirt.driver [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:19:20 compute-0 nova_compute[192079]: 2025-10-02 12:19:20.194 2 INFO nova.virt.libvirt.driver [-] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Instance spawned successfully.
Oct 02 12:19:20 compute-0 nova_compute[192079]: 2025-10-02 12:19:20.194 2 DEBUG nova.virt.libvirt.driver [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:19:20 compute-0 nova_compute[192079]: 2025-10-02 12:19:20.208 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:19:20 compute-0 nova_compute[192079]: 2025-10-02 12:19:20.217 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:19:20 compute-0 nova_compute[192079]: 2025-10-02 12:19:20.224 2 DEBUG nova.virt.libvirt.driver [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:19:20 compute-0 nova_compute[192079]: 2025-10-02 12:19:20.225 2 DEBUG nova.virt.libvirt.driver [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:19:20 compute-0 nova_compute[192079]: 2025-10-02 12:19:20.226 2 DEBUG nova.virt.libvirt.driver [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:19:20 compute-0 nova_compute[192079]: 2025-10-02 12:19:20.227 2 DEBUG nova.virt.libvirt.driver [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:19:20 compute-0 nova_compute[192079]: 2025-10-02 12:19:20.228 2 DEBUG nova.virt.libvirt.driver [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:19:20 compute-0 nova_compute[192079]: 2025-10-02 12:19:20.229 2 DEBUG nova.virt.libvirt.driver [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:19:20 compute-0 nova_compute[192079]: 2025-10-02 12:19:20.241 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:19:20 compute-0 nova_compute[192079]: 2025-10-02 12:19:20.242 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407560.1842165, 6059254e-5c4b-4d87-991a-cf72fab61216 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:19:20 compute-0 nova_compute[192079]: 2025-10-02 12:19:20.243 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] VM Paused (Lifecycle Event)
Oct 02 12:19:20 compute-0 nova_compute[192079]: 2025-10-02 12:19:20.269 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:19:20 compute-0 nova_compute[192079]: 2025-10-02 12:19:20.272 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407560.1893928, 6059254e-5c4b-4d87-991a-cf72fab61216 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:19:20 compute-0 nova_compute[192079]: 2025-10-02 12:19:20.273 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] VM Resumed (Lifecycle Event)
Oct 02 12:19:20 compute-0 nova_compute[192079]: 2025-10-02 12:19:20.307 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:19:20 compute-0 nova_compute[192079]: 2025-10-02 12:19:20.311 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:19:20 compute-0 nova_compute[192079]: 2025-10-02 12:19:20.337 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:19:20 compute-0 nova_compute[192079]: 2025-10-02 12:19:20.363 2 INFO nova.compute.manager [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Took 7.50 seconds to spawn the instance on the hypervisor.
Oct 02 12:19:20 compute-0 nova_compute[192079]: 2025-10-02 12:19:20.364 2 DEBUG nova.compute.manager [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:19:20 compute-0 nova_compute[192079]: 2025-10-02 12:19:20.498 2 INFO nova.compute.manager [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Took 8.28 seconds to build instance.
Oct 02 12:19:20 compute-0 nova_compute[192079]: 2025-10-02 12:19:20.525 2 DEBUG oslo_concurrency.lockutils [None req-8516fe9d-d313-44ef-940b-24e740112bda 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Lock "6059254e-5c4b-4d87-991a-cf72fab61216" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 8.691s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:21 compute-0 nova_compute[192079]: 2025-10-02 12:19:21.515 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:21 compute-0 nova_compute[192079]: 2025-10-02 12:19:21.564 2 DEBUG nova.compute.manager [req-4431a0af-ad1d-42ea-8896-0fbfe6e46086 req-1bb82429-8c8d-482f-9ab3-eea1358df941 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Received event network-vif-plugged-d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:19:21 compute-0 nova_compute[192079]: 2025-10-02 12:19:21.565 2 DEBUG oslo_concurrency.lockutils [req-4431a0af-ad1d-42ea-8896-0fbfe6e46086 req-1bb82429-8c8d-482f-9ab3-eea1358df941 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "6059254e-5c4b-4d87-991a-cf72fab61216-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:21 compute-0 nova_compute[192079]: 2025-10-02 12:19:21.565 2 DEBUG oslo_concurrency.lockutils [req-4431a0af-ad1d-42ea-8896-0fbfe6e46086 req-1bb82429-8c8d-482f-9ab3-eea1358df941 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6059254e-5c4b-4d87-991a-cf72fab61216-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:21 compute-0 nova_compute[192079]: 2025-10-02 12:19:21.566 2 DEBUG oslo_concurrency.lockutils [req-4431a0af-ad1d-42ea-8896-0fbfe6e46086 req-1bb82429-8c8d-482f-9ab3-eea1358df941 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6059254e-5c4b-4d87-991a-cf72fab61216-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:21 compute-0 nova_compute[192079]: 2025-10-02 12:19:21.566 2 DEBUG nova.compute.manager [req-4431a0af-ad1d-42ea-8896-0fbfe6e46086 req-1bb82429-8c8d-482f-9ab3-eea1358df941 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] No waiting events found dispatching network-vif-plugged-d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:19:21 compute-0 nova_compute[192079]: 2025-10-02 12:19:21.567 2 WARNING nova.compute.manager [req-4431a0af-ad1d-42ea-8896-0fbfe6e46086 req-1bb82429-8c8d-482f-9ab3-eea1358df941 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Received unexpected event network-vif-plugged-d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4 for instance with vm_state active and task_state None.
Oct 02 12:19:21 compute-0 nova_compute[192079]: 2025-10-02 12:19:21.689 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:19:22 compute-0 nova_compute[192079]: 2025-10-02 12:19:22.536 2 DEBUG oslo_concurrency.lockutils [None req-31818b9f-eb68-42b6-bbac-f77871cde4dc 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Acquiring lock "6059254e-5c4b-4d87-991a-cf72fab61216" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:22 compute-0 nova_compute[192079]: 2025-10-02 12:19:22.537 2 DEBUG oslo_concurrency.lockutils [None req-31818b9f-eb68-42b6-bbac-f77871cde4dc 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Lock "6059254e-5c4b-4d87-991a-cf72fab61216" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:22 compute-0 nova_compute[192079]: 2025-10-02 12:19:22.537 2 DEBUG oslo_concurrency.lockutils [None req-31818b9f-eb68-42b6-bbac-f77871cde4dc 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Acquiring lock "6059254e-5c4b-4d87-991a-cf72fab61216-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:22 compute-0 nova_compute[192079]: 2025-10-02 12:19:22.538 2 DEBUG oslo_concurrency.lockutils [None req-31818b9f-eb68-42b6-bbac-f77871cde4dc 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Lock "6059254e-5c4b-4d87-991a-cf72fab61216-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:22 compute-0 nova_compute[192079]: 2025-10-02 12:19:22.538 2 DEBUG oslo_concurrency.lockutils [None req-31818b9f-eb68-42b6-bbac-f77871cde4dc 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Lock "6059254e-5c4b-4d87-991a-cf72fab61216-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:22 compute-0 nova_compute[192079]: 2025-10-02 12:19:22.556 2 INFO nova.compute.manager [None req-31818b9f-eb68-42b6-bbac-f77871cde4dc 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Terminating instance
Oct 02 12:19:22 compute-0 nova_compute[192079]: 2025-10-02 12:19:22.568 2 DEBUG nova.compute.manager [None req-31818b9f-eb68-42b6-bbac-f77871cde4dc 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:19:22 compute-0 kernel: tapd04f9a8b-2c (unregistering): left promiscuous mode
Oct 02 12:19:22 compute-0 NetworkManager[51160]: <info>  [1759407562.5939] device (tapd04f9a8b-2c): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:19:22 compute-0 nova_compute[192079]: 2025-10-02 12:19:22.606 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:22 compute-0 ovn_controller[94336]: 2025-10-02T12:19:22Z|00325|binding|INFO|Releasing lport d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4 from this chassis (sb_readonly=0)
Oct 02 12:19:22 compute-0 ovn_controller[94336]: 2025-10-02T12:19:22Z|00326|binding|INFO|Setting lport d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4 down in Southbound
Oct 02 12:19:22 compute-0 ovn_controller[94336]: 2025-10-02T12:19:22Z|00327|binding|INFO|Removing iface tapd04f9a8b-2c ovn-installed in OVS
Oct 02 12:19:22 compute-0 nova_compute[192079]: 2025-10-02 12:19:22.610 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:22.629 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:49:2a:3a 10.100.0.11'], port_security=['fa:16:3e:49:2a:3a 10.100.0.11'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.11/28', 'neutron:device_id': '6059254e-5c4b-4d87-991a-cf72fab61216', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-a8ec7a12-2cd0-4482-af67-574169d36973', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '48b51f55b7294d64bf3395e17ff310a8', 'neutron:revision_number': '4', 'neutron:security_group_ids': '514ef4ae-4aea-4dd5-b0e2-1a2f52985029', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=c84c9884-c04d-4b89-9454-14af6f2e48a6, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:19:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:22.630 103294 INFO neutron.agent.ovn.metadata.agent [-] Port d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4 in datapath a8ec7a12-2cd0-4482-af67-574169d36973 unbound from our chassis
Oct 02 12:19:22 compute-0 nova_compute[192079]: 2025-10-02 12:19:22.631 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:22.631 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network a8ec7a12-2cd0-4482-af67-574169d36973, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:19:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:22.633 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[27b7cb00-93e9-421c-b051-bf6b038e3a92]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:22.633 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-a8ec7a12-2cd0-4482-af67-574169d36973 namespace which is not needed anymore
Oct 02 12:19:22 compute-0 systemd[1]: machine-qemu\x2d44\x2dinstance\x2d00000060.scope: Deactivated successfully.
Oct 02 12:19:22 compute-0 systemd[1]: machine-qemu\x2d44\x2dinstance\x2d00000060.scope: Consumed 3.694s CPU time.
Oct 02 12:19:22 compute-0 systemd-machined[152150]: Machine qemu-44-instance-00000060 terminated.
Oct 02 12:19:22 compute-0 neutron-haproxy-ovnmeta-a8ec7a12-2cd0-4482-af67-574169d36973[234717]: [NOTICE]   (234721) : haproxy version is 2.8.14-c23fe91
Oct 02 12:19:22 compute-0 neutron-haproxy-ovnmeta-a8ec7a12-2cd0-4482-af67-574169d36973[234717]: [NOTICE]   (234721) : path to executable is /usr/sbin/haproxy
Oct 02 12:19:22 compute-0 neutron-haproxy-ovnmeta-a8ec7a12-2cd0-4482-af67-574169d36973[234717]: [WARNING]  (234721) : Exiting Master process...
Oct 02 12:19:22 compute-0 neutron-haproxy-ovnmeta-a8ec7a12-2cd0-4482-af67-574169d36973[234717]: [ALERT]    (234721) : Current worker (234724) exited with code 143 (Terminated)
Oct 02 12:19:22 compute-0 neutron-haproxy-ovnmeta-a8ec7a12-2cd0-4482-af67-574169d36973[234717]: [WARNING]  (234721) : All workers exited. Exiting... (0)
Oct 02 12:19:22 compute-0 systemd[1]: libpod-aa85080db314c1ee010e780839b3a121d1d862217279d9df39fa97e3601fd48f.scope: Deactivated successfully.
Oct 02 12:19:22 compute-0 podman[234763]: 2025-10-02 12:19:22.765002654 +0000 UTC m=+0.047354693 container died aa85080db314c1ee010e780839b3a121d1d862217279d9df39fa97e3601fd48f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a8ec7a12-2cd0-4482-af67-574169d36973, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0)
Oct 02 12:19:22 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-aa85080db314c1ee010e780839b3a121d1d862217279d9df39fa97e3601fd48f-userdata-shm.mount: Deactivated successfully.
Oct 02 12:19:22 compute-0 systemd[1]: var-lib-containers-storage-overlay-110a8760c5a8b7c743871245305605490ab9c60c6d33114b7d9a9f2efe32b0be-merged.mount: Deactivated successfully.
Oct 02 12:19:22 compute-0 podman[234763]: 2025-10-02 12:19:22.802774023 +0000 UTC m=+0.085126062 container cleanup aa85080db314c1ee010e780839b3a121d1d862217279d9df39fa97e3601fd48f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a8ec7a12-2cd0-4482-af67-574169d36973, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS)
Oct 02 12:19:22 compute-0 systemd[1]: libpod-conmon-aa85080db314c1ee010e780839b3a121d1d862217279d9df39fa97e3601fd48f.scope: Deactivated successfully.
Oct 02 12:19:22 compute-0 nova_compute[192079]: 2025-10-02 12:19:22.824 2 INFO nova.virt.libvirt.driver [-] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Instance destroyed successfully.
Oct 02 12:19:22 compute-0 nova_compute[192079]: 2025-10-02 12:19:22.824 2 DEBUG nova.objects.instance [None req-31818b9f-eb68-42b6-bbac-f77871cde4dc 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Lazy-loading 'resources' on Instance uuid 6059254e-5c4b-4d87-991a-cf72fab61216 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:19:22 compute-0 nova_compute[192079]: 2025-10-02 12:19:22.838 2 DEBUG nova.virt.libvirt.vif [None req-31818b9f-eb68-42b6-bbac-f77871cde4dc 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:19:10Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerPasswordTestJSON-server-1400810981',display_name='tempest-ServerPasswordTestJSON-server-1400810981',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverpasswordtestjson-server-1400810981',id=96,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:19:20Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='48b51f55b7294d64bf3395e17ff310a8',ramdisk_id='',reservation_id='r-nnyvkhk2',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServerPasswordTestJSON-807356368',owner_user_name='tempest-ServerPasswordTestJSON-807356368-project-member',password_0='',password_1='',password_2='',password_3=''},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:19:22Z,user_data=None,user_id='1793e510b24c43859b967a36edab096a',uuid=6059254e-5c4b-4d87-991a-cf72fab61216,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4", "address": "fa:16:3e:49:2a:3a", "network": {"id": "a8ec7a12-2cd0-4482-af67-574169d36973", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-2001340159-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "48b51f55b7294d64bf3395e17ff310a8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd04f9a8b-2c", "ovs_interfaceid": "d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:19:22 compute-0 nova_compute[192079]: 2025-10-02 12:19:22.838 2 DEBUG nova.network.os_vif_util [None req-31818b9f-eb68-42b6-bbac-f77871cde4dc 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Converting VIF {"id": "d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4", "address": "fa:16:3e:49:2a:3a", "network": {"id": "a8ec7a12-2cd0-4482-af67-574169d36973", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-2001340159-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "48b51f55b7294d64bf3395e17ff310a8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd04f9a8b-2c", "ovs_interfaceid": "d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:19:22 compute-0 nova_compute[192079]: 2025-10-02 12:19:22.839 2 DEBUG nova.network.os_vif_util [None req-31818b9f-eb68-42b6-bbac-f77871cde4dc 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:49:2a:3a,bridge_name='br-int',has_traffic_filtering=True,id=d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4,network=Network(a8ec7a12-2cd0-4482-af67-574169d36973),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd04f9a8b-2c') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:19:22 compute-0 nova_compute[192079]: 2025-10-02 12:19:22.839 2 DEBUG os_vif [None req-31818b9f-eb68-42b6-bbac-f77871cde4dc 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:49:2a:3a,bridge_name='br-int',has_traffic_filtering=True,id=d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4,network=Network(a8ec7a12-2cd0-4482-af67-574169d36973),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd04f9a8b-2c') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:19:22 compute-0 nova_compute[192079]: 2025-10-02 12:19:22.841 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:22 compute-0 nova_compute[192079]: 2025-10-02 12:19:22.841 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapd04f9a8b-2c, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:19:22 compute-0 nova_compute[192079]: 2025-10-02 12:19:22.843 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:22 compute-0 nova_compute[192079]: 2025-10-02 12:19:22.844 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:22 compute-0 nova_compute[192079]: 2025-10-02 12:19:22.846 2 INFO os_vif [None req-31818b9f-eb68-42b6-bbac-f77871cde4dc 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:49:2a:3a,bridge_name='br-int',has_traffic_filtering=True,id=d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4,network=Network(a8ec7a12-2cd0-4482-af67-574169d36973),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd04f9a8b-2c')
Oct 02 12:19:22 compute-0 nova_compute[192079]: 2025-10-02 12:19:22.846 2 INFO nova.virt.libvirt.driver [None req-31818b9f-eb68-42b6-bbac-f77871cde4dc 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Deleting instance files /var/lib/nova/instances/6059254e-5c4b-4d87-991a-cf72fab61216_del
Oct 02 12:19:22 compute-0 nova_compute[192079]: 2025-10-02 12:19:22.847 2 INFO nova.virt.libvirt.driver [None req-31818b9f-eb68-42b6-bbac-f77871cde4dc 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Deletion of /var/lib/nova/instances/6059254e-5c4b-4d87-991a-cf72fab61216_del complete
Oct 02 12:19:22 compute-0 podman[234807]: 2025-10-02 12:19:22.862373968 +0000 UTC m=+0.038140461 container remove aa85080db314c1ee010e780839b3a121d1d862217279d9df39fa97e3601fd48f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a8ec7a12-2cd0-4482-af67-574169d36973, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true)
Oct 02 12:19:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:22.869 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[75a7c761-ccbb-4aae-96c5-1bf597db951a]: (4, ('Thu Oct  2 12:19:22 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-a8ec7a12-2cd0-4482-af67-574169d36973 (aa85080db314c1ee010e780839b3a121d1d862217279d9df39fa97e3601fd48f)\naa85080db314c1ee010e780839b3a121d1d862217279d9df39fa97e3601fd48f\nThu Oct  2 12:19:22 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-a8ec7a12-2cd0-4482-af67-574169d36973 (aa85080db314c1ee010e780839b3a121d1d862217279d9df39fa97e3601fd48f)\naa85080db314c1ee010e780839b3a121d1d862217279d9df39fa97e3601fd48f\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:22.871 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ffd12f66-a361-4aef-87fa-853c5ad138ab]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:22.871 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapa8ec7a12-20, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:19:22 compute-0 nova_compute[192079]: 2025-10-02 12:19:22.873 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:22 compute-0 kernel: tapa8ec7a12-20: left promiscuous mode
Oct 02 12:19:22 compute-0 nova_compute[192079]: 2025-10-02 12:19:22.885 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:22.887 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[33d446a4-23cc-4b39-8f1a-30b93c1687d4]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:22.914 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ecd0e65d-ca6f-4fe1-aec0-8bce7598bbe0]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:22.915 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0cbcb20d-376f-4882-bd66-ebc7933fa29c]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:22 compute-0 nova_compute[192079]: 2025-10-02 12:19:22.918 2 INFO nova.compute.manager [None req-31818b9f-eb68-42b6-bbac-f77871cde4dc 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Took 0.35 seconds to destroy the instance on the hypervisor.
Oct 02 12:19:22 compute-0 nova_compute[192079]: 2025-10-02 12:19:22.918 2 DEBUG oslo.service.loopingcall [None req-31818b9f-eb68-42b6-bbac-f77871cde4dc 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:19:22 compute-0 nova_compute[192079]: 2025-10-02 12:19:22.919 2 DEBUG nova.compute.manager [-] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:19:22 compute-0 nova_compute[192079]: 2025-10-02 12:19:22.919 2 DEBUG nova.network.neutron [-] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:19:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:22.930 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a206d4e8-45ec-492d-aded-102d1469064a]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 555651, 'reachable_time': 33554, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 234823, 'error': None, 'target': 'ovnmeta-a8ec7a12-2cd0-4482-af67-574169d36973', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:22 compute-0 systemd[1]: run-netns-ovnmeta\x2da8ec7a12\x2d2cd0\x2d4482\x2daf67\x2d574169d36973.mount: Deactivated successfully.
Oct 02 12:19:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:22.933 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-a8ec7a12-2cd0-4482-af67-574169d36973 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:19:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:22.933 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[b68a0655-77e6-4dbf-96d7-ce7218dd1548]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:23 compute-0 nova_compute[192079]: 2025-10-02 12:19:23.729 2 DEBUG nova.compute.manager [req-1c0a8d53-f9c2-4569-b956-7ee903d7432a req-9a89b2a9-d235-4593-a332-d82f6d9b1bb7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Received event network-vif-unplugged-d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:19:23 compute-0 nova_compute[192079]: 2025-10-02 12:19:23.730 2 DEBUG oslo_concurrency.lockutils [req-1c0a8d53-f9c2-4569-b956-7ee903d7432a req-9a89b2a9-d235-4593-a332-d82f6d9b1bb7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "6059254e-5c4b-4d87-991a-cf72fab61216-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:23 compute-0 nova_compute[192079]: 2025-10-02 12:19:23.731 2 DEBUG oslo_concurrency.lockutils [req-1c0a8d53-f9c2-4569-b956-7ee903d7432a req-9a89b2a9-d235-4593-a332-d82f6d9b1bb7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6059254e-5c4b-4d87-991a-cf72fab61216-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:23 compute-0 nova_compute[192079]: 2025-10-02 12:19:23.731 2 DEBUG oslo_concurrency.lockutils [req-1c0a8d53-f9c2-4569-b956-7ee903d7432a req-9a89b2a9-d235-4593-a332-d82f6d9b1bb7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6059254e-5c4b-4d87-991a-cf72fab61216-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:23 compute-0 nova_compute[192079]: 2025-10-02 12:19:23.732 2 DEBUG nova.compute.manager [req-1c0a8d53-f9c2-4569-b956-7ee903d7432a req-9a89b2a9-d235-4593-a332-d82f6d9b1bb7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] No waiting events found dispatching network-vif-unplugged-d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:19:23 compute-0 nova_compute[192079]: 2025-10-02 12:19:23.732 2 DEBUG nova.compute.manager [req-1c0a8d53-f9c2-4569-b956-7ee903d7432a req-9a89b2a9-d235-4593-a332-d82f6d9b1bb7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Received event network-vif-unplugged-d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4 for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:19:24 compute-0 nova_compute[192079]: 2025-10-02 12:19:24.042 2 DEBUG nova.network.neutron [-] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:19:24 compute-0 nova_compute[192079]: 2025-10-02 12:19:24.058 2 INFO nova.compute.manager [-] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Took 1.14 seconds to deallocate network for instance.
Oct 02 12:19:24 compute-0 nova_compute[192079]: 2025-10-02 12:19:24.137 2 DEBUG oslo_concurrency.lockutils [None req-31818b9f-eb68-42b6-bbac-f77871cde4dc 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:24 compute-0 nova_compute[192079]: 2025-10-02 12:19:24.137 2 DEBUG oslo_concurrency.lockutils [None req-31818b9f-eb68-42b6-bbac-f77871cde4dc 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:24 compute-0 nova_compute[192079]: 2025-10-02 12:19:24.208 2 DEBUG nova.compute.provider_tree [None req-31818b9f-eb68-42b6-bbac-f77871cde4dc 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:19:24 compute-0 nova_compute[192079]: 2025-10-02 12:19:24.230 2 DEBUG nova.scheduler.client.report [None req-31818b9f-eb68-42b6-bbac-f77871cde4dc 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:19:24 compute-0 nova_compute[192079]: 2025-10-02 12:19:24.255 2 DEBUG oslo_concurrency.lockutils [None req-31818b9f-eb68-42b6-bbac-f77871cde4dc 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.117s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:24 compute-0 nova_compute[192079]: 2025-10-02 12:19:24.274 2 INFO nova.scheduler.client.report [None req-31818b9f-eb68-42b6-bbac-f77871cde4dc 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Deleted allocations for instance 6059254e-5c4b-4d87-991a-cf72fab61216
Oct 02 12:19:24 compute-0 nova_compute[192079]: 2025-10-02 12:19:24.393 2 DEBUG oslo_concurrency.lockutils [None req-31818b9f-eb68-42b6-bbac-f77871cde4dc 1793e510b24c43859b967a36edab096a 48b51f55b7294d64bf3395e17ff310a8 - - default default] Lock "6059254e-5c4b-4d87-991a-cf72fab61216" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.857s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:24 compute-0 nova_compute[192079]: 2025-10-02 12:19:24.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:19:24 compute-0 nova_compute[192079]: 2025-10-02 12:19:24.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:19:24 compute-0 nova_compute[192079]: 2025-10-02 12:19:24.686 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:24 compute-0 nova_compute[192079]: 2025-10-02 12:19:24.687 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:24 compute-0 nova_compute[192079]: 2025-10-02 12:19:24.687 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:24 compute-0 nova_compute[192079]: 2025-10-02 12:19:24.687 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:19:24 compute-0 podman[234825]: 2025-10-02 12:19:24.79004736 +0000 UTC m=+0.058408463 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, container_name=ceilometer_agent_compute, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:19:24 compute-0 nova_compute[192079]: 2025-10-02 12:19:24.846 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:19:24 compute-0 nova_compute[192079]: 2025-10-02 12:19:24.847 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5719MB free_disk=73.34913635253906GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:19:24 compute-0 nova_compute[192079]: 2025-10-02 12:19:24.847 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:24 compute-0 nova_compute[192079]: 2025-10-02 12:19:24.847 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:24 compute-0 nova_compute[192079]: 2025-10-02 12:19:24.894 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:19:24 compute-0 nova_compute[192079]: 2025-10-02 12:19:24.895 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:19:24 compute-0 nova_compute[192079]: 2025-10-02 12:19:24.917 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:19:24 compute-0 nova_compute[192079]: 2025-10-02 12:19:24.938 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:19:24 compute-0 nova_compute[192079]: 2025-10-02 12:19:24.964 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:19:24 compute-0 nova_compute[192079]: 2025-10-02 12:19:24.964 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.117s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:25 compute-0 nova_compute[192079]: 2025-10-02 12:19:25.928 2 DEBUG oslo_concurrency.lockutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Acquiring lock "7f331800-f718-4dc3-b740-1a9574a65fb1" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:25 compute-0 nova_compute[192079]: 2025-10-02 12:19:25.928 2 DEBUG oslo_concurrency.lockutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Lock "7f331800-f718-4dc3-b740-1a9574a65fb1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:25 compute-0 nova_compute[192079]: 2025-10-02 12:19:25.948 2 DEBUG nova.compute.manager [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:19:25 compute-0 nova_compute[192079]: 2025-10-02 12:19:25.964 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.017 2 DEBUG nova.compute.manager [req-744ca1b6-e627-40a5-9623-3246dce0329e req-54512862-e9ae-4c50-810a-72cf2320d713 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Received event network-vif-plugged-d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.017 2 DEBUG oslo_concurrency.lockutils [req-744ca1b6-e627-40a5-9623-3246dce0329e req-54512862-e9ae-4c50-810a-72cf2320d713 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "6059254e-5c4b-4d87-991a-cf72fab61216-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.018 2 DEBUG oslo_concurrency.lockutils [req-744ca1b6-e627-40a5-9623-3246dce0329e req-54512862-e9ae-4c50-810a-72cf2320d713 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6059254e-5c4b-4d87-991a-cf72fab61216-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.018 2 DEBUG oslo_concurrency.lockutils [req-744ca1b6-e627-40a5-9623-3246dce0329e req-54512862-e9ae-4c50-810a-72cf2320d713 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6059254e-5c4b-4d87-991a-cf72fab61216-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.018 2 DEBUG nova.compute.manager [req-744ca1b6-e627-40a5-9623-3246dce0329e req-54512862-e9ae-4c50-810a-72cf2320d713 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] No waiting events found dispatching network-vif-plugged-d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.018 2 WARNING nova.compute.manager [req-744ca1b6-e627-40a5-9623-3246dce0329e req-54512862-e9ae-4c50-810a-72cf2320d713 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Received unexpected event network-vif-plugged-d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4 for instance with vm_state deleted and task_state None.
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.019 2 DEBUG nova.compute.manager [req-744ca1b6-e627-40a5-9623-3246dce0329e req-54512862-e9ae-4c50-810a-72cf2320d713 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Received event network-vif-deleted-d04f9a8b-2c50-4b59-8ca9-8bf1aacdbbc4 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.051 2 DEBUG oslo_concurrency.lockutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.051 2 DEBUG oslo_concurrency.lockutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.056 2 DEBUG nova.virt.hardware [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.056 2 INFO nova.compute.claims [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.181 2 DEBUG nova.compute.provider_tree [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.196 2 DEBUG nova.scheduler.client.report [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.218 2 DEBUG oslo_concurrency.lockutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.167s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.219 2 DEBUG nova.compute.manager [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.290 2 DEBUG nova.compute.manager [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.290 2 DEBUG nova.network.neutron [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.326 2 INFO nova.virt.libvirt.driver [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.349 2 DEBUG nova.compute.manager [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.485 2 DEBUG nova.compute.manager [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.486 2 DEBUG nova.virt.libvirt.driver [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.487 2 INFO nova.virt.libvirt.driver [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Creating image(s)
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.487 2 DEBUG oslo_concurrency.lockutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Acquiring lock "/var/lib/nova/instances/7f331800-f718-4dc3-b740-1a9574a65fb1/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.488 2 DEBUG oslo_concurrency.lockutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Lock "/var/lib/nova/instances/7f331800-f718-4dc3-b740-1a9574a65fb1/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.488 2 DEBUG oslo_concurrency.lockutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Lock "/var/lib/nova/instances/7f331800-f718-4dc3-b740-1a9574a65fb1/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.501 2 DEBUG oslo_concurrency.processutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.521 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.559 2 DEBUG oslo_concurrency.processutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.058s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.559 2 DEBUG oslo_concurrency.lockutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.560 2 DEBUG oslo_concurrency.lockutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.571 2 DEBUG oslo_concurrency.processutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.625 2 DEBUG oslo_concurrency.processutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.626 2 DEBUG oslo_concurrency.processutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/7f331800-f718-4dc3-b740-1a9574a65fb1/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.667 2 DEBUG oslo_concurrency.processutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/7f331800-f718-4dc3-b740-1a9574a65fb1/disk 1073741824" returned: 0 in 0.041s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.668 2 DEBUG oslo_concurrency.lockutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.108s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.668 2 DEBUG oslo_concurrency.processutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.689 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Skipping network cache update for instance because it is Building. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9871
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.690 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.690 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.691 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.691 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.741 2 DEBUG oslo_concurrency.processutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.073s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.741 2 DEBUG nova.virt.disk.api [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Checking if we can resize image /var/lib/nova/instances/7f331800-f718-4dc3-b740-1a9574a65fb1/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.742 2 DEBUG oslo_concurrency.processutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/7f331800-f718-4dc3-b740-1a9574a65fb1/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.800 2 DEBUG oslo_concurrency.processutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/7f331800-f718-4dc3-b740-1a9574a65fb1/disk --force-share --output=json" returned: 0 in 0.058s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.801 2 DEBUG nova.virt.disk.api [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Cannot resize image /var/lib/nova/instances/7f331800-f718-4dc3-b740-1a9574a65fb1/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.801 2 DEBUG nova.objects.instance [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Lazy-loading 'migration_context' on Instance uuid 7f331800-f718-4dc3-b740-1a9574a65fb1 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.819 2 DEBUG nova.virt.libvirt.driver [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.819 2 DEBUG nova.virt.libvirt.driver [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Ensure instance console log exists: /var/lib/nova/instances/7f331800-f718-4dc3-b740-1a9574a65fb1/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.820 2 DEBUG oslo_concurrency.lockutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.820 2 DEBUG oslo_concurrency.lockutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.820 2 DEBUG oslo_concurrency.lockutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:26 compute-0 nova_compute[192079]: 2025-10-02 12:19:26.865 2 DEBUG nova.policy [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af12a0d863d849fc869fc92f700cedde', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '99286619ca844589aacc016f9c8f009c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:19:27 compute-0 nova_compute[192079]: 2025-10-02 12:19:27.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:19:27 compute-0 nova_compute[192079]: 2025-10-02 12:19:27.845 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:28 compute-0 nova_compute[192079]: 2025-10-02 12:19:28.222 2 DEBUG nova.network.neutron [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Successfully created port: f78bfe61-0ef6-4167-a26d-b3ce52b05bba _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:19:28 compute-0 nova_compute[192079]: 2025-10-02 12:19:28.904 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:29 compute-0 nova_compute[192079]: 2025-10-02 12:19:29.030 2 DEBUG nova.network.neutron [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Successfully updated port: f78bfe61-0ef6-4167-a26d-b3ce52b05bba _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:19:29 compute-0 nova_compute[192079]: 2025-10-02 12:19:29.048 2 DEBUG oslo_concurrency.lockutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Acquiring lock "refresh_cache-7f331800-f718-4dc3-b740-1a9574a65fb1" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:19:29 compute-0 nova_compute[192079]: 2025-10-02 12:19:29.048 2 DEBUG oslo_concurrency.lockutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Acquired lock "refresh_cache-7f331800-f718-4dc3-b740-1a9574a65fb1" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:19:29 compute-0 nova_compute[192079]: 2025-10-02 12:19:29.049 2 DEBUG nova.network.neutron [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:19:29 compute-0 nova_compute[192079]: 2025-10-02 12:19:29.229 2 DEBUG nova.compute.manager [req-4e2bbf8c-5d14-4e37-89c1-4bc8af7270eb req-787d5ca8-57f2-4733-9d5d-295302d5bc59 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Received event network-changed-f78bfe61-0ef6-4167-a26d-b3ce52b05bba external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:19:29 compute-0 nova_compute[192079]: 2025-10-02 12:19:29.230 2 DEBUG nova.compute.manager [req-4e2bbf8c-5d14-4e37-89c1-4bc8af7270eb req-787d5ca8-57f2-4733-9d5d-295302d5bc59 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Refreshing instance network info cache due to event network-changed-f78bfe61-0ef6-4167-a26d-b3ce52b05bba. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:19:29 compute-0 nova_compute[192079]: 2025-10-02 12:19:29.230 2 DEBUG oslo_concurrency.lockutils [req-4e2bbf8c-5d14-4e37-89c1-4bc8af7270eb req-787d5ca8-57f2-4733-9d5d-295302d5bc59 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-7f331800-f718-4dc3-b740-1a9574a65fb1" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:19:29 compute-0 nova_compute[192079]: 2025-10-02 12:19:29.268 2 DEBUG nova.network.neutron [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:19:29 compute-0 nova_compute[192079]: 2025-10-02 12:19:29.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:19:30 compute-0 nova_compute[192079]: 2025-10-02 12:19:30.927 2 DEBUG nova.network.neutron [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Updating instance_info_cache with network_info: [{"id": "f78bfe61-0ef6-4167-a26d-b3ce52b05bba", "address": "fa:16:3e:99:3e:89", "network": {"id": "6745e688-adb3-4658-a494-7169101829e9", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-420012573-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "99286619ca844589aacc016f9c8f009c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf78bfe61-0e", "ovs_interfaceid": "f78bfe61-0ef6-4167-a26d-b3ce52b05bba", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:19:30 compute-0 nova_compute[192079]: 2025-10-02 12:19:30.966 2 DEBUG oslo_concurrency.lockutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Releasing lock "refresh_cache-7f331800-f718-4dc3-b740-1a9574a65fb1" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:19:30 compute-0 nova_compute[192079]: 2025-10-02 12:19:30.966 2 DEBUG nova.compute.manager [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Instance network_info: |[{"id": "f78bfe61-0ef6-4167-a26d-b3ce52b05bba", "address": "fa:16:3e:99:3e:89", "network": {"id": "6745e688-adb3-4658-a494-7169101829e9", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-420012573-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "99286619ca844589aacc016f9c8f009c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf78bfe61-0e", "ovs_interfaceid": "f78bfe61-0ef6-4167-a26d-b3ce52b05bba", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:19:30 compute-0 nova_compute[192079]: 2025-10-02 12:19:30.966 2 DEBUG oslo_concurrency.lockutils [req-4e2bbf8c-5d14-4e37-89c1-4bc8af7270eb req-787d5ca8-57f2-4733-9d5d-295302d5bc59 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-7f331800-f718-4dc3-b740-1a9574a65fb1" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:19:30 compute-0 nova_compute[192079]: 2025-10-02 12:19:30.967 2 DEBUG nova.network.neutron [req-4e2bbf8c-5d14-4e37-89c1-4bc8af7270eb req-787d5ca8-57f2-4733-9d5d-295302d5bc59 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Refreshing network info cache for port f78bfe61-0ef6-4167-a26d-b3ce52b05bba _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:19:30 compute-0 nova_compute[192079]: 2025-10-02 12:19:30.970 2 DEBUG nova.virt.libvirt.driver [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Start _get_guest_xml network_info=[{"id": "f78bfe61-0ef6-4167-a26d-b3ce52b05bba", "address": "fa:16:3e:99:3e:89", "network": {"id": "6745e688-adb3-4658-a494-7169101829e9", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-420012573-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "99286619ca844589aacc016f9c8f009c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf78bfe61-0e", "ovs_interfaceid": "f78bfe61-0ef6-4167-a26d-b3ce52b05bba", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:19:30 compute-0 nova_compute[192079]: 2025-10-02 12:19:30.974 2 WARNING nova.virt.libvirt.driver [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:19:30 compute-0 nova_compute[192079]: 2025-10-02 12:19:30.979 2 DEBUG nova.virt.libvirt.host [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:19:30 compute-0 nova_compute[192079]: 2025-10-02 12:19:30.980 2 DEBUG nova.virt.libvirt.host [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:19:30 compute-0 nova_compute[192079]: 2025-10-02 12:19:30.986 2 DEBUG nova.virt.libvirt.host [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:19:30 compute-0 nova_compute[192079]: 2025-10-02 12:19:30.986 2 DEBUG nova.virt.libvirt.host [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:19:30 compute-0 nova_compute[192079]: 2025-10-02 12:19:30.987 2 DEBUG nova.virt.libvirt.driver [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:19:30 compute-0 nova_compute[192079]: 2025-10-02 12:19:30.988 2 DEBUG nova.virt.hardware [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:19:30 compute-0 nova_compute[192079]: 2025-10-02 12:19:30.988 2 DEBUG nova.virt.hardware [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:19:30 compute-0 nova_compute[192079]: 2025-10-02 12:19:30.988 2 DEBUG nova.virt.hardware [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:19:30 compute-0 nova_compute[192079]: 2025-10-02 12:19:30.989 2 DEBUG nova.virt.hardware [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:19:30 compute-0 nova_compute[192079]: 2025-10-02 12:19:30.989 2 DEBUG nova.virt.hardware [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:19:30 compute-0 nova_compute[192079]: 2025-10-02 12:19:30.989 2 DEBUG nova.virt.hardware [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:19:30 compute-0 nova_compute[192079]: 2025-10-02 12:19:30.989 2 DEBUG nova.virt.hardware [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:19:30 compute-0 nova_compute[192079]: 2025-10-02 12:19:30.990 2 DEBUG nova.virt.hardware [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:19:30 compute-0 nova_compute[192079]: 2025-10-02 12:19:30.990 2 DEBUG nova.virt.hardware [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:19:30 compute-0 nova_compute[192079]: 2025-10-02 12:19:30.990 2 DEBUG nova.virt.hardware [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:19:30 compute-0 nova_compute[192079]: 2025-10-02 12:19:30.990 2 DEBUG nova.virt.hardware [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:19:30 compute-0 nova_compute[192079]: 2025-10-02 12:19:30.994 2 DEBUG nova.virt.libvirt.vif [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:19:24Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description=None,display_name='tempest-ServerTagsTestJSON-server-1058268671',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-servertagstestjson-server-1058268671',id=98,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='99286619ca844589aacc016f9c8f009c',ramdisk_id='',reservation_id='r-18awkruy',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServerTagsTestJSON-1903451521',owner_user_name='tempest-ServerTagsTestJSON-1903451521-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:19:26Z,user_data=None,user_id='af12a0d863d849fc869fc92f700cedde',uuid=7f331800-f718-4dc3-b740-1a9574a65fb1,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "f78bfe61-0ef6-4167-a26d-b3ce52b05bba", "address": "fa:16:3e:99:3e:89", "network": {"id": "6745e688-adb3-4658-a494-7169101829e9", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-420012573-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "99286619ca844589aacc016f9c8f009c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf78bfe61-0e", "ovs_interfaceid": "f78bfe61-0ef6-4167-a26d-b3ce52b05bba", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:19:30 compute-0 nova_compute[192079]: 2025-10-02 12:19:30.994 2 DEBUG nova.network.os_vif_util [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Converting VIF {"id": "f78bfe61-0ef6-4167-a26d-b3ce52b05bba", "address": "fa:16:3e:99:3e:89", "network": {"id": "6745e688-adb3-4658-a494-7169101829e9", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-420012573-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "99286619ca844589aacc016f9c8f009c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf78bfe61-0e", "ovs_interfaceid": "f78bfe61-0ef6-4167-a26d-b3ce52b05bba", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:19:30 compute-0 nova_compute[192079]: 2025-10-02 12:19:30.995 2 DEBUG nova.network.os_vif_util [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:99:3e:89,bridge_name='br-int',has_traffic_filtering=True,id=f78bfe61-0ef6-4167-a26d-b3ce52b05bba,network=Network(6745e688-adb3-4658-a494-7169101829e9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf78bfe61-0e') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:19:30 compute-0 nova_compute[192079]: 2025-10-02 12:19:30.996 2 DEBUG nova.objects.instance [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Lazy-loading 'pci_devices' on Instance uuid 7f331800-f718-4dc3-b740-1a9574a65fb1 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.010 2 DEBUG nova.virt.libvirt.driver [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:19:31 compute-0 nova_compute[192079]:   <uuid>7f331800-f718-4dc3-b740-1a9574a65fb1</uuid>
Oct 02 12:19:31 compute-0 nova_compute[192079]:   <name>instance-00000062</name>
Oct 02 12:19:31 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:19:31 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:19:31 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:19:31 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:       <nova:name>tempest-ServerTagsTestJSON-server-1058268671</nova:name>
Oct 02 12:19:31 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:19:30</nova:creationTime>
Oct 02 12:19:31 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:19:31 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:19:31 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:19:31 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:19:31 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:19:31 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:19:31 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:19:31 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:19:31 compute-0 nova_compute[192079]:         <nova:user uuid="af12a0d863d849fc869fc92f700cedde">tempest-ServerTagsTestJSON-1903451521-project-member</nova:user>
Oct 02 12:19:31 compute-0 nova_compute[192079]:         <nova:project uuid="99286619ca844589aacc016f9c8f009c">tempest-ServerTagsTestJSON-1903451521</nova:project>
Oct 02 12:19:31 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:19:31 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:19:31 compute-0 nova_compute[192079]:         <nova:port uuid="f78bfe61-0ef6-4167-a26d-b3ce52b05bba">
Oct 02 12:19:31 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.10" ipVersion="4"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:19:31 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:19:31 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:19:31 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <system>
Oct 02 12:19:31 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:19:31 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:19:31 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:19:31 compute-0 nova_compute[192079]:       <entry name="serial">7f331800-f718-4dc3-b740-1a9574a65fb1</entry>
Oct 02 12:19:31 compute-0 nova_compute[192079]:       <entry name="uuid">7f331800-f718-4dc3-b740-1a9574a65fb1</entry>
Oct 02 12:19:31 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     </system>
Oct 02 12:19:31 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:19:31 compute-0 nova_compute[192079]:   <os>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:   </os>
Oct 02 12:19:31 compute-0 nova_compute[192079]:   <features>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:   </features>
Oct 02 12:19:31 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:19:31 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:19:31 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:19:31 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/7f331800-f718-4dc3-b740-1a9574a65fb1/disk"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:19:31 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/7f331800-f718-4dc3-b740-1a9574a65fb1/disk.config"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:19:31 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:99:3e:89"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:       <target dev="tapf78bfe61-0e"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:19:31 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/7f331800-f718-4dc3-b740-1a9574a65fb1/console.log" append="off"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <video>
Oct 02 12:19:31 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     </video>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:19:31 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:19:31 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:19:31 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:19:31 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:19:31 compute-0 nova_compute[192079]: </domain>
Oct 02 12:19:31 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.012 2 DEBUG nova.compute.manager [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Preparing to wait for external event network-vif-plugged-f78bfe61-0ef6-4167-a26d-b3ce52b05bba prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.012 2 DEBUG oslo_concurrency.lockutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Acquiring lock "7f331800-f718-4dc3-b740-1a9574a65fb1-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.013 2 DEBUG oslo_concurrency.lockutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Lock "7f331800-f718-4dc3-b740-1a9574a65fb1-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.013 2 DEBUG oslo_concurrency.lockutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Lock "7f331800-f718-4dc3-b740-1a9574a65fb1-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.014 2 DEBUG nova.virt.libvirt.vif [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:19:24Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description=None,display_name='tempest-ServerTagsTestJSON-server-1058268671',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-servertagstestjson-server-1058268671',id=98,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='99286619ca844589aacc016f9c8f009c',ramdisk_id='',reservation_id='r-18awkruy',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServerTagsTestJSON-1903451521',owner_user_name='tempest-ServerTagsTestJSON-1903451521-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:19:26Z,user_data=None,user_id='af12a0d863d849fc869fc92f700cedde',uuid=7f331800-f718-4dc3-b740-1a9574a65fb1,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "f78bfe61-0ef6-4167-a26d-b3ce52b05bba", "address": "fa:16:3e:99:3e:89", "network": {"id": "6745e688-adb3-4658-a494-7169101829e9", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-420012573-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "99286619ca844589aacc016f9c8f009c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf78bfe61-0e", "ovs_interfaceid": "f78bfe61-0ef6-4167-a26d-b3ce52b05bba", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.014 2 DEBUG nova.network.os_vif_util [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Converting VIF {"id": "f78bfe61-0ef6-4167-a26d-b3ce52b05bba", "address": "fa:16:3e:99:3e:89", "network": {"id": "6745e688-adb3-4658-a494-7169101829e9", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-420012573-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "99286619ca844589aacc016f9c8f009c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf78bfe61-0e", "ovs_interfaceid": "f78bfe61-0ef6-4167-a26d-b3ce52b05bba", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.015 2 DEBUG nova.network.os_vif_util [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:99:3e:89,bridge_name='br-int',has_traffic_filtering=True,id=f78bfe61-0ef6-4167-a26d-b3ce52b05bba,network=Network(6745e688-adb3-4658-a494-7169101829e9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf78bfe61-0e') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.015 2 DEBUG os_vif [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:99:3e:89,bridge_name='br-int',has_traffic_filtering=True,id=f78bfe61-0ef6-4167-a26d-b3ce52b05bba,network=Network(6745e688-adb3-4658-a494-7169101829e9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf78bfe61-0e') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.016 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.016 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.016 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.019 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.019 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapf78bfe61-0e, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.019 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapf78bfe61-0e, col_values=(('external_ids', {'iface-id': 'f78bfe61-0ef6-4167-a26d-b3ce52b05bba', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:99:3e:89', 'vm-uuid': '7f331800-f718-4dc3-b740-1a9574a65fb1'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.091 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:31 compute-0 NetworkManager[51160]: <info>  [1759407571.0922] manager: (tapf78bfe61-0e): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/165)
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.094 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.097 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.098 2 INFO os_vif [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:99:3e:89,bridge_name='br-int',has_traffic_filtering=True,id=f78bfe61-0ef6-4167-a26d-b3ce52b05bba,network=Network(6745e688-adb3-4658-a494-7169101829e9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf78bfe61-0e')
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.144 2 DEBUG nova.virt.libvirt.driver [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.145 2 DEBUG nova.virt.libvirt.driver [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.145 2 DEBUG nova.virt.libvirt.driver [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] No VIF found with MAC fa:16:3e:99:3e:89, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.145 2 INFO nova.virt.libvirt.driver [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Using config drive
Oct 02 12:19:31 compute-0 podman[234863]: 2025-10-02 12:19:31.169729294 +0000 UTC m=+0.051601248 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=multipathd, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, container_name=multipathd, managed_by=edpm_ansible, io.buildah.version=1.41.3)
Oct 02 12:19:31 compute-0 podman[234861]: 2025-10-02 12:19:31.169723794 +0000 UTC m=+0.053924851 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, vendor=Red Hat, Inc., io.openshift.tags=minimal rhel9, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, distribution-scope=public, version=9.6, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., container_name=openstack_network_exporter, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., architecture=x86_64, release=1755695350, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., io.openshift.expose-services=, maintainer=Red Hat, Inc., build-date=2025-08-20T13:12:41, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, name=ubi9-minimal, com.redhat.component=ubi9-minimal-container, io.buildah.version=1.33.7, vcs-type=git, managed_by=edpm_ansible, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, config_id=edpm, url=https://catalog.redhat.com/en/search?searchType=containers, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b)
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.518 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.551 2 INFO nova.virt.libvirt.driver [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Creating config drive at /var/lib/nova/instances/7f331800-f718-4dc3-b740-1a9574a65fb1/disk.config
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.556 2 DEBUG oslo_concurrency.processutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/7f331800-f718-4dc3-b740-1a9574a65fb1/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp43fv6s3u execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.680 2 DEBUG oslo_concurrency.processutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/7f331800-f718-4dc3-b740-1a9574a65fb1/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp43fv6s3u" returned: 0 in 0.125s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:19:31 compute-0 kernel: tapf78bfe61-0e: entered promiscuous mode
Oct 02 12:19:31 compute-0 ovn_controller[94336]: 2025-10-02T12:19:31Z|00328|binding|INFO|Claiming lport f78bfe61-0ef6-4167-a26d-b3ce52b05bba for this chassis.
Oct 02 12:19:31 compute-0 ovn_controller[94336]: 2025-10-02T12:19:31Z|00329|binding|INFO|f78bfe61-0ef6-4167-a26d-b3ce52b05bba: Claiming fa:16:3e:99:3e:89 10.100.0.10
Oct 02 12:19:31 compute-0 NetworkManager[51160]: <info>  [1759407571.7347] manager: (tapf78bfe61-0e): new Tun device (/org/freedesktop/NetworkManager/Devices/166)
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.735 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.738 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.740 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:31.754 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:99:3e:89 10.100.0.10'], port_security=['fa:16:3e:99:3e:89 10.100.0.10'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.10/28', 'neutron:device_id': '7f331800-f718-4dc3-b740-1a9574a65fb1', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-6745e688-adb3-4658-a494-7169101829e9', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '99286619ca844589aacc016f9c8f009c', 'neutron:revision_number': '2', 'neutron:security_group_ids': '62e95c2d-cdae-4568-acd4-9be2bfb56811', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=4f32c1f4-7e7b-4049-bfe2-0cff06b02083, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=f78bfe61-0ef6-4167-a26d-b3ce52b05bba) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:19:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:31.755 103294 INFO neutron.agent.ovn.metadata.agent [-] Port f78bfe61-0ef6-4167-a26d-b3ce52b05bba in datapath 6745e688-adb3-4658-a494-7169101829e9 bound to our chassis
Oct 02 12:19:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:31.756 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 6745e688-adb3-4658-a494-7169101829e9
Oct 02 12:19:31 compute-0 systemd-udevd[234918]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:19:31 compute-0 NetworkManager[51160]: <info>  [1759407571.7716] device (tapf78bfe61-0e): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:19:31 compute-0 NetworkManager[51160]: <info>  [1759407571.7732] device (tapf78bfe61-0e): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:19:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:31.772 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[35b62b7e-128c-4dd6-b52f-fb220ce9a991]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:31.773 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap6745e688-a1 in ovnmeta-6745e688-adb3-4658-a494-7169101829e9 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:19:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:31.775 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap6745e688-a0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:19:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:31.775 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[54f59baf-b914-467e-a2a6-a579763f07e1]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:31.776 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[96c0cd7e-c939-4ec5-897f-8964e6d7f829]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:31 compute-0 systemd-machined[152150]: New machine qemu-45-instance-00000062.
Oct 02 12:19:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:31.787 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[202d57e7-90a1-44ff-9fe7-af3880096a5a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.792 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:31 compute-0 systemd[1]: Started Virtual Machine qemu-45-instance-00000062.
Oct 02 12:19:31 compute-0 ovn_controller[94336]: 2025-10-02T12:19:31Z|00330|binding|INFO|Setting lport f78bfe61-0ef6-4167-a26d-b3ce52b05bba ovn-installed in OVS
Oct 02 12:19:31 compute-0 ovn_controller[94336]: 2025-10-02T12:19:31Z|00331|binding|INFO|Setting lport f78bfe61-0ef6-4167-a26d-b3ce52b05bba up in Southbound
Oct 02 12:19:31 compute-0 nova_compute[192079]: 2025-10-02 12:19:31.796 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:31.810 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[30811aba-2ab1-4c18-a174-fd7dca365c61]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:31.837 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[593c69c9-993f-49f2-a2b0-2143b7b46b90]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:31.842 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e518039f-0417-46c7-822d-38d33364f2df]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:31 compute-0 systemd-udevd[234922]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:19:31 compute-0 NetworkManager[51160]: <info>  [1759407571.8432] manager: (tap6745e688-a0): new Veth device (/org/freedesktop/NetworkManager/Devices/167)
Oct 02 12:19:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:31.877 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[f5e832a9-1985-446e-a92f-78ddfde87b47]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:31.880 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[34a6b18e-103c-45f5-b061-452b2dd4500b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:31 compute-0 NetworkManager[51160]: <info>  [1759407571.9021] device (tap6745e688-a0): carrier: link connected
Oct 02 12:19:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:31.907 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[48ea0598-0255-4806-8171-2ccd5e845104]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:31.921 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[cc39b1f0-cb80-42f5-ad9b-c73a8dd8ca50]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap6745e688-a1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:74:31:52'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 2, 'tx_packets': 1, 'rx_bytes': 196, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 2, 'tx_packets': 1, 'rx_bytes': 196, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 107], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 556953, 'reachable_time': 36479, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 2, 'inoctets': 168, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 2, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 168, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 2, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 234952, 'error': None, 'target': 'ovnmeta-6745e688-adb3-4658-a494-7169101829e9', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:31.936 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e001df9d-4988-4d7e-b7b6-fe1f21f48be9]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe74:3152'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 556953, 'tstamp': 556953}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 234953, 'error': None, 'target': 'ovnmeta-6745e688-adb3-4658-a494-7169101829e9', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:31.950 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b8826735-4d38-49c6-996d-f113dd818a70]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap6745e688-a1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:74:31:52'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 2, 'tx_packets': 1, 'rx_bytes': 196, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 2, 'tx_packets': 1, 'rx_bytes': 196, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 107], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 556953, 'reachable_time': 36479, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 2, 'inoctets': 168, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 2, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 168, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 2, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 234954, 'error': None, 'target': 'ovnmeta-6745e688-adb3-4658-a494-7169101829e9', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:31.978 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a3556709-e7de-42b7-9f44-8168d4848ddd]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:32.032 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[59da30f2-bf34-4aa9-8d36-f620fd59bada]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:32.033 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap6745e688-a0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:32.033 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:32.034 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap6745e688-a0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.035 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:32 compute-0 kernel: tap6745e688-a0: entered promiscuous mode
Oct 02 12:19:32 compute-0 NetworkManager[51160]: <info>  [1759407572.0362] manager: (tap6745e688-a0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/168)
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:32.038 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap6745e688-a0, col_values=(('external_ids', {'iface-id': 'e7860b16-7332-4956-844c-c1b6ee82d1fd'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.040 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:32 compute-0 ovn_controller[94336]: 2025-10-02T12:19:32Z|00332|binding|INFO|Releasing lport e7860b16-7332-4956-844c-c1b6ee82d1fd from this chassis (sb_readonly=0)
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.041 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:32.041 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/6745e688-adb3-4658-a494-7169101829e9.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/6745e688-adb3-4658-a494-7169101829e9.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:32.042 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[04977bf0-bf37-4a05-a81c-e65101cf5bd9]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:32.043 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-6745e688-adb3-4658-a494-7169101829e9
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/6745e688-adb3-4658-a494-7169101829e9.pid.haproxy
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 6745e688-adb3-4658-a494-7169101829e9
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:19:32 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:32.044 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-6745e688-adb3-4658-a494-7169101829e9', 'env', 'PROCESS_TAG=haproxy-6745e688-adb3-4658-a494-7169101829e9', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/6745e688-adb3-4658-a494-7169101829e9.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.051 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.311 2 DEBUG nova.compute.manager [req-6e71b582-92df-4810-99ff-c5d9ab6f7ee7 req-fecb7b40-bfb2-4634-9c99-6b25502de5cb 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Received event network-vif-plugged-f78bfe61-0ef6-4167-a26d-b3ce52b05bba external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.312 2 DEBUG oslo_concurrency.lockutils [req-6e71b582-92df-4810-99ff-c5d9ab6f7ee7 req-fecb7b40-bfb2-4634-9c99-6b25502de5cb 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "7f331800-f718-4dc3-b740-1a9574a65fb1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.312 2 DEBUG oslo_concurrency.lockutils [req-6e71b582-92df-4810-99ff-c5d9ab6f7ee7 req-fecb7b40-bfb2-4634-9c99-6b25502de5cb 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "7f331800-f718-4dc3-b740-1a9574a65fb1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.312 2 DEBUG oslo_concurrency.lockutils [req-6e71b582-92df-4810-99ff-c5d9ab6f7ee7 req-fecb7b40-bfb2-4634-9c99-6b25502de5cb 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "7f331800-f718-4dc3-b740-1a9574a65fb1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.312 2 DEBUG nova.compute.manager [req-6e71b582-92df-4810-99ff-c5d9ab6f7ee7 req-fecb7b40-bfb2-4634-9c99-6b25502de5cb 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Processing event network-vif-plugged-f78bfe61-0ef6-4167-a26d-b3ce52b05bba _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:19:32 compute-0 podman[234993]: 2025-10-02 12:19:32.356751595 +0000 UTC m=+0.022402672 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:19:32 compute-0 podman[234993]: 2025-10-02 12:19:32.526677117 +0000 UTC m=+0.192328174 container create 793ddf70c1f2df1af6a4c1cc6e56c0fc94e0cd49bac9245816a85d4d849b0bbe (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-6745e688-adb3-4658-a494-7169101829e9, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.license=GPLv2)
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.572 2 DEBUG nova.compute.manager [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.573 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407572.5714374, 7f331800-f718-4dc3-b740-1a9574a65fb1 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.573 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] VM Started (Lifecycle Event)
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.576 2 DEBUG nova.virt.libvirt.driver [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.579 2 INFO nova.virt.libvirt.driver [-] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Instance spawned successfully.
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.579 2 DEBUG nova.virt.libvirt.driver [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:19:32 compute-0 systemd[1]: Started libpod-conmon-793ddf70c1f2df1af6a4c1cc6e56c0fc94e0cd49bac9245816a85d4d849b0bbe.scope.
Oct 02 12:19:32 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:19:32 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/d95d799d9e1e79508fbdbb67f4146aaa9c1d44958535848ead237f289a70bcbf/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:19:32 compute-0 podman[234993]: 2025-10-02 12:19:32.617640727 +0000 UTC m=+0.283291824 container init 793ddf70c1f2df1af6a4c1cc6e56c0fc94e0cd49bac9245816a85d4d849b0bbe (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-6745e688-adb3-4658-a494-7169101829e9, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:19:32 compute-0 podman[234993]: 2025-10-02 12:19:32.622585912 +0000 UTC m=+0.288236969 container start 793ddf70c1f2df1af6a4c1cc6e56c0fc94e0cd49bac9245816a85d4d849b0bbe (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-6745e688-adb3-4658-a494-7169101829e9, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, io.buildah.version=1.41.3)
Oct 02 12:19:32 compute-0 neutron-haproxy-ovnmeta-6745e688-adb3-4658-a494-7169101829e9[235008]: [NOTICE]   (235012) : New worker (235014) forked
Oct 02 12:19:32 compute-0 neutron-haproxy-ovnmeta-6745e688-adb3-4658-a494-7169101829e9[235008]: [NOTICE]   (235012) : Loading success.
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.646 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.649 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.663 2 DEBUG nova.virt.libvirt.driver [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.663 2 DEBUG nova.virt.libvirt.driver [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.664 2 DEBUG nova.virt.libvirt.driver [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.664 2 DEBUG nova.virt.libvirt.driver [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.665 2 DEBUG nova.virt.libvirt.driver [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.666 2 DEBUG nova.virt.libvirt.driver [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.678 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.679 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407572.5715406, 7f331800-f718-4dc3-b740-1a9574a65fb1 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.679 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] VM Paused (Lifecycle Event)
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.752 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.755 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407572.5763175, 7f331800-f718-4dc3-b740-1a9574a65fb1 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.756 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] VM Resumed (Lifecycle Event)
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.795 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.799 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.809 2 DEBUG nova.network.neutron [req-4e2bbf8c-5d14-4e37-89c1-4bc8af7270eb req-787d5ca8-57f2-4733-9d5d-295302d5bc59 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Updated VIF entry in instance network info cache for port f78bfe61-0ef6-4167-a26d-b3ce52b05bba. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.810 2 DEBUG nova.network.neutron [req-4e2bbf8c-5d14-4e37-89c1-4bc8af7270eb req-787d5ca8-57f2-4733-9d5d-295302d5bc59 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Updating instance_info_cache with network_info: [{"id": "f78bfe61-0ef6-4167-a26d-b3ce52b05bba", "address": "fa:16:3e:99:3e:89", "network": {"id": "6745e688-adb3-4658-a494-7169101829e9", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-420012573-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "99286619ca844589aacc016f9c8f009c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf78bfe61-0e", "ovs_interfaceid": "f78bfe61-0ef6-4167-a26d-b3ce52b05bba", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.818 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.868 2 DEBUG oslo_concurrency.lockutils [req-4e2bbf8c-5d14-4e37-89c1-4bc8af7270eb req-787d5ca8-57f2-4733-9d5d-295302d5bc59 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-7f331800-f718-4dc3-b740-1a9574a65fb1" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.875 2 INFO nova.compute.manager [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Took 6.39 seconds to spawn the instance on the hypervisor.
Oct 02 12:19:32 compute-0 nova_compute[192079]: 2025-10-02 12:19:32.875 2 DEBUG nova.compute.manager [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:19:33 compute-0 nova_compute[192079]: 2025-10-02 12:19:33.040 2 INFO nova.compute.manager [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Took 7.02 seconds to build instance.
Oct 02 12:19:33 compute-0 nova_compute[192079]: 2025-10-02 12:19:33.075 2 DEBUG oslo_concurrency.lockutils [None req-a01c63c9-37f0-4da2-99e9-04c673d8253a af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Lock "7f331800-f718-4dc3-b740-1a9574a65fb1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 7.147s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:34 compute-0 nova_compute[192079]: 2025-10-02 12:19:34.907 2 DEBUG nova.compute.manager [req-8eccc30f-2940-4a99-970d-de2d10fe780c req-cbba55c7-c0a6-4b23-8bf4-90c9f59fdb08 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Received event network-vif-plugged-f78bfe61-0ef6-4167-a26d-b3ce52b05bba external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:19:34 compute-0 nova_compute[192079]: 2025-10-02 12:19:34.908 2 DEBUG oslo_concurrency.lockutils [req-8eccc30f-2940-4a99-970d-de2d10fe780c req-cbba55c7-c0a6-4b23-8bf4-90c9f59fdb08 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "7f331800-f718-4dc3-b740-1a9574a65fb1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:34 compute-0 nova_compute[192079]: 2025-10-02 12:19:34.908 2 DEBUG oslo_concurrency.lockutils [req-8eccc30f-2940-4a99-970d-de2d10fe780c req-cbba55c7-c0a6-4b23-8bf4-90c9f59fdb08 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "7f331800-f718-4dc3-b740-1a9574a65fb1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:34 compute-0 nova_compute[192079]: 2025-10-02 12:19:34.908 2 DEBUG oslo_concurrency.lockutils [req-8eccc30f-2940-4a99-970d-de2d10fe780c req-cbba55c7-c0a6-4b23-8bf4-90c9f59fdb08 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "7f331800-f718-4dc3-b740-1a9574a65fb1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:34 compute-0 nova_compute[192079]: 2025-10-02 12:19:34.909 2 DEBUG nova.compute.manager [req-8eccc30f-2940-4a99-970d-de2d10fe780c req-cbba55c7-c0a6-4b23-8bf4-90c9f59fdb08 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] No waiting events found dispatching network-vif-plugged-f78bfe61-0ef6-4167-a26d-b3ce52b05bba pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:19:34 compute-0 nova_compute[192079]: 2025-10-02 12:19:34.909 2 WARNING nova.compute.manager [req-8eccc30f-2940-4a99-970d-de2d10fe780c req-cbba55c7-c0a6-4b23-8bf4-90c9f59fdb08 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Received unexpected event network-vif-plugged-f78bfe61-0ef6-4167-a26d-b3ce52b05bba for instance with vm_state active and task_state None.
Oct 02 12:19:36 compute-0 nova_compute[192079]: 2025-10-02 12:19:36.094 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:36 compute-0 nova_compute[192079]: 2025-10-02 12:19:36.521 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:37 compute-0 nova_compute[192079]: 2025-10-02 12:19:37.823 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759407562.8221054, 6059254e-5c4b-4d87-991a-cf72fab61216 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:19:37 compute-0 nova_compute[192079]: 2025-10-02 12:19:37.824 2 INFO nova.compute.manager [-] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] VM Stopped (Lifecycle Event)
Oct 02 12:19:37 compute-0 nova_compute[192079]: 2025-10-02 12:19:37.859 2 DEBUG nova.compute.manager [None req-7846d7f2-5de2-4cc4-9186-4fff40553a92 - - - - - -] [instance: 6059254e-5c4b-4d87-991a-cf72fab61216] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:19:38 compute-0 podman[235023]: 2025-10-02 12:19:38.133694446 +0000 UTC m=+0.050451006 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:19:38 compute-0 podman[235024]: 2025-10-02 12:19:38.186706761 +0000 UTC m=+0.094654131 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=iscsid, org.label-schema.build-date=20251001, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 12:19:38 compute-0 nova_compute[192079]: 2025-10-02 12:19:38.825 2 DEBUG oslo_concurrency.lockutils [None req-1dd7c744-1ae5-4786-b01b-4ca8dca5896e af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Acquiring lock "7f331800-f718-4dc3-b740-1a9574a65fb1" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:38 compute-0 nova_compute[192079]: 2025-10-02 12:19:38.826 2 DEBUG oslo_concurrency.lockutils [None req-1dd7c744-1ae5-4786-b01b-4ca8dca5896e af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Lock "7f331800-f718-4dc3-b740-1a9574a65fb1" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:38 compute-0 nova_compute[192079]: 2025-10-02 12:19:38.827 2 DEBUG oslo_concurrency.lockutils [None req-1dd7c744-1ae5-4786-b01b-4ca8dca5896e af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Acquiring lock "7f331800-f718-4dc3-b740-1a9574a65fb1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:38 compute-0 nova_compute[192079]: 2025-10-02 12:19:38.827 2 DEBUG oslo_concurrency.lockutils [None req-1dd7c744-1ae5-4786-b01b-4ca8dca5896e af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Lock "7f331800-f718-4dc3-b740-1a9574a65fb1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:38 compute-0 nova_compute[192079]: 2025-10-02 12:19:38.828 2 DEBUG oslo_concurrency.lockutils [None req-1dd7c744-1ae5-4786-b01b-4ca8dca5896e af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Lock "7f331800-f718-4dc3-b740-1a9574a65fb1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:38 compute-0 nova_compute[192079]: 2025-10-02 12:19:38.843 2 INFO nova.compute.manager [None req-1dd7c744-1ae5-4786-b01b-4ca8dca5896e af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Terminating instance
Oct 02 12:19:38 compute-0 nova_compute[192079]: 2025-10-02 12:19:38.857 2 DEBUG nova.compute.manager [None req-1dd7c744-1ae5-4786-b01b-4ca8dca5896e af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:19:38 compute-0 kernel: tapf78bfe61-0e (unregistering): left promiscuous mode
Oct 02 12:19:38 compute-0 NetworkManager[51160]: <info>  [1759407578.8885] device (tapf78bfe61-0e): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:19:38 compute-0 ovn_controller[94336]: 2025-10-02T12:19:38Z|00333|binding|INFO|Releasing lport f78bfe61-0ef6-4167-a26d-b3ce52b05bba from this chassis (sb_readonly=0)
Oct 02 12:19:38 compute-0 ovn_controller[94336]: 2025-10-02T12:19:38Z|00334|binding|INFO|Setting lport f78bfe61-0ef6-4167-a26d-b3ce52b05bba down in Southbound
Oct 02 12:19:38 compute-0 nova_compute[192079]: 2025-10-02 12:19:38.901 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:38 compute-0 ovn_controller[94336]: 2025-10-02T12:19:38Z|00335|binding|INFO|Removing iface tapf78bfe61-0e ovn-installed in OVS
Oct 02 12:19:38 compute-0 nova_compute[192079]: 2025-10-02 12:19:38.906 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:38 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:38.917 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:99:3e:89 10.100.0.10'], port_security=['fa:16:3e:99:3e:89 10.100.0.10'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.10/28', 'neutron:device_id': '7f331800-f718-4dc3-b740-1a9574a65fb1', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-6745e688-adb3-4658-a494-7169101829e9', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '99286619ca844589aacc016f9c8f009c', 'neutron:revision_number': '4', 'neutron:security_group_ids': '62e95c2d-cdae-4568-acd4-9be2bfb56811', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=4f32c1f4-7e7b-4049-bfe2-0cff06b02083, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=f78bfe61-0ef6-4167-a26d-b3ce52b05bba) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:19:38 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:38.918 103294 INFO neutron.agent.ovn.metadata.agent [-] Port f78bfe61-0ef6-4167-a26d-b3ce52b05bba in datapath 6745e688-adb3-4658-a494-7169101829e9 unbound from our chassis
Oct 02 12:19:38 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:38.919 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 6745e688-adb3-4658-a494-7169101829e9, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:19:38 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:38.920 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f0dc6079-f9cc-4de6-b298-61482120a769]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:38 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:38.920 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-6745e688-adb3-4658-a494-7169101829e9 namespace which is not needed anymore
Oct 02 12:19:38 compute-0 nova_compute[192079]: 2025-10-02 12:19:38.939 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:38 compute-0 systemd[1]: machine-qemu\x2d45\x2dinstance\x2d00000062.scope: Deactivated successfully.
Oct 02 12:19:38 compute-0 systemd[1]: machine-qemu\x2d45\x2dinstance\x2d00000062.scope: Consumed 7.095s CPU time.
Oct 02 12:19:38 compute-0 systemd-machined[152150]: Machine qemu-45-instance-00000062 terminated.
Oct 02 12:19:39 compute-0 neutron-haproxy-ovnmeta-6745e688-adb3-4658-a494-7169101829e9[235008]: [NOTICE]   (235012) : haproxy version is 2.8.14-c23fe91
Oct 02 12:19:39 compute-0 neutron-haproxy-ovnmeta-6745e688-adb3-4658-a494-7169101829e9[235008]: [NOTICE]   (235012) : path to executable is /usr/sbin/haproxy
Oct 02 12:19:39 compute-0 neutron-haproxy-ovnmeta-6745e688-adb3-4658-a494-7169101829e9[235008]: [WARNING]  (235012) : Exiting Master process...
Oct 02 12:19:39 compute-0 neutron-haproxy-ovnmeta-6745e688-adb3-4658-a494-7169101829e9[235008]: [WARNING]  (235012) : Exiting Master process...
Oct 02 12:19:39 compute-0 neutron-haproxy-ovnmeta-6745e688-adb3-4658-a494-7169101829e9[235008]: [ALERT]    (235012) : Current worker (235014) exited with code 143 (Terminated)
Oct 02 12:19:39 compute-0 neutron-haproxy-ovnmeta-6745e688-adb3-4658-a494-7169101829e9[235008]: [WARNING]  (235012) : All workers exited. Exiting... (0)
Oct 02 12:19:39 compute-0 systemd[1]: libpod-793ddf70c1f2df1af6a4c1cc6e56c0fc94e0cd49bac9245816a85d4d849b0bbe.scope: Deactivated successfully.
Oct 02 12:19:39 compute-0 podman[235089]: 2025-10-02 12:19:39.058391655 +0000 UTC m=+0.049153711 container died 793ddf70c1f2df1af6a4c1cc6e56c0fc94e0cd49bac9245816a85d4d849b0bbe (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-6745e688-adb3-4658-a494-7169101829e9, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3)
Oct 02 12:19:39 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-793ddf70c1f2df1af6a4c1cc6e56c0fc94e0cd49bac9245816a85d4d849b0bbe-userdata-shm.mount: Deactivated successfully.
Oct 02 12:19:39 compute-0 systemd[1]: var-lib-containers-storage-overlay-d95d799d9e1e79508fbdbb67f4146aaa9c1d44958535848ead237f289a70bcbf-merged.mount: Deactivated successfully.
Oct 02 12:19:39 compute-0 nova_compute[192079]: 2025-10-02 12:19:39.089 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:39 compute-0 nova_compute[192079]: 2025-10-02 12:19:39.097 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:39 compute-0 podman[235089]: 2025-10-02 12:19:39.103517865 +0000 UTC m=+0.094279921 container cleanup 793ddf70c1f2df1af6a4c1cc6e56c0fc94e0cd49bac9245816a85d4d849b0bbe (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-6745e688-adb3-4658-a494-7169101829e9, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0)
Oct 02 12:19:39 compute-0 systemd[1]: libpod-conmon-793ddf70c1f2df1af6a4c1cc6e56c0fc94e0cd49bac9245816a85d4d849b0bbe.scope: Deactivated successfully.
Oct 02 12:19:39 compute-0 nova_compute[192079]: 2025-10-02 12:19:39.134 2 INFO nova.virt.libvirt.driver [-] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Instance destroyed successfully.
Oct 02 12:19:39 compute-0 nova_compute[192079]: 2025-10-02 12:19:39.134 2 DEBUG nova.objects.instance [None req-1dd7c744-1ae5-4786-b01b-4ca8dca5896e af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Lazy-loading 'resources' on Instance uuid 7f331800-f718-4dc3-b740-1a9574a65fb1 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:19:39 compute-0 nova_compute[192079]: 2025-10-02 12:19:39.150 2 DEBUG nova.virt.libvirt.vif [None req-1dd7c744-1ae5-4786-b01b-4ca8dca5896e af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:19:24Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description=None,display_name='tempest-ServerTagsTestJSON-server-1058268671',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-servertagstestjson-server-1058268671',id=98,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:19:32Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='99286619ca844589aacc016f9c8f009c',ramdisk_id='',reservation_id='r-18awkruy',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServerTagsTestJSON-1903451521',owner_user_name='tempest-ServerTagsTestJSON-1903451521-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:19:32Z,user_data=None,user_id='af12a0d863d849fc869fc92f700cedde',uuid=7f331800-f718-4dc3-b740-1a9574a65fb1,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "f78bfe61-0ef6-4167-a26d-b3ce52b05bba", "address": "fa:16:3e:99:3e:89", "network": {"id": "6745e688-adb3-4658-a494-7169101829e9", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-420012573-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "99286619ca844589aacc016f9c8f009c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf78bfe61-0e", "ovs_interfaceid": "f78bfe61-0ef6-4167-a26d-b3ce52b05bba", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:19:39 compute-0 nova_compute[192079]: 2025-10-02 12:19:39.151 2 DEBUG nova.network.os_vif_util [None req-1dd7c744-1ae5-4786-b01b-4ca8dca5896e af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Converting VIF {"id": "f78bfe61-0ef6-4167-a26d-b3ce52b05bba", "address": "fa:16:3e:99:3e:89", "network": {"id": "6745e688-adb3-4658-a494-7169101829e9", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-420012573-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "99286619ca844589aacc016f9c8f009c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf78bfe61-0e", "ovs_interfaceid": "f78bfe61-0ef6-4167-a26d-b3ce52b05bba", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:19:39 compute-0 nova_compute[192079]: 2025-10-02 12:19:39.151 2 DEBUG nova.network.os_vif_util [None req-1dd7c744-1ae5-4786-b01b-4ca8dca5896e af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:99:3e:89,bridge_name='br-int',has_traffic_filtering=True,id=f78bfe61-0ef6-4167-a26d-b3ce52b05bba,network=Network(6745e688-adb3-4658-a494-7169101829e9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf78bfe61-0e') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:19:39 compute-0 nova_compute[192079]: 2025-10-02 12:19:39.152 2 DEBUG os_vif [None req-1dd7c744-1ae5-4786-b01b-4ca8dca5896e af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:99:3e:89,bridge_name='br-int',has_traffic_filtering=True,id=f78bfe61-0ef6-4167-a26d-b3ce52b05bba,network=Network(6745e688-adb3-4658-a494-7169101829e9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf78bfe61-0e') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:19:39 compute-0 nova_compute[192079]: 2025-10-02 12:19:39.155 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:39 compute-0 nova_compute[192079]: 2025-10-02 12:19:39.155 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapf78bfe61-0e, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:19:39 compute-0 nova_compute[192079]: 2025-10-02 12:19:39.157 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:39 compute-0 nova_compute[192079]: 2025-10-02 12:19:39.159 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:19:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:39.159 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=24, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=23) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:19:39 compute-0 nova_compute[192079]: 2025-10-02 12:19:39.160 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:39 compute-0 nova_compute[192079]: 2025-10-02 12:19:39.162 2 INFO os_vif [None req-1dd7c744-1ae5-4786-b01b-4ca8dca5896e af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:99:3e:89,bridge_name='br-int',has_traffic_filtering=True,id=f78bfe61-0ef6-4167-a26d-b3ce52b05bba,network=Network(6745e688-adb3-4658-a494-7169101829e9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf78bfe61-0e')
Oct 02 12:19:39 compute-0 nova_compute[192079]: 2025-10-02 12:19:39.163 2 INFO nova.virt.libvirt.driver [None req-1dd7c744-1ae5-4786-b01b-4ca8dca5896e af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Deleting instance files /var/lib/nova/instances/7f331800-f718-4dc3-b740-1a9574a65fb1_del
Oct 02 12:19:39 compute-0 nova_compute[192079]: 2025-10-02 12:19:39.163 2 INFO nova.virt.libvirt.driver [None req-1dd7c744-1ae5-4786-b01b-4ca8dca5896e af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Deletion of /var/lib/nova/instances/7f331800-f718-4dc3-b740-1a9574a65fb1_del complete
Oct 02 12:19:39 compute-0 nova_compute[192079]: 2025-10-02 12:19:39.187 2 DEBUG nova.compute.manager [req-77afce80-b10a-4671-825f-1e053e2a5bb6 req-0c342572-6c91-4de4-b921-16571bf29bd4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Received event network-vif-unplugged-f78bfe61-0ef6-4167-a26d-b3ce52b05bba external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:19:39 compute-0 nova_compute[192079]: 2025-10-02 12:19:39.188 2 DEBUG oslo_concurrency.lockutils [req-77afce80-b10a-4671-825f-1e053e2a5bb6 req-0c342572-6c91-4de4-b921-16571bf29bd4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "7f331800-f718-4dc3-b740-1a9574a65fb1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:39 compute-0 nova_compute[192079]: 2025-10-02 12:19:39.188 2 DEBUG oslo_concurrency.lockutils [req-77afce80-b10a-4671-825f-1e053e2a5bb6 req-0c342572-6c91-4de4-b921-16571bf29bd4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "7f331800-f718-4dc3-b740-1a9574a65fb1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:39 compute-0 nova_compute[192079]: 2025-10-02 12:19:39.188 2 DEBUG oslo_concurrency.lockutils [req-77afce80-b10a-4671-825f-1e053e2a5bb6 req-0c342572-6c91-4de4-b921-16571bf29bd4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "7f331800-f718-4dc3-b740-1a9574a65fb1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:39 compute-0 nova_compute[192079]: 2025-10-02 12:19:39.188 2 DEBUG nova.compute.manager [req-77afce80-b10a-4671-825f-1e053e2a5bb6 req-0c342572-6c91-4de4-b921-16571bf29bd4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] No waiting events found dispatching network-vif-unplugged-f78bfe61-0ef6-4167-a26d-b3ce52b05bba pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:19:39 compute-0 nova_compute[192079]: 2025-10-02 12:19:39.188 2 DEBUG nova.compute.manager [req-77afce80-b10a-4671-825f-1e053e2a5bb6 req-0c342572-6c91-4de4-b921-16571bf29bd4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Received event network-vif-unplugged-f78bfe61-0ef6-4167-a26d-b3ce52b05bba for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:19:39 compute-0 podman[235136]: 2025-10-02 12:19:39.195835012 +0000 UTC m=+0.057907010 container remove 793ddf70c1f2df1af6a4c1cc6e56c0fc94e0cd49bac9245816a85d4d849b0bbe (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-6745e688-adb3-4658-a494-7169101829e9, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true)
Oct 02 12:19:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:39.201 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ce80e155-94cc-40b2-ac31-84d4acfbd5a2]: (4, ('Thu Oct  2 12:19:38 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-6745e688-adb3-4658-a494-7169101829e9 (793ddf70c1f2df1af6a4c1cc6e56c0fc94e0cd49bac9245816a85d4d849b0bbe)\n793ddf70c1f2df1af6a4c1cc6e56c0fc94e0cd49bac9245816a85d4d849b0bbe\nThu Oct  2 12:19:39 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-6745e688-adb3-4658-a494-7169101829e9 (793ddf70c1f2df1af6a4c1cc6e56c0fc94e0cd49bac9245816a85d4d849b0bbe)\n793ddf70c1f2df1af6a4c1cc6e56c0fc94e0cd49bac9245816a85d4d849b0bbe\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:39.203 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[40d96dbc-cfcb-493f-a7af-5a02ed7ef565]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:39.204 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap6745e688-a0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:19:39 compute-0 nova_compute[192079]: 2025-10-02 12:19:39.243 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:39 compute-0 kernel: tap6745e688-a0: left promiscuous mode
Oct 02 12:19:39 compute-0 nova_compute[192079]: 2025-10-02 12:19:39.253 2 INFO nova.compute.manager [None req-1dd7c744-1ae5-4786-b01b-4ca8dca5896e af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Took 0.39 seconds to destroy the instance on the hypervisor.
Oct 02 12:19:39 compute-0 nova_compute[192079]: 2025-10-02 12:19:39.253 2 DEBUG oslo.service.loopingcall [None req-1dd7c744-1ae5-4786-b01b-4ca8dca5896e af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:19:39 compute-0 nova_compute[192079]: 2025-10-02 12:19:39.254 2 DEBUG nova.compute.manager [-] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:19:39 compute-0 nova_compute[192079]: 2025-10-02 12:19:39.254 2 DEBUG nova.network.neutron [-] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:19:39 compute-0 nova_compute[192079]: 2025-10-02 12:19:39.260 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:39.263 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ece3638a-bb3c-456b-90f7-cc3fd886f33c]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:39.292 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7639578e-5958-4932-bc6f-08525cfc9d1d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:39.293 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[fd252d78-eb4e-490d-98ea-7225c206681d]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:39.313 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b05f43c5-f70a-46dc-9252-4c8f5018207c]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 556946, 'reachable_time': 37047, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 235154, 'error': None, 'target': 'ovnmeta-6745e688-adb3-4658-a494-7169101829e9', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:39.315 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-6745e688-adb3-4658-a494-7169101829e9 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:19:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:39.315 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[6c4e8297-73e2-466c-a3af-50e82159c6bd]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:39.315 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 4 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:19:39 compute-0 systemd[1]: run-netns-ovnmeta\x2d6745e688\x2dadb3\x2d4658\x2da494\x2d7169101829e9.mount: Deactivated successfully.
Oct 02 12:19:40 compute-0 nova_compute[192079]: 2025-10-02 12:19:40.142 2 DEBUG nova.network.neutron [-] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:19:40 compute-0 nova_compute[192079]: 2025-10-02 12:19:40.166 2 INFO nova.compute.manager [-] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Took 0.91 seconds to deallocate network for instance.
Oct 02 12:19:40 compute-0 nova_compute[192079]: 2025-10-02 12:19:40.248 2 DEBUG nova.compute.manager [req-11add0f4-38c4-4829-abc8-5ef08d001964 req-06750c20-3b2c-4069-998b-630fa1162731 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Received event network-vif-deleted-f78bfe61-0ef6-4167-a26d-b3ce52b05bba external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:19:40 compute-0 nova_compute[192079]: 2025-10-02 12:19:40.249 2 DEBUG oslo_concurrency.lockutils [None req-1dd7c744-1ae5-4786-b01b-4ca8dca5896e af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:40 compute-0 nova_compute[192079]: 2025-10-02 12:19:40.249 2 DEBUG oslo_concurrency.lockutils [None req-1dd7c744-1ae5-4786-b01b-4ca8dca5896e af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:40 compute-0 nova_compute[192079]: 2025-10-02 12:19:40.305 2 DEBUG nova.compute.provider_tree [None req-1dd7c744-1ae5-4786-b01b-4ca8dca5896e af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:19:40 compute-0 nova_compute[192079]: 2025-10-02 12:19:40.319 2 DEBUG nova.scheduler.client.report [None req-1dd7c744-1ae5-4786-b01b-4ca8dca5896e af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:19:40 compute-0 nova_compute[192079]: 2025-10-02 12:19:40.356 2 DEBUG oslo_concurrency.lockutils [None req-1dd7c744-1ae5-4786-b01b-4ca8dca5896e af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.107s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:40 compute-0 nova_compute[192079]: 2025-10-02 12:19:40.388 2 INFO nova.scheduler.client.report [None req-1dd7c744-1ae5-4786-b01b-4ca8dca5896e af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Deleted allocations for instance 7f331800-f718-4dc3-b740-1a9574a65fb1
Oct 02 12:19:40 compute-0 nova_compute[192079]: 2025-10-02 12:19:40.475 2 DEBUG oslo_concurrency.lockutils [None req-1dd7c744-1ae5-4786-b01b-4ca8dca5896e af12a0d863d849fc869fc92f700cedde 99286619ca844589aacc016f9c8f009c - - default default] Lock "7f331800-f718-4dc3-b740-1a9574a65fb1" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.649s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:41 compute-0 nova_compute[192079]: 2025-10-02 12:19:41.353 2 DEBUG nova.compute.manager [req-f944e55c-19a2-46ed-8d10-14b5ad62e448 req-3a04bab7-a958-410b-b944-ed655c83427f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Received event network-vif-plugged-f78bfe61-0ef6-4167-a26d-b3ce52b05bba external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:19:41 compute-0 nova_compute[192079]: 2025-10-02 12:19:41.354 2 DEBUG oslo_concurrency.lockutils [req-f944e55c-19a2-46ed-8d10-14b5ad62e448 req-3a04bab7-a958-410b-b944-ed655c83427f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "7f331800-f718-4dc3-b740-1a9574a65fb1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:41 compute-0 nova_compute[192079]: 2025-10-02 12:19:41.354 2 DEBUG oslo_concurrency.lockutils [req-f944e55c-19a2-46ed-8d10-14b5ad62e448 req-3a04bab7-a958-410b-b944-ed655c83427f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "7f331800-f718-4dc3-b740-1a9574a65fb1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:41 compute-0 nova_compute[192079]: 2025-10-02 12:19:41.354 2 DEBUG oslo_concurrency.lockutils [req-f944e55c-19a2-46ed-8d10-14b5ad62e448 req-3a04bab7-a958-410b-b944-ed655c83427f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "7f331800-f718-4dc3-b740-1a9574a65fb1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:41 compute-0 nova_compute[192079]: 2025-10-02 12:19:41.354 2 DEBUG nova.compute.manager [req-f944e55c-19a2-46ed-8d10-14b5ad62e448 req-3a04bab7-a958-410b-b944-ed655c83427f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] No waiting events found dispatching network-vif-plugged-f78bfe61-0ef6-4167-a26d-b3ce52b05bba pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:19:41 compute-0 nova_compute[192079]: 2025-10-02 12:19:41.354 2 WARNING nova.compute.manager [req-f944e55c-19a2-46ed-8d10-14b5ad62e448 req-3a04bab7-a958-410b-b944-ed655c83427f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Received unexpected event network-vif-plugged-f78bfe61-0ef6-4167-a26d-b3ce52b05bba for instance with vm_state deleted and task_state None.
Oct 02 12:19:41 compute-0 nova_compute[192079]: 2025-10-02 12:19:41.524 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:42 compute-0 nova_compute[192079]: 2025-10-02 12:19:42.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:19:43 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:43.317 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '24'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:19:43 compute-0 nova_compute[192079]: 2025-10-02 12:19:43.715 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:44 compute-0 nova_compute[192079]: 2025-10-02 12:19:44.175 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:44 compute-0 nova_compute[192079]: 2025-10-02 12:19:44.701 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._run_pending_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:19:44 compute-0 nova_compute[192079]: 2025-10-02 12:19:44.701 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Cleaning up deleted instances _run_pending_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:11145
Oct 02 12:19:44 compute-0 nova_compute[192079]: 2025-10-02 12:19:44.717 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] There are 0 instances to clean _run_pending_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:11154
Oct 02 12:19:46 compute-0 nova_compute[192079]: 2025-10-02 12:19:46.526 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:48 compute-0 podman[235155]: 2025-10-02 12:19:48.174394377 +0000 UTC m=+0.072776105 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=ovn_metadata_agent, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:19:48 compute-0 podman[235157]: 2025-10-02 12:19:48.185547431 +0000 UTC m=+0.086329075 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:19:48 compute-0 podman[235156]: 2025-10-02 12:19:48.216371071 +0000 UTC m=+0.118066599 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, tcib_managed=true, config_id=ovn_controller, container_name=ovn_controller, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:19:49 compute-0 nova_compute[192079]: 2025-10-02 12:19:49.207 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:50 compute-0 nova_compute[192079]: 2025-10-02 12:19:50.941 2 DEBUG oslo_concurrency.lockutils [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Acquiring lock "433c3cd4-9f5c-4903-9257-26f4f2c31a78" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:50 compute-0 nova_compute[192079]: 2025-10-02 12:19:50.942 2 DEBUG oslo_concurrency.lockutils [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Lock "433c3cd4-9f5c-4903-9257-26f4f2c31a78" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:50 compute-0 nova_compute[192079]: 2025-10-02 12:19:50.962 2 DEBUG oslo_concurrency.lockutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Acquiring lock "35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:50 compute-0 nova_compute[192079]: 2025-10-02 12:19:50.962 2 DEBUG oslo_concurrency.lockutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Lock "35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:50 compute-0 nova_compute[192079]: 2025-10-02 12:19:50.964 2 DEBUG nova.compute.manager [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:19:50 compute-0 nova_compute[192079]: 2025-10-02 12:19:50.993 2 DEBUG nova.compute.manager [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.195 2 DEBUG oslo_concurrency.lockutils [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.196 2 DEBUG oslo_concurrency.lockutils [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.220 2 DEBUG nova.virt.hardware [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.220 2 INFO nova.compute.claims [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.276 2 DEBUG oslo_concurrency.lockutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.366 2 DEBUG nova.compute.provider_tree [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.387 2 DEBUG nova.scheduler.client.report [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.415 2 DEBUG oslo_concurrency.lockutils [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.219s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.416 2 DEBUG nova.compute.manager [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.419 2 DEBUG oslo_concurrency.lockutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.143s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.427 2 DEBUG nova.virt.hardware [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.428 2 INFO nova.compute.claims [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.522 2 DEBUG nova.compute.manager [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Not allocating networking since 'none' was specified. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1948
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.528 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.540 2 INFO nova.virt.libvirt.driver [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.559 2 DEBUG nova.compute.manager [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.642 2 DEBUG nova.compute.provider_tree [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.662 2 DEBUG nova.scheduler.client.report [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.692 2 DEBUG oslo_concurrency.lockutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.274s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.693 2 DEBUG nova.compute.manager [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.717 2 DEBUG nova.compute.manager [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.718 2 DEBUG nova.virt.libvirt.driver [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.719 2 INFO nova.virt.libvirt.driver [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Creating image(s)
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.719 2 DEBUG oslo_concurrency.lockutils [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Acquiring lock "/var/lib/nova/instances/433c3cd4-9f5c-4903-9257-26f4f2c31a78/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.720 2 DEBUG oslo_concurrency.lockutils [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Lock "/var/lib/nova/instances/433c3cd4-9f5c-4903-9257-26f4f2c31a78/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.720 2 DEBUG oslo_concurrency.lockutils [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Lock "/var/lib/nova/instances/433c3cd4-9f5c-4903-9257-26f4f2c31a78/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.734 2 DEBUG oslo_concurrency.processutils [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.775 2 DEBUG nova.compute.manager [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.776 2 DEBUG nova.network.neutron [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.790 2 DEBUG oslo_concurrency.processutils [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.790 2 DEBUG oslo_concurrency.lockutils [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.791 2 DEBUG oslo_concurrency.lockutils [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.805 2 DEBUG oslo_concurrency.processutils [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.829 2 INFO nova.virt.libvirt.driver [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.861 2 DEBUG nova.compute.manager [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.864 2 DEBUG oslo_concurrency.processutils [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.059s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.865 2 DEBUG oslo_concurrency.processutils [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/433c3cd4-9f5c-4903-9257-26f4f2c31a78/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.910 2 DEBUG oslo_concurrency.processutils [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/433c3cd4-9f5c-4903-9257-26f4f2c31a78/disk 1073741824" returned: 0 in 0.046s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.911 2 DEBUG oslo_concurrency.lockutils [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.120s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:51 compute-0 nova_compute[192079]: 2025-10-02 12:19:51.912 2 DEBUG oslo_concurrency.processutils [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.001 2 DEBUG oslo_concurrency.processutils [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.089s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.003 2 DEBUG nova.virt.disk.api [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Checking if we can resize image /var/lib/nova/instances/433c3cd4-9f5c-4903-9257-26f4f2c31a78/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.003 2 DEBUG oslo_concurrency.processutils [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/433c3cd4-9f5c-4903-9257-26f4f2c31a78/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.047 2 DEBUG nova.policy [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.074 2 DEBUG oslo_concurrency.processutils [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/433c3cd4-9f5c-4903-9257-26f4f2c31a78/disk --force-share --output=json" returned: 0 in 0.071s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.075 2 DEBUG nova.virt.disk.api [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Cannot resize image /var/lib/nova/instances/433c3cd4-9f5c-4903-9257-26f4f2c31a78/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.076 2 DEBUG nova.objects.instance [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Lazy-loading 'migration_context' on Instance uuid 433c3cd4-9f5c-4903-9257-26f4f2c31a78 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.098 2 DEBUG nova.virt.libvirt.driver [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.098 2 DEBUG nova.virt.libvirt.driver [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Ensure instance console log exists: /var/lib/nova/instances/433c3cd4-9f5c-4903-9257-26f4f2c31a78/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.099 2 DEBUG oslo_concurrency.lockutils [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.099 2 DEBUG oslo_concurrency.lockutils [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.100 2 DEBUG oslo_concurrency.lockutils [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.103 2 DEBUG nova.virt.libvirt.driver [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Start _get_guest_xml network_info=[] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.110 2 WARNING nova.virt.libvirt.driver [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.119 2 DEBUG nova.virt.libvirt.host [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.120 2 DEBUG nova.virt.libvirt.host [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.130 2 DEBUG nova.virt.libvirt.host [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.131 2 DEBUG nova.virt.libvirt.host [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.133 2 DEBUG nova.virt.libvirt.driver [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.133 2 DEBUG nova.virt.hardware [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.133 2 DEBUG nova.virt.hardware [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.134 2 DEBUG nova.virt.hardware [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.134 2 DEBUG nova.virt.hardware [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.134 2 DEBUG nova.virt.hardware [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.135 2 DEBUG nova.virt.hardware [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.135 2 DEBUG nova.virt.hardware [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.135 2 DEBUG nova.virt.hardware [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.136 2 DEBUG nova.virt.hardware [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.136 2 DEBUG nova.virt.hardware [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.136 2 DEBUG nova.virt.hardware [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.141 2 DEBUG nova.objects.instance [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Lazy-loading 'pci_devices' on Instance uuid 433c3cd4-9f5c-4903-9257-26f4f2c31a78 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.344 2 DEBUG nova.virt.libvirt.driver [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:19:52 compute-0 nova_compute[192079]:   <uuid>433c3cd4-9f5c-4903-9257-26f4f2c31a78</uuid>
Oct 02 12:19:52 compute-0 nova_compute[192079]:   <name>instance-00000064</name>
Oct 02 12:19:52 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:19:52 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:19:52 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:19:52 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:       <nova:name>tempest-ServerShowV247Test-server-1340135291</nova:name>
Oct 02 12:19:52 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:19:52</nova:creationTime>
Oct 02 12:19:52 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:19:52 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:19:52 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:19:52 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:19:52 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:19:52 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:19:52 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:19:52 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:19:52 compute-0 nova_compute[192079]:         <nova:user uuid="e7efd391ff484c8bb99570302eacb8f4">tempest-ServerShowV247Test-1215164495-project-member</nova:user>
Oct 02 12:19:52 compute-0 nova_compute[192079]:         <nova:project uuid="12599487474040b285ccdd017a8c01b5">tempest-ServerShowV247Test-1215164495</nova:project>
Oct 02 12:19:52 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:19:52 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:       <nova:ports/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:19:52 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:19:52 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <system>
Oct 02 12:19:52 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:19:52 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:19:52 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:19:52 compute-0 nova_compute[192079]:       <entry name="serial">433c3cd4-9f5c-4903-9257-26f4f2c31a78</entry>
Oct 02 12:19:52 compute-0 nova_compute[192079]:       <entry name="uuid">433c3cd4-9f5c-4903-9257-26f4f2c31a78</entry>
Oct 02 12:19:52 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     </system>
Oct 02 12:19:52 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:19:52 compute-0 nova_compute[192079]:   <os>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:   </os>
Oct 02 12:19:52 compute-0 nova_compute[192079]:   <features>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:   </features>
Oct 02 12:19:52 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:19:52 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:19:52 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:19:52 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/433c3cd4-9f5c-4903-9257-26f4f2c31a78/disk"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:19:52 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/433c3cd4-9f5c-4903-9257-26f4f2c31a78/disk.config"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:19:52 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/433c3cd4-9f5c-4903-9257-26f4f2c31a78/console.log" append="off"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <video>
Oct 02 12:19:52 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     </video>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:19:52 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:19:52 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:19:52 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:19:52 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:19:52 compute-0 nova_compute[192079]: </domain>
Oct 02 12:19:52 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.480 2 DEBUG nova.compute.manager [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.481 2 DEBUG nova.virt.libvirt.driver [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.482 2 INFO nova.virt.libvirt.driver [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Creating image(s)
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.482 2 DEBUG oslo_concurrency.lockutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Acquiring lock "/var/lib/nova/instances/35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.482 2 DEBUG oslo_concurrency.lockutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Lock "/var/lib/nova/instances/35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.483 2 DEBUG oslo_concurrency.lockutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Lock "/var/lib/nova/instances/35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.495 2 DEBUG oslo_concurrency.processutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.528 2 DEBUG nova.virt.libvirt.driver [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.528 2 DEBUG nova.virt.libvirt.driver [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.529 2 INFO nova.virt.libvirt.driver [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Using config drive
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.584 2 DEBUG oslo_concurrency.processutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.088s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.586 2 DEBUG oslo_concurrency.lockutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.587 2 DEBUG oslo_concurrency.lockutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.605 2 DEBUG oslo_concurrency.processutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.687 2 DEBUG oslo_concurrency.processutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.081s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.688 2 DEBUG oslo_concurrency.processutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.721 2 DEBUG oslo_concurrency.processutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk 1073741824" returned: 0 in 0.033s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.722 2 DEBUG oslo_concurrency.lockutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.136s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.723 2 DEBUG oslo_concurrency.processutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.778 2 DEBUG oslo_concurrency.processutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.780 2 DEBUG nova.virt.disk.api [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Checking if we can resize image /var/lib/nova/instances/35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.781 2 DEBUG oslo_concurrency.processutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.837 2 DEBUG oslo_concurrency.processutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.839 2 DEBUG nova.virt.disk.api [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Cannot resize image /var/lib/nova/instances/35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.840 2 DEBUG nova.objects.instance [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Lazy-loading 'migration_context' on Instance uuid 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.850 2 INFO nova.virt.libvirt.driver [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Creating config drive at /var/lib/nova/instances/433c3cd4-9f5c-4903-9257-26f4f2c31a78/disk.config
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.860 2 DEBUG oslo_concurrency.processutils [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/433c3cd4-9f5c-4903-9257-26f4f2c31a78/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpydr0p46k execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.901 2 DEBUG nova.virt.libvirt.driver [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.902 2 DEBUG nova.virt.libvirt.driver [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Ensure instance console log exists: /var/lib/nova/instances/35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.903 2 DEBUG oslo_concurrency.lockutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.904 2 DEBUG oslo_concurrency.lockutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.904 2 DEBUG oslo_concurrency.lockutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:52 compute-0 nova_compute[192079]: 2025-10-02 12:19:52.988 2 DEBUG oslo_concurrency.processutils [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/433c3cd4-9f5c-4903-9257-26f4f2c31a78/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpydr0p46k" returned: 0 in 0.128s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:19:53 compute-0 nova_compute[192079]: 2025-10-02 12:19:53.045 2 DEBUG nova.network.neutron [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Successfully created port: 0c328734-ebc6-47bc-b603-2e4af1cae573 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:19:53 compute-0 systemd-machined[152150]: New machine qemu-46-instance-00000064.
Oct 02 12:19:53 compute-0 systemd[1]: Started Virtual Machine qemu-46-instance-00000064.
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.133 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759407579.1325455, 7f331800-f718-4dc3-b740-1a9574a65fb1 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.136 2 INFO nova.compute.manager [-] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] VM Stopped (Lifecycle Event)
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.152 2 DEBUG nova.compute.manager [None req-786dbcab-0eda-475c-8ecb-ea4fc79c3d75 - - - - - -] [instance: 7f331800-f718-4dc3-b740-1a9574a65fb1] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.210 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.311 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407594.3103523, 433c3cd4-9f5c-4903-9257-26f4f2c31a78 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.311 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] VM Resumed (Lifecycle Event)
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.313 2 DEBUG nova.compute.manager [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.313 2 DEBUG nova.virt.libvirt.driver [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.316 2 INFO nova.virt.libvirt.driver [-] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Instance spawned successfully.
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.316 2 DEBUG nova.virt.libvirt.driver [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.338 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.344 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.346 2 DEBUG nova.virt.libvirt.driver [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.346 2 DEBUG nova.virt.libvirt.driver [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.347 2 DEBUG nova.virt.libvirt.driver [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.347 2 DEBUG nova.virt.libvirt.driver [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.348 2 DEBUG nova.virt.libvirt.driver [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.348 2 DEBUG nova.virt.libvirt.driver [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.398 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.399 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407594.310834, 433c3cd4-9f5c-4903-9257-26f4f2c31a78 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.399 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] VM Started (Lifecycle Event)
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.425 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.428 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.451 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.458 2 INFO nova.compute.manager [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Took 2.74 seconds to spawn the instance on the hypervisor.
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.458 2 DEBUG nova.compute.manager [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.468 2 DEBUG nova.network.neutron [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Successfully updated port: 0c328734-ebc6-47bc-b603-2e4af1cae573 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.516 2 DEBUG oslo_concurrency.lockutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Acquiring lock "refresh_cache-35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.516 2 DEBUG oslo_concurrency.lockutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Acquired lock "refresh_cache-35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.517 2 DEBUG nova.network.neutron [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.563 2 INFO nova.compute.manager [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Took 3.52 seconds to build instance.
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.590 2 DEBUG oslo_concurrency.lockutils [None req-d6734b3b-fad8-45eb-be3f-f35ef523f7ff e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Lock "433c3cd4-9f5c-4903-9257-26f4f2c31a78" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 3.648s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:54 compute-0 nova_compute[192079]: 2025-10-02 12:19:54.911 2 DEBUG nova.network.neutron [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.035 2 DEBUG nova.compute.manager [req-6d0b6f53-c9a9-4016-8059-bc84617dc50b req-b606c63f-32ad-4302-905b-219037dc96b3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Received event network-changed-0c328734-ebc6-47bc-b603-2e4af1cae573 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.036 2 DEBUG nova.compute.manager [req-6d0b6f53-c9a9-4016-8059-bc84617dc50b req-b606c63f-32ad-4302-905b-219037dc96b3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Refreshing instance network info cache due to event network-changed-0c328734-ebc6-47bc-b603-2e4af1cae573. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.036 2 DEBUG oslo_concurrency.lockutils [req-6d0b6f53-c9a9-4016-8059-bc84617dc50b req-b606c63f-32ad-4302-905b-219037dc96b3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:19:55 compute-0 podman[235281]: 2025-10-02 12:19:55.142228415 +0000 UTC m=+0.057071806 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=edpm, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, container_name=ceilometer_agent_compute, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_managed=true)
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.769 2 DEBUG nova.network.neutron [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Updating instance_info_cache with network_info: [{"id": "0c328734-ebc6-47bc-b603-2e4af1cae573", "address": "fa:16:3e:ef:e3:79", "network": {"id": "8f494075-66bf-4ce0-a765-98fd91c31199", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1553125421-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f0c8c8a8631b4721beed577a99f8bdb7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap0c328734-eb", "ovs_interfaceid": "0c328734-ebc6-47bc-b603-2e4af1cae573", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.790 2 DEBUG oslo_concurrency.lockutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Releasing lock "refresh_cache-35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.791 2 DEBUG nova.compute.manager [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Instance network_info: |[{"id": "0c328734-ebc6-47bc-b603-2e4af1cae573", "address": "fa:16:3e:ef:e3:79", "network": {"id": "8f494075-66bf-4ce0-a765-98fd91c31199", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1553125421-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f0c8c8a8631b4721beed577a99f8bdb7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap0c328734-eb", "ovs_interfaceid": "0c328734-ebc6-47bc-b603-2e4af1cae573", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.791 2 DEBUG oslo_concurrency.lockutils [req-6d0b6f53-c9a9-4016-8059-bc84617dc50b req-b606c63f-32ad-4302-905b-219037dc96b3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.792 2 DEBUG nova.network.neutron [req-6d0b6f53-c9a9-4016-8059-bc84617dc50b req-b606c63f-32ad-4302-905b-219037dc96b3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Refreshing network info cache for port 0c328734-ebc6-47bc-b603-2e4af1cae573 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.795 2 DEBUG nova.virt.libvirt.driver [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Start _get_guest_xml network_info=[{"id": "0c328734-ebc6-47bc-b603-2e4af1cae573", "address": "fa:16:3e:ef:e3:79", "network": {"id": "8f494075-66bf-4ce0-a765-98fd91c31199", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1553125421-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f0c8c8a8631b4721beed577a99f8bdb7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap0c328734-eb", "ovs_interfaceid": "0c328734-ebc6-47bc-b603-2e4af1cae573", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.799 2 WARNING nova.virt.libvirt.driver [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.803 2 DEBUG nova.virt.libvirt.host [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.804 2 DEBUG nova.virt.libvirt.host [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.809 2 DEBUG nova.virt.libvirt.host [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.810 2 DEBUG nova.virt.libvirt.host [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.811 2 DEBUG nova.virt.libvirt.driver [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.811 2 DEBUG nova.virt.hardware [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.812 2 DEBUG nova.virt.hardware [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.812 2 DEBUG nova.virt.hardware [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.812 2 DEBUG nova.virt.hardware [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.812 2 DEBUG nova.virt.hardware [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.813 2 DEBUG nova.virt.hardware [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.813 2 DEBUG nova.virt.hardware [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.813 2 DEBUG nova.virt.hardware [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.814 2 DEBUG nova.virt.hardware [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.814 2 DEBUG nova.virt.hardware [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.814 2 DEBUG nova.virt.hardware [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.817 2 DEBUG nova.virt.libvirt.vif [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:19:49Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ServersNegativeTestJSON-server-1170653470',display_name='tempest-ServersNegativeTestJSON-server-1170653470',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serversnegativetestjson-server-1170653470',id=101,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='f0c8c8a8631b4721beed577a99f8bdb7',ramdisk_id='',reservation_id='r-sfsoaqzs',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServersNegativeTestJSON-114354241',owner_user_name='tempest-ServersNegativeTestJSON-114354241-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:19:51Z,user_data=None,user_id='a803afe9939346088252c3b944f124f2',uuid=35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "0c328734-ebc6-47bc-b603-2e4af1cae573", "address": "fa:16:3e:ef:e3:79", "network": {"id": "8f494075-66bf-4ce0-a765-98fd91c31199", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1553125421-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f0c8c8a8631b4721beed577a99f8bdb7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap0c328734-eb", "ovs_interfaceid": "0c328734-ebc6-47bc-b603-2e4af1cae573", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.818 2 DEBUG nova.network.os_vif_util [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Converting VIF {"id": "0c328734-ebc6-47bc-b603-2e4af1cae573", "address": "fa:16:3e:ef:e3:79", "network": {"id": "8f494075-66bf-4ce0-a765-98fd91c31199", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1553125421-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f0c8c8a8631b4721beed577a99f8bdb7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap0c328734-eb", "ovs_interfaceid": "0c328734-ebc6-47bc-b603-2e4af1cae573", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.818 2 DEBUG nova.network.os_vif_util [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:ef:e3:79,bridge_name='br-int',has_traffic_filtering=True,id=0c328734-ebc6-47bc-b603-2e4af1cae573,network=Network(8f494075-66bf-4ce0-a765-98fd91c31199),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap0c328734-eb') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.820 2 DEBUG nova.objects.instance [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Lazy-loading 'pci_devices' on Instance uuid 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.846 2 DEBUG nova.virt.libvirt.driver [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:19:55 compute-0 nova_compute[192079]:   <uuid>35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c</uuid>
Oct 02 12:19:55 compute-0 nova_compute[192079]:   <name>instance-00000065</name>
Oct 02 12:19:55 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:19:55 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:19:55 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:19:55 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:       <nova:name>tempest-ServersNegativeTestJSON-server-1170653470</nova:name>
Oct 02 12:19:55 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:19:55</nova:creationTime>
Oct 02 12:19:55 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:19:55 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:19:55 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:19:55 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:19:55 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:19:55 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:19:55 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:19:55 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:19:55 compute-0 nova_compute[192079]:         <nova:user uuid="a803afe9939346088252c3b944f124f2">tempest-ServersNegativeTestJSON-114354241-project-member</nova:user>
Oct 02 12:19:55 compute-0 nova_compute[192079]:         <nova:project uuid="f0c8c8a8631b4721beed577a99f8bdb7">tempest-ServersNegativeTestJSON-114354241</nova:project>
Oct 02 12:19:55 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:19:55 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:19:55 compute-0 nova_compute[192079]:         <nova:port uuid="0c328734-ebc6-47bc-b603-2e4af1cae573">
Oct 02 12:19:55 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.10" ipVersion="4"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:19:55 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:19:55 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:19:55 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <system>
Oct 02 12:19:55 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:19:55 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:19:55 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:19:55 compute-0 nova_compute[192079]:       <entry name="serial">35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c</entry>
Oct 02 12:19:55 compute-0 nova_compute[192079]:       <entry name="uuid">35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c</entry>
Oct 02 12:19:55 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     </system>
Oct 02 12:19:55 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:19:55 compute-0 nova_compute[192079]:   <os>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:   </os>
Oct 02 12:19:55 compute-0 nova_compute[192079]:   <features>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:   </features>
Oct 02 12:19:55 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:19:55 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:19:55 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:19:55 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:19:55 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk.config"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:19:55 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:ef:e3:79"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:       <target dev="tap0c328734-eb"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:19:55 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/console.log" append="off"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <video>
Oct 02 12:19:55 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     </video>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:19:55 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:19:55 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:19:55 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:19:55 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:19:55 compute-0 nova_compute[192079]: </domain>
Oct 02 12:19:55 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.851 2 DEBUG nova.compute.manager [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Preparing to wait for external event network-vif-plugged-0c328734-ebc6-47bc-b603-2e4af1cae573 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.852 2 DEBUG oslo_concurrency.lockutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Acquiring lock "35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.852 2 DEBUG oslo_concurrency.lockutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Lock "35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.852 2 DEBUG oslo_concurrency.lockutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Lock "35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.853 2 DEBUG nova.virt.libvirt.vif [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:19:49Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ServersNegativeTestJSON-server-1170653470',display_name='tempest-ServersNegativeTestJSON-server-1170653470',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serversnegativetestjson-server-1170653470',id=101,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='f0c8c8a8631b4721beed577a99f8bdb7',ramdisk_id='',reservation_id='r-sfsoaqzs',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServersNegativeTestJSON-114354241',owner_user_name='tempest-ServersNegativeTestJSON-114354241-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:19:51Z,user_data=None,user_id='a803afe9939346088252c3b944f124f2',uuid=35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "0c328734-ebc6-47bc-b603-2e4af1cae573", "address": "fa:16:3e:ef:e3:79", "network": {"id": "8f494075-66bf-4ce0-a765-98fd91c31199", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1553125421-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f0c8c8a8631b4721beed577a99f8bdb7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap0c328734-eb", "ovs_interfaceid": "0c328734-ebc6-47bc-b603-2e4af1cae573", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.853 2 DEBUG nova.network.os_vif_util [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Converting VIF {"id": "0c328734-ebc6-47bc-b603-2e4af1cae573", "address": "fa:16:3e:ef:e3:79", "network": {"id": "8f494075-66bf-4ce0-a765-98fd91c31199", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1553125421-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f0c8c8a8631b4721beed577a99f8bdb7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap0c328734-eb", "ovs_interfaceid": "0c328734-ebc6-47bc-b603-2e4af1cae573", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.854 2 DEBUG nova.network.os_vif_util [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:ef:e3:79,bridge_name='br-int',has_traffic_filtering=True,id=0c328734-ebc6-47bc-b603-2e4af1cae573,network=Network(8f494075-66bf-4ce0-a765-98fd91c31199),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap0c328734-eb') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.854 2 DEBUG os_vif [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:ef:e3:79,bridge_name='br-int',has_traffic_filtering=True,id=0c328734-ebc6-47bc-b603-2e4af1cae573,network=Network(8f494075-66bf-4ce0-a765-98fd91c31199),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap0c328734-eb') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.855 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.856 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.856 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.859 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.859 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap0c328734-eb, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.859 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap0c328734-eb, col_values=(('external_ids', {'iface-id': '0c328734-ebc6-47bc-b603-2e4af1cae573', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:ef:e3:79', 'vm-uuid': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.861 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:55 compute-0 NetworkManager[51160]: <info>  [1759407595.8628] manager: (tap0c328734-eb): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/169)
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.865 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.867 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.868 2 INFO os_vif [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:ef:e3:79,bridge_name='br-int',has_traffic_filtering=True,id=0c328734-ebc6-47bc-b603-2e4af1cae573,network=Network(8f494075-66bf-4ce0-a765-98fd91c31199),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap0c328734-eb')
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.931 2 DEBUG nova.virt.libvirt.driver [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.932 2 DEBUG nova.virt.libvirt.driver [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.932 2 DEBUG nova.virt.libvirt.driver [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] No VIF found with MAC fa:16:3e:ef:e3:79, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:19:55 compute-0 nova_compute[192079]: 2025-10-02 12:19:55.932 2 INFO nova.virt.libvirt.driver [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Using config drive
Oct 02 12:19:56 compute-0 nova_compute[192079]: 2025-10-02 12:19:56.530 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:56 compute-0 nova_compute[192079]: 2025-10-02 12:19:56.548 2 INFO nova.virt.libvirt.driver [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Creating config drive at /var/lib/nova/instances/35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk.config
Oct 02 12:19:56 compute-0 nova_compute[192079]: 2025-10-02 12:19:56.553 2 DEBUG oslo_concurrency.processutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp08ylrdlj execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:19:56 compute-0 nova_compute[192079]: 2025-10-02 12:19:56.678 2 DEBUG oslo_concurrency.processutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp08ylrdlj" returned: 0 in 0.125s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:19:56 compute-0 kernel: tap0c328734-eb: entered promiscuous mode
Oct 02 12:19:56 compute-0 NetworkManager[51160]: <info>  [1759407596.7397] manager: (tap0c328734-eb): new Tun device (/org/freedesktop/NetworkManager/Devices/170)
Oct 02 12:19:56 compute-0 systemd-udevd[235279]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:19:56 compute-0 ovn_controller[94336]: 2025-10-02T12:19:56Z|00336|binding|INFO|Claiming lport 0c328734-ebc6-47bc-b603-2e4af1cae573 for this chassis.
Oct 02 12:19:56 compute-0 ovn_controller[94336]: 2025-10-02T12:19:56Z|00337|binding|INFO|0c328734-ebc6-47bc-b603-2e4af1cae573: Claiming fa:16:3e:ef:e3:79 10.100.0.10
Oct 02 12:19:56 compute-0 nova_compute[192079]: 2025-10-02 12:19:56.748 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:56 compute-0 nova_compute[192079]: 2025-10-02 12:19:56.751 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:56 compute-0 NetworkManager[51160]: <info>  [1759407596.7556] device (tap0c328734-eb): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:19:56 compute-0 NetworkManager[51160]: <info>  [1759407596.7565] device (tap0c328734-eb): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:19:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:56.759 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:ef:e3:79 10.100.0.10'], port_security=['fa:16:3e:ef:e3:79 10.100.0.10'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.10/28', 'neutron:device_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-8f494075-66bf-4ce0-a765-98fd91c31199', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'neutron:revision_number': '2', 'neutron:security_group_ids': 'eb030dcc-72ea-4850-916a-e1df7c4d9a87', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=e43b5827-85bf-4b83-b921-ec45e12f1f2e, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=0c328734-ebc6-47bc-b603-2e4af1cae573) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:19:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:56.760 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 0c328734-ebc6-47bc-b603-2e4af1cae573 in datapath 8f494075-66bf-4ce0-a765-98fd91c31199 bound to our chassis
Oct 02 12:19:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:56.761 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 8f494075-66bf-4ce0-a765-98fd91c31199
Oct 02 12:19:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:56.773 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[af1ae89a-858f-4b9c-b491-b921dbd0718b]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:56.774 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap8f494075-61 in ovnmeta-8f494075-66bf-4ce0-a765-98fd91c31199 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:19:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:56.776 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap8f494075-60 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:19:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:56.776 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[da9f5fc1-38d0-4cc9-a3e4-d2882618e4cd]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:56.777 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[788b9aa3-9a61-4e51-add1-1db4ff850c89]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:56.787 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[671e3320-4638-4b9f-bed3-f23e633a5e97]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:56 compute-0 systemd-machined[152150]: New machine qemu-47-instance-00000065.
Oct 02 12:19:56 compute-0 nova_compute[192079]: 2025-10-02 12:19:56.805 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:56 compute-0 ovn_controller[94336]: 2025-10-02T12:19:56Z|00338|binding|INFO|Setting lport 0c328734-ebc6-47bc-b603-2e4af1cae573 ovn-installed in OVS
Oct 02 12:19:56 compute-0 ovn_controller[94336]: 2025-10-02T12:19:56Z|00339|binding|INFO|Setting lport 0c328734-ebc6-47bc-b603-2e4af1cae573 up in Southbound
Oct 02 12:19:56 compute-0 nova_compute[192079]: 2025-10-02 12:19:56.809 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:56 compute-0 systemd[1]: Started Virtual Machine qemu-47-instance-00000065.
Oct 02 12:19:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:56.814 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1732f91e-9467-43db-b3b1-6e4b28dc1779]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:56.842 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[a895e864-63fd-43a2-a846-246ae4cbda78]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:56.847 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4bcbf691-9c39-4b64-8471-b6e4544b7f6f]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:56 compute-0 NetworkManager[51160]: <info>  [1759407596.8487] manager: (tap8f494075-60): new Veth device (/org/freedesktop/NetworkManager/Devices/171)
Oct 02 12:19:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:56.896 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[fdb0285b-08df-4be8-a582-9a72adfbb3df]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:56.901 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[faef1bcd-e84b-48f9-9215-df2dfe422c02]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:56 compute-0 systemd-udevd[235336]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:19:56 compute-0 NetworkManager[51160]: <info>  [1759407596.9282] device (tap8f494075-60): carrier: link connected
Oct 02 12:19:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:56.935 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[2a4d34f8-0db4-4d94-be7f-b50498f84845]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:56.955 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8fe6d3e7-9350-4898-99d7-be532cd2d9b2]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap8f494075-61'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:9b:9a:65'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 110], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 559455, 'reachable_time': 30604, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 235355, 'error': None, 'target': 'ovnmeta-8f494075-66bf-4ce0-a765-98fd91c31199', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:56.973 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d66f32f7-ebfe-437e-940a-c0d01b219bca]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe9b:9a65'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 559455, 'tstamp': 559455}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 235356, 'error': None, 'target': 'ovnmeta-8f494075-66bf-4ce0-a765-98fd91c31199', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:56.989 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[42909452-1871-4ecc-a167-9493c20b2479]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap8f494075-61'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:9b:9a:65'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 110], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 559455, 'reachable_time': 30604, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 235357, 'error': None, 'target': 'ovnmeta-8f494075-66bf-4ce0-a765-98fd91c31199', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:57.020 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1784267f-c917-43b1-a316-e635ac3718bb]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:57.078 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[cce8cdaa-73cd-4daa-bd91-cb05713ae550]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:57.079 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap8f494075-60, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:57.079 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:57.079 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap8f494075-60, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.081 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:57 compute-0 NetworkManager[51160]: <info>  [1759407597.0821] manager: (tap8f494075-60): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/172)
Oct 02 12:19:57 compute-0 kernel: tap8f494075-60: entered promiscuous mode
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.083 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:57.084 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap8f494075-60, col_values=(('external_ids', {'iface-id': 'a5eb523a-b004-42b7-a3f6-24b2514f40bf'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.085 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:57 compute-0 ovn_controller[94336]: 2025-10-02T12:19:57Z|00340|binding|INFO|Releasing lport a5eb523a-b004-42b7-a3f6-24b2514f40bf from this chassis (sb_readonly=0)
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.103 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:57.104 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/8f494075-66bf-4ce0-a765-98fd91c31199.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/8f494075-66bf-4ce0-a765-98fd91c31199.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:57.107 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0586d0bb-9b58-43b5-b959-f8f1719e2d82]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:57.108 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-8f494075-66bf-4ce0-a765-98fd91c31199
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/8f494075-66bf-4ce0-a765-98fd91c31199.pid.haproxy
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 8f494075-66bf-4ce0-a765-98fd91c31199
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:19:57 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:19:57.110 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-8f494075-66bf-4ce0-a765-98fd91c31199', 'env', 'PROCESS_TAG=haproxy-8f494075-66bf-4ce0-a765-98fd91c31199', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/8f494075-66bf-4ce0-a765-98fd91c31199.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:19:57 compute-0 podman[235396]: 2025-10-02 12:19:57.524843681 +0000 UTC m=+0.052517963 container create 67e0afa6c28df6ebabd7c7cd1f3fc31079a438c6bc5878aeda19fafd10396845 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-8f494075-66bf-4ce0-a765-98fd91c31199, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001)
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.569 2 DEBUG nova.compute.manager [req-7c289711-e8e1-4046-aad8-4a1f547814c2 req-4d72d72a-e5fa-4899-8b75-862aa484aea8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Received event network-vif-plugged-0c328734-ebc6-47bc-b603-2e4af1cae573 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.570 2 DEBUG oslo_concurrency.lockutils [req-7c289711-e8e1-4046-aad8-4a1f547814c2 req-4d72d72a-e5fa-4899-8b75-862aa484aea8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.570 2 DEBUG oslo_concurrency.lockutils [req-7c289711-e8e1-4046-aad8-4a1f547814c2 req-4d72d72a-e5fa-4899-8b75-862aa484aea8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.570 2 DEBUG oslo_concurrency.lockutils [req-7c289711-e8e1-4046-aad8-4a1f547814c2 req-4d72d72a-e5fa-4899-8b75-862aa484aea8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.570 2 DEBUG nova.compute.manager [req-7c289711-e8e1-4046-aad8-4a1f547814c2 req-4d72d72a-e5fa-4899-8b75-862aa484aea8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Processing event network-vif-plugged-0c328734-ebc6-47bc-b603-2e4af1cae573 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:19:57 compute-0 systemd[1]: Started libpod-conmon-67e0afa6c28df6ebabd7c7cd1f3fc31079a438c6bc5878aeda19fafd10396845.scope.
Oct 02 12:19:57 compute-0 podman[235396]: 2025-10-02 12:19:57.497474744 +0000 UTC m=+0.025149036 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:19:57 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:19:57 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/8bd6bc02ac93b323c28eacd8bd193ea0722529603b0f58262ee053876d0e864a/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:19:57 compute-0 podman[235396]: 2025-10-02 12:19:57.616487519 +0000 UTC m=+0.144161831 container init 67e0afa6c28df6ebabd7c7cd1f3fc31079a438c6bc5878aeda19fafd10396845 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-8f494075-66bf-4ce0-a765-98fd91c31199, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2)
Oct 02 12:19:57 compute-0 podman[235396]: 2025-10-02 12:19:57.621929687 +0000 UTC m=+0.149603969 container start 67e0afa6c28df6ebabd7c7cd1f3fc31079a438c6bc5878aeda19fafd10396845 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-8f494075-66bf-4ce0-a765-98fd91c31199, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, tcib_managed=true)
Oct 02 12:19:57 compute-0 neutron-haproxy-ovnmeta-8f494075-66bf-4ce0-a765-98fd91c31199[235411]: [NOTICE]   (235415) : New worker (235417) forked
Oct 02 12:19:57 compute-0 neutron-haproxy-ovnmeta-8f494075-66bf-4ce0-a765-98fd91c31199[235411]: [NOTICE]   (235415) : Loading success.
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.697 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407597.6967378, 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.697 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] VM Started (Lifecycle Event)
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.699 2 DEBUG nova.compute.manager [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.703 2 DEBUG nova.virt.libvirt.driver [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.706 2 INFO nova.virt.libvirt.driver [-] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Instance spawned successfully.
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.706 2 DEBUG nova.virt.libvirt.driver [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.724 2 DEBUG nova.network.neutron [req-6d0b6f53-c9a9-4016-8059-bc84617dc50b req-b606c63f-32ad-4302-905b-219037dc96b3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Updated VIF entry in instance network info cache for port 0c328734-ebc6-47bc-b603-2e4af1cae573. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.725 2 DEBUG nova.network.neutron [req-6d0b6f53-c9a9-4016-8059-bc84617dc50b req-b606c63f-32ad-4302-905b-219037dc96b3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Updating instance_info_cache with network_info: [{"id": "0c328734-ebc6-47bc-b603-2e4af1cae573", "address": "fa:16:3e:ef:e3:79", "network": {"id": "8f494075-66bf-4ce0-a765-98fd91c31199", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1553125421-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f0c8c8a8631b4721beed577a99f8bdb7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap0c328734-eb", "ovs_interfaceid": "0c328734-ebc6-47bc-b603-2e4af1cae573", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.728 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.734 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.737 2 DEBUG nova.virt.libvirt.driver [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.737 2 DEBUG nova.virt.libvirt.driver [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.737 2 DEBUG nova.virt.libvirt.driver [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.738 2 DEBUG nova.virt.libvirt.driver [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.738 2 DEBUG nova.virt.libvirt.driver [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.739 2 DEBUG nova.virt.libvirt.driver [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.759 2 DEBUG oslo_concurrency.lockutils [req-6d0b6f53-c9a9-4016-8059-bc84617dc50b req-b606c63f-32ad-4302-905b-219037dc96b3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.760 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.761 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407597.6968539, 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.761 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] VM Paused (Lifecycle Event)
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.800 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.803 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407597.7022474, 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.804 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] VM Resumed (Lifecycle Event)
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.825 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.829 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.844 2 INFO nova.compute.manager [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Took 5.36 seconds to spawn the instance on the hypervisor.
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.844 2 DEBUG nova.compute.manager [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.847 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.910 2 INFO nova.compute.manager [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Took 6.67 seconds to build instance.
Oct 02 12:19:57 compute-0 nova_compute[192079]: 2025-10-02 12:19:57.926 2 DEBUG oslo_concurrency.lockutils [None req-2b009af8-f09b-49ba-8845-fa63581ed934 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Lock "35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 6.964s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:59 compute-0 nova_compute[192079]: 2025-10-02 12:19:59.704 2 DEBUG nova.compute.manager [req-6811d89c-260f-4c7d-a46d-1628f15811ca req-23dbc045-f3fa-4448-9ca1-ba506616e87a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Received event network-vif-plugged-0c328734-ebc6-47bc-b603-2e4af1cae573 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:19:59 compute-0 nova_compute[192079]: 2025-10-02 12:19:59.704 2 DEBUG oslo_concurrency.lockutils [req-6811d89c-260f-4c7d-a46d-1628f15811ca req-23dbc045-f3fa-4448-9ca1-ba506616e87a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:19:59 compute-0 nova_compute[192079]: 2025-10-02 12:19:59.704 2 DEBUG oslo_concurrency.lockutils [req-6811d89c-260f-4c7d-a46d-1628f15811ca req-23dbc045-f3fa-4448-9ca1-ba506616e87a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:19:59 compute-0 nova_compute[192079]: 2025-10-02 12:19:59.705 2 DEBUG oslo_concurrency.lockutils [req-6811d89c-260f-4c7d-a46d-1628f15811ca req-23dbc045-f3fa-4448-9ca1-ba506616e87a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:19:59 compute-0 nova_compute[192079]: 2025-10-02 12:19:59.705 2 DEBUG nova.compute.manager [req-6811d89c-260f-4c7d-a46d-1628f15811ca req-23dbc045-f3fa-4448-9ca1-ba506616e87a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] No waiting events found dispatching network-vif-plugged-0c328734-ebc6-47bc-b603-2e4af1cae573 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:19:59 compute-0 nova_compute[192079]: 2025-10-02 12:19:59.705 2 WARNING nova.compute.manager [req-6811d89c-260f-4c7d-a46d-1628f15811ca req-23dbc045-f3fa-4448-9ca1-ba506616e87a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Received unexpected event network-vif-plugged-0c328734-ebc6-47bc-b603-2e4af1cae573 for instance with vm_state active and task_state None.
Oct 02 12:20:00 compute-0 nova_compute[192079]: 2025-10-02 12:20:00.864 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:01 compute-0 nova_compute[192079]: 2025-10-02 12:20:01.531 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:02 compute-0 podman[235427]: 2025-10-02 12:20:02.140659808 +0000 UTC m=+0.051638840 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., name=ubi9-minimal, build-date=2025-08-20T13:12:41, managed_by=edpm_ansible, maintainer=Red Hat, Inc., version=9.6, io.openshift.tags=minimal rhel9, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., config_id=edpm, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., com.redhat.component=ubi9-minimal-container, release=1755695350, vcs-type=git, io.openshift.expose-services=, url=https://catalog.redhat.com/en/search?searchType=containers, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, vendor=Red Hat, Inc., distribution-scope=public, architecture=x86_64, container_name=openstack_network_exporter, io.buildah.version=1.33.7)
Oct 02 12:20:02 compute-0 podman[235428]: 2025-10-02 12:20:02.18075263 +0000 UTC m=+0.088432692 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, tcib_managed=true, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, config_id=multipathd, container_name=multipathd, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001)
Oct 02 12:20:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:02.219 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:20:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:02.220 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:20:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:02.220 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:20:05 compute-0 nova_compute[192079]: 2025-10-02 12:20:05.867 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:06 compute-0 nova_compute[192079]: 2025-10-02 12:20:06.532 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:09 compute-0 podman[235487]: 2025-10-02 12:20:09.138382759 +0000 UTC m=+0.051592377 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_managed=true, config_id=iscsid, container_name=iscsid)
Oct 02 12:20:09 compute-0 podman[235486]: 2025-10-02 12:20:09.14096585 +0000 UTC m=+0.048788981 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>)
Oct 02 12:20:10 compute-0 nova_compute[192079]: 2025-10-02 12:20:10.870 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:11 compute-0 nova_compute[192079]: 2025-10-02 12:20:11.563 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:12 compute-0 ovn_controller[94336]: 2025-10-02T12:20:12Z|00033|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:ef:e3:79 10.100.0.10
Oct 02 12:20:12 compute-0 ovn_controller[94336]: 2025-10-02T12:20:12Z|00034|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:ef:e3:79 10.100.0.10
Oct 02 12:20:15 compute-0 nova_compute[192079]: 2025-10-02 12:20:15.872 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:16 compute-0 nova_compute[192079]: 2025-10-02 12:20:16.565 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:19 compute-0 podman[235542]: 2025-10-02 12:20:19.147847746 +0000 UTC m=+0.058246659 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:20:19 compute-0 podman[235540]: 2025-10-02 12:20:19.154163128 +0000 UTC m=+0.070004829 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, container_name=ovn_metadata_agent, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_metadata_agent, io.buildah.version=1.41.3, managed_by=edpm_ansible)
Oct 02 12:20:19 compute-0 podman[235541]: 2025-10-02 12:20:19.188786722 +0000 UTC m=+0.104873530 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_managed=true, config_id=ovn_controller, container_name=ovn_controller, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:20:19 compute-0 nova_compute[192079]: 2025-10-02 12:20:19.933 2 DEBUG oslo_concurrency.lockutils [None req-faf58ea9-6043-4134-9c2d-bfeafd8a2df2 e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Acquiring lock "433c3cd4-9f5c-4903-9257-26f4f2c31a78" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:20:19 compute-0 nova_compute[192079]: 2025-10-02 12:20:19.933 2 DEBUG oslo_concurrency.lockutils [None req-faf58ea9-6043-4134-9c2d-bfeafd8a2df2 e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Lock "433c3cd4-9f5c-4903-9257-26f4f2c31a78" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:20:19 compute-0 nova_compute[192079]: 2025-10-02 12:20:19.934 2 DEBUG oslo_concurrency.lockutils [None req-faf58ea9-6043-4134-9c2d-bfeafd8a2df2 e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Acquiring lock "433c3cd4-9f5c-4903-9257-26f4f2c31a78-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:20:19 compute-0 nova_compute[192079]: 2025-10-02 12:20:19.934 2 DEBUG oslo_concurrency.lockutils [None req-faf58ea9-6043-4134-9c2d-bfeafd8a2df2 e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Lock "433c3cd4-9f5c-4903-9257-26f4f2c31a78-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:20:19 compute-0 nova_compute[192079]: 2025-10-02 12:20:19.934 2 DEBUG oslo_concurrency.lockutils [None req-faf58ea9-6043-4134-9c2d-bfeafd8a2df2 e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Lock "433c3cd4-9f5c-4903-9257-26f4f2c31a78-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:20:19 compute-0 nova_compute[192079]: 2025-10-02 12:20:19.944 2 INFO nova.compute.manager [None req-faf58ea9-6043-4134-9c2d-bfeafd8a2df2 e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Terminating instance
Oct 02 12:20:19 compute-0 nova_compute[192079]: 2025-10-02 12:20:19.953 2 DEBUG oslo_concurrency.lockutils [None req-faf58ea9-6043-4134-9c2d-bfeafd8a2df2 e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Acquiring lock "refresh_cache-433c3cd4-9f5c-4903-9257-26f4f2c31a78" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:20:19 compute-0 nova_compute[192079]: 2025-10-02 12:20:19.954 2 DEBUG oslo_concurrency.lockutils [None req-faf58ea9-6043-4134-9c2d-bfeafd8a2df2 e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Acquired lock "refresh_cache-433c3cd4-9f5c-4903-9257-26f4f2c31a78" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:20:19 compute-0 nova_compute[192079]: 2025-10-02 12:20:19.954 2 DEBUG nova.network.neutron [None req-faf58ea9-6043-4134-9c2d-bfeafd8a2df2 e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:20:20 compute-0 nova_compute[192079]: 2025-10-02 12:20:20.110 2 DEBUG nova.network.neutron [None req-faf58ea9-6043-4134-9c2d-bfeafd8a2df2 e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:20:20 compute-0 nova_compute[192079]: 2025-10-02 12:20:20.420 2 DEBUG nova.network.neutron [None req-faf58ea9-6043-4134-9c2d-bfeafd8a2df2 e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:20:20 compute-0 nova_compute[192079]: 2025-10-02 12:20:20.445 2 DEBUG oslo_concurrency.lockutils [None req-faf58ea9-6043-4134-9c2d-bfeafd8a2df2 e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Releasing lock "refresh_cache-433c3cd4-9f5c-4903-9257-26f4f2c31a78" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:20:20 compute-0 nova_compute[192079]: 2025-10-02 12:20:20.447 2 DEBUG nova.compute.manager [None req-faf58ea9-6043-4134-9c2d-bfeafd8a2df2 e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:20:20 compute-0 systemd[1]: machine-qemu\x2d46\x2dinstance\x2d00000064.scope: Deactivated successfully.
Oct 02 12:20:20 compute-0 systemd[1]: machine-qemu\x2d46\x2dinstance\x2d00000064.scope: Consumed 13.509s CPU time.
Oct 02 12:20:20 compute-0 systemd-machined[152150]: Machine qemu-46-instance-00000064 terminated.
Oct 02 12:20:20 compute-0 nova_compute[192079]: 2025-10-02 12:20:20.718 2 INFO nova.virt.libvirt.driver [-] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Instance destroyed successfully.
Oct 02 12:20:20 compute-0 nova_compute[192079]: 2025-10-02 12:20:20.718 2 DEBUG nova.objects.instance [None req-faf58ea9-6043-4134-9c2d-bfeafd8a2df2 e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Lazy-loading 'resources' on Instance uuid 433c3cd4-9f5c-4903-9257-26f4f2c31a78 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:20:20 compute-0 nova_compute[192079]: 2025-10-02 12:20:20.735 2 INFO nova.virt.libvirt.driver [None req-faf58ea9-6043-4134-9c2d-bfeafd8a2df2 e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Deleting instance files /var/lib/nova/instances/433c3cd4-9f5c-4903-9257-26f4f2c31a78_del
Oct 02 12:20:20 compute-0 nova_compute[192079]: 2025-10-02 12:20:20.736 2 INFO nova.virt.libvirt.driver [None req-faf58ea9-6043-4134-9c2d-bfeafd8a2df2 e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Deletion of /var/lib/nova/instances/433c3cd4-9f5c-4903-9257-26f4f2c31a78_del complete
Oct 02 12:20:20 compute-0 nova_compute[192079]: 2025-10-02 12:20:20.839 2 INFO nova.compute.manager [None req-faf58ea9-6043-4134-9c2d-bfeafd8a2df2 e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Took 0.39 seconds to destroy the instance on the hypervisor.
Oct 02 12:20:20 compute-0 nova_compute[192079]: 2025-10-02 12:20:20.840 2 DEBUG oslo.service.loopingcall [None req-faf58ea9-6043-4134-9c2d-bfeafd8a2df2 e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:20:20 compute-0 nova_compute[192079]: 2025-10-02 12:20:20.840 2 DEBUG nova.compute.manager [-] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:20:20 compute-0 nova_compute[192079]: 2025-10-02 12:20:20.840 2 DEBUG nova.network.neutron [-] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:20:20 compute-0 nova_compute[192079]: 2025-10-02 12:20:20.874 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:21 compute-0 nova_compute[192079]: 2025-10-02 12:20:21.021 2 DEBUG nova.network.neutron [-] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:20:21 compute-0 nova_compute[192079]: 2025-10-02 12:20:21.033 2 DEBUG nova.network.neutron [-] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:20:21 compute-0 nova_compute[192079]: 2025-10-02 12:20:21.045 2 INFO nova.compute.manager [-] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Took 0.20 seconds to deallocate network for instance.
Oct 02 12:20:21 compute-0 nova_compute[192079]: 2025-10-02 12:20:21.120 2 DEBUG oslo_concurrency.lockutils [None req-faf58ea9-6043-4134-9c2d-bfeafd8a2df2 e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:20:21 compute-0 nova_compute[192079]: 2025-10-02 12:20:21.121 2 DEBUG oslo_concurrency.lockutils [None req-faf58ea9-6043-4134-9c2d-bfeafd8a2df2 e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:20:21 compute-0 nova_compute[192079]: 2025-10-02 12:20:21.351 2 DEBUG nova.compute.provider_tree [None req-faf58ea9-6043-4134-9c2d-bfeafd8a2df2 e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:20:21 compute-0 nova_compute[192079]: 2025-10-02 12:20:21.374 2 DEBUG nova.scheduler.client.report [None req-faf58ea9-6043-4134-9c2d-bfeafd8a2df2 e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:20:21 compute-0 nova_compute[192079]: 2025-10-02 12:20:21.397 2 DEBUG oslo_concurrency.lockutils [None req-faf58ea9-6043-4134-9c2d-bfeafd8a2df2 e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.277s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:20:21 compute-0 nova_compute[192079]: 2025-10-02 12:20:21.422 2 INFO nova.scheduler.client.report [None req-faf58ea9-6043-4134-9c2d-bfeafd8a2df2 e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Deleted allocations for instance 433c3cd4-9f5c-4903-9257-26f4f2c31a78
Oct 02 12:20:21 compute-0 nova_compute[192079]: 2025-10-02 12:20:21.525 2 DEBUG oslo_concurrency.lockutils [None req-faf58ea9-6043-4134-9c2d-bfeafd8a2df2 e7efd391ff484c8bb99570302eacb8f4 12599487474040b285ccdd017a8c01b5 - - default default] Lock "433c3cd4-9f5c-4903-9257-26f4f2c31a78" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.591s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:20:21 compute-0 nova_compute[192079]: 2025-10-02 12:20:21.567 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:21 compute-0 nova_compute[192079]: 2025-10-02 12:20:21.675 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:20:24 compute-0 nova_compute[192079]: 2025-10-02 12:20:24.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:20:24 compute-0 nova_compute[192079]: 2025-10-02 12:20:24.690 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:20:24 compute-0 nova_compute[192079]: 2025-10-02 12:20:24.690 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:20:24 compute-0 nova_compute[192079]: 2025-10-02 12:20:24.690 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:20:24 compute-0 nova_compute[192079]: 2025-10-02 12:20:24.691 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:20:24 compute-0 nova_compute[192079]: 2025-10-02 12:20:24.771 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:20:24 compute-0 nova_compute[192079]: 2025-10-02 12:20:24.852 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk --force-share --output=json" returned: 0 in 0.082s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:20:24 compute-0 nova_compute[192079]: 2025-10-02 12:20:24.853 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:20:24 compute-0 nova_compute[192079]: 2025-10-02 12:20:24.912 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk --force-share --output=json" returned: 0 in 0.059s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:20:25 compute-0 nova_compute[192079]: 2025-10-02 12:20:25.073 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:20:25 compute-0 nova_compute[192079]: 2025-10-02 12:20:25.074 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5575MB free_disk=73.32029342651367GB free_vcpus=7 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:20:25 compute-0 nova_compute[192079]: 2025-10-02 12:20:25.074 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:20:25 compute-0 nova_compute[192079]: 2025-10-02 12:20:25.075 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:20:25 compute-0 nova_compute[192079]: 2025-10-02 12:20:25.149 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:20:25 compute-0 nova_compute[192079]: 2025-10-02 12:20:25.149 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 1 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:20:25 compute-0 nova_compute[192079]: 2025-10-02 12:20:25.150 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=640MB phys_disk=79GB used_disk=1GB total_vcpus=8 used_vcpus=1 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:20:25 compute-0 nova_compute[192079]: 2025-10-02 12:20:25.202 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:20:25 compute-0 nova_compute[192079]: 2025-10-02 12:20:25.225 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:20:25 compute-0 nova_compute[192079]: 2025-10-02 12:20:25.253 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:20:25 compute-0 nova_compute[192079]: 2025-10-02 12:20:25.253 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.179s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:20:25 compute-0 nova_compute[192079]: 2025-10-02 12:20:25.877 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:26 compute-0 podman[235625]: 2025-10-02 12:20:26.185841146 +0000 UTC m=+0.090422926 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, org.label-schema.build-date=20251001, config_id=edpm, org.label-schema.license=GPLv2)
Oct 02 12:20:26 compute-0 nova_compute[192079]: 2025-10-02 12:20:26.254 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:20:26 compute-0 nova_compute[192079]: 2025-10-02 12:20:26.612 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:26 compute-0 nova_compute[192079]: 2025-10-02 12:20:26.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:20:27 compute-0 nova_compute[192079]: 2025-10-02 12:20:27.660 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:20:27 compute-0 nova_compute[192079]: 2025-10-02 12:20:27.693 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:20:27 compute-0 nova_compute[192079]: 2025-10-02 12:20:27.693 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:20:28 compute-0 nova_compute[192079]: 2025-10-02 12:20:28.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:20:28 compute-0 nova_compute[192079]: 2025-10-02 12:20:28.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:20:28 compute-0 nova_compute[192079]: 2025-10-02 12:20:28.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:20:28 compute-0 nova_compute[192079]: 2025-10-02 12:20:28.865 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "refresh_cache-35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:20:28 compute-0 nova_compute[192079]: 2025-10-02 12:20:28.866 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquired lock "refresh_cache-35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:20:28 compute-0 nova_compute[192079]: 2025-10-02 12:20:28.866 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Forcefully refreshing network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2004
Oct 02 12:20:28 compute-0 nova_compute[192079]: 2025-10-02 12:20:28.866 2 DEBUG nova.objects.instance [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lazy-loading 'info_cache' on Instance uuid 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:20:30 compute-0 nova_compute[192079]: 2025-10-02 12:20:30.270 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Updating instance_info_cache with network_info: [{"id": "0c328734-ebc6-47bc-b603-2e4af1cae573", "address": "fa:16:3e:ef:e3:79", "network": {"id": "8f494075-66bf-4ce0-a765-98fd91c31199", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1553125421-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f0c8c8a8631b4721beed577a99f8bdb7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap0c328734-eb", "ovs_interfaceid": "0c328734-ebc6-47bc-b603-2e4af1cae573", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:20:30 compute-0 nova_compute[192079]: 2025-10-02 12:20:30.290 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Releasing lock "refresh_cache-35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:20:30 compute-0 nova_compute[192079]: 2025-10-02 12:20:30.291 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Updated the network info_cache for instance _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9929
Oct 02 12:20:30 compute-0 nova_compute[192079]: 2025-10-02 12:20:30.291 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:20:30 compute-0 nova_compute[192079]: 2025-10-02 12:20:30.291 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:20:30 compute-0 nova_compute[192079]: 2025-10-02 12:20:30.880 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:31 compute-0 nova_compute[192079]: 2025-10-02 12:20:31.612 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:31 compute-0 nova_compute[192079]: 2025-10-02 12:20:31.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:20:33 compute-0 podman[235646]: 2025-10-02 12:20:33.141819592 +0000 UTC m=+0.054107516 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=multipathd, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:20:33 compute-0 podman[235645]: 2025-10-02 12:20:33.150861748 +0000 UTC m=+0.062930886 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, release=1755695350, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., name=ubi9-minimal, io.buildah.version=1.33.7, build-date=2025-08-20T13:12:41, managed_by=edpm_ansible, architecture=x86_64, vcs-type=git, com.redhat.component=ubi9-minimal-container, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., version=9.6, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, distribution-scope=public, io.openshift.expose-services=, vendor=Red Hat, Inc., container_name=openstack_network_exporter, config_id=edpm, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, url=https://catalog.redhat.com/en/search?searchType=containers, maintainer=Red Hat, Inc., io.openshift.tags=minimal rhel9)
Oct 02 12:20:35 compute-0 nova_compute[192079]: 2025-10-02 12:20:35.715 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759407620.7149096, 433c3cd4-9f5c-4903-9257-26f4f2c31a78 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:20:35 compute-0 nova_compute[192079]: 2025-10-02 12:20:35.716 2 INFO nova.compute.manager [-] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] VM Stopped (Lifecycle Event)
Oct 02 12:20:35 compute-0 nova_compute[192079]: 2025-10-02 12:20:35.740 2 DEBUG nova.compute.manager [None req-d5392094-ac24-41ea-9a6b-7e15ff3d64e1 - - - - - -] [instance: 433c3cd4-9f5c-4903-9257-26f4f2c31a78] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:20:35 compute-0 nova_compute[192079]: 2025-10-02 12:20:35.884 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:36 compute-0 nova_compute[192079]: 2025-10-02 12:20:36.614 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:40 compute-0 podman[235684]: 2025-10-02 12:20:40.162149211 +0000 UTC m=+0.062242207 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:20:40 compute-0 podman[235685]: 2025-10-02 12:20:40.19472068 +0000 UTC m=+0.094544619 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, config_id=iscsid, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:20:40 compute-0 nova_compute[192079]: 2025-10-02 12:20:40.886 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:41 compute-0 nova_compute[192079]: 2025-10-02 12:20:41.664 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.081 2 DEBUG oslo_concurrency.lockutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Acquiring lock "3475bff3-7ba6-45a3-b8d5-713279fe6342" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.081 2 DEBUG oslo_concurrency.lockutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Lock "3475bff3-7ba6-45a3-b8d5-713279fe6342" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.103 2 DEBUG nova.compute.manager [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.217 2 DEBUG oslo_concurrency.lockutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.218 2 DEBUG oslo_concurrency.lockutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.226 2 DEBUG nova.virt.hardware [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.226 2 INFO nova.compute.claims [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.356 2 DEBUG nova.compute.provider_tree [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.371 2 DEBUG nova.scheduler.client.report [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.398 2 DEBUG oslo_concurrency.lockutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.181s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.399 2 DEBUG nova.compute.manager [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.457 2 DEBUG nova.compute.manager [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.458 2 DEBUG nova.network.neutron [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.479 2 INFO nova.virt.libvirt.driver [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.497 2 DEBUG nova.compute.manager [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.642 2 DEBUG nova.policy [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '35fdeaf3813244da8a6293d0c73bd4ce', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8f85658eae934a26bd1246a0eecfb714', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.644 2 DEBUG nova.compute.manager [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.646 2 DEBUG nova.virt.libvirt.driver [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.646 2 INFO nova.virt.libvirt.driver [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Creating image(s)
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.647 2 DEBUG oslo_concurrency.lockutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Acquiring lock "/var/lib/nova/instances/3475bff3-7ba6-45a3-b8d5-713279fe6342/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.647 2 DEBUG oslo_concurrency.lockutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Lock "/var/lib/nova/instances/3475bff3-7ba6-45a3-b8d5-713279fe6342/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.648 2 DEBUG oslo_concurrency.lockutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Lock "/var/lib/nova/instances/3475bff3-7ba6-45a3-b8d5-713279fe6342/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.664 2 DEBUG oslo_concurrency.processutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.719 2 DEBUG oslo_concurrency.processutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.719 2 DEBUG oslo_concurrency.lockutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.720 2 DEBUG oslo_concurrency.lockutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.730 2 DEBUG oslo_concurrency.processutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.784 2 DEBUG oslo_concurrency.processutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.785 2 DEBUG oslo_concurrency.processutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/3475bff3-7ba6-45a3-b8d5-713279fe6342/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.824 2 DEBUG oslo_concurrency.processutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/3475bff3-7ba6-45a3-b8d5-713279fe6342/disk 1073741824" returned: 0 in 0.039s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.825 2 DEBUG oslo_concurrency.lockutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.105s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.825 2 DEBUG oslo_concurrency.processutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.877 2 DEBUG oslo_concurrency.processutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.052s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.879 2 DEBUG nova.virt.disk.api [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Checking if we can resize image /var/lib/nova/instances/3475bff3-7ba6-45a3-b8d5-713279fe6342/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.879 2 DEBUG oslo_concurrency.processutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/3475bff3-7ba6-45a3-b8d5-713279fe6342/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.936 2 DEBUG oslo_concurrency.processutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/3475bff3-7ba6-45a3-b8d5-713279fe6342/disk --force-share --output=json" returned: 0 in 0.057s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.937 2 DEBUG nova.virt.disk.api [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Cannot resize image /var/lib/nova/instances/3475bff3-7ba6-45a3-b8d5-713279fe6342/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.938 2 DEBUG nova.objects.instance [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Lazy-loading 'migration_context' on Instance uuid 3475bff3-7ba6-45a3-b8d5-713279fe6342 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.957 2 DEBUG nova.virt.libvirt.driver [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.958 2 DEBUG nova.virt.libvirt.driver [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Ensure instance console log exists: /var/lib/nova/instances/3475bff3-7ba6-45a3-b8d5-713279fe6342/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.958 2 DEBUG oslo_concurrency.lockutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.958 2 DEBUG oslo_concurrency.lockutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:20:43 compute-0 nova_compute[192079]: 2025-10-02 12:20:43.959 2 DEBUG oslo_concurrency.lockutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:20:44 compute-0 nova_compute[192079]: 2025-10-02 12:20:44.710 2 DEBUG nova.network.neutron [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Successfully created port: 775299b3-732f-4714-806b-69284ee838e1 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:20:45 compute-0 nova_compute[192079]: 2025-10-02 12:20:45.889 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:46 compute-0 nova_compute[192079]: 2025-10-02 12:20:46.289 2 DEBUG nova.network.neutron [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Successfully updated port: 775299b3-732f-4714-806b-69284ee838e1 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:20:46 compute-0 nova_compute[192079]: 2025-10-02 12:20:46.319 2 DEBUG oslo_concurrency.lockutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Acquiring lock "refresh_cache-3475bff3-7ba6-45a3-b8d5-713279fe6342" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:20:46 compute-0 nova_compute[192079]: 2025-10-02 12:20:46.320 2 DEBUG oslo_concurrency.lockutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Acquired lock "refresh_cache-3475bff3-7ba6-45a3-b8d5-713279fe6342" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:20:46 compute-0 nova_compute[192079]: 2025-10-02 12:20:46.320 2 DEBUG nova.network.neutron [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:20:46 compute-0 nova_compute[192079]: 2025-10-02 12:20:46.380 2 DEBUG nova.compute.manager [req-0ddf88d4-806c-4406-92b1-79c0615c98d3 req-35bc290a-c591-4c24-9423-c7ec24311b31 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Received event network-changed-775299b3-732f-4714-806b-69284ee838e1 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:20:46 compute-0 nova_compute[192079]: 2025-10-02 12:20:46.380 2 DEBUG nova.compute.manager [req-0ddf88d4-806c-4406-92b1-79c0615c98d3 req-35bc290a-c591-4c24-9423-c7ec24311b31 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Refreshing instance network info cache due to event network-changed-775299b3-732f-4714-806b-69284ee838e1. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:20:46 compute-0 nova_compute[192079]: 2025-10-02 12:20:46.381 2 DEBUG oslo_concurrency.lockutils [req-0ddf88d4-806c-4406-92b1-79c0615c98d3 req-35bc290a-c591-4c24-9423-c7ec24311b31 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-3475bff3-7ba6-45a3-b8d5-713279fe6342" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:20:46 compute-0 nova_compute[192079]: 2025-10-02 12:20:46.504 2 DEBUG nova.network.neutron [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:20:46 compute-0 nova_compute[192079]: 2025-10-02 12:20:46.666 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.566 2 DEBUG nova.network.neutron [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Updating instance_info_cache with network_info: [{"id": "775299b3-732f-4714-806b-69284ee838e1", "address": "fa:16:3e:97:c5:4c", "network": {"id": "4f73fda2-bd7c-40fc-99e9-283e6747b4fc", "bridge": "br-int", "label": "tempest-NoVNCConsoleTestJSON-1966512610-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "8f85658eae934a26bd1246a0eecfb714", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap775299b3-73", "ovs_interfaceid": "775299b3-732f-4714-806b-69284ee838e1", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.584 2 DEBUG oslo_concurrency.lockutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Releasing lock "refresh_cache-3475bff3-7ba6-45a3-b8d5-713279fe6342" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.584 2 DEBUG nova.compute.manager [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Instance network_info: |[{"id": "775299b3-732f-4714-806b-69284ee838e1", "address": "fa:16:3e:97:c5:4c", "network": {"id": "4f73fda2-bd7c-40fc-99e9-283e6747b4fc", "bridge": "br-int", "label": "tempest-NoVNCConsoleTestJSON-1966512610-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "8f85658eae934a26bd1246a0eecfb714", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap775299b3-73", "ovs_interfaceid": "775299b3-732f-4714-806b-69284ee838e1", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.585 2 DEBUG oslo_concurrency.lockutils [req-0ddf88d4-806c-4406-92b1-79c0615c98d3 req-35bc290a-c591-4c24-9423-c7ec24311b31 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-3475bff3-7ba6-45a3-b8d5-713279fe6342" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.585 2 DEBUG nova.network.neutron [req-0ddf88d4-806c-4406-92b1-79c0615c98d3 req-35bc290a-c591-4c24-9423-c7ec24311b31 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Refreshing network info cache for port 775299b3-732f-4714-806b-69284ee838e1 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.589 2 DEBUG nova.virt.libvirt.driver [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Start _get_guest_xml network_info=[{"id": "775299b3-732f-4714-806b-69284ee838e1", "address": "fa:16:3e:97:c5:4c", "network": {"id": "4f73fda2-bd7c-40fc-99e9-283e6747b4fc", "bridge": "br-int", "label": "tempest-NoVNCConsoleTestJSON-1966512610-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "8f85658eae934a26bd1246a0eecfb714", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap775299b3-73", "ovs_interfaceid": "775299b3-732f-4714-806b-69284ee838e1", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.593 2 WARNING nova.virt.libvirt.driver [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.599 2 DEBUG nova.virt.libvirt.host [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.600 2 DEBUG nova.virt.libvirt.host [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.606 2 DEBUG nova.virt.libvirt.host [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.607 2 DEBUG nova.virt.libvirt.host [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.608 2 DEBUG nova.virt.libvirt.driver [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.608 2 DEBUG nova.virt.hardware [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.609 2 DEBUG nova.virt.hardware [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.609 2 DEBUG nova.virt.hardware [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.609 2 DEBUG nova.virt.hardware [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.609 2 DEBUG nova.virt.hardware [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.610 2 DEBUG nova.virt.hardware [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.610 2 DEBUG nova.virt.hardware [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.610 2 DEBUG nova.virt.hardware [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.610 2 DEBUG nova.virt.hardware [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.610 2 DEBUG nova.virt.hardware [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.611 2 DEBUG nova.virt.hardware [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.615 2 DEBUG nova.virt.libvirt.vif [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:20:42Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-NoVNCConsoleTestJSON-server-1020715920',display_name='tempest-NoVNCConsoleTestJSON-server-1020715920',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-novncconsoletestjson-server-1020715920',id=110,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='8f85658eae934a26bd1246a0eecfb714',ramdisk_id='',reservation_id='r-xkm9nct5',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-NoVNCConsoleTestJSON-14614965',owner_user_name='tempest-NoVNCConsoleTestJSON-14614965-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:20:43Z,user_data=None,user_id='35fdeaf3813244da8a6293d0c73bd4ce',uuid=3475bff3-7ba6-45a3-b8d5-713279fe6342,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "775299b3-732f-4714-806b-69284ee838e1", "address": "fa:16:3e:97:c5:4c", "network": {"id": "4f73fda2-bd7c-40fc-99e9-283e6747b4fc", "bridge": "br-int", "label": "tempest-NoVNCConsoleTestJSON-1966512610-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "8f85658eae934a26bd1246a0eecfb714", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap775299b3-73", "ovs_interfaceid": "775299b3-732f-4714-806b-69284ee838e1", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.615 2 DEBUG nova.network.os_vif_util [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Converting VIF {"id": "775299b3-732f-4714-806b-69284ee838e1", "address": "fa:16:3e:97:c5:4c", "network": {"id": "4f73fda2-bd7c-40fc-99e9-283e6747b4fc", "bridge": "br-int", "label": "tempest-NoVNCConsoleTestJSON-1966512610-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "8f85658eae934a26bd1246a0eecfb714", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap775299b3-73", "ovs_interfaceid": "775299b3-732f-4714-806b-69284ee838e1", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.616 2 DEBUG nova.network.os_vif_util [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:97:c5:4c,bridge_name='br-int',has_traffic_filtering=True,id=775299b3-732f-4714-806b-69284ee838e1,network=Network(4f73fda2-bd7c-40fc-99e9-283e6747b4fc),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap775299b3-73') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.617 2 DEBUG nova.objects.instance [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Lazy-loading 'pci_devices' on Instance uuid 3475bff3-7ba6-45a3-b8d5-713279fe6342 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.636 2 DEBUG nova.virt.libvirt.driver [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:20:47 compute-0 nova_compute[192079]:   <uuid>3475bff3-7ba6-45a3-b8d5-713279fe6342</uuid>
Oct 02 12:20:47 compute-0 nova_compute[192079]:   <name>instance-0000006e</name>
Oct 02 12:20:47 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:20:47 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:20:47 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:20:47 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:       <nova:name>tempest-NoVNCConsoleTestJSON-server-1020715920</nova:name>
Oct 02 12:20:47 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:20:47</nova:creationTime>
Oct 02 12:20:47 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:20:47 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:20:47 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:20:47 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:20:47 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:20:47 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:20:47 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:20:47 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:20:47 compute-0 nova_compute[192079]:         <nova:user uuid="35fdeaf3813244da8a6293d0c73bd4ce">tempest-NoVNCConsoleTestJSON-14614965-project-member</nova:user>
Oct 02 12:20:47 compute-0 nova_compute[192079]:         <nova:project uuid="8f85658eae934a26bd1246a0eecfb714">tempest-NoVNCConsoleTestJSON-14614965</nova:project>
Oct 02 12:20:47 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:20:47 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:20:47 compute-0 nova_compute[192079]:         <nova:port uuid="775299b3-732f-4714-806b-69284ee838e1">
Oct 02 12:20:47 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.9" ipVersion="4"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:20:47 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:20:47 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:20:47 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <system>
Oct 02 12:20:47 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:20:47 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:20:47 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:20:47 compute-0 nova_compute[192079]:       <entry name="serial">3475bff3-7ba6-45a3-b8d5-713279fe6342</entry>
Oct 02 12:20:47 compute-0 nova_compute[192079]:       <entry name="uuid">3475bff3-7ba6-45a3-b8d5-713279fe6342</entry>
Oct 02 12:20:47 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     </system>
Oct 02 12:20:47 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:20:47 compute-0 nova_compute[192079]:   <os>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:   </os>
Oct 02 12:20:47 compute-0 nova_compute[192079]:   <features>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:   </features>
Oct 02 12:20:47 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:20:47 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:20:47 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:20:47 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/3475bff3-7ba6-45a3-b8d5-713279fe6342/disk"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:20:47 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/3475bff3-7ba6-45a3-b8d5-713279fe6342/disk.config"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:20:47 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:97:c5:4c"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:       <target dev="tap775299b3-73"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:20:47 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/3475bff3-7ba6-45a3-b8d5-713279fe6342/console.log" append="off"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <video>
Oct 02 12:20:47 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     </video>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:20:47 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:20:47 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:20:47 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:20:47 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:20:47 compute-0 nova_compute[192079]: </domain>
Oct 02 12:20:47 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.637 2 DEBUG nova.compute.manager [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Preparing to wait for external event network-vif-plugged-775299b3-732f-4714-806b-69284ee838e1 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.637 2 DEBUG oslo_concurrency.lockutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Acquiring lock "3475bff3-7ba6-45a3-b8d5-713279fe6342-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.637 2 DEBUG oslo_concurrency.lockutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Lock "3475bff3-7ba6-45a3-b8d5-713279fe6342-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.638 2 DEBUG oslo_concurrency.lockutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Lock "3475bff3-7ba6-45a3-b8d5-713279fe6342-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.638 2 DEBUG nova.virt.libvirt.vif [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:20:42Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-NoVNCConsoleTestJSON-server-1020715920',display_name='tempest-NoVNCConsoleTestJSON-server-1020715920',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-novncconsoletestjson-server-1020715920',id=110,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='8f85658eae934a26bd1246a0eecfb714',ramdisk_id='',reservation_id='r-xkm9nct5',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-NoVNCConsoleTestJSON-14614965',owner_user_name='tempest-NoVNCConsoleTestJSON-14614965-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:20:43Z,user_data=None,user_id='35fdeaf3813244da8a6293d0c73bd4ce',uuid=3475bff3-7ba6-45a3-b8d5-713279fe6342,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "775299b3-732f-4714-806b-69284ee838e1", "address": "fa:16:3e:97:c5:4c", "network": {"id": "4f73fda2-bd7c-40fc-99e9-283e6747b4fc", "bridge": "br-int", "label": "tempest-NoVNCConsoleTestJSON-1966512610-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "8f85658eae934a26bd1246a0eecfb714", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap775299b3-73", "ovs_interfaceid": "775299b3-732f-4714-806b-69284ee838e1", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.639 2 DEBUG nova.network.os_vif_util [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Converting VIF {"id": "775299b3-732f-4714-806b-69284ee838e1", "address": "fa:16:3e:97:c5:4c", "network": {"id": "4f73fda2-bd7c-40fc-99e9-283e6747b4fc", "bridge": "br-int", "label": "tempest-NoVNCConsoleTestJSON-1966512610-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "8f85658eae934a26bd1246a0eecfb714", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap775299b3-73", "ovs_interfaceid": "775299b3-732f-4714-806b-69284ee838e1", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.639 2 DEBUG nova.network.os_vif_util [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:97:c5:4c,bridge_name='br-int',has_traffic_filtering=True,id=775299b3-732f-4714-806b-69284ee838e1,network=Network(4f73fda2-bd7c-40fc-99e9-283e6747b4fc),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap775299b3-73') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.640 2 DEBUG os_vif [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:97:c5:4c,bridge_name='br-int',has_traffic_filtering=True,id=775299b3-732f-4714-806b-69284ee838e1,network=Network(4f73fda2-bd7c-40fc-99e9-283e6747b4fc),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap775299b3-73') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.640 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.640 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.641 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.642 2 INFO nova.compute.manager [None req-c385a527-9d8e-4978-90d3-c8b3b2125f09 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Pausing
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.643 2 DEBUG nova.objects.instance [None req-c385a527-9d8e-4978-90d3-c8b3b2125f09 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Lazy-loading 'flavor' on Instance uuid 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.644 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.644 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap775299b3-73, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.645 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap775299b3-73, col_values=(('external_ids', {'iface-id': '775299b3-732f-4714-806b-69284ee838e1', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:97:c5:4c', 'vm-uuid': '3475bff3-7ba6-45a3-b8d5-713279fe6342'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.646 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:47 compute-0 NetworkManager[51160]: <info>  [1759407647.6480] manager: (tap775299b3-73): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/173)
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.648 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.653 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.653 2 INFO os_vif [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:97:c5:4c,bridge_name='br-int',has_traffic_filtering=True,id=775299b3-732f-4714-806b-69284ee838e1,network=Network(4f73fda2-bd7c-40fc-99e9-283e6747b4fc),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap775299b3-73')
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.701 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407647.7007966, 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.702 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] VM Paused (Lifecycle Event)
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.730 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.734 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: active, current task_state: pausing, current DB power_state: 1, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.755 2 DEBUG nova.compute.manager [None req-c385a527-9d8e-4978-90d3-c8b3b2125f09 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.802 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] During sync_power_state the instance has a pending task (pausing). Skip.
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.855 2 DEBUG nova.virt.libvirt.driver [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.855 2 DEBUG nova.virt.libvirt.driver [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.856 2 DEBUG nova.virt.libvirt.driver [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] No VIF found with MAC fa:16:3e:97:c5:4c, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:20:47 compute-0 nova_compute[192079]: 2025-10-02 12:20:47.856 2 INFO nova.virt.libvirt.driver [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Using config drive
Oct 02 12:20:48 compute-0 nova_compute[192079]: 2025-10-02 12:20:48.490 2 INFO nova.virt.libvirt.driver [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Creating config drive at /var/lib/nova/instances/3475bff3-7ba6-45a3-b8d5-713279fe6342/disk.config
Oct 02 12:20:48 compute-0 nova_compute[192079]: 2025-10-02 12:20:48.497 2 DEBUG oslo_concurrency.processutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/3475bff3-7ba6-45a3-b8d5-713279fe6342/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpx7m6uirg execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:20:48 compute-0 nova_compute[192079]: 2025-10-02 12:20:48.624 2 DEBUG oslo_concurrency.processutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/3475bff3-7ba6-45a3-b8d5-713279fe6342/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpx7m6uirg" returned: 0 in 0.127s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:20:48 compute-0 kernel: tap775299b3-73: entered promiscuous mode
Oct 02 12:20:48 compute-0 NetworkManager[51160]: <info>  [1759407648.6927] manager: (tap775299b3-73): new Tun device (/org/freedesktop/NetworkManager/Devices/174)
Oct 02 12:20:48 compute-0 nova_compute[192079]: 2025-10-02 12:20:48.693 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:48 compute-0 ovn_controller[94336]: 2025-10-02T12:20:48Z|00341|binding|INFO|Claiming lport 775299b3-732f-4714-806b-69284ee838e1 for this chassis.
Oct 02 12:20:48 compute-0 ovn_controller[94336]: 2025-10-02T12:20:48Z|00342|binding|INFO|775299b3-732f-4714-806b-69284ee838e1: Claiming fa:16:3e:97:c5:4c 10.100.0.9
Oct 02 12:20:48 compute-0 nova_compute[192079]: 2025-10-02 12:20:48.696 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:48 compute-0 nova_compute[192079]: 2025-10-02 12:20:48.705 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:48.715 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:97:c5:4c 10.100.0.9'], port_security=['fa:16:3e:97:c5:4c 10.100.0.9'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.9/28', 'neutron:device_id': '3475bff3-7ba6-45a3-b8d5-713279fe6342', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-4f73fda2-bd7c-40fc-99e9-283e6747b4fc', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '8f85658eae934a26bd1246a0eecfb714', 'neutron:revision_number': '2', 'neutron:security_group_ids': '6fc7061a-8bc2-4671-ab0e-4eda62624360', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=5dc75b7b-cea7-4052-961e-9277fc067f9a, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=775299b3-732f-4714-806b-69284ee838e1) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:20:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:48.716 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 775299b3-732f-4714-806b-69284ee838e1 in datapath 4f73fda2-bd7c-40fc-99e9-283e6747b4fc bound to our chassis
Oct 02 12:20:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:48.717 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 4f73fda2-bd7c-40fc-99e9-283e6747b4fc
Oct 02 12:20:48 compute-0 systemd-udevd[235762]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:20:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:48.727 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ec4d20c5-e334-4572-a65b-333ca2edb31d]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:20:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:48.728 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap4f73fda2-b1 in ovnmeta-4f73fda2-bd7c-40fc-99e9-283e6747b4fc namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:20:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:48.729 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap4f73fda2-b0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:20:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:48.730 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[fc0c3edd-3965-4d52-ad7f-e8b114462669]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:20:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:48.731 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[733e6a28-988f-49e4-be7a-d80573da9554]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:20:48 compute-0 NetworkManager[51160]: <info>  [1759407648.7422] device (tap775299b3-73): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:20:48 compute-0 NetworkManager[51160]: <info>  [1759407648.7444] device (tap775299b3-73): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:20:48 compute-0 systemd-machined[152150]: New machine qemu-48-instance-0000006e.
Oct 02 12:20:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:48.745 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[24f63aca-03fd-456f-9d6f-737075b34ede]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:20:48 compute-0 ovn_controller[94336]: 2025-10-02T12:20:48Z|00343|binding|INFO|Setting lport 775299b3-732f-4714-806b-69284ee838e1 ovn-installed in OVS
Oct 02 12:20:48 compute-0 ovn_controller[94336]: 2025-10-02T12:20:48Z|00344|binding|INFO|Setting lport 775299b3-732f-4714-806b-69284ee838e1 up in Southbound
Oct 02 12:20:48 compute-0 systemd[1]: Started Virtual Machine qemu-48-instance-0000006e.
Oct 02 12:20:48 compute-0 nova_compute[192079]: 2025-10-02 12:20:48.770 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:48.779 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[575708a3-e4ae-4ece-b224-6f38d2fc06c9]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:20:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:48.804 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[e276b846-3daf-4342-afe8-f030564bc3c3]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:20:48 compute-0 NetworkManager[51160]: <info>  [1759407648.8093] manager: (tap4f73fda2-b0): new Veth device (/org/freedesktop/NetworkManager/Devices/175)
Oct 02 12:20:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:48.808 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b5250e71-049f-4381-a703-ae72c8a6148f]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:20:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:48.840 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[3be2ad2d-d2f0-40fb-8d34-41e881e1429f]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:20:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:48.843 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[4d386226-a981-43dc-a135-bfd89d1ae3b6]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:20:48 compute-0 NetworkManager[51160]: <info>  [1759407648.8624] device (tap4f73fda2-b0): carrier: link connected
Oct 02 12:20:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:48.866 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[6c72005a-b884-4c2b-9145-60535b75c6dd]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:20:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:48.880 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[40ba3e40-38ec-4190-8780-9d7b19eaf3bb]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap4f73fda2-b1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:5f:e9:bb'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 2, 'rx_bytes': 110, 'tx_bytes': 176, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 2, 'rx_bytes': 110, 'tx_bytes': 176, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 112], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 564649, 'reachable_time': 41155, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 2, 'outoctets': 148, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 2, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 148, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 2, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 235796, 'error': None, 'target': 'ovnmeta-4f73fda2-bd7c-40fc-99e9-283e6747b4fc', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:20:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:48.893 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[33b451bb-da90-414e-9d51-0b907eb900e5]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe5f:e9bb'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 564649, 'tstamp': 564649}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 235797, 'error': None, 'target': 'ovnmeta-4f73fda2-bd7c-40fc-99e9-283e6747b4fc', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:20:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:48.906 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[000ca5e7-8c5b-46b9-a3f7-ba99e0d42ec6]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap4f73fda2-b1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:5f:e9:bb'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 2, 'rx_bytes': 110, 'tx_bytes': 176, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 2, 'rx_bytes': 110, 'tx_bytes': 176, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 112], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 564649, 'reachable_time': 41155, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 2, 'outoctets': 148, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 2, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 148, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 2, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 235798, 'error': None, 'target': 'ovnmeta-4f73fda2-bd7c-40fc-99e9-283e6747b4fc', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:20:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:48.929 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0bba354f-2dbc-4d75-a555-60eae52568ad]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:20:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:48.979 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e640dea8-1a59-4204-87b6-c5e00369fa7e]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:20:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:48.980 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap4f73fda2-b0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:20:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:48.981 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:20:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:48.981 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap4f73fda2-b0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:20:48 compute-0 kernel: tap4f73fda2-b0: entered promiscuous mode
Oct 02 12:20:48 compute-0 nova_compute[192079]: 2025-10-02 12:20:48.982 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:48 compute-0 NetworkManager[51160]: <info>  [1759407648.9837] manager: (tap4f73fda2-b0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/176)
Oct 02 12:20:48 compute-0 nova_compute[192079]: 2025-10-02 12:20:48.985 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:48.986 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap4f73fda2-b0, col_values=(('external_ids', {'iface-id': '95e64c07-a1bd-40eb-b885-c800df916a6b'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:20:48 compute-0 nova_compute[192079]: 2025-10-02 12:20:48.987 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:48 compute-0 ovn_controller[94336]: 2025-10-02T12:20:48Z|00345|binding|INFO|Releasing lport 95e64c07-a1bd-40eb-b885-c800df916a6b from this chassis (sb_readonly=0)
Oct 02 12:20:49 compute-0 nova_compute[192079]: 2025-10-02 12:20:49.000 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:49.004 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/4f73fda2-bd7c-40fc-99e9-283e6747b4fc.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/4f73fda2-bd7c-40fc-99e9-283e6747b4fc.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:49.005 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d2675f95-948e-4d3e-9aa1-1b6d1c242898]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:49.006 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-4f73fda2-bd7c-40fc-99e9-283e6747b4fc
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/4f73fda2-bd7c-40fc-99e9-283e6747b4fc.pid.haproxy
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 4f73fda2-bd7c-40fc-99e9-283e6747b4fc
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:20:49 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:49.006 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-4f73fda2-bd7c-40fc-99e9-283e6747b4fc', 'env', 'PROCESS_TAG=haproxy-4f73fda2-bd7c-40fc-99e9-283e6747b4fc', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/4f73fda2-bd7c-40fc-99e9-283e6747b4fc.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:20:49 compute-0 nova_compute[192079]: 2025-10-02 12:20:49.302 2 DEBUG nova.network.neutron [req-0ddf88d4-806c-4406-92b1-79c0615c98d3 req-35bc290a-c591-4c24-9423-c7ec24311b31 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Updated VIF entry in instance network info cache for port 775299b3-732f-4714-806b-69284ee838e1. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:20:49 compute-0 nova_compute[192079]: 2025-10-02 12:20:49.303 2 DEBUG nova.network.neutron [req-0ddf88d4-806c-4406-92b1-79c0615c98d3 req-35bc290a-c591-4c24-9423-c7ec24311b31 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Updating instance_info_cache with network_info: [{"id": "775299b3-732f-4714-806b-69284ee838e1", "address": "fa:16:3e:97:c5:4c", "network": {"id": "4f73fda2-bd7c-40fc-99e9-283e6747b4fc", "bridge": "br-int", "label": "tempest-NoVNCConsoleTestJSON-1966512610-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "8f85658eae934a26bd1246a0eecfb714", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap775299b3-73", "ovs_interfaceid": "775299b3-732f-4714-806b-69284ee838e1", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:20:49 compute-0 nova_compute[192079]: 2025-10-02 12:20:49.333 2 DEBUG oslo_concurrency.lockutils [req-0ddf88d4-806c-4406-92b1-79c0615c98d3 req-35bc290a-c591-4c24-9423-c7ec24311b31 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-3475bff3-7ba6-45a3-b8d5-713279fe6342" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:20:49 compute-0 nova_compute[192079]: 2025-10-02 12:20:49.378 2 DEBUG nova.compute.manager [req-e70af610-f11c-4231-a55c-3e8254604285 req-17f6c83f-17ae-4ff1-8c00-33cdffc5c3a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Received event network-vif-plugged-775299b3-732f-4714-806b-69284ee838e1 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:20:49 compute-0 nova_compute[192079]: 2025-10-02 12:20:49.379 2 DEBUG oslo_concurrency.lockutils [req-e70af610-f11c-4231-a55c-3e8254604285 req-17f6c83f-17ae-4ff1-8c00-33cdffc5c3a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "3475bff3-7ba6-45a3-b8d5-713279fe6342-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:20:49 compute-0 nova_compute[192079]: 2025-10-02 12:20:49.380 2 DEBUG oslo_concurrency.lockutils [req-e70af610-f11c-4231-a55c-3e8254604285 req-17f6c83f-17ae-4ff1-8c00-33cdffc5c3a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "3475bff3-7ba6-45a3-b8d5-713279fe6342-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:20:49 compute-0 nova_compute[192079]: 2025-10-02 12:20:49.381 2 DEBUG oslo_concurrency.lockutils [req-e70af610-f11c-4231-a55c-3e8254604285 req-17f6c83f-17ae-4ff1-8c00-33cdffc5c3a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "3475bff3-7ba6-45a3-b8d5-713279fe6342-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:20:49 compute-0 nova_compute[192079]: 2025-10-02 12:20:49.381 2 DEBUG nova.compute.manager [req-e70af610-f11c-4231-a55c-3e8254604285 req-17f6c83f-17ae-4ff1-8c00-33cdffc5c3a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Processing event network-vif-plugged-775299b3-732f-4714-806b-69284ee838e1 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:20:49 compute-0 podman[235829]: 2025-10-02 12:20:49.418295914 +0000 UTC m=+0.058080655 container create 1382028578ef1043cd7adcc7eaaf5688228db5c1798aee6781f3868985a8e5d2 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f73fda2-bd7c-40fc-99e9-283e6747b4fc, tcib_managed=true, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:20:49 compute-0 podman[235829]: 2025-10-02 12:20:49.385632743 +0000 UTC m=+0.025417494 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:20:49 compute-0 systemd[1]: Started libpod-conmon-1382028578ef1043cd7adcc7eaaf5688228db5c1798aee6781f3868985a8e5d2.scope.
Oct 02 12:20:49 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:20:49 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/d27a52574d11238e3ea13ca862888145b28a07f953380580ccd1a11e5feb0cbe/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:20:49 compute-0 podman[235829]: 2025-10-02 12:20:49.551804384 +0000 UTC m=+0.191589155 container init 1382028578ef1043cd7adcc7eaaf5688228db5c1798aee6781f3868985a8e5d2 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f73fda2-bd7c-40fc-99e9-283e6747b4fc, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_managed=true, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:20:49 compute-0 podman[235842]: 2025-10-02 12:20:49.551810814 +0000 UTC m=+0.089542482 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_id=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, container_name=ovn_metadata_agent, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']})
Oct 02 12:20:49 compute-0 podman[235844]: 2025-10-02 12:20:49.553667944 +0000 UTC m=+0.090427466 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']})
Oct 02 12:20:49 compute-0 podman[235829]: 2025-10-02 12:20:49.559786462 +0000 UTC m=+0.199571203 container start 1382028578ef1043cd7adcc7eaaf5688228db5c1798aee6781f3868985a8e5d2 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f73fda2-bd7c-40fc-99e9-283e6747b4fc, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0)
Oct 02 12:20:49 compute-0 podman[235843]: 2025-10-02 12:20:49.581957085 +0000 UTC m=+0.120308080 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, container_name=ovn_controller, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_id=ovn_controller, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0)
Oct 02 12:20:49 compute-0 neutron-haproxy-ovnmeta-4f73fda2-bd7c-40fc-99e9-283e6747b4fc[235875]: [NOTICE]   (235914) : New worker (235919) forked
Oct 02 12:20:49 compute-0 neutron-haproxy-ovnmeta-4f73fda2-bd7c-40fc-99e9-283e6747b4fc[235875]: [NOTICE]   (235914) : Loading success.
Oct 02 12:20:50 compute-0 nova_compute[192079]: 2025-10-02 12:20:50.279 2 INFO nova.compute.manager [None req-5c74e805-1f30-4aff-bec4-af0ec917e16a a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Unpausing
Oct 02 12:20:50 compute-0 nova_compute[192079]: 2025-10-02 12:20:50.281 2 DEBUG nova.objects.instance [None req-5c74e805-1f30-4aff-bec4-af0ec917e16a a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Lazy-loading 'flavor' on Instance uuid 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:20:50 compute-0 nova_compute[192079]: 2025-10-02 12:20:50.343 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407650.3427835, 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:20:50 compute-0 nova_compute[192079]: 2025-10-02 12:20:50.344 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] VM Resumed (Lifecycle Event)
Oct 02 12:20:50 compute-0 virtqemud[191807]: argument unsupported: QEMU guest agent is not configured
Oct 02 12:20:50 compute-0 nova_compute[192079]: 2025-10-02 12:20:50.350 2 DEBUG nova.virt.libvirt.guest [None req-5c74e805-1f30-4aff-bec4-af0ec917e16a a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Failed to set time: agent not configured sync_guest_time /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:200
Oct 02 12:20:50 compute-0 nova_compute[192079]: 2025-10-02 12:20:50.350 2 DEBUG nova.compute.manager [None req-5c74e805-1f30-4aff-bec4-af0ec917e16a a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:20:50 compute-0 nova_compute[192079]: 2025-10-02 12:20:50.383 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:20:50 compute-0 nova_compute[192079]: 2025-10-02 12:20:50.386 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: paused, current task_state: unpausing, current DB power_state: 3, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:20:50 compute-0 nova_compute[192079]: 2025-10-02 12:20:50.417 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] During sync_power_state the instance has a pending task (unpausing). Skip.
Oct 02 12:20:50 compute-0 nova_compute[192079]: 2025-10-02 12:20:50.974 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407650.9737, 3475bff3-7ba6-45a3-b8d5-713279fe6342 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:20:50 compute-0 nova_compute[192079]: 2025-10-02 12:20:50.974 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] VM Started (Lifecycle Event)
Oct 02 12:20:50 compute-0 nova_compute[192079]: 2025-10-02 12:20:50.976 2 DEBUG nova.compute.manager [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:20:50 compute-0 nova_compute[192079]: 2025-10-02 12:20:50.979 2 DEBUG nova.virt.libvirt.driver [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:20:50 compute-0 nova_compute[192079]: 2025-10-02 12:20:50.983 2 INFO nova.virt.libvirt.driver [-] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Instance spawned successfully.
Oct 02 12:20:50 compute-0 nova_compute[192079]: 2025-10-02 12:20:50.983 2 DEBUG nova.virt.libvirt.driver [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:20:51 compute-0 nova_compute[192079]: 2025-10-02 12:20:51.009 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:20:51 compute-0 nova_compute[192079]: 2025-10-02 12:20:51.018 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:20:51 compute-0 nova_compute[192079]: 2025-10-02 12:20:51.021 2 DEBUG nova.virt.libvirt.driver [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:20:51 compute-0 nova_compute[192079]: 2025-10-02 12:20:51.021 2 DEBUG nova.virt.libvirt.driver [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:20:51 compute-0 nova_compute[192079]: 2025-10-02 12:20:51.021 2 DEBUG nova.virt.libvirt.driver [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:20:51 compute-0 nova_compute[192079]: 2025-10-02 12:20:51.022 2 DEBUG nova.virt.libvirt.driver [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:20:51 compute-0 nova_compute[192079]: 2025-10-02 12:20:51.022 2 DEBUG nova.virt.libvirt.driver [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:20:51 compute-0 nova_compute[192079]: 2025-10-02 12:20:51.022 2 DEBUG nova.virt.libvirt.driver [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:20:51 compute-0 nova_compute[192079]: 2025-10-02 12:20:51.052 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:20:51 compute-0 nova_compute[192079]: 2025-10-02 12:20:51.053 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407650.9737864, 3475bff3-7ba6-45a3-b8d5-713279fe6342 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:20:51 compute-0 nova_compute[192079]: 2025-10-02 12:20:51.053 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] VM Paused (Lifecycle Event)
Oct 02 12:20:51 compute-0 nova_compute[192079]: 2025-10-02 12:20:51.085 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:20:51 compute-0 nova_compute[192079]: 2025-10-02 12:20:51.088 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407650.9790053, 3475bff3-7ba6-45a3-b8d5-713279fe6342 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:20:51 compute-0 nova_compute[192079]: 2025-10-02 12:20:51.089 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] VM Resumed (Lifecycle Event)
Oct 02 12:20:51 compute-0 nova_compute[192079]: 2025-10-02 12:20:51.121 2 INFO nova.compute.manager [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Took 7.48 seconds to spawn the instance on the hypervisor.
Oct 02 12:20:51 compute-0 nova_compute[192079]: 2025-10-02 12:20:51.122 2 DEBUG nova.compute.manager [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:20:51 compute-0 nova_compute[192079]: 2025-10-02 12:20:51.123 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:20:51 compute-0 nova_compute[192079]: 2025-10-02 12:20:51.129 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:20:51 compute-0 nova_compute[192079]: 2025-10-02 12:20:51.168 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:20:51 compute-0 nova_compute[192079]: 2025-10-02 12:20:51.218 2 INFO nova.compute.manager [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Took 8.04 seconds to build instance.
Oct 02 12:20:51 compute-0 nova_compute[192079]: 2025-10-02 12:20:51.240 2 DEBUG oslo_concurrency.lockutils [None req-57a2c78e-5c4f-4dc1-ae46-317f47961d63 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Lock "3475bff3-7ba6-45a3-b8d5-713279fe6342" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 8.159s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:20:51 compute-0 nova_compute[192079]: 2025-10-02 12:20:51.477 2 DEBUG nova.compute.manager [req-68be92a6-7cb6-40d9-bdfa-0321ec830966 req-55ed1819-e3d3-44e6-97ca-c11fe5b400a9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Received event network-vif-plugged-775299b3-732f-4714-806b-69284ee838e1 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:20:51 compute-0 nova_compute[192079]: 2025-10-02 12:20:51.478 2 DEBUG oslo_concurrency.lockutils [req-68be92a6-7cb6-40d9-bdfa-0321ec830966 req-55ed1819-e3d3-44e6-97ca-c11fe5b400a9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "3475bff3-7ba6-45a3-b8d5-713279fe6342-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:20:51 compute-0 nova_compute[192079]: 2025-10-02 12:20:51.478 2 DEBUG oslo_concurrency.lockutils [req-68be92a6-7cb6-40d9-bdfa-0321ec830966 req-55ed1819-e3d3-44e6-97ca-c11fe5b400a9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "3475bff3-7ba6-45a3-b8d5-713279fe6342-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:20:51 compute-0 nova_compute[192079]: 2025-10-02 12:20:51.478 2 DEBUG oslo_concurrency.lockutils [req-68be92a6-7cb6-40d9-bdfa-0321ec830966 req-55ed1819-e3d3-44e6-97ca-c11fe5b400a9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "3475bff3-7ba6-45a3-b8d5-713279fe6342-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:20:51 compute-0 nova_compute[192079]: 2025-10-02 12:20:51.478 2 DEBUG nova.compute.manager [req-68be92a6-7cb6-40d9-bdfa-0321ec830966 req-55ed1819-e3d3-44e6-97ca-c11fe5b400a9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] No waiting events found dispatching network-vif-plugged-775299b3-732f-4714-806b-69284ee838e1 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:20:51 compute-0 nova_compute[192079]: 2025-10-02 12:20:51.479 2 WARNING nova.compute.manager [req-68be92a6-7cb6-40d9-bdfa-0321ec830966 req-55ed1819-e3d3-44e6-97ca-c11fe5b400a9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Received unexpected event network-vif-plugged-775299b3-732f-4714-806b-69284ee838e1 for instance with vm_state active and task_state None.
Oct 02 12:20:51 compute-0 nova_compute[192079]: 2025-10-02 12:20:51.710 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:52 compute-0 nova_compute[192079]: 2025-10-02 12:20:52.426 2 DEBUG nova.compute.manager [None req-ecafaed4-c2e0-43ad-a298-040014234fd3 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Getting vnc console get_vnc_console /usr/lib/python3.9/site-packages/nova/compute/manager.py:7196
Oct 02 12:20:52 compute-0 nova_compute[192079]: 2025-10-02 12:20:52.648 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.049 2 DEBUG nova.compute.manager [None req-2ea986d2-6452-4b77-af7c-fd8b8434655c 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Getting vnc console get_vnc_console /usr/lib/python3.9/site-packages/nova/compute/manager.py:7196
Oct 02 12:20:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:53.469 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=25, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=24) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.472 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:53.472 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 6 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.507 2 DEBUG oslo_concurrency.lockutils [None req-b4bda8f8-37e0-4f39-a9f7-e11b7f31d9a5 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Acquiring lock "3475bff3-7ba6-45a3-b8d5-713279fe6342" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.508 2 DEBUG oslo_concurrency.lockutils [None req-b4bda8f8-37e0-4f39-a9f7-e11b7f31d9a5 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Lock "3475bff3-7ba6-45a3-b8d5-713279fe6342" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.509 2 DEBUG oslo_concurrency.lockutils [None req-b4bda8f8-37e0-4f39-a9f7-e11b7f31d9a5 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Acquiring lock "3475bff3-7ba6-45a3-b8d5-713279fe6342-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.509 2 DEBUG oslo_concurrency.lockutils [None req-b4bda8f8-37e0-4f39-a9f7-e11b7f31d9a5 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Lock "3475bff3-7ba6-45a3-b8d5-713279fe6342-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.510 2 DEBUG oslo_concurrency.lockutils [None req-b4bda8f8-37e0-4f39-a9f7-e11b7f31d9a5 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Lock "3475bff3-7ba6-45a3-b8d5-713279fe6342-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.528 2 INFO nova.compute.manager [None req-b4bda8f8-37e0-4f39-a9f7-e11b7f31d9a5 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Terminating instance
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.547 2 DEBUG nova.compute.manager [None req-b4bda8f8-37e0-4f39-a9f7-e11b7f31d9a5 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:20:53 compute-0 kernel: tap775299b3-73 (unregistering): left promiscuous mode
Oct 02 12:20:53 compute-0 NetworkManager[51160]: <info>  [1759407653.5741] device (tap775299b3-73): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:20:53 compute-0 ovn_controller[94336]: 2025-10-02T12:20:53Z|00346|binding|INFO|Releasing lport 775299b3-732f-4714-806b-69284ee838e1 from this chassis (sb_readonly=0)
Oct 02 12:20:53 compute-0 ovn_controller[94336]: 2025-10-02T12:20:53Z|00347|binding|INFO|Setting lport 775299b3-732f-4714-806b-69284ee838e1 down in Southbound
Oct 02 12:20:53 compute-0 ovn_controller[94336]: 2025-10-02T12:20:53Z|00348|binding|INFO|Removing iface tap775299b3-73 ovn-installed in OVS
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.588 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.589 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:53.597 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:97:c5:4c 10.100.0.9'], port_security=['fa:16:3e:97:c5:4c 10.100.0.9'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.9/28', 'neutron:device_id': '3475bff3-7ba6-45a3-b8d5-713279fe6342', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-4f73fda2-bd7c-40fc-99e9-283e6747b4fc', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '8f85658eae934a26bd1246a0eecfb714', 'neutron:revision_number': '4', 'neutron:security_group_ids': '6fc7061a-8bc2-4671-ab0e-4eda62624360', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=5dc75b7b-cea7-4052-961e-9277fc067f9a, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=775299b3-732f-4714-806b-69284ee838e1) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.599 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:53.600 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 775299b3-732f-4714-806b-69284ee838e1 in datapath 4f73fda2-bd7c-40fc-99e9-283e6747b4fc unbound from our chassis
Oct 02 12:20:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:53.604 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 4f73fda2-bd7c-40fc-99e9-283e6747b4fc, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:20:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:53.608 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[155b6d23-c40b-4ffa-b1e2-91ed387471e8]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:20:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:53.608 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-4f73fda2-bd7c-40fc-99e9-283e6747b4fc namespace which is not needed anymore
Oct 02 12:20:53 compute-0 systemd[1]: machine-qemu\x2d48\x2dinstance\x2d0000006e.scope: Deactivated successfully.
Oct 02 12:20:53 compute-0 systemd[1]: machine-qemu\x2d48\x2dinstance\x2d0000006e.scope: Consumed 4.753s CPU time.
Oct 02 12:20:53 compute-0 systemd-machined[152150]: Machine qemu-48-instance-0000006e terminated.
Oct 02 12:20:53 compute-0 neutron-haproxy-ovnmeta-4f73fda2-bd7c-40fc-99e9-283e6747b4fc[235875]: [NOTICE]   (235914) : haproxy version is 2.8.14-c23fe91
Oct 02 12:20:53 compute-0 neutron-haproxy-ovnmeta-4f73fda2-bd7c-40fc-99e9-283e6747b4fc[235875]: [NOTICE]   (235914) : path to executable is /usr/sbin/haproxy
Oct 02 12:20:53 compute-0 neutron-haproxy-ovnmeta-4f73fda2-bd7c-40fc-99e9-283e6747b4fc[235875]: [WARNING]  (235914) : Exiting Master process...
Oct 02 12:20:53 compute-0 neutron-haproxy-ovnmeta-4f73fda2-bd7c-40fc-99e9-283e6747b4fc[235875]: [ALERT]    (235914) : Current worker (235919) exited with code 143 (Terminated)
Oct 02 12:20:53 compute-0 neutron-haproxy-ovnmeta-4f73fda2-bd7c-40fc-99e9-283e6747b4fc[235875]: [WARNING]  (235914) : All workers exited. Exiting... (0)
Oct 02 12:20:53 compute-0 systemd[1]: libpod-1382028578ef1043cd7adcc7eaaf5688228db5c1798aee6781f3868985a8e5d2.scope: Deactivated successfully.
Oct 02 12:20:53 compute-0 podman[235959]: 2025-10-02 12:20:53.772491738 +0000 UTC m=+0.056541812 container died 1382028578ef1043cd7adcc7eaaf5688228db5c1798aee6781f3868985a8e5d2 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f73fda2-bd7c-40fc-99e9-283e6747b4fc, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0)
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.814 2 DEBUG nova.compute.manager [req-19cb1842-fd98-408e-a7cc-531293c420e4 req-faecbfa7-7ffd-4417-a189-efd81e43cd9c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Received event network-vif-unplugged-775299b3-732f-4714-806b-69284ee838e1 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.815 2 DEBUG oslo_concurrency.lockutils [req-19cb1842-fd98-408e-a7cc-531293c420e4 req-faecbfa7-7ffd-4417-a189-efd81e43cd9c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "3475bff3-7ba6-45a3-b8d5-713279fe6342-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.815 2 DEBUG oslo_concurrency.lockutils [req-19cb1842-fd98-408e-a7cc-531293c420e4 req-faecbfa7-7ffd-4417-a189-efd81e43cd9c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "3475bff3-7ba6-45a3-b8d5-713279fe6342-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.816 2 DEBUG oslo_concurrency.lockutils [req-19cb1842-fd98-408e-a7cc-531293c420e4 req-faecbfa7-7ffd-4417-a189-efd81e43cd9c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "3475bff3-7ba6-45a3-b8d5-713279fe6342-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.816 2 DEBUG nova.compute.manager [req-19cb1842-fd98-408e-a7cc-531293c420e4 req-faecbfa7-7ffd-4417-a189-efd81e43cd9c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] No waiting events found dispatching network-vif-unplugged-775299b3-732f-4714-806b-69284ee838e1 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.816 2 DEBUG nova.compute.manager [req-19cb1842-fd98-408e-a7cc-531293c420e4 req-faecbfa7-7ffd-4417-a189-efd81e43cd9c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Received event network-vif-unplugged-775299b3-732f-4714-806b-69284ee838e1 for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.817 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:53 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-1382028578ef1043cd7adcc7eaaf5688228db5c1798aee6781f3868985a8e5d2-userdata-shm.mount: Deactivated successfully.
Oct 02 12:20:53 compute-0 systemd[1]: var-lib-containers-storage-overlay-d27a52574d11238e3ea13ca862888145b28a07f953380580ccd1a11e5feb0cbe-merged.mount: Deactivated successfully.
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.860 2 INFO nova.virt.libvirt.driver [-] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Instance destroyed successfully.
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.861 2 DEBUG nova.objects.instance [None req-b4bda8f8-37e0-4f39-a9f7-e11b7f31d9a5 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Lazy-loading 'resources' on Instance uuid 3475bff3-7ba6-45a3-b8d5-713279fe6342 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.878 2 DEBUG nova.virt.libvirt.vif [None req-b4bda8f8-37e0-4f39-a9f7-e11b7f31d9a5 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:20:42Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-NoVNCConsoleTestJSON-server-1020715920',display_name='tempest-NoVNCConsoleTestJSON-server-1020715920',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-novncconsoletestjson-server-1020715920',id=110,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:20:51Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='8f85658eae934a26bd1246a0eecfb714',ramdisk_id='',reservation_id='r-xkm9nct5',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-NoVNCConsoleTestJSON-14614965',owner_user_name='tempest-NoVNCConsoleTestJSON-14614965-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:20:51Z,user_data=None,user_id='35fdeaf3813244da8a6293d0c73bd4ce',uuid=3475bff3-7ba6-45a3-b8d5-713279fe6342,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "775299b3-732f-4714-806b-69284ee838e1", "address": "fa:16:3e:97:c5:4c", "network": {"id": "4f73fda2-bd7c-40fc-99e9-283e6747b4fc", "bridge": "br-int", "label": "tempest-NoVNCConsoleTestJSON-1966512610-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "8f85658eae934a26bd1246a0eecfb714", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap775299b3-73", "ovs_interfaceid": "775299b3-732f-4714-806b-69284ee838e1", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.878 2 DEBUG nova.network.os_vif_util [None req-b4bda8f8-37e0-4f39-a9f7-e11b7f31d9a5 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Converting VIF {"id": "775299b3-732f-4714-806b-69284ee838e1", "address": "fa:16:3e:97:c5:4c", "network": {"id": "4f73fda2-bd7c-40fc-99e9-283e6747b4fc", "bridge": "br-int", "label": "tempest-NoVNCConsoleTestJSON-1966512610-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "8f85658eae934a26bd1246a0eecfb714", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap775299b3-73", "ovs_interfaceid": "775299b3-732f-4714-806b-69284ee838e1", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.879 2 DEBUG nova.network.os_vif_util [None req-b4bda8f8-37e0-4f39-a9f7-e11b7f31d9a5 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:97:c5:4c,bridge_name='br-int',has_traffic_filtering=True,id=775299b3-732f-4714-806b-69284ee838e1,network=Network(4f73fda2-bd7c-40fc-99e9-283e6747b4fc),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap775299b3-73') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.879 2 DEBUG os_vif [None req-b4bda8f8-37e0-4f39-a9f7-e11b7f31d9a5 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:97:c5:4c,bridge_name='br-int',has_traffic_filtering=True,id=775299b3-732f-4714-806b-69284ee838e1,network=Network(4f73fda2-bd7c-40fc-99e9-283e6747b4fc),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap775299b3-73') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.883 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.883 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap775299b3-73, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.885 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.887 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.887 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:53 compute-0 podman[235959]: 2025-10-02 12:20:53.888944883 +0000 UTC m=+0.172994957 container cleanup 1382028578ef1043cd7adcc7eaaf5688228db5c1798aee6781f3868985a8e5d2 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f73fda2-bd7c-40fc-99e9-283e6747b4fc, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true)
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.890 2 INFO os_vif [None req-b4bda8f8-37e0-4f39-a9f7-e11b7f31d9a5 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:97:c5:4c,bridge_name='br-int',has_traffic_filtering=True,id=775299b3-732f-4714-806b-69284ee838e1,network=Network(4f73fda2-bd7c-40fc-99e9-283e6747b4fc),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap775299b3-73')
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.891 2 INFO nova.virt.libvirt.driver [None req-b4bda8f8-37e0-4f39-a9f7-e11b7f31d9a5 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Deleting instance files /var/lib/nova/instances/3475bff3-7ba6-45a3-b8d5-713279fe6342_del
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.892 2 INFO nova.virt.libvirt.driver [None req-b4bda8f8-37e0-4f39-a9f7-e11b7f31d9a5 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Deletion of /var/lib/nova/instances/3475bff3-7ba6-45a3-b8d5-713279fe6342_del complete
Oct 02 12:20:53 compute-0 systemd[1]: libpod-conmon-1382028578ef1043cd7adcc7eaaf5688228db5c1798aee6781f3868985a8e5d2.scope: Deactivated successfully.
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.968 2 INFO nova.compute.manager [None req-b4bda8f8-37e0-4f39-a9f7-e11b7f31d9a5 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Took 0.42 seconds to destroy the instance on the hypervisor.
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.968 2 DEBUG oslo.service.loopingcall [None req-b4bda8f8-37e0-4f39-a9f7-e11b7f31d9a5 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.968 2 DEBUG nova.compute.manager [-] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.969 2 DEBUG nova.network.neutron [-] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:20:53 compute-0 podman[236003]: 2025-10-02 12:20:53.971330769 +0000 UTC m=+0.053371756 container remove 1382028578ef1043cd7adcc7eaaf5688228db5c1798aee6781f3868985a8e5d2 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-4f73fda2-bd7c-40fc-99e9-283e6747b4fc, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0)
Oct 02 12:20:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:53.975 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[44075332-3426-49bf-80ef-7ea438377c92]: (4, ('Thu Oct  2 12:20:53 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-4f73fda2-bd7c-40fc-99e9-283e6747b4fc (1382028578ef1043cd7adcc7eaaf5688228db5c1798aee6781f3868985a8e5d2)\n1382028578ef1043cd7adcc7eaaf5688228db5c1798aee6781f3868985a8e5d2\nThu Oct  2 12:20:53 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-4f73fda2-bd7c-40fc-99e9-283e6747b4fc (1382028578ef1043cd7adcc7eaaf5688228db5c1798aee6781f3868985a8e5d2)\n1382028578ef1043cd7adcc7eaaf5688228db5c1798aee6781f3868985a8e5d2\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:20:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:53.977 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8c6c9f94-b97a-4471-bb70-b037956a2e77]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:20:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:53.978 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap4f73fda2-b0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.979 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:53 compute-0 kernel: tap4f73fda2-b0: left promiscuous mode
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.981 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:53.987 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6aaf1746-2581-4bec-8d15-8f4334cd49ff]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:20:53 compute-0 nova_compute[192079]: 2025-10-02 12:20:53.994 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:54.020 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1c504267-ace0-4b8b-9ee8-3c1f54dd685a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:20:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:54.021 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4a274272-0ff4-4d83-8055-ba87ac7613b4]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:20:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:54.041 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4053dfa0-59e3-464c-8352-b1adcef8a847]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 564642, 'reachable_time': 26213, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 236018, 'error': None, 'target': 'ovnmeta-4f73fda2-bd7c-40fc-99e9-283e6747b4fc', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:20:54 compute-0 systemd[1]: run-netns-ovnmeta\x2d4f73fda2\x2dbd7c\x2d40fc\x2d99e9\x2d283e6747b4fc.mount: Deactivated successfully.
Oct 02 12:20:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:54.044 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-4f73fda2-bd7c-40fc-99e9-283e6747b4fc deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:20:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:54.044 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[f51e33f8-9696-4e9d-8783-29d09ac00c44]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:20:54 compute-0 nova_compute[192079]: 2025-10-02 12:20:54.831 2 DEBUG nova.network.neutron [-] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:20:54 compute-0 nova_compute[192079]: 2025-10-02 12:20:54.851 2 INFO nova.compute.manager [-] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Took 0.88 seconds to deallocate network for instance.
Oct 02 12:20:54 compute-0 nova_compute[192079]: 2025-10-02 12:20:54.921 2 DEBUG nova.compute.manager [req-46b36775-743c-41a4-be21-7d94bc41f8d0 req-051a52cb-c0ba-4103-8702-e28eeced5b20 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Received event network-vif-deleted-775299b3-732f-4714-806b-69284ee838e1 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:20:54 compute-0 nova_compute[192079]: 2025-10-02 12:20:54.928 2 DEBUG oslo_concurrency.lockutils [None req-b4bda8f8-37e0-4f39-a9f7-e11b7f31d9a5 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:20:54 compute-0 nova_compute[192079]: 2025-10-02 12:20:54.929 2 DEBUG oslo_concurrency.lockutils [None req-b4bda8f8-37e0-4f39-a9f7-e11b7f31d9a5 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:20:55 compute-0 nova_compute[192079]: 2025-10-02 12:20:55.023 2 DEBUG nova.compute.provider_tree [None req-b4bda8f8-37e0-4f39-a9f7-e11b7f31d9a5 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:20:55 compute-0 nova_compute[192079]: 2025-10-02 12:20:55.039 2 DEBUG nova.scheduler.client.report [None req-b4bda8f8-37e0-4f39-a9f7-e11b7f31d9a5 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:20:55 compute-0 nova_compute[192079]: 2025-10-02 12:20:55.056 2 DEBUG oslo_concurrency.lockutils [None req-b4bda8f8-37e0-4f39-a9f7-e11b7f31d9a5 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.127s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:20:55 compute-0 nova_compute[192079]: 2025-10-02 12:20:55.083 2 INFO nova.scheduler.client.report [None req-b4bda8f8-37e0-4f39-a9f7-e11b7f31d9a5 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Deleted allocations for instance 3475bff3-7ba6-45a3-b8d5-713279fe6342
Oct 02 12:20:55 compute-0 nova_compute[192079]: 2025-10-02 12:20:55.186 2 DEBUG oslo_concurrency.lockutils [None req-b4bda8f8-37e0-4f39-a9f7-e11b7f31d9a5 35fdeaf3813244da8a6293d0c73bd4ce 8f85658eae934a26bd1246a0eecfb714 - - default default] Lock "3475bff3-7ba6-45a3-b8d5-713279fe6342" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.677s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:20:55 compute-0 nova_compute[192079]: 2025-10-02 12:20:55.930 2 DEBUG nova.compute.manager [req-30bffa68-36b2-49fb-8b29-092622c8c933 req-69713fb1-6a0d-4245-a4b1-7e8a530e8d24 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Received event network-vif-plugged-775299b3-732f-4714-806b-69284ee838e1 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:20:55 compute-0 nova_compute[192079]: 2025-10-02 12:20:55.930 2 DEBUG oslo_concurrency.lockutils [req-30bffa68-36b2-49fb-8b29-092622c8c933 req-69713fb1-6a0d-4245-a4b1-7e8a530e8d24 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "3475bff3-7ba6-45a3-b8d5-713279fe6342-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:20:55 compute-0 nova_compute[192079]: 2025-10-02 12:20:55.931 2 DEBUG oslo_concurrency.lockutils [req-30bffa68-36b2-49fb-8b29-092622c8c933 req-69713fb1-6a0d-4245-a4b1-7e8a530e8d24 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "3475bff3-7ba6-45a3-b8d5-713279fe6342-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:20:55 compute-0 nova_compute[192079]: 2025-10-02 12:20:55.931 2 DEBUG oslo_concurrency.lockutils [req-30bffa68-36b2-49fb-8b29-092622c8c933 req-69713fb1-6a0d-4245-a4b1-7e8a530e8d24 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "3475bff3-7ba6-45a3-b8d5-713279fe6342-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:20:55 compute-0 nova_compute[192079]: 2025-10-02 12:20:55.931 2 DEBUG nova.compute.manager [req-30bffa68-36b2-49fb-8b29-092622c8c933 req-69713fb1-6a0d-4245-a4b1-7e8a530e8d24 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] No waiting events found dispatching network-vif-plugged-775299b3-732f-4714-806b-69284ee838e1 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:20:55 compute-0 nova_compute[192079]: 2025-10-02 12:20:55.931 2 WARNING nova.compute.manager [req-30bffa68-36b2-49fb-8b29-092622c8c933 req-69713fb1-6a0d-4245-a4b1-7e8a530e8d24 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Received unexpected event network-vif-plugged-775299b3-732f-4714-806b-69284ee838e1 for instance with vm_state deleted and task_state None.
Oct 02 12:20:56 compute-0 nova_compute[192079]: 2025-10-02 12:20:56.711 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:57 compute-0 podman[236020]: 2025-10-02 12:20:57.16187515 +0000 UTC m=+0.071464659 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.schema-version=1.0)
Oct 02 12:20:58 compute-0 nova_compute[192079]: 2025-10-02 12:20:58.886 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:20:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:20:59.475 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '25'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:21:00 compute-0 ovn_controller[94336]: 2025-10-02T12:21:00Z|00349|binding|INFO|Releasing lport a5eb523a-b004-42b7-a3f6-24b2514f40bf from this chassis (sb_readonly=0)
Oct 02 12:21:00 compute-0 nova_compute[192079]: 2025-10-02 12:21:00.704 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:01 compute-0 anacron[1094]: Job `cron.monthly' started
Oct 02 12:21:01 compute-0 anacron[1094]: Job `cron.monthly' terminated
Oct 02 12:21:01 compute-0 anacron[1094]: Normal exit (3 jobs run)
Oct 02 12:21:01 compute-0 nova_compute[192079]: 2025-10-02 12:21:01.741 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:02.220 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:21:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:02.221 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:21:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:02.222 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:21:03 compute-0 nova_compute[192079]: 2025-10-02 12:21:03.925 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:04 compute-0 podman[236045]: 2025-10-02 12:21:04.164890857 +0000 UTC m=+0.067506421 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, container_name=multipathd, managed_by=edpm_ansible, tcib_managed=true, config_id=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0)
Oct 02 12:21:04 compute-0 podman[236044]: 2025-10-02 12:21:04.166027508 +0000 UTC m=+0.064738576 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, build-date=2025-08-20T13:12:41, container_name=openstack_network_exporter, com.redhat.component=ubi9-minimal-container, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., name=ubi9-minimal, maintainer=Red Hat, Inc., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.openshift.tags=minimal rhel9, io.buildah.version=1.33.7, io.openshift.expose-services=, url=https://catalog.redhat.com/en/search?searchType=containers, distribution-scope=public, release=1755695350, vcs-type=git, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vendor=Red Hat, Inc., config_id=edpm, managed_by=edpm_ansible, architecture=x86_64, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, version=9.6, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI)
Oct 02 12:21:06 compute-0 nova_compute[192079]: 2025-10-02 12:21:06.744 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:08 compute-0 nova_compute[192079]: 2025-10-02 12:21:08.858 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759407653.857361, 3475bff3-7ba6-45a3-b8d5-713279fe6342 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:21:08 compute-0 nova_compute[192079]: 2025-10-02 12:21:08.859 2 INFO nova.compute.manager [-] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] VM Stopped (Lifecycle Event)
Oct 02 12:21:08 compute-0 nova_compute[192079]: 2025-10-02 12:21:08.893 2 DEBUG nova.compute.manager [None req-79382d07-d9f1-42a3-b603-916255e7cf7f - - - - - -] [instance: 3475bff3-7ba6-45a3-b8d5-713279fe6342] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:21:08 compute-0 nova_compute[192079]: 2025-10-02 12:21:08.929 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:11 compute-0 podman[236082]: 2025-10-02 12:21:11.153763407 +0000 UTC m=+0.067520191 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>)
Oct 02 12:21:11 compute-0 podman[236083]: 2025-10-02 12:21:11.168496519 +0000 UTC m=+0.070360790 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=iscsid, container_name=iscsid, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:21:11 compute-0 nova_compute[192079]: 2025-10-02 12:21:11.745 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:13 compute-0 nova_compute[192079]: 2025-10-02 12:21:13.932 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:16 compute-0 nova_compute[192079]: 2025-10-02 12:21:16.747 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:16 compute-0 nova_compute[192079]: 2025-10-02 12:21:16.950 2 DEBUG oslo_concurrency.lockutils [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Acquiring lock "35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c" by "nova.compute.manager.ComputeManager.shelve_instance.<locals>.do_shelve_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:21:16 compute-0 nova_compute[192079]: 2025-10-02 12:21:16.951 2 DEBUG oslo_concurrency.lockutils [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Lock "35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c" acquired by "nova.compute.manager.ComputeManager.shelve_instance.<locals>.do_shelve_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:21:16 compute-0 nova_compute[192079]: 2025-10-02 12:21:16.951 2 INFO nova.compute.manager [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Shelving
Oct 02 12:21:16 compute-0 nova_compute[192079]: 2025-10-02 12:21:16.998 2 DEBUG nova.virt.libvirt.driver [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Shutting down instance from state 1 _clean_shutdown /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4071
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.107 12 DEBUG ceilometer.compute.discovery [-] instance data: {'id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'os_type': 'hvm', 'architecture': 'x86_64', 'OS-EXT-SRV-ATTR:instance_name': 'instance-00000065', 'OS-EXT-SRV-ATTR:host': 'compute-0.ctlplane.example.com', 'OS-EXT-STS:vm_state': 'running', 'tenant_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'user_id': 'a803afe9939346088252c3b944f124f2', 'hostId': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'status': 'active', 'metadata': {}} discover_libvirt_polling /usr/lib/python3.9/site-packages/ceilometer/compute/discovery.py:228
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.108 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes.rate in the context of pollsters
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.108 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for IncomingBytesRatePollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.108 12 ERROR ceilometer.polling.manager [-] Prevent pollster network.incoming.bytes.rate from polling [<NovaLikeServer: tempest-ServersNegativeTestJSON-server-1170653470>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-ServersNegativeTestJSON-server-1170653470>]
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.109 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.bytes in the context of pollsters
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.141 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk.device.read.bytes volume: 30845440 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.142 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk.device.read.bytes volume: 274750 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '56f7f112-9a85-4746-b890-dd89d1dfae30', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 30845440, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-vda', 'timestamp': '2025-10-02T12:21:17.109571', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'instance-00000065', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '4b9dd958-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.796682573, 'message_signature': '64d0a6d2b85c09cce9c9aa8d03c4bb4d9696f7424f200aa466fa5784586aeedf'}, {'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 274750, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-sda', 'timestamp': '2025-10-02T12:21:17.109571', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'instance-00000065', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '4b9df122-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.796682573, 'message_signature': 'fa0f06e385496f55e9b5d6da4e1043c273c6af95f7e0c4c910e446fe8aecc80a'}]}, 'timestamp': '2025-10-02 12:21:17.143317', '_unique_id': 'a69da7823b2c46588a8c53dc70c2aadb'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.145 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.146 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes.delta in the context of pollsters
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.151 12 DEBUG ceilometer.compute.virt.libvirt.inspector [-] No delta meter predecessor for 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c / tap0c328734-eb inspect_vnics /usr/lib/python3.9/site-packages/ceilometer/compute/virt/libvirt/inspector.py:136
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.151 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/network.incoming.bytes.delta volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '984c70bc-28e9-4f8b-ac8e-217c36498c29', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.bytes.delta', 'counter_type': 'delta', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': 'instance-00000065-35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-tap0c328734-eb', 'timestamp': '2025-10-02T12:21:17.146979', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'tap0c328734-eb', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:ef:e3:79', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap0c328734-eb'}, 'message_id': '4b9f4cde-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.834134694, 'message_signature': 'd6c1a98014f32c91074c69d7bd692a355868bc077ba871955a96d02c3ab41299'}]}, 'timestamp': '2025-10-02 12:21:17.152370', '_unique_id': '3df41718380a4396a1e60de514934088'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.153 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.155 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.capacity in the context of pollsters
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.173 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk.device.capacity volume: 1073741824 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.174 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk.device.capacity volume: 485376 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'fa7ba4f3-1554-4af4-87b9-528355f02cad', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 1073741824, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-vda', 'timestamp': '2025-10-02T12:21:17.155665', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'instance-00000065', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '4ba2a83e-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.842780219, 'message_signature': 'd0c6de6ae8bb17576246cecfa221b55f7b6d0e868b2526638f64ce0a96a7884f'}, {'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 485376, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-sda', 'timestamp': '2025-10-02T12:21:17.155665', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'instance-00000065', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '4ba2b8ce-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.842780219, 'message_signature': 'dc3545b43e662a9197261d3f2f25756b53510835b2b8c024820efed88127c6fa'}]}, 'timestamp': '2025-10-02 12:21:17.174541', '_unique_id': 'c961c4e9b85b4f6e8aefe957e5507b72'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.175 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.176 12 INFO ceilometer.polling.manager [-] Polling pollster cpu in the context of pollsters
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.200 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/cpu volume: 12500000000 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'fd57b044-a553-4918-9720-a754b1524ece', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'cpu', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 12500000000, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'timestamp': '2025-10-02T12:21:17.176739', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'instance-00000065', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'cpu_number': 1}, 'message_id': '4ba6b384-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.887168549, 'message_signature': '7e708fc4e563010ecd4191813f0d713af26cdaf4e52b83f18d0687796bca7c43'}]}, 'timestamp': '2025-10-02 12:21:17.200706', '_unique_id': 'fb2a3b65d1864896bf4a184cfe641faf'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.201 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.202 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets in the context of pollsters
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.202 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/network.incoming.packets volume: 11 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '73a6185a-9c2a-4624-990d-9c97992aef32', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 11, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': 'instance-00000065-35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-tap0c328734-eb', 'timestamp': '2025-10-02T12:21:17.202816', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'tap0c328734-eb', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:ef:e3:79', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap0c328734-eb'}, 'message_id': '4ba7178e-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.834134694, 'message_signature': '98fbb777952e0066855602c64a144e447779b3704256614ce0726992a708293d'}]}, 'timestamp': '2025-10-02 12:21:17.203214', '_unique_id': 'c6bc9fa17e4846bca9b6cb77d47a41d8'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.203 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.204 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.iops in the context of pollsters
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.205 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for PerDeviceDiskIOPSPollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.205 12 ERROR ceilometer.polling.manager [-] Prevent pollster disk.device.iops from polling [<NovaLikeServer: tempest-ServersNegativeTestJSON-server-1170653470>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-ServersNegativeTestJSON-server-1170653470>]
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.205 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes in the context of pollsters
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.205 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/network.incoming.bytes volume: 1652 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '633e9757-7e5c-4d3f-a496-7747b7b09715', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 1652, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': 'instance-00000065-35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-tap0c328734-eb', 'timestamp': '2025-10-02T12:21:17.205615', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'tap0c328734-eb', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:ef:e3:79', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap0c328734-eb'}, 'message_id': '4ba784ee-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.834134694, 'message_signature': '1d59e92ea9771522f481b9bf22ec06870de007959de1cfdadb84c75dcc11b0dc'}]}, 'timestamp': '2025-10-02 12:21:17.206066', '_unique_id': 'cbcb62018e394be88678ab2fd04ef40d'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.206 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.207 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.bytes in the context of pollsters
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.207 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk.device.write.bytes volume: 73105408 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.208 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk.device.write.bytes volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'a92c5151-eee1-4466-9efa-5a2e3ff531ae', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 73105408, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-vda', 'timestamp': '2025-10-02T12:21:17.207715', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'instance-00000065', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '4ba7d4f8-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.796682573, 'message_signature': 'a4b9165c06b1222ce8316402d16888f543420f05821156fcac96c7b7b23f708e'}, {'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-sda', 'timestamp': '2025-10-02T12:21:17.207715', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'instance-00000065', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '4ba7e0e2-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.796682573, 'message_signature': '2ed3d2eaf8d4041a42f9b121cfcb30e8a8005a4c934a80b0867bb7636ae37a25'}]}, 'timestamp': '2025-10-02 12:21:17.208401', '_unique_id': '2c4b96d35ca64b4e84b4c38c223a93af'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.209 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets in the context of pollsters
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.210 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/network.outgoing.packets volume: 16 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'f1171d91-41bb-452d-9f6e-7bdb5ec5f5d0', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 16, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': 'instance-00000065-35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-tap0c328734-eb', 'timestamp': '2025-10-02T12:21:17.210084', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'tap0c328734-eb', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:ef:e3:79', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap0c328734-eb'}, 'message_id': '4ba831c8-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.834134694, 'message_signature': 'ad8c0cdf67fc375ee1237a6c44fb8f9ba3844fead8f14f9154559f7f52ed77f2'}]}, 'timestamp': '2025-10-02 12:21:17.210412', '_unique_id': '74a3d67adfed443cb66f1aa26ec64b45'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.211 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.212 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.usage in the context of pollsters
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.212 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk.device.usage volume: 29949952 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.212 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk.device.usage volume: 485376 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '280c47bb-67bd-4bca-9413-86c77613edba', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 29949952, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-vda', 'timestamp': '2025-10-02T12:21:17.212166', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'instance-00000065', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '4ba883bc-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.842780219, 'message_signature': '242112aa8ba66614145fd19aa8a441ffaaafee0282331aae64a68caef133de55'}, {'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 485376, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-sda', 'timestamp': '2025-10-02T12:21:17.212166', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'instance-00000065', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '4ba88fd8-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.842780219, 'message_signature': 'a5eb2ce8190903daeacd063cab849db81d555fdc515bdf22915c1e365c2e241a'}]}, 'timestamp': '2025-10-02 12:21:17.212852', '_unique_id': 'bdda3aeddc094902985d9436f440b725'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.213 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.214 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.latency in the context of pollsters
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.214 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk.device.write.latency volume: 93070909774 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.214 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk.device.write.latency volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '82ce1e6a-f98f-463d-966c-7d77db6d40ea', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 93070909774, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-vda', 'timestamp': '2025-10-02T12:21:17.214488', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'instance-00000065', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '4ba8dd30-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.796682573, 'message_signature': '378d60b6c359b093b620567f08e7785f1ace8415c2eb0171d4086012abb271c4'}, {'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 0, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-sda', 'timestamp': '2025-10-02T12:21:17.214488', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'instance-00000065', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '4ba8e7f8-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.796682573, 'message_signature': '422c2cd6e99b0d2e96cd055db1b58294dc7f87c42ece9df4bee4b62ac618d37c'}]}, 'timestamp': '2025-10-02 12:21:17.215106', '_unique_id': '9a841636e6484dcbb797657f104961e2'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.215 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.216 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets.error in the context of pollsters
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.216 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/network.outgoing.packets.error volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '3be0bbed-e1ef-4d79-8c82-b6cabe8c09b6', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets.error', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': 'instance-00000065-35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-tap0c328734-eb', 'timestamp': '2025-10-02T12:21:17.216724', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'tap0c328734-eb', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:ef:e3:79', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap0c328734-eb'}, 'message_id': '4ba934ba-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.834134694, 'message_signature': '9fdbde76b89ced3cfa1fa949ce64bcdddc18c77210f2c567cbd5eb8dd9a3a5c6'}]}, 'timestamp': '2025-10-02 12:21:17.217062', '_unique_id': '3417e2fa1d834e9488bbe371b8a96c57'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.217 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.218 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.allocation in the context of pollsters
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.218 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk.device.allocation volume: 30154752 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.218 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk.device.allocation volume: 487424 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '9798c453-8b55-4216-b9a1-14b675c371d2', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 30154752, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-vda', 'timestamp': '2025-10-02T12:21:17.218504', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'instance-00000065', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '4ba979fc-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.842780219, 'message_signature': '79edc58426f13373aa4dd67f397efdafb61d9c6895a3f774cd14065157f64521'}, {'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 487424, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-sda', 'timestamp': '2025-10-02T12:21:17.218504', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'instance-00000065', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '4ba98488-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.842780219, 'message_signature': 'a43a5d05ff9b4a6470c0c6aa6d46b177f609ad0d2f086b91ddee4fe32394a262'}]}, 'timestamp': '2025-10-02 12:21:17.219083', '_unique_id': 'd0f00ae20fba4bdc93023cbc74ac97c9'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.219 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.220 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.requests in the context of pollsters
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.220 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk.device.write.requests volume: 347 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.221 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk.device.write.requests volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '6b03c2b2-87ee-48cf-99ee-6edbf7564436', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 347, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-vda', 'timestamp': '2025-10-02T12:21:17.220685', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'instance-00000065', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '4ba9cf56-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.796682573, 'message_signature': 'a83e1f02d59d2b40d5a949857044348e071084d66ab146de86379c7ac128016b'}, {'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 0, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-sda', 'timestamp': '2025-10-02T12:21:17.220685', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'instance-00000065', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '4ba9ddca-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.796682573, 'message_signature': '8d21acc3dd088d9fb27b7e1441f3d7bd755c21a75fce57eeb654861b32f0480f'}]}, 'timestamp': '2025-10-02 12:21:17.221358', '_unique_id': 'dafe2611093549c2a368a06049f8d2c3'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 INFO ceilometer.polling.manager [-] Polling pollster memory.usage in the context of pollsters
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.222 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/memory.usage volume: 41.921875 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'c05e92f9-edd5-48e9-b1e7-4314b4e6b513', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'memory.usage', 'counter_type': 'gauge', 'counter_unit': 'MB', 'counter_volume': 41.921875, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'timestamp': '2025-10-02T12:21:17.222870', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'instance-00000065', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1}, 'message_id': '4baa2578-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.887168549, 'message_signature': '663eb7206697522cb080175d37b6997c13d48b53c3c96a062a38e7141114dbf8'}]}, 'timestamp': '2025-10-02 12:21:17.223188', '_unique_id': '4d420c46b9864793b3de49bb8d785b9a'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.223 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.224 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets.error in the context of pollsters
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.224 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/network.incoming.packets.error volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'bfa4d517-0c5c-4dec-9fff-bc547dae3f3f', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets.error', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': 'instance-00000065-35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-tap0c328734-eb', 'timestamp': '2025-10-02T12:21:17.224802', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'tap0c328734-eb', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:ef:e3:79', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap0c328734-eb'}, 'message_id': '4baa7032-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.834134694, 'message_signature': '97a4092bc7795b1cc28a04cf08e771a66bfbac61e1ddc52a91af5b057c605ac6'}]}, 'timestamp': '2025-10-02 12:21:17.225142', '_unique_id': '50df3501036e430b9b4ed225d56e988f'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.225 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.226 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.requests in the context of pollsters
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.226 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk.device.read.requests volume: 1118 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.227 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk.device.read.requests volume: 108 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '232bd492-cfce-4a84-929d-35c122d487c1', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 1118, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-vda', 'timestamp': '2025-10-02T12:21:17.226759', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'instance-00000065', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '4baabc7c-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.796682573, 'message_signature': '0cf081da525c98de78a89247f3e6b484efe39d0f0266596036e5c9c5686b5b9f'}, {'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 108, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-sda', 'timestamp': '2025-10-02T12:21:17.226759', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'instance-00000065', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '4baac83e-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.796682573, 'message_signature': '154ce3e4652e39751d3f5fda7f30eacf81bd522af0b8df3d704052fb46cc1832'}]}, 'timestamp': '2025-10-02 12:21:17.227350', '_unique_id': '0099ed450c7b4ebdbe8eb01d3ed60731'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes.rate in the context of pollsters
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.228 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for OutgoingBytesRatePollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.229 12 ERROR ceilometer.polling.manager [-] Prevent pollster network.outgoing.bytes.rate from polling [<NovaLikeServer: tempest-ServersNegativeTestJSON-server-1170653470>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-ServersNegativeTestJSON-server-1170653470>]
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.229 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.latency in the context of pollsters
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.229 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for PerDeviceDiskLatencyPollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.229 12 ERROR ceilometer.polling.manager [-] Prevent pollster disk.device.latency from polling [<NovaLikeServer: tempest-ServersNegativeTestJSON-server-1170653470>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-ServersNegativeTestJSON-server-1170653470>]
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.229 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes in the context of pollsters
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.229 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/network.outgoing.bytes volume: 1620 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '4bfc5484-9c8f-45f6-8cbc-422d1b2b40b0', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 1620, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': 'instance-00000065-35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-tap0c328734-eb', 'timestamp': '2025-10-02T12:21:17.229706', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'tap0c328734-eb', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:ef:e3:79', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap0c328734-eb'}, 'message_id': '4bab2fc2-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.834134694, 'message_signature': 'fb874f9e482bb6a5208238b225b16030e598d97a3f2c5e8bb2818d25d1a50255'}]}, 'timestamp': '2025-10-02 12:21:17.230083', '_unique_id': '6cc0c39e5ea74910b8f4265bb9f3803c'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.230 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.231 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets.drop in the context of pollsters
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.231 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/network.outgoing.packets.drop volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '37294ee5-95f0-4e05-ad89-f52f29318f13', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets.drop', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': 'instance-00000065-35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-tap0c328734-eb', 'timestamp': '2025-10-02T12:21:17.231623', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'tap0c328734-eb', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:ef:e3:79', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap0c328734-eb'}, 'message_id': '4bab7a9a-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.834134694, 'message_signature': 'b9d146f4b9519ab29dd4cc20b94e01868bcff5d40d32a1ec8784f494407be7c7'}]}, 'timestamp': '2025-10-02 12:21:17.231936', '_unique_id': 'a307e1584a774fdead41d74634cf18ab'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.232 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.233 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.latency in the context of pollsters
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.233 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk.device.read.latency volume: 1346222043 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.233 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk.device.read.latency volume: 345521828 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '2f40a3d4-81b3-4f25-9a98-3154524cde38', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 1346222043, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-vda', 'timestamp': '2025-10-02T12:21:17.233564', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'instance-00000065', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '4babc644-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.796682573, 'message_signature': 'a2b2ce9d93c93bde62e7277b256f258f6c4bd6d304e9bfab09d9fb52829d35a0'}, {'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 345521828, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-sda', 'timestamp': '2025-10-02T12:21:17.233564', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'instance-00000065', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '4babd45e-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.796682573, 'message_signature': '80f4b9e89ba22feb48460d27dd03918ecbd043f6f35e398a9467e45b2dbf92a3'}]}, 'timestamp': '2025-10-02 12:21:17.234221', '_unique_id': '84454670f6db4b31b8c1e943681af1eb'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.234 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.235 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets.drop in the context of pollsters
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.235 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/network.incoming.packets.drop volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '55830a79-bf2a-46b6-b7ef-878275ffe836', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets.drop', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': 'instance-00000065-35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-tap0c328734-eb', 'timestamp': '2025-10-02T12:21:17.235775', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'tap0c328734-eb', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:ef:e3:79', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap0c328734-eb'}, 'message_id': '4bac1fd6-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.834134694, 'message_signature': '70d71297a115b10678fd6b48031a3f382707a450219104be5be651e82c2ef032'}]}, 'timestamp': '2025-10-02 12:21:17.236224', '_unique_id': 'cdfecf4afbe84ce8aa8c0dd7ebf73397'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.236 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.237 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes.delta in the context of pollsters
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.237 12 DEBUG ceilometer.compute.pollsters [-] 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/network.outgoing.bytes.delta volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '618ea7f9-9e3c-466a-aba7-907bc0b8968f', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.bytes.delta', 'counter_type': 'delta', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': 'a803afe9939346088252c3b944f124f2', 'user_name': None, 'project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'project_name': None, 'resource_id': 'instance-00000065-35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-tap0c328734-eb', 'timestamp': '2025-10-02T12:21:17.237757', 'resource_metadata': {'display_name': 'tempest-ServersNegativeTestJSON-server-1170653470', 'name': 'tap0c328734-eb', 'instance_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'instance_type': 'm1.nano', 'host': 'b3eed5a32dd235c4fcd29faa5b49d9a42b4b81e2d1a7315a4fec2e77', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:ef:e3:79', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap0c328734-eb'}, 'message_id': '4bac6a18-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5674.834134694, 'message_signature': '3ec7983ddb5a7b795dbb286b2d49966f722ed7405afd65ecc3e36c777b562b03'}]}, 'timestamp': '2025-10-02 12:21:17.238094', '_unique_id': 'fcc2d5b322184eb2873589ebaee4ba31'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:21:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:21:17.238 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:21:18 compute-0 nova_compute[192079]: 2025-10-02 12:21:18.936 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:19 compute-0 kernel: tap0c328734-eb (unregistering): left promiscuous mode
Oct 02 12:21:19 compute-0 NetworkManager[51160]: <info>  [1759407679.1803] device (tap0c328734-eb): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:21:19 compute-0 ovn_controller[94336]: 2025-10-02T12:21:19Z|00350|binding|INFO|Releasing lport 0c328734-ebc6-47bc-b603-2e4af1cae573 from this chassis (sb_readonly=0)
Oct 02 12:21:19 compute-0 ovn_controller[94336]: 2025-10-02T12:21:19Z|00351|binding|INFO|Setting lport 0c328734-ebc6-47bc-b603-2e4af1cae573 down in Southbound
Oct 02 12:21:19 compute-0 nova_compute[192079]: 2025-10-02 12:21:19.188 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:19 compute-0 ovn_controller[94336]: 2025-10-02T12:21:19Z|00352|binding|INFO|Removing iface tap0c328734-eb ovn-installed in OVS
Oct 02 12:21:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:19.198 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:ef:e3:79 10.100.0.10'], port_security=['fa:16:3e:ef:e3:79 10.100.0.10'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.10/28', 'neutron:device_id': '35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-8f494075-66bf-4ce0-a765-98fd91c31199', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'f0c8c8a8631b4721beed577a99f8bdb7', 'neutron:revision_number': '4', 'neutron:security_group_ids': 'eb030dcc-72ea-4850-916a-e1df7c4d9a87', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=e43b5827-85bf-4b83-b921-ec45e12f1f2e, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=0c328734-ebc6-47bc-b603-2e4af1cae573) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:21:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:19.199 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 0c328734-ebc6-47bc-b603-2e4af1cae573 in datapath 8f494075-66bf-4ce0-a765-98fd91c31199 unbound from our chassis
Oct 02 12:21:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:19.201 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 8f494075-66bf-4ce0-a765-98fd91c31199, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:21:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:19.202 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[372717d3-c129-46a8-a201-2ec53a731a9b]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:21:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:19.203 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-8f494075-66bf-4ce0-a765-98fd91c31199 namespace which is not needed anymore
Oct 02 12:21:19 compute-0 nova_compute[192079]: 2025-10-02 12:21:19.206 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:19 compute-0 systemd[1]: machine-qemu\x2d47\x2dinstance\x2d00000065.scope: Deactivated successfully.
Oct 02 12:21:19 compute-0 systemd[1]: machine-qemu\x2d47\x2dinstance\x2d00000065.scope: Consumed 16.506s CPU time.
Oct 02 12:21:19 compute-0 systemd-machined[152150]: Machine qemu-47-instance-00000065 terminated.
Oct 02 12:21:19 compute-0 neutron-haproxy-ovnmeta-8f494075-66bf-4ce0-a765-98fd91c31199[235411]: [NOTICE]   (235415) : haproxy version is 2.8.14-c23fe91
Oct 02 12:21:19 compute-0 neutron-haproxy-ovnmeta-8f494075-66bf-4ce0-a765-98fd91c31199[235411]: [NOTICE]   (235415) : path to executable is /usr/sbin/haproxy
Oct 02 12:21:19 compute-0 neutron-haproxy-ovnmeta-8f494075-66bf-4ce0-a765-98fd91c31199[235411]: [WARNING]  (235415) : Exiting Master process...
Oct 02 12:21:19 compute-0 neutron-haproxy-ovnmeta-8f494075-66bf-4ce0-a765-98fd91c31199[235411]: [WARNING]  (235415) : Exiting Master process...
Oct 02 12:21:19 compute-0 neutron-haproxy-ovnmeta-8f494075-66bf-4ce0-a765-98fd91c31199[235411]: [ALERT]    (235415) : Current worker (235417) exited with code 143 (Terminated)
Oct 02 12:21:19 compute-0 neutron-haproxy-ovnmeta-8f494075-66bf-4ce0-a765-98fd91c31199[235411]: [WARNING]  (235415) : All workers exited. Exiting... (0)
Oct 02 12:21:19 compute-0 systemd[1]: libpod-67e0afa6c28df6ebabd7c7cd1f3fc31079a438c6bc5878aeda19fafd10396845.scope: Deactivated successfully.
Oct 02 12:21:19 compute-0 podman[236152]: 2025-10-02 12:21:19.353870709 +0000 UTC m=+0.045193243 container died 67e0afa6c28df6ebabd7c7cd1f3fc31079a438c6bc5878aeda19fafd10396845 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-8f494075-66bf-4ce0-a765-98fd91c31199, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_managed=true)
Oct 02 12:21:19 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-67e0afa6c28df6ebabd7c7cd1f3fc31079a438c6bc5878aeda19fafd10396845-userdata-shm.mount: Deactivated successfully.
Oct 02 12:21:19 compute-0 systemd[1]: var-lib-containers-storage-overlay-8bd6bc02ac93b323c28eacd8bd193ea0722529603b0f58262ee053876d0e864a-merged.mount: Deactivated successfully.
Oct 02 12:21:19 compute-0 podman[236152]: 2025-10-02 12:21:19.388960706 +0000 UTC m=+0.080283240 container cleanup 67e0afa6c28df6ebabd7c7cd1f3fc31079a438c6bc5878aeda19fafd10396845 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-8f494075-66bf-4ce0-a765-98fd91c31199, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0)
Oct 02 12:21:19 compute-0 systemd[1]: libpod-conmon-67e0afa6c28df6ebabd7c7cd1f3fc31079a438c6bc5878aeda19fafd10396845.scope: Deactivated successfully.
Oct 02 12:21:19 compute-0 podman[236180]: 2025-10-02 12:21:19.446135734 +0000 UTC m=+0.039713343 container remove 67e0afa6c28df6ebabd7c7cd1f3fc31079a438c6bc5878aeda19fafd10396845 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-8f494075-66bf-4ce0-a765-98fd91c31199, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true)
Oct 02 12:21:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:19.452 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[feceae25-4522-4442-839e-71e2852211d6]: (4, ('Thu Oct  2 12:21:19 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-8f494075-66bf-4ce0-a765-98fd91c31199 (67e0afa6c28df6ebabd7c7cd1f3fc31079a438c6bc5878aeda19fafd10396845)\n67e0afa6c28df6ebabd7c7cd1f3fc31079a438c6bc5878aeda19fafd10396845\nThu Oct  2 12:21:19 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-8f494075-66bf-4ce0-a765-98fd91c31199 (67e0afa6c28df6ebabd7c7cd1f3fc31079a438c6bc5878aeda19fafd10396845)\n67e0afa6c28df6ebabd7c7cd1f3fc31079a438c6bc5878aeda19fafd10396845\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:21:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:19.454 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[587c1d38-4b3f-463e-a118-6936a3519034]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:21:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:19.454 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap8f494075-60, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:21:19 compute-0 nova_compute[192079]: 2025-10-02 12:21:19.456 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:19 compute-0 kernel: tap8f494075-60: left promiscuous mode
Oct 02 12:21:19 compute-0 nova_compute[192079]: 2025-10-02 12:21:19.472 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:19.475 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f03fad7d-a079-4254-959e-29b701cc6e82]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:21:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:19.510 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a7004d84-ddc2-416b-9fea-0bb6145c6aa4]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:21:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:19.511 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5ad81087-7ec6-426f-be20-98ae51fb80fb]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:21:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:19.526 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c82c09e4-fd40-4774-9e4a-0f4aa644b525]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 559446, 'reachable_time': 24332, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 236216, 'error': None, 'target': 'ovnmeta-8f494075-66bf-4ce0-a765-98fd91c31199', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:21:19 compute-0 systemd[1]: run-netns-ovnmeta\x2d8f494075\x2d66bf\x2d4ce0\x2da765\x2d98fd91c31199.mount: Deactivated successfully.
Oct 02 12:21:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:19.529 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-8f494075-66bf-4ce0-a765-98fd91c31199 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:21:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:19.530 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[63af7d2e-f748-4d67-acec-9aad31fdcfe4]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:21:19 compute-0 nova_compute[192079]: 2025-10-02 12:21:19.753 2 DEBUG nova.compute.manager [req-294bf03f-ff81-4632-9f4f-b5ea8b0fa430 req-d4f2a536-ecc3-4e53-bfd6-1774f4e45f26 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Received event network-vif-unplugged-0c328734-ebc6-47bc-b603-2e4af1cae573 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:21:19 compute-0 nova_compute[192079]: 2025-10-02 12:21:19.754 2 DEBUG oslo_concurrency.lockutils [req-294bf03f-ff81-4632-9f4f-b5ea8b0fa430 req-d4f2a536-ecc3-4e53-bfd6-1774f4e45f26 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:21:19 compute-0 nova_compute[192079]: 2025-10-02 12:21:19.754 2 DEBUG oslo_concurrency.lockutils [req-294bf03f-ff81-4632-9f4f-b5ea8b0fa430 req-d4f2a536-ecc3-4e53-bfd6-1774f4e45f26 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:21:19 compute-0 nova_compute[192079]: 2025-10-02 12:21:19.754 2 DEBUG oslo_concurrency.lockutils [req-294bf03f-ff81-4632-9f4f-b5ea8b0fa430 req-d4f2a536-ecc3-4e53-bfd6-1774f4e45f26 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:21:19 compute-0 nova_compute[192079]: 2025-10-02 12:21:19.755 2 DEBUG nova.compute.manager [req-294bf03f-ff81-4632-9f4f-b5ea8b0fa430 req-d4f2a536-ecc3-4e53-bfd6-1774f4e45f26 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] No waiting events found dispatching network-vif-unplugged-0c328734-ebc6-47bc-b603-2e4af1cae573 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:21:19 compute-0 nova_compute[192079]: 2025-10-02 12:21:19.755 2 WARNING nova.compute.manager [req-294bf03f-ff81-4632-9f4f-b5ea8b0fa430 req-d4f2a536-ecc3-4e53-bfd6-1774f4e45f26 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Received unexpected event network-vif-unplugged-0c328734-ebc6-47bc-b603-2e4af1cae573 for instance with vm_state active and task_state shelving.
Oct 02 12:21:19 compute-0 podman[236219]: 2025-10-02 12:21:19.855283949 +0000 UTC m=+0.065659201 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 12:21:19 compute-0 podman[236217]: 2025-10-02 12:21:19.882409958 +0000 UTC m=+0.095580156 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_metadata_agent, container_name=ovn_metadata_agent, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_managed=true, managed_by=edpm_ansible)
Oct 02 12:21:19 compute-0 podman[236218]: 2025-10-02 12:21:19.905706103 +0000 UTC m=+0.118654996 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, container_name=ovn_controller, config_id=ovn_controller, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0)
Oct 02 12:21:20 compute-0 nova_compute[192079]: 2025-10-02 12:21:20.018 2 INFO nova.virt.libvirt.driver [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Instance shutdown successfully after 3 seconds.
Oct 02 12:21:20 compute-0 nova_compute[192079]: 2025-10-02 12:21:20.025 2 INFO nova.virt.libvirt.driver [-] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Instance destroyed successfully.
Oct 02 12:21:20 compute-0 nova_compute[192079]: 2025-10-02 12:21:20.025 2 DEBUG nova.objects.instance [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Lazy-loading 'numa_topology' on Instance uuid 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:21:20 compute-0 nova_compute[192079]: 2025-10-02 12:21:20.240 2 INFO nova.virt.libvirt.driver [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Beginning cold snapshot process
Oct 02 12:21:20 compute-0 nova_compute[192079]: 2025-10-02 12:21:20.466 2 DEBUG nova.privsep.utils [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Path '/var/lib/nova/instances' supports direct I/O supports_direct_io /usr/lib/python3.9/site-packages/nova/privsep/utils.py:63
Oct 02 12:21:20 compute-0 nova_compute[192079]: 2025-10-02 12:21:20.466 2 DEBUG oslo_concurrency.processutils [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Running cmd (subprocess): qemu-img convert -t none -O qcow2 -f qcow2 /var/lib/nova/instances/35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk /var/lib/nova/instances/snapshots/tmpr5qu0cw9/530810efbcd4441eb4262c08a657bd35 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:21:20 compute-0 nova_compute[192079]: 2025-10-02 12:21:20.951 2 DEBUG oslo_concurrency.processutils [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] CMD "qemu-img convert -t none -O qcow2 -f qcow2 /var/lib/nova/instances/35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk /var/lib/nova/instances/snapshots/tmpr5qu0cw9/530810efbcd4441eb4262c08a657bd35" returned: 0 in 0.485s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:21:20 compute-0 nova_compute[192079]: 2025-10-02 12:21:20.952 2 INFO nova.virt.libvirt.driver [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Snapshot extracted, beginning image upload
Oct 02 12:21:21 compute-0 nova_compute[192079]: 2025-10-02 12:21:21.660 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:21:21 compute-0 nova_compute[192079]: 2025-10-02 12:21:21.749 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:21 compute-0 nova_compute[192079]: 2025-10-02 12:21:21.848 2 DEBUG nova.compute.manager [req-4c7c2010-575e-48c9-8be6-b85219421467 req-b8a7af9c-06a7-4147-8f6e-9030ce59dce0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Received event network-vif-plugged-0c328734-ebc6-47bc-b603-2e4af1cae573 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:21:21 compute-0 nova_compute[192079]: 2025-10-02 12:21:21.849 2 DEBUG oslo_concurrency.lockutils [req-4c7c2010-575e-48c9-8be6-b85219421467 req-b8a7af9c-06a7-4147-8f6e-9030ce59dce0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:21:21 compute-0 nova_compute[192079]: 2025-10-02 12:21:21.850 2 DEBUG oslo_concurrency.lockutils [req-4c7c2010-575e-48c9-8be6-b85219421467 req-b8a7af9c-06a7-4147-8f6e-9030ce59dce0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:21:21 compute-0 nova_compute[192079]: 2025-10-02 12:21:21.850 2 DEBUG oslo_concurrency.lockutils [req-4c7c2010-575e-48c9-8be6-b85219421467 req-b8a7af9c-06a7-4147-8f6e-9030ce59dce0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:21:21 compute-0 nova_compute[192079]: 2025-10-02 12:21:21.851 2 DEBUG nova.compute.manager [req-4c7c2010-575e-48c9-8be6-b85219421467 req-b8a7af9c-06a7-4147-8f6e-9030ce59dce0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] No waiting events found dispatching network-vif-plugged-0c328734-ebc6-47bc-b603-2e4af1cae573 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:21:21 compute-0 nova_compute[192079]: 2025-10-02 12:21:21.851 2 WARNING nova.compute.manager [req-4c7c2010-575e-48c9-8be6-b85219421467 req-b8a7af9c-06a7-4147-8f6e-9030ce59dce0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Received unexpected event network-vif-plugged-0c328734-ebc6-47bc-b603-2e4af1cae573 for instance with vm_state active and task_state shelving_image_uploading.
Oct 02 12:21:23 compute-0 nova_compute[192079]: 2025-10-02 12:21:23.458 2 INFO nova.virt.libvirt.driver [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Snapshot image upload complete
Oct 02 12:21:23 compute-0 nova_compute[192079]: 2025-10-02 12:21:23.459 2 DEBUG nova.compute.manager [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:21:23 compute-0 nova_compute[192079]: 2025-10-02 12:21:23.536 2 INFO nova.compute.manager [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Shelve offloading
Oct 02 12:21:23 compute-0 nova_compute[192079]: 2025-10-02 12:21:23.556 2 INFO nova.virt.libvirt.driver [-] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Instance destroyed successfully.
Oct 02 12:21:23 compute-0 nova_compute[192079]: 2025-10-02 12:21:23.557 2 DEBUG nova.compute.manager [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:21:23 compute-0 nova_compute[192079]: 2025-10-02 12:21:23.560 2 DEBUG oslo_concurrency.lockutils [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Acquiring lock "refresh_cache-35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:21:23 compute-0 nova_compute[192079]: 2025-10-02 12:21:23.560 2 DEBUG oslo_concurrency.lockutils [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Acquired lock "refresh_cache-35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:21:23 compute-0 nova_compute[192079]: 2025-10-02 12:21:23.560 2 DEBUG nova.network.neutron [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:21:23 compute-0 nova_compute[192079]: 2025-10-02 12:21:23.939 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:25 compute-0 nova_compute[192079]: 2025-10-02 12:21:25.088 2 DEBUG nova.network.neutron [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Updating instance_info_cache with network_info: [{"id": "0c328734-ebc6-47bc-b603-2e4af1cae573", "address": "fa:16:3e:ef:e3:79", "network": {"id": "8f494075-66bf-4ce0-a765-98fd91c31199", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1553125421-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f0c8c8a8631b4721beed577a99f8bdb7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap0c328734-eb", "ovs_interfaceid": "0c328734-ebc6-47bc-b603-2e4af1cae573", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:21:25 compute-0 nova_compute[192079]: 2025-10-02 12:21:25.192 2 DEBUG oslo_concurrency.lockutils [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Releasing lock "refresh_cache-35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:21:25 compute-0 nova_compute[192079]: 2025-10-02 12:21:25.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:21:25 compute-0 nova_compute[192079]: 2025-10-02 12:21:25.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:21:25 compute-0 nova_compute[192079]: 2025-10-02 12:21:25.719 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:21:25 compute-0 nova_compute[192079]: 2025-10-02 12:21:25.720 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:21:25 compute-0 nova_compute[192079]: 2025-10-02 12:21:25.720 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:21:25 compute-0 nova_compute[192079]: 2025-10-02 12:21:25.720 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:21:25 compute-0 nova_compute[192079]: 2025-10-02 12:21:25.831 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:21:25 compute-0 nova_compute[192079]: 2025-10-02 12:21:25.887 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:21:25 compute-0 nova_compute[192079]: 2025-10-02 12:21:25.888 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:21:25 compute-0 nova_compute[192079]: 2025-10-02 12:21:25.963 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c/disk --force-share --output=json" returned: 0 in 0.074s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.104 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.105 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5755MB free_disk=73.32049942016602GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.105 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.106 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.361 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.362 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 1 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.363 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=640MB phys_disk=79GB used_disk=1GB total_vcpus=8 used_vcpus=1 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.383 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing inventories for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708 _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:804
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.399 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Updating ProviderTree inventory for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 from _refresh_and_get_inventory using data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} _refresh_and_get_inventory /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:768
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.399 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Updating inventory in ProviderTree for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 with inventory: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:176
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.643 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing aggregate associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, aggregates: None _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:813
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.715 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing trait associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, traits: COMPUTE_SECURITY_UEFI_SECURE_BOOT,COMPUTE_VIOMMU_MODEL_VIRTIO,COMPUTE_VIOMMU_MODEL_AUTO,COMPUTE_IMAGE_TYPE_AKI,COMPUTE_GRAPHICS_MODEL_VIRTIO,COMPUTE_NET_VIF_MODEL_PCNET,HW_CPU_X86_SSE42,COMPUTE_RESCUE_BFV,COMPUTE_VOLUME_EXTEND,COMPUTE_IMAGE_TYPE_QCOW2,COMPUTE_TRUSTED_CERTS,COMPUTE_SOCKET_PCI_NUMA_AFFINITY,COMPUTE_GRAPHICS_MODEL_CIRRUS,HW_CPU_X86_MMX,COMPUTE_STORAGE_BUS_VIRTIO,COMPUTE_NET_ATTACH_INTERFACE_WITH_TAG,COMPUTE_STORAGE_BUS_FDC,COMPUTE_STORAGE_BUS_USB,COMPUTE_NODE,HW_CPU_X86_SSSE3,HW_CPU_X86_SSE2,COMPUTE_GRAPHICS_MODEL_BOCHS,COMPUTE_NET_VIF_MODEL_E1000E,COMPUTE_IMAGE_TYPE_RAW,COMPUTE_NET_VIF_MODEL_NE2K_PCI,COMPUTE_IMAGE_TYPE_AMI,COMPUTE_VIOMMU_MODEL_INTEL,COMPUTE_SECURITY_TPM_2_0,COMPUTE_STORAGE_BUS_SCSI,COMPUTE_IMAGE_TYPE_ARI,COMPUTE_NET_VIF_MODEL_VMXNET3,COMPUTE_SECURITY_TPM_1_2,COMPUTE_NET_VIF_MODEL_E1000,HW_CPU_X86_SSE,COMPUTE_VOLUME_MULTI_ATTACH,COMPUTE_STORAGE_BUS_IDE,COMPUTE_GRAPHICS_MODEL_NONE,COMPUTE_VOLUME_ATTACH_WITH_TAG,COMPUTE_NET_VIF_MODEL_VIRTIO,HW_CPU_X86_SSE41,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_DEVICE_TAGGING,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_ACCELERATORS,COMPUTE_NET_VIF_MODEL_RTL8139,COMPUTE_GRAPHICS_MODEL_VGA,COMPUTE_STORAGE_BUS_SATA,COMPUTE_NET_VIF_MODEL_SPAPR_VLAN _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:825
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.751 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.823 2 INFO nova.virt.libvirt.driver [-] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Instance destroyed successfully.
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.824 2 DEBUG nova.objects.instance [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Lazy-loading 'resources' on Instance uuid 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.846 2 DEBUG nova.virt.libvirt.vif [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:19:49Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServersNegativeTestJSON-server-1170653470',display_name='tempest-ServersNegativeTestJSON-server-1170653470',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serversnegativetestjson-server-1170653470',id=101,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:19:57Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=4,progress=0,project_id='f0c8c8a8631b4721beed577a99f8bdb7',ramdisk_id='',reservation_id='r-sfsoaqzs',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServersNegativeTestJSON-114354241',owner_user_name='tempest-ServersNegativeTestJSON-114354241-project-member',shelved_at='2025-10-02T12:21:23.459143',shelved_host='compute-0.ctlplane.example.com',shelved_image_id='e54e42ff-f245-4c1b-a659-20ba701a4194'},tags=<?>,task_state='shelving_offloading',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:21:20Z,user_data=None,user_id='a803afe9939346088252c3b944f124f2',uuid=35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='shelved') vif={"id": "0c328734-ebc6-47bc-b603-2e4af1cae573", "address": "fa:16:3e:ef:e3:79", "network": {"id": "8f494075-66bf-4ce0-a765-98fd91c31199", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1553125421-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f0c8c8a8631b4721beed577a99f8bdb7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap0c328734-eb", "ovs_interfaceid": "0c328734-ebc6-47bc-b603-2e4af1cae573", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.847 2 DEBUG nova.network.os_vif_util [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Converting VIF {"id": "0c328734-ebc6-47bc-b603-2e4af1cae573", "address": "fa:16:3e:ef:e3:79", "network": {"id": "8f494075-66bf-4ce0-a765-98fd91c31199", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1553125421-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f0c8c8a8631b4721beed577a99f8bdb7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap0c328734-eb", "ovs_interfaceid": "0c328734-ebc6-47bc-b603-2e4af1cae573", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.848 2 DEBUG nova.network.os_vif_util [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:ef:e3:79,bridge_name='br-int',has_traffic_filtering=True,id=0c328734-ebc6-47bc-b603-2e4af1cae573,network=Network(8f494075-66bf-4ce0-a765-98fd91c31199),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap0c328734-eb') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.848 2 DEBUG os_vif [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:ef:e3:79,bridge_name='br-int',has_traffic_filtering=True,id=0c328734-ebc6-47bc-b603-2e4af1cae573,network=Network(8f494075-66bf-4ce0-a765-98fd91c31199),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap0c328734-eb') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.849 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.850 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap0c328734-eb, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.851 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.853 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.856 2 INFO os_vif [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:ef:e3:79,bridge_name='br-int',has_traffic_filtering=True,id=0c328734-ebc6-47bc-b603-2e4af1cae573,network=Network(8f494075-66bf-4ce0-a765-98fd91c31199),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap0c328734-eb')
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.856 2 INFO nova.virt.libvirt.driver [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Deleting instance files /var/lib/nova/instances/35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c_del
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.863 2 INFO nova.virt.libvirt.driver [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Deletion of /var/lib/nova/instances/35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c_del complete
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.916 2 DEBUG nova.compute.manager [req-afcd5b69-7870-461b-93c8-0c7229813db5 req-16f5548f-9b03-493f-861f-71bbe5be05b6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Received event network-changed-0c328734-ebc6-47bc-b603-2e4af1cae573 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.916 2 DEBUG nova.compute.manager [req-afcd5b69-7870-461b-93c8-0c7229813db5 req-16f5548f-9b03-493f-861f-71bbe5be05b6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Refreshing instance network info cache due to event network-changed-0c328734-ebc6-47bc-b603-2e4af1cae573. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.917 2 DEBUG oslo_concurrency.lockutils [req-afcd5b69-7870-461b-93c8-0c7229813db5 req-16f5548f-9b03-493f-861f-71bbe5be05b6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.917 2 DEBUG oslo_concurrency.lockutils [req-afcd5b69-7870-461b-93c8-0c7229813db5 req-16f5548f-9b03-493f-861f-71bbe5be05b6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.917 2 DEBUG nova.network.neutron [req-afcd5b69-7870-461b-93c8-0c7229813db5 req-16f5548f-9b03-493f-861f-71bbe5be05b6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Refreshing network info cache for port 0c328734-ebc6-47bc-b603-2e4af1cae573 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.938 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:21:26 compute-0 nova_compute[192079]: 2025-10-02 12:21:26.990 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:21:27 compute-0 nova_compute[192079]: 2025-10-02 12:21:27.048 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:21:27 compute-0 nova_compute[192079]: 2025-10-02 12:21:27.049 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.943s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:21:27 compute-0 nova_compute[192079]: 2025-10-02 12:21:27.079 2 INFO nova.scheduler.client.report [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Deleted allocations for instance 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c
Oct 02 12:21:27 compute-0 nova_compute[192079]: 2025-10-02 12:21:27.082 2 DEBUG oslo_concurrency.lockutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:21:27 compute-0 nova_compute[192079]: 2025-10-02 12:21:27.083 2 DEBUG oslo_concurrency.lockutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:21:27 compute-0 nova_compute[192079]: 2025-10-02 12:21:27.205 2 DEBUG nova.compute.manager [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:21:27 compute-0 nova_compute[192079]: 2025-10-02 12:21:27.331 2 DEBUG oslo_concurrency.lockutils [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:21:27 compute-0 nova_compute[192079]: 2025-10-02 12:21:27.332 2 DEBUG oslo_concurrency.lockutils [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:21:27 compute-0 nova_compute[192079]: 2025-10-02 12:21:27.410 2 DEBUG nova.compute.provider_tree [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:21:27 compute-0 nova_compute[192079]: 2025-10-02 12:21:27.434 2 DEBUG oslo_concurrency.lockutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:21:27 compute-0 nova_compute[192079]: 2025-10-02 12:21:27.442 2 DEBUG nova.scheduler.client.report [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:21:27 compute-0 nova_compute[192079]: 2025-10-02 12:21:27.517 2 DEBUG oslo_concurrency.lockutils [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.186s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:21:27 compute-0 nova_compute[192079]: 2025-10-02 12:21:27.520 2 DEBUG oslo_concurrency.lockutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.087s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:21:27 compute-0 nova_compute[192079]: 2025-10-02 12:21:27.527 2 DEBUG nova.virt.hardware [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:21:27 compute-0 nova_compute[192079]: 2025-10-02 12:21:27.528 2 INFO nova.compute.claims [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:21:27 compute-0 nova_compute[192079]: 2025-10-02 12:21:27.684 2 DEBUG oslo_concurrency.lockutils [None req-b612ca75-d334-4cae-90e3-a083e26304f4 a803afe9939346088252c3b944f124f2 f0c8c8a8631b4721beed577a99f8bdb7 - - default default] Lock "35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c" "released" by "nova.compute.manager.ComputeManager.shelve_instance.<locals>.do_shelve_instance" :: held 10.733s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:21:27 compute-0 nova_compute[192079]: 2025-10-02 12:21:27.765 2 DEBUG nova.compute.provider_tree [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:21:27 compute-0 nova_compute[192079]: 2025-10-02 12:21:27.782 2 DEBUG nova.scheduler.client.report [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:21:27 compute-0 nova_compute[192079]: 2025-10-02 12:21:27.811 2 DEBUG oslo_concurrency.lockutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.291s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:21:27 compute-0 nova_compute[192079]: 2025-10-02 12:21:27.812 2 DEBUG nova.compute.manager [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:21:27 compute-0 nova_compute[192079]: 2025-10-02 12:21:27.878 2 DEBUG nova.compute.manager [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:21:27 compute-0 nova_compute[192079]: 2025-10-02 12:21:27.879 2 DEBUG nova.network.neutron [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:21:27 compute-0 nova_compute[192079]: 2025-10-02 12:21:27.904 2 INFO nova.virt.libvirt.driver [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:21:27 compute-0 nova_compute[192079]: 2025-10-02 12:21:27.923 2 DEBUG nova.compute.manager [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.058 2 DEBUG nova.compute.manager [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.059 2 DEBUG nova.virt.libvirt.driver [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.060 2 INFO nova.virt.libvirt.driver [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Creating image(s)
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.061 2 DEBUG oslo_concurrency.lockutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "/var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.061 2 DEBUG oslo_concurrency.lockutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "/var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.062 2 DEBUG oslo_concurrency.lockutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "/var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.086 2 DEBUG nova.policy [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1faa7e121a0e43ad8cb4ae5b2cfcc6a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '76c7dd40d83e4e3ca71abbebf57921b6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.090 2 DEBUG oslo_concurrency.processutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.148 2 DEBUG oslo_concurrency.processutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.058s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.150 2 DEBUG oslo_concurrency.lockutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.152 2 DEBUG oslo_concurrency.lockutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.002s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:21:28 compute-0 podman[236301]: 2025-10-02 12:21:28.160888807 +0000 UTC m=+0.070423341 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_id=edpm)
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.174 2 DEBUG oslo_concurrency.processutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.227 2 DEBUG oslo_concurrency.processutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.053s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.228 2 DEBUG oslo_concurrency.processutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.394 2 DEBUG oslo_concurrency.processutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/disk 1073741824" returned: 0 in 0.165s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.395 2 DEBUG oslo_concurrency.lockutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.243s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.396 2 DEBUG oslo_concurrency.processutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.432 2 DEBUG nova.network.neutron [req-afcd5b69-7870-461b-93c8-0c7229813db5 req-16f5548f-9b03-493f-861f-71bbe5be05b6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Updated VIF entry in instance network info cache for port 0c328734-ebc6-47bc-b603-2e4af1cae573. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.434 2 DEBUG nova.network.neutron [req-afcd5b69-7870-461b-93c8-0c7229813db5 req-16f5548f-9b03-493f-861f-71bbe5be05b6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Updating instance_info_cache with network_info: [{"id": "0c328734-ebc6-47bc-b603-2e4af1cae573", "address": "fa:16:3e:ef:e3:79", "network": {"id": "8f494075-66bf-4ce0-a765-98fd91c31199", "bridge": null, "label": "tempest-ServersNegativeTestJSON-1553125421-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f0c8c8a8631b4721beed577a99f8bdb7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "unbound", "details": {}, "devname": "tap0c328734-eb", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.468 2 DEBUG oslo_concurrency.lockutils [req-afcd5b69-7870-461b-93c8-0c7229813db5 req-16f5548f-9b03-493f-861f-71bbe5be05b6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.485 2 DEBUG oslo_concurrency.processutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.089s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.486 2 DEBUG nova.virt.disk.api [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Checking if we can resize image /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.486 2 DEBUG oslo_concurrency.processutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.567 2 DEBUG oslo_concurrency.processutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/disk --force-share --output=json" returned: 0 in 0.081s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.568 2 DEBUG nova.virt.disk.api [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Cannot resize image /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.569 2 DEBUG nova.objects.instance [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'migration_context' on Instance uuid 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.625 2 DEBUG nova.virt.libvirt.driver [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.626 2 DEBUG nova.virt.libvirt.driver [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Ensure instance console log exists: /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.627 2 DEBUG oslo_concurrency.lockutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.627 2 DEBUG oslo_concurrency.lockutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.627 2 DEBUG oslo_concurrency.lockutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:21:28 compute-0 nova_compute[192079]: 2025-10-02 12:21:28.886 2 DEBUG nova.network.neutron [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Successfully created port: fd508257-51ca-4c61-9340-029f9a9e7a75 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:21:29 compute-0 nova_compute[192079]: 2025-10-02 12:21:29.049 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:21:29 compute-0 nova_compute[192079]: 2025-10-02 12:21:29.050 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:21:29 compute-0 nova_compute[192079]: 2025-10-02 12:21:29.050 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:21:29 compute-0 nova_compute[192079]: 2025-10-02 12:21:29.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:21:29 compute-0 nova_compute[192079]: 2025-10-02 12:21:29.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:21:29 compute-0 nova_compute[192079]: 2025-10-02 12:21:29.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:21:29 compute-0 nova_compute[192079]: 2025-10-02 12:21:29.696 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Skipping network cache update for instance because it is Building. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9871
Oct 02 12:21:29 compute-0 nova_compute[192079]: 2025-10-02 12:21:29.696 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:21:29 compute-0 nova_compute[192079]: 2025-10-02 12:21:29.698 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:21:29 compute-0 nova_compute[192079]: 2025-10-02 12:21:29.698 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:21:30 compute-0 nova_compute[192079]: 2025-10-02 12:21:30.196 2 DEBUG nova.network.neutron [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Successfully updated port: fd508257-51ca-4c61-9340-029f9a9e7a75 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:21:30 compute-0 nova_compute[192079]: 2025-10-02 12:21:30.214 2 DEBUG oslo_concurrency.lockutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "refresh_cache-2eb08e64-4af9-4c5f-9817-b24d5e5ccce2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:21:30 compute-0 nova_compute[192079]: 2025-10-02 12:21:30.214 2 DEBUG oslo_concurrency.lockutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquired lock "refresh_cache-2eb08e64-4af9-4c5f-9817-b24d5e5ccce2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:21:30 compute-0 nova_compute[192079]: 2025-10-02 12:21:30.215 2 DEBUG nova.network.neutron [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:21:30 compute-0 nova_compute[192079]: 2025-10-02 12:21:30.304 2 DEBUG nova.compute.manager [req-d6077910-decc-47b8-8bab-70315168949d req-730a5fd2-dc65-4036-8091-b7dc99417938 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Received event network-changed-fd508257-51ca-4c61-9340-029f9a9e7a75 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:21:30 compute-0 nova_compute[192079]: 2025-10-02 12:21:30.305 2 DEBUG nova.compute.manager [req-d6077910-decc-47b8-8bab-70315168949d req-730a5fd2-dc65-4036-8091-b7dc99417938 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Refreshing instance network info cache due to event network-changed-fd508257-51ca-4c61-9340-029f9a9e7a75. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:21:30 compute-0 nova_compute[192079]: 2025-10-02 12:21:30.305 2 DEBUG oslo_concurrency.lockutils [req-d6077910-decc-47b8-8bab-70315168949d req-730a5fd2-dc65-4036-8091-b7dc99417938 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-2eb08e64-4af9-4c5f-9817-b24d5e5ccce2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:21:30 compute-0 nova_compute[192079]: 2025-10-02 12:21:30.766 2 DEBUG nova.network.neutron [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.753 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.755 2 DEBUG nova.network.neutron [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Updating instance_info_cache with network_info: [{"id": "fd508257-51ca-4c61-9340-029f9a9e7a75", "address": "fa:16:3e:5e:8b:77", "network": {"id": "043fc82b-ca25-47f8-a78d-d7118d064ecd", "bridge": "br-int", "label": "tempest-network-smoke--1375280567", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapfd508257-51", "ovs_interfaceid": "fd508257-51ca-4c61-9340-029f9a9e7a75", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.779 2 DEBUG oslo_concurrency.lockutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Releasing lock "refresh_cache-2eb08e64-4af9-4c5f-9817-b24d5e5ccce2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.780 2 DEBUG nova.compute.manager [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Instance network_info: |[{"id": "fd508257-51ca-4c61-9340-029f9a9e7a75", "address": "fa:16:3e:5e:8b:77", "network": {"id": "043fc82b-ca25-47f8-a78d-d7118d064ecd", "bridge": "br-int", "label": "tempest-network-smoke--1375280567", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapfd508257-51", "ovs_interfaceid": "fd508257-51ca-4c61-9340-029f9a9e7a75", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.780 2 DEBUG oslo_concurrency.lockutils [req-d6077910-decc-47b8-8bab-70315168949d req-730a5fd2-dc65-4036-8091-b7dc99417938 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-2eb08e64-4af9-4c5f-9817-b24d5e5ccce2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.780 2 DEBUG nova.network.neutron [req-d6077910-decc-47b8-8bab-70315168949d req-730a5fd2-dc65-4036-8091-b7dc99417938 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Refreshing network info cache for port fd508257-51ca-4c61-9340-029f9a9e7a75 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.782 2 DEBUG nova.virt.libvirt.driver [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Start _get_guest_xml network_info=[{"id": "fd508257-51ca-4c61-9340-029f9a9e7a75", "address": "fa:16:3e:5e:8b:77", "network": {"id": "043fc82b-ca25-47f8-a78d-d7118d064ecd", "bridge": "br-int", "label": "tempest-network-smoke--1375280567", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapfd508257-51", "ovs_interfaceid": "fd508257-51ca-4c61-9340-029f9a9e7a75", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.786 2 WARNING nova.virt.libvirt.driver [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.790 2 DEBUG nova.virt.libvirt.host [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.791 2 DEBUG nova.virt.libvirt.host [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.793 2 DEBUG nova.virt.libvirt.host [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.793 2 DEBUG nova.virt.libvirt.host [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.794 2 DEBUG nova.virt.libvirt.driver [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.794 2 DEBUG nova.virt.hardware [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.795 2 DEBUG nova.virt.hardware [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.795 2 DEBUG nova.virt.hardware [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.795 2 DEBUG nova.virt.hardware [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.795 2 DEBUG nova.virt.hardware [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.795 2 DEBUG nova.virt.hardware [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.796 2 DEBUG nova.virt.hardware [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.796 2 DEBUG nova.virt.hardware [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.796 2 DEBUG nova.virt.hardware [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.796 2 DEBUG nova.virt.hardware [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.796 2 DEBUG nova.virt.hardware [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.799 2 DEBUG nova.virt.libvirt.vif [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:21:25Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestNetworkAdvancedServerOps-server-1275898317',display_name='tempest-TestNetworkAdvancedServerOps-server-1275898317',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkadvancedserverops-server-1275898317',id=112,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBOCz+7JmNyQy7JdP1IjSwu02/HePNAJvzHsZBcv8XH13dMGPNzBUuwrRU02GRGGFMvEIz5Lu1u/RVTlkdJCGXW3q1BcgXBVQzMFZYW+dEdgXTOuU2vWkRuKj+JzgzmR88A==',key_name='tempest-TestNetworkAdvancedServerOps-217202803',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='76c7dd40d83e4e3ca71abbebf57921b6',ramdisk_id='',reservation_id='r-x0vz0bnp',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestNetworkAdvancedServerOps-597114071',owner_user_name='tempest-TestNetworkAdvancedServerOps-597114071-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:21:27Z,user_data=None,user_id='1faa7e121a0e43ad8cb4ae5b2cfcc6a2',uuid=2eb08e64-4af9-4c5f-9817-b24d5e5ccce2,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "fd508257-51ca-4c61-9340-029f9a9e7a75", "address": "fa:16:3e:5e:8b:77", "network": {"id": "043fc82b-ca25-47f8-a78d-d7118d064ecd", "bridge": "br-int", "label": "tempest-network-smoke--1375280567", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapfd508257-51", "ovs_interfaceid": "fd508257-51ca-4c61-9340-029f9a9e7a75", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.800 2 DEBUG nova.network.os_vif_util [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converting VIF {"id": "fd508257-51ca-4c61-9340-029f9a9e7a75", "address": "fa:16:3e:5e:8b:77", "network": {"id": "043fc82b-ca25-47f8-a78d-d7118d064ecd", "bridge": "br-int", "label": "tempest-network-smoke--1375280567", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapfd508257-51", "ovs_interfaceid": "fd508257-51ca-4c61-9340-029f9a9e7a75", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.800 2 DEBUG nova.network.os_vif_util [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:5e:8b:77,bridge_name='br-int',has_traffic_filtering=True,id=fd508257-51ca-4c61-9340-029f9a9e7a75,network=Network(043fc82b-ca25-47f8-a78d-d7118d064ecd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapfd508257-51') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.802 2 DEBUG nova.objects.instance [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'pci_devices' on Instance uuid 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.822 2 DEBUG nova.virt.libvirt.driver [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:21:31 compute-0 nova_compute[192079]:   <uuid>2eb08e64-4af9-4c5f-9817-b24d5e5ccce2</uuid>
Oct 02 12:21:31 compute-0 nova_compute[192079]:   <name>instance-00000070</name>
Oct 02 12:21:31 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:21:31 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:21:31 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:21:31 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:       <nova:name>tempest-TestNetworkAdvancedServerOps-server-1275898317</nova:name>
Oct 02 12:21:31 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:21:31</nova:creationTime>
Oct 02 12:21:31 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:21:31 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:21:31 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:21:31 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:21:31 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:21:31 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:21:31 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:21:31 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:21:31 compute-0 nova_compute[192079]:         <nova:user uuid="1faa7e121a0e43ad8cb4ae5b2cfcc6a2">tempest-TestNetworkAdvancedServerOps-597114071-project-member</nova:user>
Oct 02 12:21:31 compute-0 nova_compute[192079]:         <nova:project uuid="76c7dd40d83e4e3ca71abbebf57921b6">tempest-TestNetworkAdvancedServerOps-597114071</nova:project>
Oct 02 12:21:31 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:21:31 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:21:31 compute-0 nova_compute[192079]:         <nova:port uuid="fd508257-51ca-4c61-9340-029f9a9e7a75">
Oct 02 12:21:31 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.10" ipVersion="4"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:21:31 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:21:31 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:21:31 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <system>
Oct 02 12:21:31 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:21:31 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:21:31 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:21:31 compute-0 nova_compute[192079]:       <entry name="serial">2eb08e64-4af9-4c5f-9817-b24d5e5ccce2</entry>
Oct 02 12:21:31 compute-0 nova_compute[192079]:       <entry name="uuid">2eb08e64-4af9-4c5f-9817-b24d5e5ccce2</entry>
Oct 02 12:21:31 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     </system>
Oct 02 12:21:31 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:21:31 compute-0 nova_compute[192079]:   <os>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:   </os>
Oct 02 12:21:31 compute-0 nova_compute[192079]:   <features>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:   </features>
Oct 02 12:21:31 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:21:31 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:21:31 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:21:31 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/disk"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:21:31 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/disk.config"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:21:31 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:5e:8b:77"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:       <target dev="tapfd508257-51"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:21:31 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/console.log" append="off"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <video>
Oct 02 12:21:31 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     </video>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:21:31 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:21:31 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:21:31 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:21:31 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:21:31 compute-0 nova_compute[192079]: </domain>
Oct 02 12:21:31 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.823 2 DEBUG nova.compute.manager [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Preparing to wait for external event network-vif-plugged-fd508257-51ca-4c61-9340-029f9a9e7a75 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.824 2 DEBUG oslo_concurrency.lockutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.824 2 DEBUG oslo_concurrency.lockutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.824 2 DEBUG oslo_concurrency.lockutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.825 2 DEBUG nova.virt.libvirt.vif [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:21:25Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestNetworkAdvancedServerOps-server-1275898317',display_name='tempest-TestNetworkAdvancedServerOps-server-1275898317',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkadvancedserverops-server-1275898317',id=112,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBOCz+7JmNyQy7JdP1IjSwu02/HePNAJvzHsZBcv8XH13dMGPNzBUuwrRU02GRGGFMvEIz5Lu1u/RVTlkdJCGXW3q1BcgXBVQzMFZYW+dEdgXTOuU2vWkRuKj+JzgzmR88A==',key_name='tempest-TestNetworkAdvancedServerOps-217202803',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='76c7dd40d83e4e3ca71abbebf57921b6',ramdisk_id='',reservation_id='r-x0vz0bnp',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestNetworkAdvancedServerOps-597114071',owner_user_name='tempest-TestNetworkAdvancedServerOps-597114071-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:21:27Z,user_data=None,user_id='1faa7e121a0e43ad8cb4ae5b2cfcc6a2',uuid=2eb08e64-4af9-4c5f-9817-b24d5e5ccce2,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "fd508257-51ca-4c61-9340-029f9a9e7a75", "address": "fa:16:3e:5e:8b:77", "network": {"id": "043fc82b-ca25-47f8-a78d-d7118d064ecd", "bridge": "br-int", "label": "tempest-network-smoke--1375280567", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapfd508257-51", "ovs_interfaceid": "fd508257-51ca-4c61-9340-029f9a9e7a75", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.825 2 DEBUG nova.network.os_vif_util [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converting VIF {"id": "fd508257-51ca-4c61-9340-029f9a9e7a75", "address": "fa:16:3e:5e:8b:77", "network": {"id": "043fc82b-ca25-47f8-a78d-d7118d064ecd", "bridge": "br-int", "label": "tempest-network-smoke--1375280567", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapfd508257-51", "ovs_interfaceid": "fd508257-51ca-4c61-9340-029f9a9e7a75", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.825 2 DEBUG nova.network.os_vif_util [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:5e:8b:77,bridge_name='br-int',has_traffic_filtering=True,id=fd508257-51ca-4c61-9340-029f9a9e7a75,network=Network(043fc82b-ca25-47f8-a78d-d7118d064ecd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapfd508257-51') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.826 2 DEBUG os_vif [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:5e:8b:77,bridge_name='br-int',has_traffic_filtering=True,id=fd508257-51ca-4c61-9340-029f9a9e7a75,network=Network(043fc82b-ca25-47f8-a78d-d7118d064ecd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapfd508257-51') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.826 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.826 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.827 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.829 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.829 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapfd508257-51, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.829 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapfd508257-51, col_values=(('external_ids', {'iface-id': 'fd508257-51ca-4c61-9340-029f9a9e7a75', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:5e:8b:77', 'vm-uuid': '2eb08e64-4af9-4c5f-9817-b24d5e5ccce2'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.830 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:31 compute-0 NetworkManager[51160]: <info>  [1759407691.8315] manager: (tapfd508257-51): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/177)
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.833 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.835 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.836 2 INFO os_vif [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:5e:8b:77,bridge_name='br-int',has_traffic_filtering=True,id=fd508257-51ca-4c61-9340-029f9a9e7a75,network=Network(043fc82b-ca25-47f8-a78d-d7118d064ecd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapfd508257-51')
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.891 2 DEBUG nova.virt.libvirt.driver [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.891 2 DEBUG nova.virt.libvirt.driver [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.891 2 DEBUG nova.virt.libvirt.driver [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] No VIF found with MAC fa:16:3e:5e:8b:77, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:21:31 compute-0 nova_compute[192079]: 2025-10-02 12:21:31.892 2 INFO nova.virt.libvirt.driver [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Using config drive
Oct 02 12:21:33 compute-0 nova_compute[192079]: 2025-10-02 12:21:33.392 2 INFO nova.virt.libvirt.driver [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Creating config drive at /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/disk.config
Oct 02 12:21:33 compute-0 nova_compute[192079]: 2025-10-02 12:21:33.397 2 DEBUG oslo_concurrency.processutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpwi9ltgay execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:21:33 compute-0 nova_compute[192079]: 2025-10-02 12:21:33.524 2 DEBUG oslo_concurrency.processutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpwi9ltgay" returned: 0 in 0.127s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:21:33 compute-0 kernel: tapfd508257-51: entered promiscuous mode
Oct 02 12:21:33 compute-0 NetworkManager[51160]: <info>  [1759407693.5902] manager: (tapfd508257-51): new Tun device (/org/freedesktop/NetworkManager/Devices/178)
Oct 02 12:21:33 compute-0 ovn_controller[94336]: 2025-10-02T12:21:33Z|00353|binding|INFO|Claiming lport fd508257-51ca-4c61-9340-029f9a9e7a75 for this chassis.
Oct 02 12:21:33 compute-0 ovn_controller[94336]: 2025-10-02T12:21:33Z|00354|binding|INFO|fd508257-51ca-4c61-9340-029f9a9e7a75: Claiming fa:16:3e:5e:8b:77 10.100.0.10
Oct 02 12:21:33 compute-0 nova_compute[192079]: 2025-10-02 12:21:33.594 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:33.610 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:5e:8b:77 10.100.0.10'], port_security=['fa:16:3e:5e:8b:77 10.100.0.10'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.10/28', 'neutron:device_id': '2eb08e64-4af9-4c5f-9817-b24d5e5ccce2', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-043fc82b-ca25-47f8-a78d-d7118d064ecd', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '76c7dd40d83e4e3ca71abbebf57921b6', 'neutron:revision_number': '2', 'neutron:security_group_ids': 'cc787597-8604-4a47-984f-e7d594779894', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=007edb9e-bf02-4e5b-b812-8540d6b44a38, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=fd508257-51ca-4c61-9340-029f9a9e7a75) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:33.611 103294 INFO neutron.agent.ovn.metadata.agent [-] Port fd508257-51ca-4c61-9340-029f9a9e7a75 in datapath 043fc82b-ca25-47f8-a78d-d7118d064ecd bound to our chassis
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:33.613 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 043fc82b-ca25-47f8-a78d-d7118d064ecd
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:33.625 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d0f347af-7b24-4e82-9020-333f062f998d]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:33.627 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap043fc82b-c1 in ovnmeta-043fc82b-ca25-47f8-a78d-d7118d064ecd namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:21:33 compute-0 systemd-udevd[236356]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:33.629 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap043fc82b-c0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:33.629 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[36026c96-4eb5-4e48-862b-68812f429eb1]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:33.630 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[16b31522-0a09-4225-bf0a-d7bbb51e9532]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:21:33 compute-0 NetworkManager[51160]: <info>  [1759407693.6440] device (tapfd508257-51): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:33.642 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[1b8bafd8-0b76-4af1-b603-97d3b16ebf78]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:21:33 compute-0 systemd-machined[152150]: New machine qemu-49-instance-00000070.
Oct 02 12:21:33 compute-0 NetworkManager[51160]: <info>  [1759407693.6460] device (tapfd508257-51): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:21:33 compute-0 nova_compute[192079]: 2025-10-02 12:21:33.648 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:33 compute-0 nova_compute[192079]: 2025-10-02 12:21:33.653 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:33 compute-0 ovn_controller[94336]: 2025-10-02T12:21:33Z|00355|binding|INFO|Setting lport fd508257-51ca-4c61-9340-029f9a9e7a75 ovn-installed in OVS
Oct 02 12:21:33 compute-0 ovn_controller[94336]: 2025-10-02T12:21:33Z|00356|binding|INFO|Setting lport fd508257-51ca-4c61-9340-029f9a9e7a75 up in Southbound
Oct 02 12:21:33 compute-0 nova_compute[192079]: 2025-10-02 12:21:33.657 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:33 compute-0 systemd[1]: Started Virtual Machine qemu-49-instance-00000070.
Oct 02 12:21:33 compute-0 nova_compute[192079]: 2025-10-02 12:21:33.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:33.667 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3fcb4a88-0ceb-4f63-ac11-08ce7c3c817d]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:33.694 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[046904c6-6c5a-4622-b226-7e19962f22d6]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:33.697 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b2da0d0a-ee38-4eaa-9c4b-6e9daeb3a7c4]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:21:33 compute-0 NetworkManager[51160]: <info>  [1759407693.6994] manager: (tap043fc82b-c0): new Veth device (/org/freedesktop/NetworkManager/Devices/179)
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:33.726 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[005a28f6-3eae-477e-91db-e1034661f388]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:33.728 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[7d32b67a-9bb5-4719-aba5-4ec4820a8cd9]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:21:33 compute-0 NetworkManager[51160]: <info>  [1759407693.7473] device (tap043fc82b-c0): carrier: link connected
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:33.751 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[1c454c26-3a5b-4f6a-a957-8eb5581e0211]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:33.765 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[67189a54-4e1f-499f-938e-a2370d514fc8]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap043fc82b-c1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:db:ff:f1'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 116], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 569137, 'reachable_time': 23232, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 236389, 'error': None, 'target': 'ovnmeta-043fc82b-ca25-47f8-a78d-d7118d064ecd', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:33.779 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ab36b492-f40a-49bf-8cfa-2ca8fb6e111a]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fedb:fff1'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 569137, 'tstamp': 569137}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 236390, 'error': None, 'target': 'ovnmeta-043fc82b-ca25-47f8-a78d-d7118d064ecd', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:33.797 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b3cb37b8-1a85-43a5-ac0d-56dd3da29d5a]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap043fc82b-c1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:db:ff:f1'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 116], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 569137, 'reachable_time': 23232, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 236391, 'error': None, 'target': 'ovnmeta-043fc82b-ca25-47f8-a78d-d7118d064ecd', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:33.822 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4e96d4ae-b5e2-4ddc-b78b-8ac8373819e0]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:33.876 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[61315457-094a-488f-b5f8-b4a85854d5fe]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:21:33 compute-0 kernel: tap043fc82b-c0: entered promiscuous mode
Oct 02 12:21:33 compute-0 nova_compute[192079]: 2025-10-02 12:21:33.879 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:33.877 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap043fc82b-c0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:33.877 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:33.877 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap043fc82b-c0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:21:33 compute-0 NetworkManager[51160]: <info>  [1759407693.8802] manager: (tap043fc82b-c0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/180)
Oct 02 12:21:33 compute-0 nova_compute[192079]: 2025-10-02 12:21:33.881 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:33.884 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap043fc82b-c0, col_values=(('external_ids', {'iface-id': 'b26dbb45-d584-4e58-871b-0b97c246a793'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:21:33 compute-0 nova_compute[192079]: 2025-10-02 12:21:33.885 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:33 compute-0 ovn_controller[94336]: 2025-10-02T12:21:33Z|00357|binding|INFO|Releasing lport b26dbb45-d584-4e58-871b-0b97c246a793 from this chassis (sb_readonly=0)
Oct 02 12:21:33 compute-0 nova_compute[192079]: 2025-10-02 12:21:33.896 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:33 compute-0 nova_compute[192079]: 2025-10-02 12:21:33.896 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:33.897 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/043fc82b-ca25-47f8-a78d-d7118d064ecd.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/043fc82b-ca25-47f8-a78d-d7118d064ecd.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:33.898 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[15d4387a-2dd5-403b-a905-887ca6f23b5f]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:33.899 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-043fc82b-ca25-47f8-a78d-d7118d064ecd
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/043fc82b-ca25-47f8-a78d-d7118d064ecd.pid.haproxy
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 043fc82b-ca25-47f8-a78d-d7118d064ecd
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:21:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:33.900 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-043fc82b-ca25-47f8-a78d-d7118d064ecd', 'env', 'PROCESS_TAG=haproxy-043fc82b-ca25-47f8-a78d-d7118d064ecd', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/043fc82b-ca25-47f8-a78d-d7118d064ecd.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:21:34 compute-0 podman[236430]: 2025-10-02 12:21:34.302269633 +0000 UTC m=+0.099710188 container create 0650993395e2be181bd41136cac6ef61aee714a7064feca18b9ae6a5febecd5e (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-043fc82b-ca25-47f8-a78d-d7118d064ecd, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS)
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.322 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407694.3223212, 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:21:34 compute-0 podman[236430]: 2025-10-02 12:21:34.227755673 +0000 UTC m=+0.025196258 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.323 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] VM Started (Lifecycle Event)
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.354 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.357 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407694.323211, 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.357 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] VM Paused (Lifecycle Event)
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.380 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.383 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.403 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:21:34 compute-0 systemd[1]: Started libpod-conmon-0650993395e2be181bd41136cac6ef61aee714a7064feca18b9ae6a5febecd5e.scope.
Oct 02 12:21:34 compute-0 podman[236443]: 2025-10-02 12:21:34.433055829 +0000 UTC m=+0.108746366 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, build-date=2025-08-20T13:12:41, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.buildah.version=1.33.7, version=9.6, container_name=openstack_network_exporter, com.redhat.component=ubi9-minimal-container, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.openshift.tags=minimal rhel9, maintainer=Red Hat, Inc., managed_by=edpm_ansible, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vendor=Red Hat, Inc., architecture=x86_64, release=1755695350, vcs-type=git, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., url=https://catalog.redhat.com/en/search?searchType=containers, distribution-scope=public, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, io.openshift.expose-services=, name=ubi9-minimal, config_id=edpm)
Oct 02 12:21:34 compute-0 podman[236444]: 2025-10-02 12:21:34.433415959 +0000 UTC m=+0.106220117 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=multipathd, io.buildah.version=1.41.3, managed_by=edpm_ansible, config_id=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS)
Oct 02 12:21:34 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:21:34 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/28381737b1b92a0b1de43bc79d7b4f79582cbab7dda09e1044b75ec240406fb3/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.475 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759407679.4746058, 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.476 2 INFO nova.compute.manager [-] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] VM Stopped (Lifecycle Event)
Oct 02 12:21:34 compute-0 podman[236430]: 2025-10-02 12:21:34.490539766 +0000 UTC m=+0.287980341 container init 0650993395e2be181bd41136cac6ef61aee714a7064feca18b9ae6a5febecd5e (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-043fc82b-ca25-47f8-a78d-d7118d064ecd, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001)
Oct 02 12:21:34 compute-0 podman[236430]: 2025-10-02 12:21:34.499225392 +0000 UTC m=+0.296665947 container start 0650993395e2be181bd41136cac6ef61aee714a7064feca18b9ae6a5febecd5e (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-043fc82b-ca25-47f8-a78d-d7118d064ecd, org.label-schema.license=GPLv2, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.500 2 DEBUG nova.compute.manager [None req-4ed36359-5033-4029-99ad-b5c90e35146e - - - - - -] [instance: 35c6bb03-2e70-4705-bfc3-78bdeeaf6c9c] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:21:34 compute-0 neutron-haproxy-ovnmeta-043fc82b-ca25-47f8-a78d-d7118d064ecd[236480]: [NOTICE]   (236489) : New worker (236491) forked
Oct 02 12:21:34 compute-0 neutron-haproxy-ovnmeta-043fc82b-ca25-47f8-a78d-d7118d064ecd[236480]: [NOTICE]   (236489) : Loading success.
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.641 2 DEBUG nova.network.neutron [req-d6077910-decc-47b8-8bab-70315168949d req-730a5fd2-dc65-4036-8091-b7dc99417938 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Updated VIF entry in instance network info cache for port fd508257-51ca-4c61-9340-029f9a9e7a75. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.642 2 DEBUG nova.network.neutron [req-d6077910-decc-47b8-8bab-70315168949d req-730a5fd2-dc65-4036-8091-b7dc99417938 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Updating instance_info_cache with network_info: [{"id": "fd508257-51ca-4c61-9340-029f9a9e7a75", "address": "fa:16:3e:5e:8b:77", "network": {"id": "043fc82b-ca25-47f8-a78d-d7118d064ecd", "bridge": "br-int", "label": "tempest-network-smoke--1375280567", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapfd508257-51", "ovs_interfaceid": "fd508257-51ca-4c61-9340-029f9a9e7a75", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.681 2 DEBUG oslo_concurrency.lockutils [req-d6077910-decc-47b8-8bab-70315168949d req-730a5fd2-dc65-4036-8091-b7dc99417938 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-2eb08e64-4af9-4c5f-9817-b24d5e5ccce2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.958 2 DEBUG nova.compute.manager [req-8932be4f-60fd-4708-accb-3adebce44ea9 req-da4dae3c-821b-43d3-9d6d-841bfc5e1adc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Received event network-vif-plugged-fd508257-51ca-4c61-9340-029f9a9e7a75 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.958 2 DEBUG oslo_concurrency.lockutils [req-8932be4f-60fd-4708-accb-3adebce44ea9 req-da4dae3c-821b-43d3-9d6d-841bfc5e1adc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.959 2 DEBUG oslo_concurrency.lockutils [req-8932be4f-60fd-4708-accb-3adebce44ea9 req-da4dae3c-821b-43d3-9d6d-841bfc5e1adc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.959 2 DEBUG oslo_concurrency.lockutils [req-8932be4f-60fd-4708-accb-3adebce44ea9 req-da4dae3c-821b-43d3-9d6d-841bfc5e1adc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.959 2 DEBUG nova.compute.manager [req-8932be4f-60fd-4708-accb-3adebce44ea9 req-da4dae3c-821b-43d3-9d6d-841bfc5e1adc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Processing event network-vif-plugged-fd508257-51ca-4c61-9340-029f9a9e7a75 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.960 2 DEBUG nova.compute.manager [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.963 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407694.9634569, 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.963 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] VM Resumed (Lifecycle Event)
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.965 2 DEBUG nova.virt.libvirt.driver [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.968 2 INFO nova.virt.libvirt.driver [-] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Instance spawned successfully.
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.969 2 DEBUG nova.virt.libvirt.driver [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.988 2 DEBUG nova.virt.libvirt.driver [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.989 2 DEBUG nova.virt.libvirt.driver [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.989 2 DEBUG nova.virt.libvirt.driver [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.990 2 DEBUG nova.virt.libvirt.driver [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.990 2 DEBUG nova.virt.libvirt.driver [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.991 2 DEBUG nova.virt.libvirt.driver [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.994 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:21:34 compute-0 nova_compute[192079]: 2025-10-02 12:21:34.997 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:21:35 compute-0 nova_compute[192079]: 2025-10-02 12:21:35.026 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:21:35 compute-0 nova_compute[192079]: 2025-10-02 12:21:35.061 2 INFO nova.compute.manager [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Took 7.00 seconds to spawn the instance on the hypervisor.
Oct 02 12:21:35 compute-0 nova_compute[192079]: 2025-10-02 12:21:35.061 2 DEBUG nova.compute.manager [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:21:35 compute-0 nova_compute[192079]: 2025-10-02 12:21:35.157 2 INFO nova.compute.manager [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Took 7.80 seconds to build instance.
Oct 02 12:21:35 compute-0 nova_compute[192079]: 2025-10-02 12:21:35.193 2 DEBUG oslo_concurrency.lockutils [None req-92a8f697-2393-44b3-98f0-1517ebf423fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 8.110s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:21:36 compute-0 nova_compute[192079]: 2025-10-02 12:21:36.756 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:36 compute-0 nova_compute[192079]: 2025-10-02 12:21:36.831 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:37 compute-0 nova_compute[192079]: 2025-10-02 12:21:37.417 2 DEBUG nova.compute.manager [req-85c0f868-37df-4178-9c5a-ec22929baf7a req-1db44e15-83d4-453e-935b-721fd6cf6918 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Received event network-vif-plugged-fd508257-51ca-4c61-9340-029f9a9e7a75 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:21:37 compute-0 nova_compute[192079]: 2025-10-02 12:21:37.418 2 DEBUG oslo_concurrency.lockutils [req-85c0f868-37df-4178-9c5a-ec22929baf7a req-1db44e15-83d4-453e-935b-721fd6cf6918 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:21:37 compute-0 nova_compute[192079]: 2025-10-02 12:21:37.418 2 DEBUG oslo_concurrency.lockutils [req-85c0f868-37df-4178-9c5a-ec22929baf7a req-1db44e15-83d4-453e-935b-721fd6cf6918 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:21:37 compute-0 nova_compute[192079]: 2025-10-02 12:21:37.418 2 DEBUG oslo_concurrency.lockutils [req-85c0f868-37df-4178-9c5a-ec22929baf7a req-1db44e15-83d4-453e-935b-721fd6cf6918 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:21:37 compute-0 nova_compute[192079]: 2025-10-02 12:21:37.418 2 DEBUG nova.compute.manager [req-85c0f868-37df-4178-9c5a-ec22929baf7a req-1db44e15-83d4-453e-935b-721fd6cf6918 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] No waiting events found dispatching network-vif-plugged-fd508257-51ca-4c61-9340-029f9a9e7a75 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:21:37 compute-0 nova_compute[192079]: 2025-10-02 12:21:37.419 2 WARNING nova.compute.manager [req-85c0f868-37df-4178-9c5a-ec22929baf7a req-1db44e15-83d4-453e-935b-721fd6cf6918 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Received unexpected event network-vif-plugged-fd508257-51ca-4c61-9340-029f9a9e7a75 for instance with vm_state active and task_state None.
Oct 02 12:21:39 compute-0 NetworkManager[51160]: <info>  [1759407699.9296] manager: (patch-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d-to-br-int): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/181)
Oct 02 12:21:39 compute-0 NetworkManager[51160]: <info>  [1759407699.9318] manager: (patch-br-int-to-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/182)
Oct 02 12:21:39 compute-0 nova_compute[192079]: 2025-10-02 12:21:39.928 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:40 compute-0 nova_compute[192079]: 2025-10-02 12:21:40.043 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:40 compute-0 ovn_controller[94336]: 2025-10-02T12:21:40Z|00358|binding|INFO|Releasing lport b26dbb45-d584-4e58-871b-0b97c246a793 from this chassis (sb_readonly=0)
Oct 02 12:21:40 compute-0 nova_compute[192079]: 2025-10-02 12:21:40.059 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:40 compute-0 nova_compute[192079]: 2025-10-02 12:21:40.176 2 DEBUG nova.compute.manager [req-b2a5a566-eae8-4c2e-b2b0-7e3a87ca6174 req-3a6eb0cb-52f7-4966-b973-0bc41cd2d9b3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Received event network-changed-fd508257-51ca-4c61-9340-029f9a9e7a75 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:21:40 compute-0 nova_compute[192079]: 2025-10-02 12:21:40.177 2 DEBUG nova.compute.manager [req-b2a5a566-eae8-4c2e-b2b0-7e3a87ca6174 req-3a6eb0cb-52f7-4966-b973-0bc41cd2d9b3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Refreshing instance network info cache due to event network-changed-fd508257-51ca-4c61-9340-029f9a9e7a75. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:21:40 compute-0 nova_compute[192079]: 2025-10-02 12:21:40.177 2 DEBUG oslo_concurrency.lockutils [req-b2a5a566-eae8-4c2e-b2b0-7e3a87ca6174 req-3a6eb0cb-52f7-4966-b973-0bc41cd2d9b3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-2eb08e64-4af9-4c5f-9817-b24d5e5ccce2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:21:40 compute-0 nova_compute[192079]: 2025-10-02 12:21:40.178 2 DEBUG oslo_concurrency.lockutils [req-b2a5a566-eae8-4c2e-b2b0-7e3a87ca6174 req-3a6eb0cb-52f7-4966-b973-0bc41cd2d9b3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-2eb08e64-4af9-4c5f-9817-b24d5e5ccce2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:21:40 compute-0 nova_compute[192079]: 2025-10-02 12:21:40.178 2 DEBUG nova.network.neutron [req-b2a5a566-eae8-4c2e-b2b0-7e3a87ca6174 req-3a6eb0cb-52f7-4966-b973-0bc41cd2d9b3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Refreshing network info cache for port fd508257-51ca-4c61-9340-029f9a9e7a75 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:21:41 compute-0 nova_compute[192079]: 2025-10-02 12:21:41.758 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:41 compute-0 nova_compute[192079]: 2025-10-02 12:21:41.833 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:42 compute-0 podman[236502]: 2025-10-02 12:21:42.17353584 +0000 UTC m=+0.079220921 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=iscsid, container_name=iscsid, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, tcib_managed=true)
Oct 02 12:21:42 compute-0 podman[236501]: 2025-10-02 12:21:42.174217368 +0000 UTC m=+0.077929456 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>)
Oct 02 12:21:42 compute-0 nova_compute[192079]: 2025-10-02 12:21:42.485 2 DEBUG nova.network.neutron [req-b2a5a566-eae8-4c2e-b2b0-7e3a87ca6174 req-3a6eb0cb-52f7-4966-b973-0bc41cd2d9b3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Updated VIF entry in instance network info cache for port fd508257-51ca-4c61-9340-029f9a9e7a75. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:21:42 compute-0 nova_compute[192079]: 2025-10-02 12:21:42.486 2 DEBUG nova.network.neutron [req-b2a5a566-eae8-4c2e-b2b0-7e3a87ca6174 req-3a6eb0cb-52f7-4966-b973-0bc41cd2d9b3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Updating instance_info_cache with network_info: [{"id": "fd508257-51ca-4c61-9340-029f9a9e7a75", "address": "fa:16:3e:5e:8b:77", "network": {"id": "043fc82b-ca25-47f8-a78d-d7118d064ecd", "bridge": "br-int", "label": "tempest-network-smoke--1375280567", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.242", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapfd508257-51", "ovs_interfaceid": "fd508257-51ca-4c61-9340-029f9a9e7a75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:21:42 compute-0 nova_compute[192079]: 2025-10-02 12:21:42.509 2 DEBUG oslo_concurrency.lockutils [req-b2a5a566-eae8-4c2e-b2b0-7e3a87ca6174 req-3a6eb0cb-52f7-4966-b973-0bc41cd2d9b3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-2eb08e64-4af9-4c5f-9817-b24d5e5ccce2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:21:46 compute-0 nova_compute[192079]: 2025-10-02 12:21:46.762 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:46 compute-0 nova_compute[192079]: 2025-10-02 12:21:46.835 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:50 compute-0 podman[236560]: 2025-10-02 12:21:50.133934035 +0000 UTC m=+0.046527218 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_id=ovn_metadata_agent, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, container_name=ovn_metadata_agent, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:21:50 compute-0 podman[236562]: 2025-10-02 12:21:50.144562205 +0000 UTC m=+0.048094012 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 12:21:50 compute-0 podman[236561]: 2025-10-02 12:21:50.210955416 +0000 UTC m=+0.108601132 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_controller, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, managed_by=edpm_ansible, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:21:50 compute-0 ovn_controller[94336]: 2025-10-02T12:21:50Z|00035|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:5e:8b:77 10.100.0.10
Oct 02 12:21:50 compute-0 ovn_controller[94336]: 2025-10-02T12:21:50Z|00036|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:5e:8b:77 10.100.0.10
Oct 02 12:21:51 compute-0 nova_compute[192079]: 2025-10-02 12:21:51.765 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:51 compute-0 nova_compute[192079]: 2025-10-02 12:21:51.837 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:56 compute-0 nova_compute[192079]: 2025-10-02 12:21:56.767 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:56 compute-0 nova_compute[192079]: 2025-10-02 12:21:56.838 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:58 compute-0 nova_compute[192079]: 2025-10-02 12:21:58.066 2 INFO nova.compute.manager [None req-6cda5671-df23-4291-a5be-6e2dbb528df2 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Get console output
Oct 02 12:21:58 compute-0 nova_compute[192079]: 2025-10-02 12:21:58.162 55 INFO nova.privsep.libvirt [-] Ignored error while reading from instance console pty: can't concat NoneType to bytes
Oct 02 12:21:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:58.554 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=26, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=25) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:21:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:21:58.555 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 5 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:21:58 compute-0 nova_compute[192079]: 2025-10-02 12:21:58.606 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:21:59 compute-0 podman[236623]: 2025-10-02 12:21:59.135941999 +0000 UTC m=+0.055125254 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:22:00 compute-0 nova_compute[192079]: 2025-10-02 12:22:00.220 2 INFO nova.compute.manager [None req-e2455d4e-70f4-4018-9250-1089dc1d59e9 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Get console output
Oct 02 12:22:00 compute-0 nova_compute[192079]: 2025-10-02 12:22:00.225 55 INFO nova.privsep.libvirt [-] Ignored error while reading from instance console pty: can't concat NoneType to bytes
Oct 02 12:22:01 compute-0 nova_compute[192079]: 2025-10-02 12:22:01.769 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:01 compute-0 nova_compute[192079]: 2025-10-02 12:22:01.840 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:02.221 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:22:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:02.222 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:22:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:02.222 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:22:03 compute-0 nova_compute[192079]: 2025-10-02 12:22:03.042 2 DEBUG oslo_concurrency.lockutils [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Acquiring lock "refresh_cache-2eb08e64-4af9-4c5f-9817-b24d5e5ccce2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:22:03 compute-0 nova_compute[192079]: 2025-10-02 12:22:03.043 2 DEBUG oslo_concurrency.lockutils [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Acquired lock "refresh_cache-2eb08e64-4af9-4c5f-9817-b24d5e5ccce2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:22:03 compute-0 nova_compute[192079]: 2025-10-02 12:22:03.043 2 DEBUG nova.network.neutron [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:22:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:03.556 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '26'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:22:05 compute-0 podman[236645]: 2025-10-02 12:22:05.141981887 +0000 UTC m=+0.057496989 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, com.redhat.component=ubi9-minimal-container, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., url=https://catalog.redhat.com/en/search?searchType=containers, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., managed_by=edpm_ansible, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, distribution-scope=public, io.openshift.tags=minimal rhel9, container_name=openstack_network_exporter, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., config_id=edpm, maintainer=Red Hat, Inc., com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.buildah.version=1.33.7, release=1755695350, architecture=x86_64, name=ubi9-minimal, version=9.6, io.openshift.expose-services=, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, vcs-type=git, build-date=2025-08-20T13:12:41, vendor=Red Hat, Inc.)
Oct 02 12:22:05 compute-0 podman[236646]: 2025-10-02 12:22:05.157787627 +0000 UTC m=+0.066651467 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_id=multipathd, container_name=multipathd, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, tcib_managed=true)
Oct 02 12:22:05 compute-0 nova_compute[192079]: 2025-10-02 12:22:05.374 2 DEBUG nova.network.neutron [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Updating instance_info_cache with network_info: [{"id": "fd508257-51ca-4c61-9340-029f9a9e7a75", "address": "fa:16:3e:5e:8b:77", "network": {"id": "043fc82b-ca25-47f8-a78d-d7118d064ecd", "bridge": "br-int", "label": "tempest-network-smoke--1375280567", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.242", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapfd508257-51", "ovs_interfaceid": "fd508257-51ca-4c61-9340-029f9a9e7a75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:22:05 compute-0 nova_compute[192079]: 2025-10-02 12:22:05.393 2 DEBUG oslo_concurrency.lockutils [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Releasing lock "refresh_cache-2eb08e64-4af9-4c5f-9817-b24d5e5ccce2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:22:05 compute-0 nova_compute[192079]: 2025-10-02 12:22:05.528 2 DEBUG nova.virt.libvirt.driver [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Starting migrate_disk_and_power_off migrate_disk_and_power_off /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:11511
Oct 02 12:22:05 compute-0 nova_compute[192079]: 2025-10-02 12:22:05.529 2 DEBUG nova.virt.libvirt.volume.remotefs [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Creating file /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/e6f6056887c8483a8e0e5d0bd4b3dde8.tmp on remote host 192.168.122.101 create_file /usr/lib/python3.9/site-packages/nova/virt/libvirt/volume/remotefs.py:79
Oct 02 12:22:05 compute-0 nova_compute[192079]: 2025-10-02 12:22:05.529 2 DEBUG oslo_concurrency.processutils [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Running cmd (subprocess): ssh -o BatchMode=yes 192.168.122.101 touch /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/e6f6056887c8483a8e0e5d0bd4b3dde8.tmp execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:22:05 compute-0 nova_compute[192079]: 2025-10-02 12:22:05.952 2 DEBUG oslo_concurrency.processutils [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] CMD "ssh -o BatchMode=yes 192.168.122.101 touch /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/e6f6056887c8483a8e0e5d0bd4b3dde8.tmp" returned: 1 in 0.423s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:22:05 compute-0 nova_compute[192079]: 2025-10-02 12:22:05.952 2 DEBUG oslo_concurrency.processutils [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] 'ssh -o BatchMode=yes 192.168.122.101 touch /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/e6f6056887c8483a8e0e5d0bd4b3dde8.tmp' failed. Not Retrying. execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:473
Oct 02 12:22:05 compute-0 nova_compute[192079]: 2025-10-02 12:22:05.953 2 DEBUG nova.virt.libvirt.volume.remotefs [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Creating directory /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2 on remote host 192.168.122.101 create_dir /usr/lib/python3.9/site-packages/nova/virt/libvirt/volume/remotefs.py:91
Oct 02 12:22:05 compute-0 nova_compute[192079]: 2025-10-02 12:22:05.953 2 DEBUG oslo_concurrency.processutils [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Running cmd (subprocess): ssh -o BatchMode=yes 192.168.122.101 mkdir -p /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:22:06 compute-0 nova_compute[192079]: 2025-10-02 12:22:06.154 2 DEBUG oslo_concurrency.processutils [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] CMD "ssh -o BatchMode=yes 192.168.122.101 mkdir -p /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2" returned: 0 in 0.200s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:22:06 compute-0 nova_compute[192079]: 2025-10-02 12:22:06.157 2 DEBUG nova.virt.libvirt.driver [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Shutting down instance from state 1 _clean_shutdown /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4071
Oct 02 12:22:06 compute-0 nova_compute[192079]: 2025-10-02 12:22:06.771 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:06 compute-0 nova_compute[192079]: 2025-10-02 12:22:06.842 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:07 compute-0 nova_compute[192079]: 2025-10-02 12:22:07.604 2 DEBUG nova.compute.manager [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Stashing vm_state: stopped _prep_resize /usr/lib/python3.9/site-packages/nova/compute/manager.py:5560
Oct 02 12:22:07 compute-0 nova_compute[192079]: 2025-10-02 12:22:07.835 2 DEBUG oslo_concurrency.lockutils [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:22:07 compute-0 nova_compute[192079]: 2025-10-02 12:22:07.836 2 DEBUG oslo_concurrency.lockutils [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.002s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:22:07 compute-0 nova_compute[192079]: 2025-10-02 12:22:07.871 2 DEBUG nova.objects.instance [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lazy-loading 'pci_requests' on Instance uuid ad2d69bb-3aa9-4c11-b9de-29996574cfa2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:22:07 compute-0 nova_compute[192079]: 2025-10-02 12:22:07.897 2 DEBUG nova.virt.hardware [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:22:07 compute-0 nova_compute[192079]: 2025-10-02 12:22:07.898 2 INFO nova.compute.claims [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:22:07 compute-0 nova_compute[192079]: 2025-10-02 12:22:07.898 2 DEBUG nova.objects.instance [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lazy-loading 'resources' on Instance uuid ad2d69bb-3aa9-4c11-b9de-29996574cfa2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:22:07 compute-0 nova_compute[192079]: 2025-10-02 12:22:07.909 2 DEBUG nova.objects.instance [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lazy-loading 'pci_devices' on Instance uuid ad2d69bb-3aa9-4c11-b9de-29996574cfa2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:22:07 compute-0 nova_compute[192079]: 2025-10-02 12:22:07.958 2 INFO nova.compute.resource_tracker [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Updating resource usage from migration 630aba62-6e03-48d5-8063-37553a2a143a
Oct 02 12:22:07 compute-0 nova_compute[192079]: 2025-10-02 12:22:07.959 2 DEBUG nova.compute.resource_tracker [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Starting to track incoming migration 630aba62-6e03-48d5-8063-37553a2a143a with flavor 9949d9da-6314-4ede-8797-6f2f0a6a64fc _update_usage_from_migration /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1431
Oct 02 12:22:08 compute-0 nova_compute[192079]: 2025-10-02 12:22:08.045 2 DEBUG nova.compute.provider_tree [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:22:08 compute-0 nova_compute[192079]: 2025-10-02 12:22:08.064 2 DEBUG nova.scheduler.client.report [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:22:08 compute-0 nova_compute[192079]: 2025-10-02 12:22:08.083 2 DEBUG oslo_concurrency.lockutils [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 0.247s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:22:08 compute-0 nova_compute[192079]: 2025-10-02 12:22:08.084 2 INFO nova.compute.manager [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Migrating
Oct 02 12:22:08 compute-0 kernel: tapfd508257-51 (unregistering): left promiscuous mode
Oct 02 12:22:08 compute-0 NetworkManager[51160]: <info>  [1759407728.3454] device (tapfd508257-51): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:22:08 compute-0 ovn_controller[94336]: 2025-10-02T12:22:08Z|00359|binding|INFO|Releasing lport fd508257-51ca-4c61-9340-029f9a9e7a75 from this chassis (sb_readonly=0)
Oct 02 12:22:08 compute-0 ovn_controller[94336]: 2025-10-02T12:22:08Z|00360|binding|INFO|Setting lport fd508257-51ca-4c61-9340-029f9a9e7a75 down in Southbound
Oct 02 12:22:08 compute-0 ovn_controller[94336]: 2025-10-02T12:22:08Z|00361|binding|INFO|Removing iface tapfd508257-51 ovn-installed in OVS
Oct 02 12:22:08 compute-0 nova_compute[192079]: 2025-10-02 12:22:08.358 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:08 compute-0 nova_compute[192079]: 2025-10-02 12:22:08.375 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:08 compute-0 systemd[1]: machine-qemu\x2d49\x2dinstance\x2d00000070.scope: Deactivated successfully.
Oct 02 12:22:08 compute-0 systemd[1]: machine-qemu\x2d49\x2dinstance\x2d00000070.scope: Consumed 14.975s CPU time.
Oct 02 12:22:08 compute-0 systemd-machined[152150]: Machine qemu-49-instance-00000070 terminated.
Oct 02 12:22:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:08.427 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:5e:8b:77 10.100.0.10'], port_security=['fa:16:3e:5e:8b:77 10.100.0.10'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.10/28', 'neutron:device_id': '2eb08e64-4af9-4c5f-9817-b24d5e5ccce2', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-043fc82b-ca25-47f8-a78d-d7118d064ecd', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '76c7dd40d83e4e3ca71abbebf57921b6', 'neutron:revision_number': '4', 'neutron:security_group_ids': 'cc787597-8604-4a47-984f-e7d594779894', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com', 'neutron:port_fip': '192.168.122.242'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=007edb9e-bf02-4e5b-b812-8540d6b44a38, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=fd508257-51ca-4c61-9340-029f9a9e7a75) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:22:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:08.429 103294 INFO neutron.agent.ovn.metadata.agent [-] Port fd508257-51ca-4c61-9340-029f9a9e7a75 in datapath 043fc82b-ca25-47f8-a78d-d7118d064ecd unbound from our chassis
Oct 02 12:22:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:08.430 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 043fc82b-ca25-47f8-a78d-d7118d064ecd, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:22:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:08.430 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[90e832b0-d5e2-4521-8537-50fa96c5ccab]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:08.431 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-043fc82b-ca25-47f8-a78d-d7118d064ecd namespace which is not needed anymore
Oct 02 12:22:08 compute-0 neutron-haproxy-ovnmeta-043fc82b-ca25-47f8-a78d-d7118d064ecd[236480]: [NOTICE]   (236489) : haproxy version is 2.8.14-c23fe91
Oct 02 12:22:08 compute-0 neutron-haproxy-ovnmeta-043fc82b-ca25-47f8-a78d-d7118d064ecd[236480]: [NOTICE]   (236489) : path to executable is /usr/sbin/haproxy
Oct 02 12:22:08 compute-0 neutron-haproxy-ovnmeta-043fc82b-ca25-47f8-a78d-d7118d064ecd[236480]: [ALERT]    (236489) : Current worker (236491) exited with code 143 (Terminated)
Oct 02 12:22:08 compute-0 neutron-haproxy-ovnmeta-043fc82b-ca25-47f8-a78d-d7118d064ecd[236480]: [WARNING]  (236489) : All workers exited. Exiting... (0)
Oct 02 12:22:08 compute-0 systemd[1]: libpod-0650993395e2be181bd41136cac6ef61aee714a7064feca18b9ae6a5febecd5e.scope: Deactivated successfully.
Oct 02 12:22:08 compute-0 podman[236711]: 2025-10-02 12:22:08.635396704 +0000 UTC m=+0.065510907 container died 0650993395e2be181bd41136cac6ef61aee714a7064feca18b9ae6a5febecd5e (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-043fc82b-ca25-47f8-a78d-d7118d064ecd, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true)
Oct 02 12:22:08 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-0650993395e2be181bd41136cac6ef61aee714a7064feca18b9ae6a5febecd5e-userdata-shm.mount: Deactivated successfully.
Oct 02 12:22:08 compute-0 systemd[1]: var-lib-containers-storage-overlay-28381737b1b92a0b1de43bc79d7b4f79582cbab7dda09e1044b75ec240406fb3-merged.mount: Deactivated successfully.
Oct 02 12:22:08 compute-0 podman[236711]: 2025-10-02 12:22:08.69611656 +0000 UTC m=+0.126230743 container cleanup 0650993395e2be181bd41136cac6ef61aee714a7064feca18b9ae6a5febecd5e (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-043fc82b-ca25-47f8-a78d-d7118d064ecd, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.schema-version=1.0)
Oct 02 12:22:08 compute-0 systemd[1]: libpod-conmon-0650993395e2be181bd41136cac6ef61aee714a7064feca18b9ae6a5febecd5e.scope: Deactivated successfully.
Oct 02 12:22:08 compute-0 podman[236757]: 2025-10-02 12:22:08.768504552 +0000 UTC m=+0.050754054 container remove 0650993395e2be181bd41136cac6ef61aee714a7064feca18b9ae6a5febecd5e (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-043fc82b-ca25-47f8-a78d-d7118d064ecd, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, tcib_managed=true)
Oct 02 12:22:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:08.774 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[66d2f4e0-40f3-4d48-89c7-afe6a2a8ab0b]: (4, ('Thu Oct  2 12:22:08 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-043fc82b-ca25-47f8-a78d-d7118d064ecd (0650993395e2be181bd41136cac6ef61aee714a7064feca18b9ae6a5febecd5e)\n0650993395e2be181bd41136cac6ef61aee714a7064feca18b9ae6a5febecd5e\nThu Oct  2 12:22:08 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-043fc82b-ca25-47f8-a78d-d7118d064ecd (0650993395e2be181bd41136cac6ef61aee714a7064feca18b9ae6a5febecd5e)\n0650993395e2be181bd41136cac6ef61aee714a7064feca18b9ae6a5febecd5e\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:08.776 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7fa827fc-eaac-44c1-8edf-a6845c5c7561]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:08.777 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap043fc82b-c0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:22:08 compute-0 nova_compute[192079]: 2025-10-02 12:22:08.779 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:08 compute-0 kernel: tap043fc82b-c0: left promiscuous mode
Oct 02 12:22:08 compute-0 nova_compute[192079]: 2025-10-02 12:22:08.799 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:08.802 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[18f4e309-49eb-4b40-b41c-09d4935ed902]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:08.828 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[393708b9-c1ea-4c83-9cbd-c4b04454eae6]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:08.830 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[418d4db5-309a-4b17-9ce5-5fa7a75f45df]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:08.845 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3b9405a4-c1fa-4ffe-8b0e-35ff8b062d5b]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 569131, 'reachable_time': 35772, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 236775, 'error': None, 'target': 'ovnmeta-043fc82b-ca25-47f8-a78d-d7118d064ecd', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:08 compute-0 systemd[1]: run-netns-ovnmeta\x2d043fc82b\x2dca25\x2d47f8\x2da78d\x2dd7118d064ecd.mount: Deactivated successfully.
Oct 02 12:22:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:08.848 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-043fc82b-ca25-47f8-a78d-d7118d064ecd deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:22:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:08.848 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[f951ef3e-7854-4d5d-9223-868c3bf38798]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:08 compute-0 nova_compute[192079]: 2025-10-02 12:22:08.938 2 DEBUG nova.compute.manager [req-3f92c72d-98e6-4610-9983-ab48f180ca80 req-3cd30478-b8fd-4a39-aa1a-c6b4583f7a45 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Received event network-vif-unplugged-fd508257-51ca-4c61-9340-029f9a9e7a75 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:22:08 compute-0 nova_compute[192079]: 2025-10-02 12:22:08.939 2 DEBUG oslo_concurrency.lockutils [req-3f92c72d-98e6-4610-9983-ab48f180ca80 req-3cd30478-b8fd-4a39-aa1a-c6b4583f7a45 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:22:08 compute-0 nova_compute[192079]: 2025-10-02 12:22:08.939 2 DEBUG oslo_concurrency.lockutils [req-3f92c72d-98e6-4610-9983-ab48f180ca80 req-3cd30478-b8fd-4a39-aa1a-c6b4583f7a45 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:22:08 compute-0 nova_compute[192079]: 2025-10-02 12:22:08.939 2 DEBUG oslo_concurrency.lockutils [req-3f92c72d-98e6-4610-9983-ab48f180ca80 req-3cd30478-b8fd-4a39-aa1a-c6b4583f7a45 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:22:08 compute-0 nova_compute[192079]: 2025-10-02 12:22:08.940 2 DEBUG nova.compute.manager [req-3f92c72d-98e6-4610-9983-ab48f180ca80 req-3cd30478-b8fd-4a39-aa1a-c6b4583f7a45 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] No waiting events found dispatching network-vif-unplugged-fd508257-51ca-4c61-9340-029f9a9e7a75 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:22:08 compute-0 nova_compute[192079]: 2025-10-02 12:22:08.940 2 WARNING nova.compute.manager [req-3f92c72d-98e6-4610-9983-ab48f180ca80 req-3cd30478-b8fd-4a39-aa1a-c6b4583f7a45 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Received unexpected event network-vif-unplugged-fd508257-51ca-4c61-9340-029f9a9e7a75 for instance with vm_state active and task_state resize_migrating.
Oct 02 12:22:09 compute-0 nova_compute[192079]: 2025-10-02 12:22:09.175 2 INFO nova.virt.libvirt.driver [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Instance shutdown successfully after 3 seconds.
Oct 02 12:22:09 compute-0 nova_compute[192079]: 2025-10-02 12:22:09.181 2 INFO nova.virt.libvirt.driver [-] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Instance destroyed successfully.
Oct 02 12:22:09 compute-0 nova_compute[192079]: 2025-10-02 12:22:09.182 2 DEBUG nova.virt.libvirt.vif [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:21:25Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestNetworkAdvancedServerOps-server-1275898317',display_name='tempest-TestNetworkAdvancedServerOps-server-1275898317',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkadvancedserverops-server-1275898317',id=112,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBOCz+7JmNyQy7JdP1IjSwu02/HePNAJvzHsZBcv8XH13dMGPNzBUuwrRU02GRGGFMvEIz5Lu1u/RVTlkdJCGXW3q1BcgXBVQzMFZYW+dEdgXTOuU2vWkRuKj+JzgzmR88A==',key_name='tempest-TestNetworkAdvancedServerOps-217202803',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:21:35Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=MigrationContext,new_flavor=Flavor(1),node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='76c7dd40d83e4e3ca71abbebf57921b6',ramdisk_id='',reservation_id='r-x0vz0bnp',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=ServiceList,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',old_vm_state='active',owner_project_name='tempest-TestNetworkAdvancedServerOps-597114071',owner_user_name='tempest-TestNetworkAdvancedServerOps-597114071-project-member'},tags=<?>,task_state='resize_migrating',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:22:02Z,user_data=None,user_id='1faa7e121a0e43ad8cb4ae5b2cfcc6a2',uuid=2eb08e64-4af9-4c5f-9817-b24d5e5ccce2,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "fd508257-51ca-4c61-9340-029f9a9e7a75", "address": "fa:16:3e:5e:8b:77", "network": {"id": "043fc82b-ca25-47f8-a78d-d7118d064ecd", "bridge": "br-int", "label": "tempest-network-smoke--1375280567", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.242", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-network-smoke--1375280567", "vif_mac": "fa:16:3e:5e:8b:77"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapfd508257-51", "ovs_interfaceid": "fd508257-51ca-4c61-9340-029f9a9e7a75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:22:09 compute-0 nova_compute[192079]: 2025-10-02 12:22:09.182 2 DEBUG nova.network.os_vif_util [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Converting VIF {"id": "fd508257-51ca-4c61-9340-029f9a9e7a75", "address": "fa:16:3e:5e:8b:77", "network": {"id": "043fc82b-ca25-47f8-a78d-d7118d064ecd", "bridge": "br-int", "label": "tempest-network-smoke--1375280567", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.242", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-network-smoke--1375280567", "vif_mac": "fa:16:3e:5e:8b:77"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapfd508257-51", "ovs_interfaceid": "fd508257-51ca-4c61-9340-029f9a9e7a75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:22:09 compute-0 nova_compute[192079]: 2025-10-02 12:22:09.183 2 DEBUG nova.network.os_vif_util [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:5e:8b:77,bridge_name='br-int',has_traffic_filtering=True,id=fd508257-51ca-4c61-9340-029f9a9e7a75,network=Network(043fc82b-ca25-47f8-a78d-d7118d064ecd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapfd508257-51') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:22:09 compute-0 nova_compute[192079]: 2025-10-02 12:22:09.183 2 DEBUG os_vif [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:5e:8b:77,bridge_name='br-int',has_traffic_filtering=True,id=fd508257-51ca-4c61-9340-029f9a9e7a75,network=Network(043fc82b-ca25-47f8-a78d-d7118d064ecd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapfd508257-51') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:22:09 compute-0 nova_compute[192079]: 2025-10-02 12:22:09.185 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:09 compute-0 nova_compute[192079]: 2025-10-02 12:22:09.185 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapfd508257-51, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:22:09 compute-0 nova_compute[192079]: 2025-10-02 12:22:09.186 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:09 compute-0 nova_compute[192079]: 2025-10-02 12:22:09.188 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:09 compute-0 nova_compute[192079]: 2025-10-02 12:22:09.192 2 INFO os_vif [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:5e:8b:77,bridge_name='br-int',has_traffic_filtering=True,id=fd508257-51ca-4c61-9340-029f9a9e7a75,network=Network(043fc82b-ca25-47f8-a78d-d7118d064ecd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapfd508257-51')
Oct 02 12:22:09 compute-0 nova_compute[192079]: 2025-10-02 12:22:09.196 2 DEBUG oslo_concurrency.processutils [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:22:09 compute-0 nova_compute[192079]: 2025-10-02 12:22:09.278 2 DEBUG oslo_concurrency.processutils [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/disk --force-share --output=json" returned: 0 in 0.081s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:22:09 compute-0 nova_compute[192079]: 2025-10-02 12:22:09.279 2 DEBUG oslo_concurrency.processutils [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:22:09 compute-0 nova_compute[192079]: 2025-10-02 12:22:09.330 2 DEBUG oslo_concurrency.processutils [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/disk --force-share --output=json" returned: 0 in 0.051s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:22:09 compute-0 nova_compute[192079]: 2025-10-02 12:22:09.331 2 DEBUG nova.virt.libvirt.volume.remotefs [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Copying file /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2_resize/disk to 192.168.122.101:/var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/disk copy_file /usr/lib/python3.9/site-packages/nova/virt/libvirt/volume/remotefs.py:103
Oct 02 12:22:09 compute-0 nova_compute[192079]: 2025-10-02 12:22:09.332 2 DEBUG oslo_concurrency.processutils [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Running cmd (subprocess): scp -r /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2_resize/disk 192.168.122.101:/var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/disk execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:22:10 compute-0 nova_compute[192079]: 2025-10-02 12:22:10.026 2 DEBUG oslo_concurrency.processutils [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] CMD "scp -r /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2_resize/disk 192.168.122.101:/var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/disk" returned: 0 in 0.695s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:22:10 compute-0 nova_compute[192079]: 2025-10-02 12:22:10.027 2 DEBUG nova.virt.libvirt.volume.remotefs [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Copying file /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2_resize/disk.config to 192.168.122.101:/var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/disk.config copy_file /usr/lib/python3.9/site-packages/nova/virt/libvirt/volume/remotefs.py:103
Oct 02 12:22:10 compute-0 nova_compute[192079]: 2025-10-02 12:22:10.028 2 DEBUG oslo_concurrency.processutils [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Running cmd (subprocess): scp -C -r /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2_resize/disk.config 192.168.122.101:/var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/disk.config execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:22:10 compute-0 nova_compute[192079]: 2025-10-02 12:22:10.289 2 DEBUG oslo_concurrency.processutils [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] CMD "scp -C -r /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2_resize/disk.config 192.168.122.101:/var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/disk.config" returned: 0 in 0.261s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:22:10 compute-0 nova_compute[192079]: 2025-10-02 12:22:10.290 2 DEBUG nova.virt.libvirt.volume.remotefs [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Copying file /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2_resize/disk.info to 192.168.122.101:/var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/disk.info copy_file /usr/lib/python3.9/site-packages/nova/virt/libvirt/volume/remotefs.py:103
Oct 02 12:22:10 compute-0 nova_compute[192079]: 2025-10-02 12:22:10.290 2 DEBUG oslo_concurrency.processutils [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Running cmd (subprocess): scp -C -r /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2_resize/disk.info 192.168.122.101:/var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/disk.info execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:22:10 compute-0 sshd-session[236787]: Accepted publickey for nova from 192.168.122.102 port 39696 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:22:10 compute-0 systemd-logind[827]: New session 58 of user nova.
Oct 02 12:22:10 compute-0 systemd[1]: Created slice User Slice of UID 42436.
Oct 02 12:22:10 compute-0 systemd[1]: Starting User Runtime Directory /run/user/42436...
Oct 02 12:22:10 compute-0 systemd[1]: Finished User Runtime Directory /run/user/42436.
Oct 02 12:22:10 compute-0 systemd[1]: Starting User Manager for UID 42436...
Oct 02 12:22:10 compute-0 systemd[236792]: pam_unix(systemd-user:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:22:10 compute-0 nova_compute[192079]: 2025-10-02 12:22:10.541 2 DEBUG oslo_concurrency.processutils [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] CMD "scp -C -r /var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2_resize/disk.info 192.168.122.101:/var/lib/nova/instances/2eb08e64-4af9-4c5f-9817-b24d5e5ccce2/disk.info" returned: 0 in 0.251s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:22:10 compute-0 systemd[236792]: Queued start job for default target Main User Target.
Oct 02 12:22:10 compute-0 systemd[236792]: Created slice User Application Slice.
Oct 02 12:22:10 compute-0 systemd[236792]: Started Mark boot as successful after the user session has run 2 minutes.
Oct 02 12:22:10 compute-0 systemd[236792]: Started Daily Cleanup of User's Temporary Directories.
Oct 02 12:22:10 compute-0 systemd[236792]: Reached target Paths.
Oct 02 12:22:10 compute-0 systemd[236792]: Reached target Timers.
Oct 02 12:22:10 compute-0 systemd[236792]: Starting D-Bus User Message Bus Socket...
Oct 02 12:22:10 compute-0 systemd[236792]: Starting Create User's Volatile Files and Directories...
Oct 02 12:22:10 compute-0 systemd[236792]: Finished Create User's Volatile Files and Directories.
Oct 02 12:22:10 compute-0 systemd[236792]: Listening on D-Bus User Message Bus Socket.
Oct 02 12:22:10 compute-0 systemd[236792]: Reached target Sockets.
Oct 02 12:22:10 compute-0 systemd[236792]: Reached target Basic System.
Oct 02 12:22:10 compute-0 systemd[236792]: Reached target Main User Target.
Oct 02 12:22:10 compute-0 systemd[236792]: Startup finished in 163ms.
Oct 02 12:22:10 compute-0 systemd[1]: Started User Manager for UID 42436.
Oct 02 12:22:10 compute-0 systemd[1]: Started Session 58 of User nova.
Oct 02 12:22:10 compute-0 sshd-session[236787]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:22:10 compute-0 sshd-session[236807]: Received disconnect from 192.168.122.102 port 39696:11: disconnected by user
Oct 02 12:22:10 compute-0 sshd-session[236807]: Disconnected from user nova 192.168.122.102 port 39696
Oct 02 12:22:10 compute-0 sshd-session[236787]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:22:10 compute-0 systemd[1]: session-58.scope: Deactivated successfully.
Oct 02 12:22:10 compute-0 systemd-logind[827]: Session 58 logged out. Waiting for processes to exit.
Oct 02 12:22:10 compute-0 systemd-logind[827]: Removed session 58.
Oct 02 12:22:10 compute-0 sshd-session[236809]: Accepted publickey for nova from 192.168.122.102 port 39706 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:22:10 compute-0 systemd-logind[827]: New session 60 of user nova.
Oct 02 12:22:10 compute-0 systemd[1]: Started Session 60 of User nova.
Oct 02 12:22:10 compute-0 sshd-session[236809]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:22:10 compute-0 sshd-session[236812]: Received disconnect from 192.168.122.102 port 39706:11: disconnected by user
Oct 02 12:22:10 compute-0 sshd-session[236812]: Disconnected from user nova 192.168.122.102 port 39706
Oct 02 12:22:10 compute-0 sshd-session[236809]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:22:10 compute-0 systemd[1]: session-60.scope: Deactivated successfully.
Oct 02 12:22:10 compute-0 systemd-logind[827]: Session 60 logged out. Waiting for processes to exit.
Oct 02 12:22:10 compute-0 systemd-logind[827]: Removed session 60.
Oct 02 12:22:11 compute-0 nova_compute[192079]: 2025-10-02 12:22:11.185 2 DEBUG nova.compute.manager [req-98e498b8-c3e5-4a33-ab5d-2ea8583e4528 req-c620dde8-50cb-44be-a5e6-1bf88305e933 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Received event network-vif-plugged-fd508257-51ca-4c61-9340-029f9a9e7a75 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:22:11 compute-0 nova_compute[192079]: 2025-10-02 12:22:11.187 2 DEBUG oslo_concurrency.lockutils [req-98e498b8-c3e5-4a33-ab5d-2ea8583e4528 req-c620dde8-50cb-44be-a5e6-1bf88305e933 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:22:11 compute-0 nova_compute[192079]: 2025-10-02 12:22:11.187 2 DEBUG oslo_concurrency.lockutils [req-98e498b8-c3e5-4a33-ab5d-2ea8583e4528 req-c620dde8-50cb-44be-a5e6-1bf88305e933 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:22:11 compute-0 nova_compute[192079]: 2025-10-02 12:22:11.188 2 DEBUG oslo_concurrency.lockutils [req-98e498b8-c3e5-4a33-ab5d-2ea8583e4528 req-c620dde8-50cb-44be-a5e6-1bf88305e933 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:22:11 compute-0 nova_compute[192079]: 2025-10-02 12:22:11.188 2 DEBUG nova.compute.manager [req-98e498b8-c3e5-4a33-ab5d-2ea8583e4528 req-c620dde8-50cb-44be-a5e6-1bf88305e933 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] No waiting events found dispatching network-vif-plugged-fd508257-51ca-4c61-9340-029f9a9e7a75 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:22:11 compute-0 nova_compute[192079]: 2025-10-02 12:22:11.188 2 WARNING nova.compute.manager [req-98e498b8-c3e5-4a33-ab5d-2ea8583e4528 req-c620dde8-50cb-44be-a5e6-1bf88305e933 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Received unexpected event network-vif-plugged-fd508257-51ca-4c61-9340-029f9a9e7a75 for instance with vm_state active and task_state resize_migrating.
Oct 02 12:22:11 compute-0 sshd-session[236814]: Accepted publickey for nova from 192.168.122.102 port 49544 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:22:11 compute-0 systemd-logind[827]: New session 61 of user nova.
Oct 02 12:22:11 compute-0 systemd[1]: Started Session 61 of User nova.
Oct 02 12:22:11 compute-0 sshd-session[236814]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:22:11 compute-0 nova_compute[192079]: 2025-10-02 12:22:11.774 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:11 compute-0 sshd-session[236817]: Received disconnect from 192.168.122.102 port 49544:11: disconnected by user
Oct 02 12:22:11 compute-0 sshd-session[236817]: Disconnected from user nova 192.168.122.102 port 49544
Oct 02 12:22:11 compute-0 sshd-session[236814]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:22:11 compute-0 nova_compute[192079]: 2025-10-02 12:22:11.882 2 DEBUG neutronclient.v2_0.client [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Error message: {"NeutronError": {"type": "PortBindingNotFound", "message": "Binding for port fd508257-51ca-4c61-9340-029f9a9e7a75 for host compute-1.ctlplane.example.com could not be found.", "detail": ""}} _handle_fault_response /usr/lib/python3.9/site-packages/neutronclient/v2_0/client.py:262
Oct 02 12:22:11 compute-0 systemd[1]: session-61.scope: Deactivated successfully.
Oct 02 12:22:11 compute-0 systemd-logind[827]: Session 61 logged out. Waiting for processes to exit.
Oct 02 12:22:11 compute-0 systemd-logind[827]: Removed session 61.
Oct 02 12:22:12 compute-0 sshd-session[236819]: Accepted publickey for nova from 192.168.122.102 port 49552 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:22:12 compute-0 nova_compute[192079]: 2025-10-02 12:22:12.040 2 DEBUG oslo_concurrency.lockutils [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Acquiring lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:22:12 compute-0 nova_compute[192079]: 2025-10-02 12:22:12.040 2 DEBUG oslo_concurrency.lockutils [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:22:12 compute-0 nova_compute[192079]: 2025-10-02 12:22:12.041 2 DEBUG oslo_concurrency.lockutils [None req-a68f2946-0a48-4eec-9d14-00714ea30922 cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:22:12 compute-0 systemd-logind[827]: New session 62 of user nova.
Oct 02 12:22:12 compute-0 systemd[1]: Started Session 62 of User nova.
Oct 02 12:22:12 compute-0 sshd-session[236819]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:22:12 compute-0 sshd-session[236822]: Received disconnect from 192.168.122.102 port 49552:11: disconnected by user
Oct 02 12:22:12 compute-0 sshd-session[236822]: Disconnected from user nova 192.168.122.102 port 49552
Oct 02 12:22:12 compute-0 sshd-session[236819]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:22:12 compute-0 systemd[1]: session-62.scope: Deactivated successfully.
Oct 02 12:22:12 compute-0 systemd-logind[827]: Session 62 logged out. Waiting for processes to exit.
Oct 02 12:22:12 compute-0 systemd-logind[827]: Removed session 62.
Oct 02 12:22:12 compute-0 sshd-session[236824]: Accepted publickey for nova from 192.168.122.102 port 49556 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:22:12 compute-0 systemd-logind[827]: New session 63 of user nova.
Oct 02 12:22:12 compute-0 systemd[1]: Started Session 63 of User nova.
Oct 02 12:22:12 compute-0 sshd-session[236824]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:22:12 compute-0 podman[236826]: 2025-10-02 12:22:12.355193154 +0000 UTC m=+0.059379049 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:22:12 compute-0 sshd-session[236839]: Received disconnect from 192.168.122.102 port 49556:11: disconnected by user
Oct 02 12:22:12 compute-0 sshd-session[236839]: Disconnected from user nova 192.168.122.102 port 49556
Oct 02 12:22:12 compute-0 sshd-session[236824]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:22:12 compute-0 systemd[1]: session-63.scope: Deactivated successfully.
Oct 02 12:22:12 compute-0 systemd-logind[827]: Session 63 logged out. Waiting for processes to exit.
Oct 02 12:22:12 compute-0 systemd-logind[827]: Removed session 63.
Oct 02 12:22:12 compute-0 podman[236828]: 2025-10-02 12:22:12.390842235 +0000 UTC m=+0.094901047 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_managed=true, io.buildah.version=1.41.3, config_id=iscsid, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=iscsid, managed_by=edpm_ansible, org.label-schema.build-date=20251001)
Oct 02 12:22:13 compute-0 nova_compute[192079]: 2025-10-02 12:22:13.790 2 INFO nova.network.neutron [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Updating port cae13af9-8175-4eab-b9ec-18019b521d0b with attributes {'binding:host_id': 'compute-0.ctlplane.example.com', 'device_owner': 'compute:nova'}
Oct 02 12:22:14 compute-0 nova_compute[192079]: 2025-10-02 12:22:14.087 2 DEBUG nova.compute.manager [req-bd77332c-e66e-4baa-9c97-938ff65fdcdc req-2d328774-423b-467f-89ea-f8eac94d4b06 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Received event network-changed-fd508257-51ca-4c61-9340-029f9a9e7a75 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:22:14 compute-0 nova_compute[192079]: 2025-10-02 12:22:14.088 2 DEBUG nova.compute.manager [req-bd77332c-e66e-4baa-9c97-938ff65fdcdc req-2d328774-423b-467f-89ea-f8eac94d4b06 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Refreshing instance network info cache due to event network-changed-fd508257-51ca-4c61-9340-029f9a9e7a75. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:22:14 compute-0 nova_compute[192079]: 2025-10-02 12:22:14.088 2 DEBUG oslo_concurrency.lockutils [req-bd77332c-e66e-4baa-9c97-938ff65fdcdc req-2d328774-423b-467f-89ea-f8eac94d4b06 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-2eb08e64-4af9-4c5f-9817-b24d5e5ccce2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:22:14 compute-0 nova_compute[192079]: 2025-10-02 12:22:14.088 2 DEBUG oslo_concurrency.lockutils [req-bd77332c-e66e-4baa-9c97-938ff65fdcdc req-2d328774-423b-467f-89ea-f8eac94d4b06 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-2eb08e64-4af9-4c5f-9817-b24d5e5ccce2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:22:14 compute-0 nova_compute[192079]: 2025-10-02 12:22:14.089 2 DEBUG nova.network.neutron [req-bd77332c-e66e-4baa-9c97-938ff65fdcdc req-2d328774-423b-467f-89ea-f8eac94d4b06 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Refreshing network info cache for port fd508257-51ca-4c61-9340-029f9a9e7a75 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:22:14 compute-0 nova_compute[192079]: 2025-10-02 12:22:14.187 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:14 compute-0 nova_compute[192079]: 2025-10-02 12:22:14.983 2 DEBUG oslo_concurrency.lockutils [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Acquiring lock "refresh_cache-ad2d69bb-3aa9-4c11-b9de-29996574cfa2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:22:14 compute-0 nova_compute[192079]: 2025-10-02 12:22:14.984 2 DEBUG oslo_concurrency.lockutils [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Acquired lock "refresh_cache-ad2d69bb-3aa9-4c11-b9de-29996574cfa2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:22:14 compute-0 nova_compute[192079]: 2025-10-02 12:22:14.985 2 DEBUG nova.network.neutron [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:22:15 compute-0 nova_compute[192079]: 2025-10-02 12:22:15.337 2 DEBUG nova.compute.manager [req-82b53fa2-c503-45e6-b5b4-8c9fa0be65c2 req-d1da8e55-d34d-422e-a6ad-a7015753f2d1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Received event network-changed-cae13af9-8175-4eab-b9ec-18019b521d0b external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:22:15 compute-0 nova_compute[192079]: 2025-10-02 12:22:15.337 2 DEBUG nova.compute.manager [req-82b53fa2-c503-45e6-b5b4-8c9fa0be65c2 req-d1da8e55-d34d-422e-a6ad-a7015753f2d1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Refreshing instance network info cache due to event network-changed-cae13af9-8175-4eab-b9ec-18019b521d0b. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:22:15 compute-0 nova_compute[192079]: 2025-10-02 12:22:15.337 2 DEBUG oslo_concurrency.lockutils [req-82b53fa2-c503-45e6-b5b4-8c9fa0be65c2 req-d1da8e55-d34d-422e-a6ad-a7015753f2d1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-ad2d69bb-3aa9-4c11-b9de-29996574cfa2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:22:16 compute-0 nova_compute[192079]: 2025-10-02 12:22:16.593 2 DEBUG nova.network.neutron [req-bd77332c-e66e-4baa-9c97-938ff65fdcdc req-2d328774-423b-467f-89ea-f8eac94d4b06 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Updated VIF entry in instance network info cache for port fd508257-51ca-4c61-9340-029f9a9e7a75. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:22:16 compute-0 nova_compute[192079]: 2025-10-02 12:22:16.594 2 DEBUG nova.network.neutron [req-bd77332c-e66e-4baa-9c97-938ff65fdcdc req-2d328774-423b-467f-89ea-f8eac94d4b06 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Updating instance_info_cache with network_info: [{"id": "fd508257-51ca-4c61-9340-029f9a9e7a75", "address": "fa:16:3e:5e:8b:77", "network": {"id": "043fc82b-ca25-47f8-a78d-d7118d064ecd", "bridge": "br-int", "label": "tempest-network-smoke--1375280567", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.242", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapfd508257-51", "ovs_interfaceid": "fd508257-51ca-4c61-9340-029f9a9e7a75", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:22:16 compute-0 nova_compute[192079]: 2025-10-02 12:22:16.692 2 DEBUG oslo_concurrency.lockutils [req-bd77332c-e66e-4baa-9c97-938ff65fdcdc req-2d328774-423b-467f-89ea-f8eac94d4b06 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-2eb08e64-4af9-4c5f-9817-b24d5e5ccce2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:22:16 compute-0 nova_compute[192079]: 2025-10-02 12:22:16.777 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:17 compute-0 nova_compute[192079]: 2025-10-02 12:22:17.599 2 DEBUG nova.network.neutron [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Updating instance_info_cache with network_info: [{"id": "cae13af9-8175-4eab-b9ec-18019b521d0b", "address": "fa:16:3e:35:d3:eb", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapcae13af9-81", "ovs_interfaceid": "cae13af9-8175-4eab-b9ec-18019b521d0b", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:22:17 compute-0 nova_compute[192079]: 2025-10-02 12:22:17.623 2 DEBUG oslo_concurrency.lockutils [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Releasing lock "refresh_cache-ad2d69bb-3aa9-4c11-b9de-29996574cfa2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:22:17 compute-0 nova_compute[192079]: 2025-10-02 12:22:17.626 2 DEBUG oslo_concurrency.lockutils [req-82b53fa2-c503-45e6-b5b4-8c9fa0be65c2 req-d1da8e55-d34d-422e-a6ad-a7015753f2d1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-ad2d69bb-3aa9-4c11-b9de-29996574cfa2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:22:17 compute-0 nova_compute[192079]: 2025-10-02 12:22:17.627 2 DEBUG nova.network.neutron [req-82b53fa2-c503-45e6-b5b4-8c9fa0be65c2 req-d1da8e55-d34d-422e-a6ad-a7015753f2d1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Refreshing network info cache for port cae13af9-8175-4eab-b9ec-18019b521d0b _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.019 2 DEBUG nova.virt.libvirt.driver [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Starting finish_migration finish_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:11698
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.021 2 DEBUG nova.virt.libvirt.driver [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Instance directory exists: not creating _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4719
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.021 2 INFO nova.virt.libvirt.driver [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Creating image(s)
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.023 2 DEBUG nova.objects.instance [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lazy-loading 'trusted_certs' on Instance uuid ad2d69bb-3aa9-4c11-b9de-29996574cfa2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.039 2 DEBUG oslo_concurrency.processutils [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.103 2 DEBUG oslo_concurrency.processutils [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.064s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.104 2 DEBUG nova.virt.disk.api [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Checking if we can resize image /var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.104 2 DEBUG oslo_concurrency.processutils [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.159 2 DEBUG oslo_concurrency.processutils [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2/disk --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.160 2 DEBUG nova.virt.disk.api [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Cannot resize image /var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.187 2 DEBUG nova.virt.libvirt.driver [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Did not create local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4859
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.190 2 DEBUG nova.virt.libvirt.driver [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Ensure instance console log exists: /var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.191 2 DEBUG oslo_concurrency.lockutils [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.191 2 DEBUG oslo_concurrency.lockutils [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.191 2 DEBUG oslo_concurrency.lockutils [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.194 2 DEBUG nova.virt.libvirt.driver [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Start _get_guest_xml network_info=[{"id": "cae13af9-8175-4eab-b9ec-18019b521d0b", "address": "fa:16:3e:35:d3:eb", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.185", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-ServerActionsTestOtherB-370285634-network", "vif_mac": "fa:16:3e:35:d3:eb"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapcae13af9-81", "ovs_interfaceid": "cae13af9-8175-4eab-b9ec-18019b521d0b", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.202 2 WARNING nova.virt.libvirt.driver [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.208 2 DEBUG nova.virt.libvirt.host [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.209 2 DEBUG nova.virt.libvirt.host [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.212 2 DEBUG nova.virt.libvirt.host [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.213 2 DEBUG nova.virt.libvirt.host [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.214 2 DEBUG nova.virt.libvirt.driver [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.214 2 DEBUG nova.virt.hardware [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:25Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9949d9da-6314-4ede-8797-6f2f0a6a64fc',id=2,is_public=True,memory_mb=192,name='m1.micro',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.215 2 DEBUG nova.virt.hardware [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.215 2 DEBUG nova.virt.hardware [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.215 2 DEBUG nova.virt.hardware [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.216 2 DEBUG nova.virt.hardware [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.216 2 DEBUG nova.virt.hardware [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.216 2 DEBUG nova.virt.hardware [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.216 2 DEBUG nova.virt.hardware [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.217 2 DEBUG nova.virt.hardware [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.217 2 DEBUG nova.virt.hardware [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.217 2 DEBUG nova.virt.hardware [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.217 2 DEBUG nova.objects.instance [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lazy-loading 'vcpu_model' on Instance uuid ad2d69bb-3aa9-4c11-b9de-29996574cfa2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.263 2 DEBUG oslo_concurrency.processutils [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2/disk.config --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.329 2 DEBUG oslo_concurrency.processutils [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2/disk.config --force-share --output=json" returned: 0 in 0.066s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.330 2 DEBUG oslo_concurrency.lockutils [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Acquiring lock "/var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.330 2 DEBUG oslo_concurrency.lockutils [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lock "/var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.331 2 DEBUG oslo_concurrency.lockutils [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lock "/var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.332 2 DEBUG nova.virt.libvirt.vif [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:20:21Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerActionsTestOtherB-server-1629207280',display_name='tempest-ServerActionsTestOtherB-server-1629207280',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(2),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serveractionstestotherb-server-1629207280',id=108,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=2,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBG+aqSe4de2VLtRAXN5xeLQn4S/3X8QrNMy2M5WdQ5hviVyEOgqK+m+uWmzPaUSUgE38sEdkytfwUHD32CBZajBt4q3OEf9i3yPJUQGuqp42pAUD+A3EoBIyeptNeSxGdA==',key_name='tempest-keypair-1900171990',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:20:28Z,launched_on='compute-2.ctlplane.example.com',locked=False,locked_by=None,memory_mb=192,metadata={},migration_context=MigrationContext,new_flavor=Flavor(2),node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=Flavor(1),os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=4,progress=0,project_id='ffce7d629aa24a7f970d93b2a79045f1',ramdisk_id='',reservation_id='r-jtzab0yc',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=ServiceList,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',old_vm_state='stopped',owner_project_name='tempest-ServerActionsTestOtherB-263921372',owner_user_name='tempest-ServerActionsTestOtherB-263921372-project-member'},tags=<?>,task_state='resize_finish',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:22:13Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='0ea122e2fff94f2ba7c78bf30b04029c',uuid=ad2d69bb-3aa9-4c11-b9de-29996574cfa2,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='stopped') vif={"id": "cae13af9-8175-4eab-b9ec-18019b521d0b", "address": "fa:16:3e:35:d3:eb", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.185", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-ServerActionsTestOtherB-370285634-network", "vif_mac": "fa:16:3e:35:d3:eb"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapcae13af9-81", "ovs_interfaceid": "cae13af9-8175-4eab-b9ec-18019b521d0b", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.332 2 DEBUG nova.network.os_vif_util [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Converting VIF {"id": "cae13af9-8175-4eab-b9ec-18019b521d0b", "address": "fa:16:3e:35:d3:eb", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.185", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-ServerActionsTestOtherB-370285634-network", "vif_mac": "fa:16:3e:35:d3:eb"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapcae13af9-81", "ovs_interfaceid": "cae13af9-8175-4eab-b9ec-18019b521d0b", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.333 2 DEBUG nova.network.os_vif_util [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:35:d3:eb,bridge_name='br-int',has_traffic_filtering=True,id=cae13af9-8175-4eab-b9ec-18019b521d0b,network=Network(20eb29be-ee23-463b-85af-bfc2388e9f77),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapcae13af9-81') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.335 2 DEBUG nova.virt.libvirt.driver [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:22:18 compute-0 nova_compute[192079]:   <uuid>ad2d69bb-3aa9-4c11-b9de-29996574cfa2</uuid>
Oct 02 12:22:18 compute-0 nova_compute[192079]:   <name>instance-0000006c</name>
Oct 02 12:22:18 compute-0 nova_compute[192079]:   <memory>196608</memory>
Oct 02 12:22:18 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:22:18 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:22:18 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:       <nova:name>tempest-ServerActionsTestOtherB-server-1629207280</nova:name>
Oct 02 12:22:18 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:22:18</nova:creationTime>
Oct 02 12:22:18 compute-0 nova_compute[192079]:       <nova:flavor name="m1.micro">
Oct 02 12:22:18 compute-0 nova_compute[192079]:         <nova:memory>192</nova:memory>
Oct 02 12:22:18 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:22:18 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:22:18 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:22:18 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:22:18 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:22:18 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:22:18 compute-0 nova_compute[192079]:         <nova:user uuid="0ea122e2fff94f2ba7c78bf30b04029c">tempest-ServerActionsTestOtherB-263921372-project-member</nova:user>
Oct 02 12:22:18 compute-0 nova_compute[192079]:         <nova:project uuid="ffce7d629aa24a7f970d93b2a79045f1">tempest-ServerActionsTestOtherB-263921372</nova:project>
Oct 02 12:22:18 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:22:18 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:22:18 compute-0 nova_compute[192079]:         <nova:port uuid="cae13af9-8175-4eab-b9ec-18019b521d0b">
Oct 02 12:22:18 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.14" ipVersion="4"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:22:18 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:22:18 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:22:18 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <system>
Oct 02 12:22:18 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:22:18 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:22:18 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:22:18 compute-0 nova_compute[192079]:       <entry name="serial">ad2d69bb-3aa9-4c11-b9de-29996574cfa2</entry>
Oct 02 12:22:18 compute-0 nova_compute[192079]:       <entry name="uuid">ad2d69bb-3aa9-4c11-b9de-29996574cfa2</entry>
Oct 02 12:22:18 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     </system>
Oct 02 12:22:18 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:22:18 compute-0 nova_compute[192079]:   <os>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:   </os>
Oct 02 12:22:18 compute-0 nova_compute[192079]:   <features>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:   </features>
Oct 02 12:22:18 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:22:18 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:22:18 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:22:18 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2/disk"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:22:18 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2/disk.config"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:22:18 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:35:d3:eb"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:       <target dev="tapcae13af9-81"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:22:18 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2/console.log" append="off"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <video>
Oct 02 12:22:18 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     </video>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:22:18 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:22:18 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:22:18 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:22:18 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:22:18 compute-0 nova_compute[192079]: </domain>
Oct 02 12:22:18 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.336 2 DEBUG nova.virt.libvirt.vif [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:20:21Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerActionsTestOtherB-server-1629207280',display_name='tempest-ServerActionsTestOtherB-server-1629207280',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(2),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serveractionstestotherb-server-1629207280',id=108,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=2,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBG+aqSe4de2VLtRAXN5xeLQn4S/3X8QrNMy2M5WdQ5hviVyEOgqK+m+uWmzPaUSUgE38sEdkytfwUHD32CBZajBt4q3OEf9i3yPJUQGuqp42pAUD+A3EoBIyeptNeSxGdA==',key_name='tempest-keypair-1900171990',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:20:28Z,launched_on='compute-2.ctlplane.example.com',locked=False,locked_by=None,memory_mb=192,metadata={},migration_context=MigrationContext,new_flavor=Flavor(2),node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=Flavor(1),os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=4,progress=0,project_id='ffce7d629aa24a7f970d93b2a79045f1',ramdisk_id='',reservation_id='r-jtzab0yc',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=ServiceList,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',old_vm_state='stopped',owner_project_name='tempest-ServerActionsTestOtherB-263921372',owner_user_name='tempest-ServerActionsTestOtherB-263921372-project-member'},tags=<?>,task_state='resize_finish',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:22:13Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='0ea122e2fff94f2ba7c78bf30b04029c',uuid=ad2d69bb-3aa9-4c11-b9de-29996574cfa2,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='stopped') vif={"id": "cae13af9-8175-4eab-b9ec-18019b521d0b", "address": "fa:16:3e:35:d3:eb", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.185", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-ServerActionsTestOtherB-370285634-network", "vif_mac": "fa:16:3e:35:d3:eb"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapcae13af9-81", "ovs_interfaceid": "cae13af9-8175-4eab-b9ec-18019b521d0b", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.337 2 DEBUG nova.network.os_vif_util [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Converting VIF {"id": "cae13af9-8175-4eab-b9ec-18019b521d0b", "address": "fa:16:3e:35:d3:eb", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.185", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-ServerActionsTestOtherB-370285634-network", "vif_mac": "fa:16:3e:35:d3:eb"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapcae13af9-81", "ovs_interfaceid": "cae13af9-8175-4eab-b9ec-18019b521d0b", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.337 2 DEBUG nova.network.os_vif_util [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:35:d3:eb,bridge_name='br-int',has_traffic_filtering=True,id=cae13af9-8175-4eab-b9ec-18019b521d0b,network=Network(20eb29be-ee23-463b-85af-bfc2388e9f77),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapcae13af9-81') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.338 2 DEBUG os_vif [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:35:d3:eb,bridge_name='br-int',has_traffic_filtering=True,id=cae13af9-8175-4eab-b9ec-18019b521d0b,network=Network(20eb29be-ee23-463b-85af-bfc2388e9f77),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapcae13af9-81') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.338 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.338 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.339 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.341 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.341 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapcae13af9-81, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.342 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapcae13af9-81, col_values=(('external_ids', {'iface-id': 'cae13af9-8175-4eab-b9ec-18019b521d0b', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:35:d3:eb', 'vm-uuid': 'ad2d69bb-3aa9-4c11-b9de-29996574cfa2'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.343 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:18 compute-0 NetworkManager[51160]: <info>  [1759407738.3445] manager: (tapcae13af9-81): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/183)
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.347 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.350 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.352 2 INFO os_vif [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:35:d3:eb,bridge_name='br-int',has_traffic_filtering=True,id=cae13af9-8175-4eab-b9ec-18019b521d0b,network=Network(20eb29be-ee23-463b-85af-bfc2388e9f77),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapcae13af9-81')
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.594 2 DEBUG nova.compute.manager [req-a175e590-5865-4302-b18d-1f463f233927 req-c7e392aa-efc4-450e-8761-096b6d121a14 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Received event network-vif-plugged-fd508257-51ca-4c61-9340-029f9a9e7a75 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.595 2 DEBUG oslo_concurrency.lockutils [req-a175e590-5865-4302-b18d-1f463f233927 req-c7e392aa-efc4-450e-8761-096b6d121a14 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.595 2 DEBUG oslo_concurrency.lockutils [req-a175e590-5865-4302-b18d-1f463f233927 req-c7e392aa-efc4-450e-8761-096b6d121a14 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.595 2 DEBUG oslo_concurrency.lockutils [req-a175e590-5865-4302-b18d-1f463f233927 req-c7e392aa-efc4-450e-8761-096b6d121a14 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.595 2 DEBUG nova.compute.manager [req-a175e590-5865-4302-b18d-1f463f233927 req-c7e392aa-efc4-450e-8761-096b6d121a14 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] No waiting events found dispatching network-vif-plugged-fd508257-51ca-4c61-9340-029f9a9e7a75 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.595 2 WARNING nova.compute.manager [req-a175e590-5865-4302-b18d-1f463f233927 req-c7e392aa-efc4-450e-8761-096b6d121a14 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Received unexpected event network-vif-plugged-fd508257-51ca-4c61-9340-029f9a9e7a75 for instance with vm_state active and task_state resize_finish.
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.677 2 DEBUG nova.virt.libvirt.driver [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.677 2 DEBUG nova.virt.libvirt.driver [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.677 2 DEBUG nova.virt.libvirt.driver [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] No VIF found with MAC fa:16:3e:35:d3:eb, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.678 2 INFO nova.virt.libvirt.driver [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Using config drive
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.678 2 DEBUG nova.compute.manager [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:22:18 compute-0 nova_compute[192079]: 2025-10-02 12:22:18.678 2 DEBUG nova.virt.libvirt.driver [None req-47fa1911-b268-4829-9880-033aaa9cbef2 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] finish_migration finished successfully. finish_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:11793
Oct 02 12:22:20 compute-0 nova_compute[192079]: 2025-10-02 12:22:20.256 2 DEBUG oslo_concurrency.lockutils [None req-e0ca94b1-20df-41e2-836b-f503c2f1147c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2" by "nova.compute.manager.ComputeManager.confirm_resize.<locals>.do_confirm_resize" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:22:20 compute-0 nova_compute[192079]: 2025-10-02 12:22:20.256 2 DEBUG oslo_concurrency.lockutils [None req-e0ca94b1-20df-41e2-836b-f503c2f1147c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2" acquired by "nova.compute.manager.ComputeManager.confirm_resize.<locals>.do_confirm_resize" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:22:20 compute-0 nova_compute[192079]: 2025-10-02 12:22:20.257 2 DEBUG nova.compute.manager [None req-e0ca94b1-20df-41e2-836b-f503c2f1147c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Going to confirm migration 14 do_confirm_resize /usr/lib/python3.9/site-packages/nova/compute/manager.py:4679
Oct 02 12:22:20 compute-0 nova_compute[192079]: 2025-10-02 12:22:20.261 2 DEBUG nova.network.neutron [req-82b53fa2-c503-45e6-b5b4-8c9fa0be65c2 req-d1da8e55-d34d-422e-a6ad-a7015753f2d1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Updated VIF entry in instance network info cache for port cae13af9-8175-4eab-b9ec-18019b521d0b. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:22:20 compute-0 nova_compute[192079]: 2025-10-02 12:22:20.261 2 DEBUG nova.network.neutron [req-82b53fa2-c503-45e6-b5b4-8c9fa0be65c2 req-d1da8e55-d34d-422e-a6ad-a7015753f2d1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Updating instance_info_cache with network_info: [{"id": "cae13af9-8175-4eab-b9ec-18019b521d0b", "address": "fa:16:3e:35:d3:eb", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapcae13af9-81", "ovs_interfaceid": "cae13af9-8175-4eab-b9ec-18019b521d0b", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:22:20 compute-0 nova_compute[192079]: 2025-10-02 12:22:20.287 2 DEBUG oslo_concurrency.lockutils [req-82b53fa2-c503-45e6-b5b4-8c9fa0be65c2 req-d1da8e55-d34d-422e-a6ad-a7015753f2d1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-ad2d69bb-3aa9-4c11-b9de-29996574cfa2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:22:20 compute-0 nova_compute[192079]: 2025-10-02 12:22:20.295 2 DEBUG nova.objects.instance [None req-e0ca94b1-20df-41e2-836b-f503c2f1147c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'info_cache' on Instance uuid 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:22:20 compute-0 nova_compute[192079]: 2025-10-02 12:22:20.744 2 DEBUG neutronclient.v2_0.client [None req-e0ca94b1-20df-41e2-836b-f503c2f1147c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Error message: {"NeutronError": {"type": "PortBindingNotFound", "message": "Binding for port fd508257-51ca-4c61-9340-029f9a9e7a75 for host compute-0.ctlplane.example.com could not be found.", "detail": ""}} _handle_fault_response /usr/lib/python3.9/site-packages/neutronclient/v2_0/client.py:262
Oct 02 12:22:20 compute-0 nova_compute[192079]: 2025-10-02 12:22:20.745 2 DEBUG oslo_concurrency.lockutils [None req-e0ca94b1-20df-41e2-836b-f503c2f1147c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "refresh_cache-2eb08e64-4af9-4c5f-9817-b24d5e5ccce2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:22:20 compute-0 nova_compute[192079]: 2025-10-02 12:22:20.745 2 DEBUG oslo_concurrency.lockutils [None req-e0ca94b1-20df-41e2-836b-f503c2f1147c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquired lock "refresh_cache-2eb08e64-4af9-4c5f-9817-b24d5e5ccce2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:22:20 compute-0 nova_compute[192079]: 2025-10-02 12:22:20.746 2 DEBUG nova.network.neutron [None req-e0ca94b1-20df-41e2-836b-f503c2f1147c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:22:20 compute-0 nova_compute[192079]: 2025-10-02 12:22:20.758 2 DEBUG nova.compute.manager [req-b64f7b32-ff29-4631-992d-5cd9742bafc7 req-6d10e4a0-7508-4187-8a53-dfb10c4a695e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Received event network-vif-plugged-fd508257-51ca-4c61-9340-029f9a9e7a75 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:22:20 compute-0 nova_compute[192079]: 2025-10-02 12:22:20.758 2 DEBUG oslo_concurrency.lockutils [req-b64f7b32-ff29-4631-992d-5cd9742bafc7 req-6d10e4a0-7508-4187-8a53-dfb10c4a695e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:22:20 compute-0 nova_compute[192079]: 2025-10-02 12:22:20.759 2 DEBUG oslo_concurrency.lockutils [req-b64f7b32-ff29-4631-992d-5cd9742bafc7 req-6d10e4a0-7508-4187-8a53-dfb10c4a695e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:22:20 compute-0 nova_compute[192079]: 2025-10-02 12:22:20.759 2 DEBUG oslo_concurrency.lockutils [req-b64f7b32-ff29-4631-992d-5cd9742bafc7 req-6d10e4a0-7508-4187-8a53-dfb10c4a695e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:22:20 compute-0 nova_compute[192079]: 2025-10-02 12:22:20.759 2 DEBUG nova.compute.manager [req-b64f7b32-ff29-4631-992d-5cd9742bafc7 req-6d10e4a0-7508-4187-8a53-dfb10c4a695e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] No waiting events found dispatching network-vif-plugged-fd508257-51ca-4c61-9340-029f9a9e7a75 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:22:20 compute-0 nova_compute[192079]: 2025-10-02 12:22:20.759 2 WARNING nova.compute.manager [req-b64f7b32-ff29-4631-992d-5cd9742bafc7 req-6d10e4a0-7508-4187-8a53-dfb10c4a695e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Received unexpected event network-vif-plugged-fd508257-51ca-4c61-9340-029f9a9e7a75 for instance with vm_state resized and task_state None.
Oct 02 12:22:21 compute-0 podman[236885]: 2025-10-02 12:22:21.147108029 +0000 UTC m=+0.054932229 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, config_id=ovn_metadata_agent, managed_by=edpm_ansible, org.label-schema.build-date=20251001, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, container_name=ovn_metadata_agent, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 12:22:21 compute-0 podman[236886]: 2025-10-02 12:22:21.220753726 +0000 UTC m=+0.133947812 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, tcib_managed=true, container_name=ovn_controller, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, config_id=ovn_controller)
Oct 02 12:22:21 compute-0 podman[236887]: 2025-10-02 12:22:21.233969727 +0000 UTC m=+0.137822309 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:22:21 compute-0 nova_compute[192079]: 2025-10-02 12:22:21.659 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:22:21 compute-0 nova_compute[192079]: 2025-10-02 12:22:21.779 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:22 compute-0 systemd[1]: Stopping User Manager for UID 42436...
Oct 02 12:22:22 compute-0 systemd[236792]: Activating special unit Exit the Session...
Oct 02 12:22:22 compute-0 systemd[236792]: Stopped target Main User Target.
Oct 02 12:22:22 compute-0 systemd[236792]: Stopped target Basic System.
Oct 02 12:22:22 compute-0 systemd[236792]: Stopped target Paths.
Oct 02 12:22:22 compute-0 systemd[236792]: Stopped target Sockets.
Oct 02 12:22:22 compute-0 systemd[236792]: Stopped target Timers.
Oct 02 12:22:22 compute-0 systemd[236792]: Stopped Mark boot as successful after the user session has run 2 minutes.
Oct 02 12:22:22 compute-0 systemd[236792]: Stopped Daily Cleanup of User's Temporary Directories.
Oct 02 12:22:22 compute-0 systemd[236792]: Closed D-Bus User Message Bus Socket.
Oct 02 12:22:22 compute-0 systemd[236792]: Stopped Create User's Volatile Files and Directories.
Oct 02 12:22:22 compute-0 systemd[236792]: Removed slice User Application Slice.
Oct 02 12:22:22 compute-0 systemd[236792]: Reached target Shutdown.
Oct 02 12:22:22 compute-0 systemd[236792]: Finished Exit the Session.
Oct 02 12:22:22 compute-0 systemd[236792]: Reached target Exit the Session.
Oct 02 12:22:22 compute-0 systemd[1]: user@42436.service: Deactivated successfully.
Oct 02 12:22:22 compute-0 systemd[1]: Stopped User Manager for UID 42436.
Oct 02 12:22:22 compute-0 systemd[1]: Stopping User Runtime Directory /run/user/42436...
Oct 02 12:22:22 compute-0 nova_compute[192079]: 2025-10-02 12:22:22.587 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:22 compute-0 systemd[1]: run-user-42436.mount: Deactivated successfully.
Oct 02 12:22:22 compute-0 systemd[1]: user-runtime-dir@42436.service: Deactivated successfully.
Oct 02 12:22:22 compute-0 systemd[1]: Stopped User Runtime Directory /run/user/42436.
Oct 02 12:22:22 compute-0 systemd[1]: Removed slice User Slice of UID 42436.
Oct 02 12:22:22 compute-0 nova_compute[192079]: 2025-10-02 12:22:22.677 2 DEBUG nova.network.neutron [None req-e0ca94b1-20df-41e2-836b-f503c2f1147c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Updating instance_info_cache with network_info: [{"id": "fd508257-51ca-4c61-9340-029f9a9e7a75", "address": "fa:16:3e:5e:8b:77", "network": {"id": "043fc82b-ca25-47f8-a78d-d7118d064ecd", "bridge": "br-int", "label": "tempest-network-smoke--1375280567", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.242", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapfd508257-51", "ovs_interfaceid": "fd508257-51ca-4c61-9340-029f9a9e7a75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:22:22 compute-0 nova_compute[192079]: 2025-10-02 12:22:22.734 2 DEBUG oslo_concurrency.lockutils [None req-e0ca94b1-20df-41e2-836b-f503c2f1147c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Releasing lock "refresh_cache-2eb08e64-4af9-4c5f-9817-b24d5e5ccce2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:22:22 compute-0 nova_compute[192079]: 2025-10-02 12:22:22.735 2 DEBUG nova.objects.instance [None req-e0ca94b1-20df-41e2-836b-f503c2f1147c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'migration_context' on Instance uuid 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:22:22 compute-0 nova_compute[192079]: 2025-10-02 12:22:22.774 2 DEBUG nova.virt.libvirt.vif [None req-e0ca94b1-20df-41e2-836b-f503c2f1147c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:21:25Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestNetworkAdvancedServerOps-server-1275898317',display_name='tempest-TestNetworkAdvancedServerOps-server-1275898317',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-1.ctlplane.example.com',hostname='tempest-testnetworkadvancedserverops-server-1275898317',id=112,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBOCz+7JmNyQy7JdP1IjSwu02/HePNAJvzHsZBcv8XH13dMGPNzBUuwrRU02GRGGFMvEIz5Lu1u/RVTlkdJCGXW3q1BcgXBVQzMFZYW+dEdgXTOuU2vWkRuKj+JzgzmR88A==',key_name='tempest-TestNetworkAdvancedServerOps-217202803',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:22:18Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=MigrationContext,new_flavor=Flavor(1),node='compute-1.ctlplane.example.com',numa_topology=<?>,old_flavor=Flavor(1),os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='76c7dd40d83e4e3ca71abbebf57921b6',ramdisk_id='',reservation_id='r-x0vz0bnp',resources=<?>,root_device_name='/dev/vda',root_gb=1,security_groups=<?>,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',old_vm_state='active',owner_project_name='tempest-TestNetworkAdvancedServerOps-597114071',owner_user_name='tempest-TestNetworkAdvancedServerOps-597114071-project-member'},tags=<?>,task_state=None,terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:22:18Z,user_data=None,user_id='1faa7e121a0e43ad8cb4ae5b2cfcc6a2',uuid=2eb08e64-4af9-4c5f-9817-b24d5e5ccce2,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='resized') vif={"id": "fd508257-51ca-4c61-9340-029f9a9e7a75", "address": "fa:16:3e:5e:8b:77", "network": {"id": "043fc82b-ca25-47f8-a78d-d7118d064ecd", "bridge": "br-int", "label": "tempest-network-smoke--1375280567", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.242", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapfd508257-51", "ovs_interfaceid": "fd508257-51ca-4c61-9340-029f9a9e7a75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:22:22 compute-0 nova_compute[192079]: 2025-10-02 12:22:22.775 2 DEBUG nova.network.os_vif_util [None req-e0ca94b1-20df-41e2-836b-f503c2f1147c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converting VIF {"id": "fd508257-51ca-4c61-9340-029f9a9e7a75", "address": "fa:16:3e:5e:8b:77", "network": {"id": "043fc82b-ca25-47f8-a78d-d7118d064ecd", "bridge": "br-int", "label": "tempest-network-smoke--1375280567", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.242", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapfd508257-51", "ovs_interfaceid": "fd508257-51ca-4c61-9340-029f9a9e7a75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:22:22 compute-0 nova_compute[192079]: 2025-10-02 12:22:22.775 2 DEBUG nova.network.os_vif_util [None req-e0ca94b1-20df-41e2-836b-f503c2f1147c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:5e:8b:77,bridge_name='br-int',has_traffic_filtering=True,id=fd508257-51ca-4c61-9340-029f9a9e7a75,network=Network(043fc82b-ca25-47f8-a78d-d7118d064ecd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapfd508257-51') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:22:22 compute-0 nova_compute[192079]: 2025-10-02 12:22:22.776 2 DEBUG os_vif [None req-e0ca94b1-20df-41e2-836b-f503c2f1147c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:5e:8b:77,bridge_name='br-int',has_traffic_filtering=True,id=fd508257-51ca-4c61-9340-029f9a9e7a75,network=Network(043fc82b-ca25-47f8-a78d-d7118d064ecd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapfd508257-51') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:22:22 compute-0 nova_compute[192079]: 2025-10-02 12:22:22.778 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:22 compute-0 nova_compute[192079]: 2025-10-02 12:22:22.778 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapfd508257-51, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:22:22 compute-0 nova_compute[192079]: 2025-10-02 12:22:22.778 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:22:22 compute-0 nova_compute[192079]: 2025-10-02 12:22:22.780 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:22 compute-0 nova_compute[192079]: 2025-10-02 12:22:22.781 2 INFO os_vif [None req-e0ca94b1-20df-41e2-836b-f503c2f1147c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:5e:8b:77,bridge_name='br-int',has_traffic_filtering=True,id=fd508257-51ca-4c61-9340-029f9a9e7a75,network=Network(043fc82b-ca25-47f8-a78d-d7118d064ecd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapfd508257-51')
Oct 02 12:22:22 compute-0 nova_compute[192079]: 2025-10-02 12:22:22.781 2 DEBUG oslo_concurrency.lockutils [None req-e0ca94b1-20df-41e2-836b-f503c2f1147c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:22:22 compute-0 nova_compute[192079]: 2025-10-02 12:22:22.782 2 DEBUG oslo_concurrency.lockutils [None req-e0ca94b1-20df-41e2-836b-f503c2f1147c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:22:22 compute-0 nova_compute[192079]: 2025-10-02 12:22:22.869 2 DEBUG nova.compute.provider_tree [None req-e0ca94b1-20df-41e2-836b-f503c2f1147c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:22:22 compute-0 nova_compute[192079]: 2025-10-02 12:22:22.885 2 DEBUG nova.scheduler.client.report [None req-e0ca94b1-20df-41e2-836b-f503c2f1147c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:22:22 compute-0 nova_compute[192079]: 2025-10-02 12:22:22.923 2 DEBUG oslo_concurrency.lockutils [None req-e0ca94b1-20df-41e2-836b-f503c2f1147c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 0.141s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:22:23 compute-0 nova_compute[192079]: 2025-10-02 12:22:23.045 2 INFO nova.scheduler.client.report [None req-e0ca94b1-20df-41e2-836b-f503c2f1147c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Deleted allocation for migration f797b564-4367-4f96-bd5b-f7913b36cb65
Oct 02 12:22:23 compute-0 nova_compute[192079]: 2025-10-02 12:22:23.118 2 DEBUG oslo_concurrency.lockutils [None req-e0ca94b1-20df-41e2-836b-f503c2f1147c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "2eb08e64-4af9-4c5f-9817-b24d5e5ccce2" "released" by "nova.compute.manager.ComputeManager.confirm_resize.<locals>.do_confirm_resize" :: held 2.862s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:22:23 compute-0 nova_compute[192079]: 2025-10-02 12:22:23.344 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:23 compute-0 nova_compute[192079]: 2025-10-02 12:22:23.642 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759407728.6407547, 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:22:23 compute-0 nova_compute[192079]: 2025-10-02 12:22:23.643 2 INFO nova.compute.manager [-] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] VM Stopped (Lifecycle Event)
Oct 02 12:22:23 compute-0 nova_compute[192079]: 2025-10-02 12:22:23.683 2 DEBUG nova.compute.manager [None req-03fe1916-e472-4cae-b7bb-ce3abb72e0b4 - - - - - -] [instance: 2eb08e64-4af9-4c5f-9817-b24d5e5ccce2] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:22:25 compute-0 nova_compute[192079]: 2025-10-02 12:22:25.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:22:26 compute-0 nova_compute[192079]: 2025-10-02 12:22:26.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:22:26 compute-0 nova_compute[192079]: 2025-10-02 12:22:26.690 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:22:26 compute-0 nova_compute[192079]: 2025-10-02 12:22:26.691 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:22:26 compute-0 nova_compute[192079]: 2025-10-02 12:22:26.691 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:22:26 compute-0 nova_compute[192079]: 2025-10-02 12:22:26.692 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:22:26 compute-0 nova_compute[192079]: 2025-10-02 12:22:26.770 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:22:26 compute-0 nova_compute[192079]: 2025-10-02 12:22:26.787 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:26 compute-0 nova_compute[192079]: 2025-10-02 12:22:26.825 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2/disk --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:22:26 compute-0 nova_compute[192079]: 2025-10-02 12:22:26.826 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:22:26 compute-0 nova_compute[192079]: 2025-10-02 12:22:26.879 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2/disk --force-share --output=json" returned: 0 in 0.053s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:22:26 compute-0 nova_compute[192079]: 2025-10-02 12:22:26.997 2 DEBUG nova.objects.instance [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lazy-loading 'flavor' on Instance uuid ad2d69bb-3aa9-4c11-b9de-29996574cfa2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:22:27 compute-0 nova_compute[192079]: 2025-10-02 12:22:27.022 2 DEBUG nova.objects.instance [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lazy-loading 'info_cache' on Instance uuid ad2d69bb-3aa9-4c11-b9de-29996574cfa2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:22:27 compute-0 nova_compute[192079]: 2025-10-02 12:22:27.043 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:22:27 compute-0 nova_compute[192079]: 2025-10-02 12:22:27.044 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5681MB free_disk=73.31989669799805GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:22:27 compute-0 nova_compute[192079]: 2025-10-02 12:22:27.044 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:22:27 compute-0 nova_compute[192079]: 2025-10-02 12:22:27.044 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:22:27 compute-0 nova_compute[192079]: 2025-10-02 12:22:27.046 2 DEBUG oslo_concurrency.lockutils [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Acquiring lock "refresh_cache-ad2d69bb-3aa9-4c11-b9de-29996574cfa2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:22:27 compute-0 nova_compute[192079]: 2025-10-02 12:22:27.046 2 DEBUG oslo_concurrency.lockutils [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Acquired lock "refresh_cache-ad2d69bb-3aa9-4c11-b9de-29996574cfa2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:22:27 compute-0 nova_compute[192079]: 2025-10-02 12:22:27.046 2 DEBUG nova.network.neutron [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:22:27 compute-0 nova_compute[192079]: 2025-10-02 12:22:27.199 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance ad2d69bb-3aa9-4c11-b9de-29996574cfa2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:22:27 compute-0 nova_compute[192079]: 2025-10-02 12:22:27.200 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 1 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:22:27 compute-0 nova_compute[192079]: 2025-10-02 12:22:27.200 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=704MB phys_disk=79GB used_disk=1GB total_vcpus=8 used_vcpus=1 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:22:27 compute-0 nova_compute[192079]: 2025-10-02 12:22:27.258 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:22:27 compute-0 nova_compute[192079]: 2025-10-02 12:22:27.276 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:22:27 compute-0 nova_compute[192079]: 2025-10-02 12:22:27.302 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:22:27 compute-0 nova_compute[192079]: 2025-10-02 12:22:27.302 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.258s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.304 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.389 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.587 2 DEBUG nova.network.neutron [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Updating instance_info_cache with network_info: [{"id": "cae13af9-8175-4eab-b9ec-18019b521d0b", "address": "fa:16:3e:35:d3:eb", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapcae13af9-81", "ovs_interfaceid": "cae13af9-8175-4eab-b9ec-18019b521d0b", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.624 2 DEBUG oslo_concurrency.lockutils [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Releasing lock "refresh_cache-ad2d69bb-3aa9-4c11-b9de-29996574cfa2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.647 2 INFO nova.virt.libvirt.driver [-] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Instance destroyed successfully.
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.648 2 DEBUG nova.objects.instance [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lazy-loading 'numa_topology' on Instance uuid ad2d69bb-3aa9-4c11-b9de-29996574cfa2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.684 2 DEBUG nova.objects.instance [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lazy-loading 'resources' on Instance uuid ad2d69bb-3aa9-4c11-b9de-29996574cfa2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.878 2 DEBUG nova.virt.libvirt.vif [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:20:21Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerActionsTestOtherB-server-1629207280',display_name='tempest-ServerActionsTestOtherB-server-1629207280',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(2),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serveractionstestotherb-server-1629207280',id=108,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=2,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBG+aqSe4de2VLtRAXN5xeLQn4S/3X8QrNMy2M5WdQ5hviVyEOgqK+m+uWmzPaUSUgE38sEdkytfwUHD32CBZajBt4q3OEf9i3yPJUQGuqp42pAUD+A3EoBIyeptNeSxGdA==',key_name='tempest-keypair-1900171990',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:22:18Z,launched_on='compute-2.ctlplane.example.com',locked=False,locked_by=None,memory_mb=192,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=4,progress=0,project_id='ffce7d629aa24a7f970d93b2a79045f1',ramdisk_id='',reservation_id='r-jtzab0yc',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=<?>,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServerActionsTestOtherB-263921372',owner_user_name='tempest-ServerActionsTestOtherB-263921372-project-member'},tags=<?>,task_state='powering-on',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:22:24Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='0ea122e2fff94f2ba7c78bf30b04029c',uuid=ad2d69bb-3aa9-4c11-b9de-29996574cfa2,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='stopped') vif={"id": "cae13af9-8175-4eab-b9ec-18019b521d0b", "address": "fa:16:3e:35:d3:eb", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapcae13af9-81", "ovs_interfaceid": "cae13af9-8175-4eab-b9ec-18019b521d0b", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.878 2 DEBUG nova.network.os_vif_util [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Converting VIF {"id": "cae13af9-8175-4eab-b9ec-18019b521d0b", "address": "fa:16:3e:35:d3:eb", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapcae13af9-81", "ovs_interfaceid": "cae13af9-8175-4eab-b9ec-18019b521d0b", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.879 2 DEBUG nova.network.os_vif_util [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:35:d3:eb,bridge_name='br-int',has_traffic_filtering=True,id=cae13af9-8175-4eab-b9ec-18019b521d0b,network=Network(20eb29be-ee23-463b-85af-bfc2388e9f77),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapcae13af9-81') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.880 2 DEBUG os_vif [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:35:d3:eb,bridge_name='br-int',has_traffic_filtering=True,id=cae13af9-8175-4eab-b9ec-18019b521d0b,network=Network(20eb29be-ee23-463b-85af-bfc2388e9f77),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapcae13af9-81') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.881 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.882 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapcae13af9-81, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.883 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.885 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.887 2 INFO os_vif [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:35:d3:eb,bridge_name='br-int',has_traffic_filtering=True,id=cae13af9-8175-4eab-b9ec-18019b521d0b,network=Network(20eb29be-ee23-463b-85af-bfc2388e9f77),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapcae13af9-81')
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.893 2 DEBUG nova.virt.libvirt.driver [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Start _get_guest_xml network_info=[{"id": "cae13af9-8175-4eab-b9ec-18019b521d0b", "address": "fa:16:3e:35:d3:eb", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapcae13af9-81", "ovs_interfaceid": "cae13af9-8175-4eab-b9ec-18019b521d0b", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum=<?>,container_format='bare',created_at=<?>,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=1,min_ram=0,name=<?>,owner=<?>,properties=ImageMetaProps,protected=<?>,size=<?>,status=<?>,tags=<?>,updated_at=<?>,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.896 2 WARNING nova.virt.libvirt.driver [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.902 2 DEBUG nova.virt.libvirt.host [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.903 2 DEBUG nova.virt.libvirt.host [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.908 2 DEBUG nova.virt.libvirt.host [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.909 2 DEBUG nova.virt.libvirt.host [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.910 2 DEBUG nova.virt.libvirt.driver [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.910 2 DEBUG nova.virt.hardware [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:25Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9949d9da-6314-4ede-8797-6f2f0a6a64fc',id=2,is_public=True,memory_mb=192,name='m1.micro',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=<?>,container_format='bare',created_at=<?>,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=1,min_ram=0,name=<?>,owner=<?>,properties=ImageMetaProps,protected=<?>,size=<?>,status=<?>,tags=<?>,updated_at=<?>,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.911 2 DEBUG nova.virt.hardware [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.911 2 DEBUG nova.virt.hardware [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.911 2 DEBUG nova.virt.hardware [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.912 2 DEBUG nova.virt.hardware [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.912 2 DEBUG nova.virt.hardware [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.912 2 DEBUG nova.virt.hardware [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.912 2 DEBUG nova.virt.hardware [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.913 2 DEBUG nova.virt.hardware [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.913 2 DEBUG nova.virt.hardware [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.913 2 DEBUG nova.virt.hardware [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.914 2 DEBUG nova.objects.instance [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lazy-loading 'vcpu_model' on Instance uuid ad2d69bb-3aa9-4c11-b9de-29996574cfa2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.932 2 DEBUG nova.virt.libvirt.vif [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:20:21Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerActionsTestOtherB-server-1629207280',display_name='tempest-ServerActionsTestOtherB-server-1629207280',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(2),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serveractionstestotherb-server-1629207280',id=108,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=2,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBG+aqSe4de2VLtRAXN5xeLQn4S/3X8QrNMy2M5WdQ5hviVyEOgqK+m+uWmzPaUSUgE38sEdkytfwUHD32CBZajBt4q3OEf9i3yPJUQGuqp42pAUD+A3EoBIyeptNeSxGdA==',key_name='tempest-keypair-1900171990',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:22:18Z,launched_on='compute-2.ctlplane.example.com',locked=False,locked_by=None,memory_mb=192,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=4,progress=0,project_id='ffce7d629aa24a7f970d93b2a79045f1',ramdisk_id='',reservation_id='r-jtzab0yc',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=<?>,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServerActionsTestOtherB-263921372',owner_user_name='tempest-ServerActionsTestOtherB-263921372-project-member'},tags=<?>,task_state='powering-on',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:22:24Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='0ea122e2fff94f2ba7c78bf30b04029c',uuid=ad2d69bb-3aa9-4c11-b9de-29996574cfa2,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='stopped') vif={"id": "cae13af9-8175-4eab-b9ec-18019b521d0b", "address": "fa:16:3e:35:d3:eb", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapcae13af9-81", "ovs_interfaceid": "cae13af9-8175-4eab-b9ec-18019b521d0b", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.932 2 DEBUG nova.network.os_vif_util [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Converting VIF {"id": "cae13af9-8175-4eab-b9ec-18019b521d0b", "address": "fa:16:3e:35:d3:eb", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapcae13af9-81", "ovs_interfaceid": "cae13af9-8175-4eab-b9ec-18019b521d0b", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.933 2 DEBUG nova.network.os_vif_util [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:35:d3:eb,bridge_name='br-int',has_traffic_filtering=True,id=cae13af9-8175-4eab-b9ec-18019b521d0b,network=Network(20eb29be-ee23-463b-85af-bfc2388e9f77),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapcae13af9-81') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.934 2 DEBUG nova.objects.instance [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lazy-loading 'pci_devices' on Instance uuid ad2d69bb-3aa9-4c11-b9de-29996574cfa2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.957 2 DEBUG nova.virt.libvirt.driver [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:22:28 compute-0 nova_compute[192079]:   <uuid>ad2d69bb-3aa9-4c11-b9de-29996574cfa2</uuid>
Oct 02 12:22:28 compute-0 nova_compute[192079]:   <name>instance-0000006c</name>
Oct 02 12:22:28 compute-0 nova_compute[192079]:   <memory>196608</memory>
Oct 02 12:22:28 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:22:28 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:22:28 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:       <nova:name>tempest-ServerActionsTestOtherB-server-1629207280</nova:name>
Oct 02 12:22:28 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:22:28</nova:creationTime>
Oct 02 12:22:28 compute-0 nova_compute[192079]:       <nova:flavor name="m1.micro">
Oct 02 12:22:28 compute-0 nova_compute[192079]:         <nova:memory>192</nova:memory>
Oct 02 12:22:28 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:22:28 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:22:28 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:22:28 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:22:28 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:22:28 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:22:28 compute-0 nova_compute[192079]:         <nova:user uuid="0ea122e2fff94f2ba7c78bf30b04029c">tempest-ServerActionsTestOtherB-263921372-project-member</nova:user>
Oct 02 12:22:28 compute-0 nova_compute[192079]:         <nova:project uuid="ffce7d629aa24a7f970d93b2a79045f1">tempest-ServerActionsTestOtherB-263921372</nova:project>
Oct 02 12:22:28 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:22:28 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:22:28 compute-0 nova_compute[192079]:         <nova:port uuid="cae13af9-8175-4eab-b9ec-18019b521d0b">
Oct 02 12:22:28 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.14" ipVersion="4"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:22:28 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:22:28 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:22:28 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <system>
Oct 02 12:22:28 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:22:28 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:22:28 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:22:28 compute-0 nova_compute[192079]:       <entry name="serial">ad2d69bb-3aa9-4c11-b9de-29996574cfa2</entry>
Oct 02 12:22:28 compute-0 nova_compute[192079]:       <entry name="uuid">ad2d69bb-3aa9-4c11-b9de-29996574cfa2</entry>
Oct 02 12:22:28 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     </system>
Oct 02 12:22:28 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:22:28 compute-0 nova_compute[192079]:   <os>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:   </os>
Oct 02 12:22:28 compute-0 nova_compute[192079]:   <features>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:   </features>
Oct 02 12:22:28 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:22:28 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:22:28 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:22:28 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2/disk"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:22:28 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2/disk.config"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:22:28 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:35:d3:eb"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:       <target dev="tapcae13af9-81"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:22:28 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2/console.log" append="off"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <video>
Oct 02 12:22:28 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     </video>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <input type="keyboard" bus="usb"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:22:28 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:22:28 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:22:28 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:22:28 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:22:28 compute-0 nova_compute[192079]: </domain>
Oct 02 12:22:28 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:22:28 compute-0 nova_compute[192079]: 2025-10-02 12:22:28.959 2 DEBUG oslo_concurrency.processutils [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.036 2 DEBUG oslo_concurrency.processutils [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2/disk --force-share --output=json" returned: 0 in 0.078s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.038 2 DEBUG oslo_concurrency.processutils [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.104 2 DEBUG oslo_concurrency.processutils [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2/disk --force-share --output=json" returned: 0 in 0.065s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.105 2 DEBUG nova.objects.instance [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lazy-loading 'trusted_certs' on Instance uuid ad2d69bb-3aa9-4c11-b9de-29996574cfa2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.148 2 DEBUG oslo_concurrency.processutils [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.210 2 DEBUG oslo_concurrency.processutils [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.063s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.211 2 DEBUG nova.virt.disk.api [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Checking if we can resize image /var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.212 2 DEBUG oslo_concurrency.processutils [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.267 2 DEBUG oslo_concurrency.processutils [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2/disk --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.268 2 DEBUG nova.virt.disk.api [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Cannot resize image /var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.268 2 DEBUG nova.objects.instance [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lazy-loading 'migration_context' on Instance uuid ad2d69bb-3aa9-4c11-b9de-29996574cfa2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.285 2 DEBUG nova.virt.libvirt.vif [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:20:21Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerActionsTestOtherB-server-1629207280',display_name='tempest-ServerActionsTestOtherB-server-1629207280',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(2),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serveractionstestotherb-server-1629207280',id=108,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=2,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBG+aqSe4de2VLtRAXN5xeLQn4S/3X8QrNMy2M5WdQ5hviVyEOgqK+m+uWmzPaUSUgE38sEdkytfwUHD32CBZajBt4q3OEf9i3yPJUQGuqp42pAUD+A3EoBIyeptNeSxGdA==',key_name='tempest-keypair-1900171990',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:22:18Z,launched_on='compute-2.ctlplane.example.com',locked=False,locked_by=None,memory_mb=192,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=<?>,power_state=4,progress=0,project_id='ffce7d629aa24a7f970d93b2a79045f1',ramdisk_id='',reservation_id='r-jtzab0yc',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=<?>,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServerActionsTestOtherB-263921372',owner_user_name='tempest-ServerActionsTestOtherB-263921372-project-member'},tags=<?>,task_state='powering-on',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:22:24Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='0ea122e2fff94f2ba7c78bf30b04029c',uuid=ad2d69bb-3aa9-4c11-b9de-29996574cfa2,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='stopped') vif={"id": "cae13af9-8175-4eab-b9ec-18019b521d0b", "address": "fa:16:3e:35:d3:eb", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapcae13af9-81", "ovs_interfaceid": "cae13af9-8175-4eab-b9ec-18019b521d0b", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.285 2 DEBUG nova.network.os_vif_util [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Converting VIF {"id": "cae13af9-8175-4eab-b9ec-18019b521d0b", "address": "fa:16:3e:35:d3:eb", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapcae13af9-81", "ovs_interfaceid": "cae13af9-8175-4eab-b9ec-18019b521d0b", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.286 2 DEBUG nova.network.os_vif_util [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:35:d3:eb,bridge_name='br-int',has_traffic_filtering=True,id=cae13af9-8175-4eab-b9ec-18019b521d0b,network=Network(20eb29be-ee23-463b-85af-bfc2388e9f77),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapcae13af9-81') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.286 2 DEBUG os_vif [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:35:d3:eb,bridge_name='br-int',has_traffic_filtering=True,id=cae13af9-8175-4eab-b9ec-18019b521d0b,network=Network(20eb29be-ee23-463b-85af-bfc2388e9f77),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapcae13af9-81') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.287 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.288 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.288 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.290 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.290 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapcae13af9-81, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.291 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapcae13af9-81, col_values=(('external_ids', {'iface-id': 'cae13af9-8175-4eab-b9ec-18019b521d0b', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:35:d3:eb', 'vm-uuid': 'ad2d69bb-3aa9-4c11-b9de-29996574cfa2'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.292 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:29 compute-0 NetworkManager[51160]: <info>  [1759407749.2932] manager: (tapcae13af9-81): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/184)
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.295 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.296 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.297 2 INFO os_vif [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:35:d3:eb,bridge_name='br-int',has_traffic_filtering=True,id=cae13af9-8175-4eab-b9ec-18019b521d0b,network=Network(20eb29be-ee23-463b-85af-bfc2388e9f77),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapcae13af9-81')
Oct 02 12:22:29 compute-0 kernel: tapcae13af9-81: entered promiscuous mode
Oct 02 12:22:29 compute-0 NetworkManager[51160]: <info>  [1759407749.3749] manager: (tapcae13af9-81): new Tun device (/org/freedesktop/NetworkManager/Devices/185)
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.375 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:29 compute-0 ovn_controller[94336]: 2025-10-02T12:22:29Z|00362|binding|INFO|Claiming lport cae13af9-8175-4eab-b9ec-18019b521d0b for this chassis.
Oct 02 12:22:29 compute-0 ovn_controller[94336]: 2025-10-02T12:22:29Z|00363|binding|INFO|cae13af9-8175-4eab-b9ec-18019b521d0b: Claiming fa:16:3e:35:d3:eb 10.100.0.14
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.379 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.383 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:29 compute-0 NetworkManager[51160]: <info>  [1759407749.3916] manager: (patch-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d-to-br-int): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/186)
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.390 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:29 compute-0 NetworkManager[51160]: <info>  [1759407749.3928] manager: (patch-br-int-to-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/187)
Oct 02 12:22:29 compute-0 systemd-udevd[236997]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:22:29 compute-0 systemd-machined[152150]: New machine qemu-50-instance-0000006c.
Oct 02 12:22:29 compute-0 NetworkManager[51160]: <info>  [1759407749.4191] device (tapcae13af9-81): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:22:29 compute-0 NetworkManager[51160]: <info>  [1759407749.4198] device (tapcae13af9-81): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:29.419 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:35:d3:eb 10.100.0.14'], port_security=['fa:16:3e:35:d3:eb 10.100.0.14'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.14/28', 'neutron:device_id': 'ad2d69bb-3aa9-4c11-b9de-29996574cfa2', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-20eb29be-ee23-463b-85af-bfc2388e9f77', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'neutron:revision_number': '6', 'neutron:security_group_ids': '12e9168a-be86-462f-a658-971f38e3430f', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:port_fip': '192.168.122.185'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=e183e2c6-21dc-48e3-ae47-279bc8b32eeb, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=cae13af9-8175-4eab-b9ec-18019b521d0b) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:29.421 103294 INFO neutron.agent.ovn.metadata.agent [-] Port cae13af9-8175-4eab-b9ec-18019b521d0b in datapath 20eb29be-ee23-463b-85af-bfc2388e9f77 bound to our chassis
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:29.422 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 20eb29be-ee23-463b-85af-bfc2388e9f77
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:29.432 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[077f0f77-5f89-4229-a23f-a104cff0235e]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:29.433 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap20eb29be-e1 in ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:29.435 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap20eb29be-e0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:29.435 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[fb65dc04-5459-43aa-9293-fc691ade9723]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:29.435 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7aee754b-bb1f-472e-8711-331e74cc68cf]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:29.446 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[69630fcc-dbb0-406b-a785-0bb55a301507]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:29 compute-0 systemd[1]: Started Virtual Machine qemu-50-instance-0000006c.
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:29.482 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[125bd21e-ccea-4ca9-b422-ea73fdd465ac]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:29 compute-0 podman[236981]: 2025-10-02 12:22:29.490392254 +0000 UTC m=+0.119687104 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, org.label-schema.vendor=CentOS, tcib_managed=true)
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:29.515 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[1f896f5b-6582-443e-8cd5-c61ada511576]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:29.536 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[fdb79160-9c95-474c-8fd2-41b20f2b5684]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:29 compute-0 NetworkManager[51160]: <info>  [1759407749.5388] manager: (tap20eb29be-e0): new Veth device (/org/freedesktop/NetworkManager/Devices/188)
Oct 02 12:22:29 compute-0 systemd-udevd[237007]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.538 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.540 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.562 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:29.567 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[3110c018-b7ef-4ab1-8b5a-91eb34afa14a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:29.570 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[9d3851d3-6f9b-4f37-9816-af9924a72fe0]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:29 compute-0 ovn_controller[94336]: 2025-10-02T12:22:29Z|00364|binding|INFO|Setting lport cae13af9-8175-4eab-b9ec-18019b521d0b ovn-installed in OVS
Oct 02 12:22:29 compute-0 ovn_controller[94336]: 2025-10-02T12:22:29Z|00365|binding|INFO|Setting lport cae13af9-8175-4eab-b9ec-18019b521d0b up in Southbound
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.573 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:29 compute-0 NetworkManager[51160]: <info>  [1759407749.5905] device (tap20eb29be-e0): carrier: link connected
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:29.595 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[891e5dc6-2ec9-4cae-b6e4-519712707191]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:29.610 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ab450268-ff1d-41ea-ad03-849222e897a7]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap20eb29be-e1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:77:55:96'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 119], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 574721, 'reachable_time': 27477, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 237041, 'error': None, 'target': 'ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:29.624 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d54decc7-4ac3-44fb-9d0a-ef0df2267a91]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe77:5596'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 574721, 'tstamp': 574721}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 237042, 'error': None, 'target': 'ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:29.647 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f17fc1bd-b234-46c9-954a-9923c2927a97]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap20eb29be-e1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:77:55:96'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 119], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 574721, 'reachable_time': 27477, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 237043, 'error': None, 'target': 'ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:29.681 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[52e075cb-c621-4bc4-ab3e-1340a7891af9]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:29.727 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8c5da32c-4022-4edd-a2ce-4e84f4190810]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:29.728 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap20eb29be-e0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:29.729 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:29.729 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap20eb29be-e0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:22:29 compute-0 kernel: tap20eb29be-e0: entered promiscuous mode
Oct 02 12:22:29 compute-0 NetworkManager[51160]: <info>  [1759407749.7313] manager: (tap20eb29be-e0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/189)
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.730 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.733 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:29.739 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap20eb29be-e0, col_values=(('external_ids', {'iface-id': 'e533861f-45cb-4843-b071-0b628ca25128'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:22:29 compute-0 ovn_controller[94336]: 2025-10-02T12:22:29Z|00366|binding|INFO|Releasing lport e533861f-45cb-4843-b071-0b628ca25128 from this chassis (sb_readonly=0)
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.740 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.740 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.752 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:29.752 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/20eb29be-ee23-463b-85af-bfc2388e9f77.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/20eb29be-ee23-463b-85af-bfc2388e9f77.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:29.753 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c1c668bc-e03f-4812-908d-8573f9237e05]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:29.753 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-20eb29be-ee23-463b-85af-bfc2388e9f77
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/20eb29be-ee23-463b-85af-bfc2388e9f77.pid.haproxy
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 20eb29be-ee23-463b-85af-bfc2388e9f77
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:22:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:29.754 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77', 'env', 'PROCESS_TAG=haproxy-20eb29be-ee23-463b-85af-bfc2388e9f77', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/20eb29be-ee23-463b-85af-bfc2388e9f77.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.910 2 DEBUG nova.compute.manager [req-15cdde6d-f9b9-4e49-8417-9bb3357f7efb req-e34f26d8-24e4-4bfe-8681-e416e9bae166 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Received event network-vif-plugged-cae13af9-8175-4eab-b9ec-18019b521d0b external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.911 2 DEBUG oslo_concurrency.lockutils [req-15cdde6d-f9b9-4e49-8417-9bb3357f7efb req-e34f26d8-24e4-4bfe-8681-e416e9bae166 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "ad2d69bb-3aa9-4c11-b9de-29996574cfa2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.911 2 DEBUG oslo_concurrency.lockutils [req-15cdde6d-f9b9-4e49-8417-9bb3357f7efb req-e34f26d8-24e4-4bfe-8681-e416e9bae166 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ad2d69bb-3aa9-4c11-b9de-29996574cfa2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.911 2 DEBUG oslo_concurrency.lockutils [req-15cdde6d-f9b9-4e49-8417-9bb3357f7efb req-e34f26d8-24e4-4bfe-8681-e416e9bae166 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ad2d69bb-3aa9-4c11-b9de-29996574cfa2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.912 2 DEBUG nova.compute.manager [req-15cdde6d-f9b9-4e49-8417-9bb3357f7efb req-e34f26d8-24e4-4bfe-8681-e416e9bae166 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] No waiting events found dispatching network-vif-plugged-cae13af9-8175-4eab-b9ec-18019b521d0b pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:22:29 compute-0 nova_compute[192079]: 2025-10-02 12:22:29.912 2 WARNING nova.compute.manager [req-15cdde6d-f9b9-4e49-8417-9bb3357f7efb req-e34f26d8-24e4-4bfe-8681-e416e9bae166 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Received unexpected event network-vif-plugged-cae13af9-8175-4eab-b9ec-18019b521d0b for instance with vm_state stopped and task_state powering-on.
Oct 02 12:22:30 compute-0 podman[237075]: 2025-10-02 12:22:30.132853109 +0000 UTC m=+0.053053807 container create 60c210515c7e8ab8845e5ca7d5f285849b2a4bd6147a57bf75ee5bc9a5fc1bb7 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:22:30 compute-0 systemd[1]: Started libpod-conmon-60c210515c7e8ab8845e5ca7d5f285849b2a4bd6147a57bf75ee5bc9a5fc1bb7.scope.
Oct 02 12:22:30 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:22:30 compute-0 podman[237075]: 2025-10-02 12:22:30.107108537 +0000 UTC m=+0.027309255 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:22:30 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/d3302a2a2fdc878722715071084d9a912b9352d96ae79ac4f84c52d50653af21/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:22:30 compute-0 podman[237075]: 2025-10-02 12:22:30.22530898 +0000 UTC m=+0.145509698 container init 60c210515c7e8ab8845e5ca7d5f285849b2a4bd6147a57bf75ee5bc9a5fc1bb7 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0)
Oct 02 12:22:30 compute-0 podman[237075]: 2025-10-02 12:22:30.240857883 +0000 UTC m=+0.161058581 container start 60c210515c7e8ab8845e5ca7d5f285849b2a4bd6147a57bf75ee5bc9a5fc1bb7 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS)
Oct 02 12:22:30 compute-0 neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77[237097]: [NOTICE]   (237101) : New worker (237103) forked
Oct 02 12:22:30 compute-0 neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77[237097]: [NOTICE]   (237101) : Loading success.
Oct 02 12:22:30 compute-0 nova_compute[192079]: 2025-10-02 12:22:30.588 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407750.5879583, ad2d69bb-3aa9-4c11-b9de-29996574cfa2 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:22:30 compute-0 nova_compute[192079]: 2025-10-02 12:22:30.589 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] VM Resumed (Lifecycle Event)
Oct 02 12:22:30 compute-0 nova_compute[192079]: 2025-10-02 12:22:30.591 2 DEBUG nova.compute.manager [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:22:30 compute-0 nova_compute[192079]: 2025-10-02 12:22:30.596 2 INFO nova.virt.libvirt.driver [-] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Instance rebooted successfully.
Oct 02 12:22:30 compute-0 nova_compute[192079]: 2025-10-02 12:22:30.596 2 DEBUG nova.compute.manager [None req-9eb221af-e5b9-4f5c-9f20-15c7e58e4e3a 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:22:30 compute-0 nova_compute[192079]: 2025-10-02 12:22:30.623 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:22:30 compute-0 nova_compute[192079]: 2025-10-02 12:22:30.627 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: stopped, current task_state: powering-on, current DB power_state: 4, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:22:30 compute-0 nova_compute[192079]: 2025-10-02 12:22:30.660 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:22:30 compute-0 nova_compute[192079]: 2025-10-02 12:22:30.683 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] During sync_power_state the instance has a pending task (powering-on). Skip.
Oct 02 12:22:30 compute-0 nova_compute[192079]: 2025-10-02 12:22:30.684 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407750.5895524, ad2d69bb-3aa9-4c11-b9de-29996574cfa2 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:22:30 compute-0 nova_compute[192079]: 2025-10-02 12:22:30.684 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] VM Started (Lifecycle Event)
Oct 02 12:22:30 compute-0 nova_compute[192079]: 2025-10-02 12:22:30.713 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:22:30 compute-0 nova_compute[192079]: 2025-10-02 12:22:30.719 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Synchronizing instance power state after lifecycle event "Started"; current vm_state: active, current task_state: None, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:22:30 compute-0 nova_compute[192079]: 2025-10-02 12:22:30.816 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:31 compute-0 nova_compute[192079]: 2025-10-02 12:22:31.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:22:31 compute-0 nova_compute[192079]: 2025-10-02 12:22:31.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:22:31 compute-0 nova_compute[192079]: 2025-10-02 12:22:31.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:22:31 compute-0 nova_compute[192079]: 2025-10-02 12:22:31.813 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:31 compute-0 nova_compute[192079]: 2025-10-02 12:22:31.829 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "refresh_cache-ad2d69bb-3aa9-4c11-b9de-29996574cfa2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:22:31 compute-0 nova_compute[192079]: 2025-10-02 12:22:31.830 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquired lock "refresh_cache-ad2d69bb-3aa9-4c11-b9de-29996574cfa2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:22:31 compute-0 nova_compute[192079]: 2025-10-02 12:22:31.830 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Forcefully refreshing network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2004
Oct 02 12:22:31 compute-0 nova_compute[192079]: 2025-10-02 12:22:31.830 2 DEBUG nova.objects.instance [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lazy-loading 'info_cache' on Instance uuid ad2d69bb-3aa9-4c11-b9de-29996574cfa2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:22:32 compute-0 nova_compute[192079]: 2025-10-02 12:22:32.030 2 DEBUG nova.compute.manager [req-db950f42-6c91-44ec-9e1b-1fc0b41649f0 req-2a500422-21c2-4268-9783-18335234db8d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Received event network-vif-plugged-cae13af9-8175-4eab-b9ec-18019b521d0b external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:22:32 compute-0 nova_compute[192079]: 2025-10-02 12:22:32.031 2 DEBUG oslo_concurrency.lockutils [req-db950f42-6c91-44ec-9e1b-1fc0b41649f0 req-2a500422-21c2-4268-9783-18335234db8d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "ad2d69bb-3aa9-4c11-b9de-29996574cfa2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:22:32 compute-0 nova_compute[192079]: 2025-10-02 12:22:32.031 2 DEBUG oslo_concurrency.lockutils [req-db950f42-6c91-44ec-9e1b-1fc0b41649f0 req-2a500422-21c2-4268-9783-18335234db8d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ad2d69bb-3aa9-4c11-b9de-29996574cfa2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:22:32 compute-0 nova_compute[192079]: 2025-10-02 12:22:32.031 2 DEBUG oslo_concurrency.lockutils [req-db950f42-6c91-44ec-9e1b-1fc0b41649f0 req-2a500422-21c2-4268-9783-18335234db8d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ad2d69bb-3aa9-4c11-b9de-29996574cfa2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:22:32 compute-0 nova_compute[192079]: 2025-10-02 12:22:32.031 2 DEBUG nova.compute.manager [req-db950f42-6c91-44ec-9e1b-1fc0b41649f0 req-2a500422-21c2-4268-9783-18335234db8d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] No waiting events found dispatching network-vif-plugged-cae13af9-8175-4eab-b9ec-18019b521d0b pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:22:32 compute-0 nova_compute[192079]: 2025-10-02 12:22:32.032 2 WARNING nova.compute.manager [req-db950f42-6c91-44ec-9e1b-1fc0b41649f0 req-2a500422-21c2-4268-9783-18335234db8d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Received unexpected event network-vif-plugged-cae13af9-8175-4eab-b9ec-18019b521d0b for instance with vm_state active and task_state None.
Oct 02 12:22:32 compute-0 nova_compute[192079]: 2025-10-02 12:22:32.846 2 DEBUG oslo_concurrency.lockutils [None req-406b9612-15cc-4c16-8c4a-861abcbe9566 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Acquiring lock "ad2d69bb-3aa9-4c11-b9de-29996574cfa2" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:22:32 compute-0 nova_compute[192079]: 2025-10-02 12:22:32.848 2 DEBUG oslo_concurrency.lockutils [None req-406b9612-15cc-4c16-8c4a-861abcbe9566 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lock "ad2d69bb-3aa9-4c11-b9de-29996574cfa2" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:22:32 compute-0 nova_compute[192079]: 2025-10-02 12:22:32.848 2 DEBUG oslo_concurrency.lockutils [None req-406b9612-15cc-4c16-8c4a-861abcbe9566 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Acquiring lock "ad2d69bb-3aa9-4c11-b9de-29996574cfa2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:22:32 compute-0 nova_compute[192079]: 2025-10-02 12:22:32.849 2 DEBUG oslo_concurrency.lockutils [None req-406b9612-15cc-4c16-8c4a-861abcbe9566 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lock "ad2d69bb-3aa9-4c11-b9de-29996574cfa2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:22:32 compute-0 nova_compute[192079]: 2025-10-02 12:22:32.849 2 DEBUG oslo_concurrency.lockutils [None req-406b9612-15cc-4c16-8c4a-861abcbe9566 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lock "ad2d69bb-3aa9-4c11-b9de-29996574cfa2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:22:32 compute-0 nova_compute[192079]: 2025-10-02 12:22:32.861 2 INFO nova.compute.manager [None req-406b9612-15cc-4c16-8c4a-861abcbe9566 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Terminating instance
Oct 02 12:22:32 compute-0 nova_compute[192079]: 2025-10-02 12:22:32.874 2 DEBUG nova.compute.manager [None req-406b9612-15cc-4c16-8c4a-861abcbe9566 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:22:32 compute-0 kernel: tapcae13af9-81 (unregistering): left promiscuous mode
Oct 02 12:22:32 compute-0 NetworkManager[51160]: <info>  [1759407752.9012] device (tapcae13af9-81): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:22:32 compute-0 nova_compute[192079]: 2025-10-02 12:22:32.911 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:32 compute-0 ovn_controller[94336]: 2025-10-02T12:22:32Z|00367|binding|INFO|Releasing lport cae13af9-8175-4eab-b9ec-18019b521d0b from this chassis (sb_readonly=0)
Oct 02 12:22:32 compute-0 ovn_controller[94336]: 2025-10-02T12:22:32Z|00368|binding|INFO|Setting lport cae13af9-8175-4eab-b9ec-18019b521d0b down in Southbound
Oct 02 12:22:32 compute-0 ovn_controller[94336]: 2025-10-02T12:22:32Z|00369|binding|INFO|Removing iface tapcae13af9-81 ovn-installed in OVS
Oct 02 12:22:32 compute-0 nova_compute[192079]: 2025-10-02 12:22:32.915 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:32 compute-0 nova_compute[192079]: 2025-10-02 12:22:32.939 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:32 compute-0 systemd[1]: machine-qemu\x2d50\x2dinstance\x2d0000006c.scope: Deactivated successfully.
Oct 02 12:22:32 compute-0 systemd[1]: machine-qemu\x2d50\x2dinstance\x2d0000006c.scope: Consumed 3.387s CPU time.
Oct 02 12:22:32 compute-0 systemd-machined[152150]: Machine qemu-50-instance-0000006c terminated.
Oct 02 12:22:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:33.065 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:35:d3:eb 10.100.0.14'], port_security=['fa:16:3e:35:d3:eb 10.100.0.14'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.14/28', 'neutron:device_id': 'ad2d69bb-3aa9-4c11-b9de-29996574cfa2', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-20eb29be-ee23-463b-85af-bfc2388e9f77', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'neutron:revision_number': '8', 'neutron:security_group_ids': '12e9168a-be86-462f-a658-971f38e3430f', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:port_fip': '192.168.122.185', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=e183e2c6-21dc-48e3-ae47-279bc8b32eeb, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=cae13af9-8175-4eab-b9ec-18019b521d0b) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:22:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:33.066 103294 INFO neutron.agent.ovn.metadata.agent [-] Port cae13af9-8175-4eab-b9ec-18019b521d0b in datapath 20eb29be-ee23-463b-85af-bfc2388e9f77 unbound from our chassis
Oct 02 12:22:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:33.068 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 20eb29be-ee23-463b-85af-bfc2388e9f77, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:22:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:33.069 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[484f0399-90ab-4a58-9b25-68871e6390b3]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:33.069 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77 namespace which is not needed anymore
Oct 02 12:22:33 compute-0 nova_compute[192079]: 2025-10-02 12:22:33.137 2 INFO nova.virt.libvirt.driver [-] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Instance destroyed successfully.
Oct 02 12:22:33 compute-0 nova_compute[192079]: 2025-10-02 12:22:33.138 2 DEBUG nova.objects.instance [None req-406b9612-15cc-4c16-8c4a-861abcbe9566 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lazy-loading 'resources' on Instance uuid ad2d69bb-3aa9-4c11-b9de-29996574cfa2 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:22:33 compute-0 nova_compute[192079]: 2025-10-02 12:22:33.152 2 DEBUG nova.virt.libvirt.vif [None req-406b9612-15cc-4c16-8c4a-861abcbe9566 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:20:21Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerActionsTestOtherB-server-1629207280',display_name='tempest-ServerActionsTestOtherB-server-1629207280',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(2),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serveractionstestotherb-server-1629207280',id=108,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=2,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBG+aqSe4de2VLtRAXN5xeLQn4S/3X8QrNMy2M5WdQ5hviVyEOgqK+m+uWmzPaUSUgE38sEdkytfwUHD32CBZajBt4q3OEf9i3yPJUQGuqp42pAUD+A3EoBIyeptNeSxGdA==',key_name='tempest-keypair-1900171990',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:22:18Z,launched_on='compute-2.ctlplane.example.com',locked=False,locked_by=None,memory_mb=192,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='ffce7d629aa24a7f970d93b2a79045f1',ramdisk_id='',reservation_id='r-jtzab0yc',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServerActionsTestOtherB-263921372',owner_user_name='tempest-ServerActionsTestOtherB-263921372-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:22:30Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='0ea122e2fff94f2ba7c78bf30b04029c',uuid=ad2d69bb-3aa9-4c11-b9de-29996574cfa2,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "cae13af9-8175-4eab-b9ec-18019b521d0b", "address": "fa:16:3e:35:d3:eb", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapcae13af9-81", "ovs_interfaceid": "cae13af9-8175-4eab-b9ec-18019b521d0b", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:22:33 compute-0 nova_compute[192079]: 2025-10-02 12:22:33.153 2 DEBUG nova.network.os_vif_util [None req-406b9612-15cc-4c16-8c4a-861abcbe9566 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Converting VIF {"id": "cae13af9-8175-4eab-b9ec-18019b521d0b", "address": "fa:16:3e:35:d3:eb", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapcae13af9-81", "ovs_interfaceid": "cae13af9-8175-4eab-b9ec-18019b521d0b", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:22:33 compute-0 nova_compute[192079]: 2025-10-02 12:22:33.153 2 DEBUG nova.network.os_vif_util [None req-406b9612-15cc-4c16-8c4a-861abcbe9566 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:35:d3:eb,bridge_name='br-int',has_traffic_filtering=True,id=cae13af9-8175-4eab-b9ec-18019b521d0b,network=Network(20eb29be-ee23-463b-85af-bfc2388e9f77),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapcae13af9-81') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:22:33 compute-0 nova_compute[192079]: 2025-10-02 12:22:33.154 2 DEBUG os_vif [None req-406b9612-15cc-4c16-8c4a-861abcbe9566 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:35:d3:eb,bridge_name='br-int',has_traffic_filtering=True,id=cae13af9-8175-4eab-b9ec-18019b521d0b,network=Network(20eb29be-ee23-463b-85af-bfc2388e9f77),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapcae13af9-81') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:22:33 compute-0 nova_compute[192079]: 2025-10-02 12:22:33.155 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:33 compute-0 nova_compute[192079]: 2025-10-02 12:22:33.155 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapcae13af9-81, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:22:33 compute-0 nova_compute[192079]: 2025-10-02 12:22:33.156 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:33 compute-0 nova_compute[192079]: 2025-10-02 12:22:33.158 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:33 compute-0 nova_compute[192079]: 2025-10-02 12:22:33.160 2 INFO os_vif [None req-406b9612-15cc-4c16-8c4a-861abcbe9566 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:35:d3:eb,bridge_name='br-int',has_traffic_filtering=True,id=cae13af9-8175-4eab-b9ec-18019b521d0b,network=Network(20eb29be-ee23-463b-85af-bfc2388e9f77),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapcae13af9-81')
Oct 02 12:22:33 compute-0 nova_compute[192079]: 2025-10-02 12:22:33.161 2 INFO nova.virt.libvirt.driver [None req-406b9612-15cc-4c16-8c4a-861abcbe9566 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Deleting instance files /var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2_del
Oct 02 12:22:33 compute-0 nova_compute[192079]: 2025-10-02 12:22:33.169 2 INFO nova.virt.libvirt.driver [None req-406b9612-15cc-4c16-8c4a-861abcbe9566 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Deletion of /var/lib/nova/instances/ad2d69bb-3aa9-4c11-b9de-29996574cfa2_del complete
Oct 02 12:22:33 compute-0 nova_compute[192079]: 2025-10-02 12:22:33.240 2 INFO nova.compute.manager [None req-406b9612-15cc-4c16-8c4a-861abcbe9566 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Took 0.37 seconds to destroy the instance on the hypervisor.
Oct 02 12:22:33 compute-0 nova_compute[192079]: 2025-10-02 12:22:33.241 2 DEBUG oslo.service.loopingcall [None req-406b9612-15cc-4c16-8c4a-861abcbe9566 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:22:33 compute-0 nova_compute[192079]: 2025-10-02 12:22:33.241 2 DEBUG nova.compute.manager [-] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:22:33 compute-0 nova_compute[192079]: 2025-10-02 12:22:33.241 2 DEBUG nova.network.neutron [-] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:22:33 compute-0 neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77[237097]: [NOTICE]   (237101) : haproxy version is 2.8.14-c23fe91
Oct 02 12:22:33 compute-0 neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77[237097]: [NOTICE]   (237101) : path to executable is /usr/sbin/haproxy
Oct 02 12:22:33 compute-0 neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77[237097]: [WARNING]  (237101) : Exiting Master process...
Oct 02 12:22:33 compute-0 neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77[237097]: [ALERT]    (237101) : Current worker (237103) exited with code 143 (Terminated)
Oct 02 12:22:33 compute-0 neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77[237097]: [WARNING]  (237101) : All workers exited. Exiting... (0)
Oct 02 12:22:33 compute-0 systemd[1]: libpod-60c210515c7e8ab8845e5ca7d5f285849b2a4bd6147a57bf75ee5bc9a5fc1bb7.scope: Deactivated successfully.
Oct 02 12:22:33 compute-0 conmon[237097]: conmon 60c210515c7e8ab8845e <nwarn>: Failed to open cgroups file: /sys/fs/cgroup/machine.slice/libpod-60c210515c7e8ab8845e5ca7d5f285849b2a4bd6147a57bf75ee5bc9a5fc1bb7.scope/container/memory.events
Oct 02 12:22:33 compute-0 podman[237153]: 2025-10-02 12:22:33.380745393 +0000 UTC m=+0.218684563 container died 60c210515c7e8ab8845e5ca7d5f285849b2a4bd6147a57bf75ee5bc9a5fc1bb7 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0)
Oct 02 12:22:33 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-60c210515c7e8ab8845e5ca7d5f285849b2a4bd6147a57bf75ee5bc9a5fc1bb7-userdata-shm.mount: Deactivated successfully.
Oct 02 12:22:33 compute-0 systemd[1]: var-lib-containers-storage-overlay-d3302a2a2fdc878722715071084d9a912b9352d96ae79ac4f84c52d50653af21-merged.mount: Deactivated successfully.
Oct 02 12:22:33 compute-0 podman[237153]: 2025-10-02 12:22:33.469214375 +0000 UTC m=+0.307153545 container cleanup 60c210515c7e8ab8845e5ca7d5f285849b2a4bd6147a57bf75ee5bc9a5fc1bb7 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:22:33 compute-0 systemd[1]: libpod-conmon-60c210515c7e8ab8845e5ca7d5f285849b2a4bd6147a57bf75ee5bc9a5fc1bb7.scope: Deactivated successfully.
Oct 02 12:22:33 compute-0 podman[237185]: 2025-10-02 12:22:33.525837908 +0000 UTC m=+0.037499112 container remove 60c210515c7e8ab8845e5ca7d5f285849b2a4bd6147a57bf75ee5bc9a5fc1bb7 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3)
Oct 02 12:22:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:33.532 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6fc6eb78-d62a-4c19-937b-248b194ae719]: (4, ('Thu Oct  2 12:22:33 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77 (60c210515c7e8ab8845e5ca7d5f285849b2a4bd6147a57bf75ee5bc9a5fc1bb7)\n60c210515c7e8ab8845e5ca7d5f285849b2a4bd6147a57bf75ee5bc9a5fc1bb7\nThu Oct  2 12:22:33 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77 (60c210515c7e8ab8845e5ca7d5f285849b2a4bd6147a57bf75ee5bc9a5fc1bb7)\n60c210515c7e8ab8845e5ca7d5f285849b2a4bd6147a57bf75ee5bc9a5fc1bb7\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:33.533 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7ba6a28a-7949-4fa7-900b-877ee4ad3712]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:33.534 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap20eb29be-e0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:22:33 compute-0 nova_compute[192079]: 2025-10-02 12:22:33.535 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:33 compute-0 kernel: tap20eb29be-e0: left promiscuous mode
Oct 02 12:22:33 compute-0 nova_compute[192079]: 2025-10-02 12:22:33.546 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:33 compute-0 nova_compute[192079]: 2025-10-02 12:22:33.547 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:33.550 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d3a1a7e8-799f-4ee5-b6b7-f8877ddc4713]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:33 compute-0 nova_compute[192079]: 2025-10-02 12:22:33.556 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Updating instance_info_cache with network_info: [{"id": "cae13af9-8175-4eab-b9ec-18019b521d0b", "address": "fa:16:3e:35:d3:eb", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapcae13af9-81", "ovs_interfaceid": "cae13af9-8175-4eab-b9ec-18019b521d0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:22:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:33.575 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b955d419-d360-493b-bd27-2cc14cd7ccbf]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:33.576 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9b99bc4e-12c8-48b9-a150-238852512b0a]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:33 compute-0 nova_compute[192079]: 2025-10-02 12:22:33.581 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Releasing lock "refresh_cache-ad2d69bb-3aa9-4c11-b9de-29996574cfa2" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:22:33 compute-0 nova_compute[192079]: 2025-10-02 12:22:33.581 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Updated the network info_cache for instance _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9929
Oct 02 12:22:33 compute-0 nova_compute[192079]: 2025-10-02 12:22:33.582 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:22:33 compute-0 nova_compute[192079]: 2025-10-02 12:22:33.582 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:22:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:33.591 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c03689ad-6661-40f2-9b45-225cc991896a]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 574713, 'reachable_time': 43847, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 237200, 'error': None, 'target': 'ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:33.594 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:22:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:33.594 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[43d7751a-c9e6-46a1-bd2c-f316bd203734]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:33 compute-0 systemd[1]: run-netns-ovnmeta\x2d20eb29be\x2dee23\x2d463b\x2d85af\x2dbfc2388e9f77.mount: Deactivated successfully.
Oct 02 12:22:33 compute-0 nova_compute[192079]: 2025-10-02 12:22:33.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:22:34 compute-0 nova_compute[192079]: 2025-10-02 12:22:34.153 2 DEBUG nova.compute.manager [req-8f80def5-4d4c-4d29-93b3-224aa03966e0 req-a7aee980-2e04-4491-95d1-faab331b4c28 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Received event network-vif-unplugged-cae13af9-8175-4eab-b9ec-18019b521d0b external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:22:34 compute-0 nova_compute[192079]: 2025-10-02 12:22:34.153 2 DEBUG oslo_concurrency.lockutils [req-8f80def5-4d4c-4d29-93b3-224aa03966e0 req-a7aee980-2e04-4491-95d1-faab331b4c28 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "ad2d69bb-3aa9-4c11-b9de-29996574cfa2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:22:34 compute-0 nova_compute[192079]: 2025-10-02 12:22:34.154 2 DEBUG oslo_concurrency.lockutils [req-8f80def5-4d4c-4d29-93b3-224aa03966e0 req-a7aee980-2e04-4491-95d1-faab331b4c28 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ad2d69bb-3aa9-4c11-b9de-29996574cfa2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:22:34 compute-0 nova_compute[192079]: 2025-10-02 12:22:34.154 2 DEBUG oslo_concurrency.lockutils [req-8f80def5-4d4c-4d29-93b3-224aa03966e0 req-a7aee980-2e04-4491-95d1-faab331b4c28 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ad2d69bb-3aa9-4c11-b9de-29996574cfa2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:22:34 compute-0 nova_compute[192079]: 2025-10-02 12:22:34.155 2 DEBUG nova.compute.manager [req-8f80def5-4d4c-4d29-93b3-224aa03966e0 req-a7aee980-2e04-4491-95d1-faab331b4c28 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] No waiting events found dispatching network-vif-unplugged-cae13af9-8175-4eab-b9ec-18019b521d0b pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:22:34 compute-0 nova_compute[192079]: 2025-10-02 12:22:34.155 2 DEBUG nova.compute.manager [req-8f80def5-4d4c-4d29-93b3-224aa03966e0 req-a7aee980-2e04-4491-95d1-faab331b4c28 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Received event network-vif-unplugged-cae13af9-8175-4eab-b9ec-18019b521d0b for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:22:34 compute-0 nova_compute[192079]: 2025-10-02 12:22:34.155 2 DEBUG nova.compute.manager [req-8f80def5-4d4c-4d29-93b3-224aa03966e0 req-a7aee980-2e04-4491-95d1-faab331b4c28 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Received event network-vif-plugged-cae13af9-8175-4eab-b9ec-18019b521d0b external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:22:34 compute-0 nova_compute[192079]: 2025-10-02 12:22:34.156 2 DEBUG oslo_concurrency.lockutils [req-8f80def5-4d4c-4d29-93b3-224aa03966e0 req-a7aee980-2e04-4491-95d1-faab331b4c28 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "ad2d69bb-3aa9-4c11-b9de-29996574cfa2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:22:34 compute-0 nova_compute[192079]: 2025-10-02 12:22:34.156 2 DEBUG oslo_concurrency.lockutils [req-8f80def5-4d4c-4d29-93b3-224aa03966e0 req-a7aee980-2e04-4491-95d1-faab331b4c28 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ad2d69bb-3aa9-4c11-b9de-29996574cfa2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:22:34 compute-0 nova_compute[192079]: 2025-10-02 12:22:34.156 2 DEBUG oslo_concurrency.lockutils [req-8f80def5-4d4c-4d29-93b3-224aa03966e0 req-a7aee980-2e04-4491-95d1-faab331b4c28 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ad2d69bb-3aa9-4c11-b9de-29996574cfa2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:22:34 compute-0 nova_compute[192079]: 2025-10-02 12:22:34.157 2 DEBUG nova.compute.manager [req-8f80def5-4d4c-4d29-93b3-224aa03966e0 req-a7aee980-2e04-4491-95d1-faab331b4c28 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] No waiting events found dispatching network-vif-plugged-cae13af9-8175-4eab-b9ec-18019b521d0b pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:22:34 compute-0 nova_compute[192079]: 2025-10-02 12:22:34.157 2 WARNING nova.compute.manager [req-8f80def5-4d4c-4d29-93b3-224aa03966e0 req-a7aee980-2e04-4491-95d1-faab331b4c28 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Received unexpected event network-vif-plugged-cae13af9-8175-4eab-b9ec-18019b521d0b for instance with vm_state active and task_state deleting.
Oct 02 12:22:34 compute-0 nova_compute[192079]: 2025-10-02 12:22:34.182 2 DEBUG nova.network.neutron [-] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:22:34 compute-0 nova_compute[192079]: 2025-10-02 12:22:34.200 2 INFO nova.compute.manager [-] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Took 0.96 seconds to deallocate network for instance.
Oct 02 12:22:34 compute-0 nova_compute[192079]: 2025-10-02 12:22:34.220 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:34 compute-0 nova_compute[192079]: 2025-10-02 12:22:34.278 2 DEBUG oslo_concurrency.lockutils [None req-406b9612-15cc-4c16-8c4a-861abcbe9566 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:22:34 compute-0 nova_compute[192079]: 2025-10-02 12:22:34.279 2 DEBUG oslo_concurrency.lockutils [None req-406b9612-15cc-4c16-8c4a-861abcbe9566 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:22:34 compute-0 nova_compute[192079]: 2025-10-02 12:22:34.333 2 DEBUG nova.compute.provider_tree [None req-406b9612-15cc-4c16-8c4a-861abcbe9566 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:22:34 compute-0 nova_compute[192079]: 2025-10-02 12:22:34.349 2 DEBUG nova.scheduler.client.report [None req-406b9612-15cc-4c16-8c4a-861abcbe9566 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:22:34 compute-0 nova_compute[192079]: 2025-10-02 12:22:34.372 2 DEBUG oslo_concurrency.lockutils [None req-406b9612-15cc-4c16-8c4a-861abcbe9566 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.093s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:22:34 compute-0 nova_compute[192079]: 2025-10-02 12:22:34.409 2 INFO nova.scheduler.client.report [None req-406b9612-15cc-4c16-8c4a-861abcbe9566 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Deleted allocations for instance ad2d69bb-3aa9-4c11-b9de-29996574cfa2
Oct 02 12:22:34 compute-0 nova_compute[192079]: 2025-10-02 12:22:34.497 2 DEBUG oslo_concurrency.lockutils [None req-406b9612-15cc-4c16-8c4a-861abcbe9566 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lock "ad2d69bb-3aa9-4c11-b9de-29996574cfa2" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.650s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:22:34 compute-0 nova_compute[192079]: 2025-10-02 12:22:34.880 2 DEBUG nova.compute.manager [req-a494a5f4-67b8-449d-80ea-c5dab68c1430 req-9fb93c12-b56f-478b-b7af-f78502e9dca1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Received event network-vif-deleted-cae13af9-8175-4eab-b9ec-18019b521d0b external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:22:36 compute-0 podman[237201]: 2025-10-02 12:22:36.144809538 +0000 UTC m=+0.058775074 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=openstack_network_exporter, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., release=1755695350, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, com.redhat.component=ubi9-minimal-container, io.openshift.tags=minimal rhel9, distribution-scope=public, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.buildah.version=1.33.7, managed_by=edpm_ansible, name=ubi9-minimal, url=https://catalog.redhat.com/en/search?searchType=containers, io.openshift.expose-services=, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, config_id=edpm, architecture=x86_64, vendor=Red Hat, Inc., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., build-date=2025-08-20T13:12:41, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., maintainer=Red Hat, Inc., vcs-type=git, version=9.6)
Oct 02 12:22:36 compute-0 podman[237202]: 2025-10-02 12:22:36.177721685 +0000 UTC m=+0.079072347 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, org.label-schema.schema-version=1.0, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3)
Oct 02 12:22:36 compute-0 nova_compute[192079]: 2025-10-02 12:22:36.815 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:38 compute-0 nova_compute[192079]: 2025-10-02 12:22:38.158 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:38 compute-0 nova_compute[192079]: 2025-10-02 12:22:38.833 2 DEBUG oslo_concurrency.lockutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Acquiring lock "ae6bf863-8cca-48ab-a98f-065f8382fa99" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:22:38 compute-0 nova_compute[192079]: 2025-10-02 12:22:38.833 2 DEBUG oslo_concurrency.lockutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lock "ae6bf863-8cca-48ab-a98f-065f8382fa99" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:22:38 compute-0 nova_compute[192079]: 2025-10-02 12:22:38.851 2 DEBUG nova.compute.manager [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.026 2 DEBUG oslo_concurrency.lockutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.027 2 DEBUG oslo_concurrency.lockutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.035 2 DEBUG nova.virt.hardware [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.035 2 INFO nova.compute.claims [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.148 2 DEBUG nova.compute.provider_tree [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.162 2 DEBUG nova.scheduler.client.report [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.184 2 DEBUG oslo_concurrency.lockutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.158s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.185 2 DEBUG nova.compute.manager [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.231 2 DEBUG nova.compute.manager [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.232 2 DEBUG nova.network.neutron [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.248 2 INFO nova.virt.libvirt.driver [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.262 2 DEBUG nova.compute.manager [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.359 2 DEBUG nova.compute.manager [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.360 2 DEBUG nova.virt.libvirt.driver [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.360 2 INFO nova.virt.libvirt.driver [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Creating image(s)
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.361 2 DEBUG oslo_concurrency.lockutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Acquiring lock "/var/lib/nova/instances/ae6bf863-8cca-48ab-a98f-065f8382fa99/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.361 2 DEBUG oslo_concurrency.lockutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lock "/var/lib/nova/instances/ae6bf863-8cca-48ab-a98f-065f8382fa99/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.361 2 DEBUG oslo_concurrency.lockutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lock "/var/lib/nova/instances/ae6bf863-8cca-48ab-a98f-065f8382fa99/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.373 2 DEBUG oslo_concurrency.processutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.398 2 DEBUG nova.policy [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.447 2 DEBUG oslo_concurrency.processutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.074s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.448 2 DEBUG oslo_concurrency.lockutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.448 2 DEBUG oslo_concurrency.lockutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.464 2 DEBUG oslo_concurrency.processutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.516 2 DEBUG oslo_concurrency.processutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.052s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.517 2 DEBUG oslo_concurrency.processutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/ae6bf863-8cca-48ab-a98f-065f8382fa99/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.552 2 DEBUG oslo_concurrency.processutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/ae6bf863-8cca-48ab-a98f-065f8382fa99/disk 1073741824" returned: 0 in 0.035s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.554 2 DEBUG oslo_concurrency.lockutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.105s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.554 2 DEBUG oslo_concurrency.processutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.618 2 DEBUG oslo_concurrency.processutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.063s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.618 2 DEBUG nova.virt.disk.api [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Checking if we can resize image /var/lib/nova/instances/ae6bf863-8cca-48ab-a98f-065f8382fa99/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.619 2 DEBUG oslo_concurrency.processutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ae6bf863-8cca-48ab-a98f-065f8382fa99/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.678 2 DEBUG oslo_concurrency.processutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ae6bf863-8cca-48ab-a98f-065f8382fa99/disk --force-share --output=json" returned: 0 in 0.059s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.679 2 DEBUG nova.virt.disk.api [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Cannot resize image /var/lib/nova/instances/ae6bf863-8cca-48ab-a98f-065f8382fa99/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.680 2 DEBUG nova.objects.instance [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lazy-loading 'migration_context' on Instance uuid ae6bf863-8cca-48ab-a98f-065f8382fa99 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.699 2 DEBUG nova.virt.libvirt.driver [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.700 2 DEBUG nova.virt.libvirt.driver [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Ensure instance console log exists: /var/lib/nova/instances/ae6bf863-8cca-48ab-a98f-065f8382fa99/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.700 2 DEBUG oslo_concurrency.lockutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.700 2 DEBUG oslo_concurrency.lockutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:22:39 compute-0 nova_compute[192079]: 2025-10-02 12:22:39.701 2 DEBUG oslo_concurrency.lockutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:22:40 compute-0 nova_compute[192079]: 2025-10-02 12:22:40.464 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:40 compute-0 nova_compute[192079]: 2025-10-02 12:22:40.775 2 DEBUG nova.network.neutron [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Successfully created port: f1306fa9-9429-43db-a3f4-48a2399611d7 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:22:41 compute-0 nova_compute[192079]: 2025-10-02 12:22:41.817 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:43 compute-0 podman[237254]: 2025-10-02 12:22:43.160427947 +0000 UTC m=+0.066902115 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:22:43 compute-0 nova_compute[192079]: 2025-10-02 12:22:43.160 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:43 compute-0 podman[237255]: 2025-10-02 12:22:43.161927199 +0000 UTC m=+0.071966924 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, container_name=iscsid, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_managed=true, config_id=iscsid, managed_by=edpm_ansible, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:22:43 compute-0 nova_compute[192079]: 2025-10-02 12:22:43.175 2 DEBUG nova.network.neutron [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Successfully updated port: f1306fa9-9429-43db-a3f4-48a2399611d7 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:22:43 compute-0 nova_compute[192079]: 2025-10-02 12:22:43.192 2 DEBUG oslo_concurrency.lockutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Acquiring lock "refresh_cache-ae6bf863-8cca-48ab-a98f-065f8382fa99" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:22:43 compute-0 nova_compute[192079]: 2025-10-02 12:22:43.192 2 DEBUG oslo_concurrency.lockutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Acquired lock "refresh_cache-ae6bf863-8cca-48ab-a98f-065f8382fa99" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:22:43 compute-0 nova_compute[192079]: 2025-10-02 12:22:43.192 2 DEBUG nova.network.neutron [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:22:43 compute-0 nova_compute[192079]: 2025-10-02 12:22:43.373 2 DEBUG nova.network.neutron [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.530 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.706 2 DEBUG nova.network.neutron [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Updating instance_info_cache with network_info: [{"id": "f1306fa9-9429-43db-a3f4-48a2399611d7", "address": "fa:16:3e:41:ec:88", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf1306fa9-94", "ovs_interfaceid": "f1306fa9-9429-43db-a3f4-48a2399611d7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.731 2 DEBUG oslo_concurrency.lockutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Releasing lock "refresh_cache-ae6bf863-8cca-48ab-a98f-065f8382fa99" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.731 2 DEBUG nova.compute.manager [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Instance network_info: |[{"id": "f1306fa9-9429-43db-a3f4-48a2399611d7", "address": "fa:16:3e:41:ec:88", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf1306fa9-94", "ovs_interfaceid": "f1306fa9-9429-43db-a3f4-48a2399611d7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.733 2 DEBUG nova.virt.libvirt.driver [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Start _get_guest_xml network_info=[{"id": "f1306fa9-9429-43db-a3f4-48a2399611d7", "address": "fa:16:3e:41:ec:88", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf1306fa9-94", "ovs_interfaceid": "f1306fa9-9429-43db-a3f4-48a2399611d7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.738 2 WARNING nova.virt.libvirt.driver [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.742 2 DEBUG nova.virt.libvirt.host [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.742 2 DEBUG nova.virt.libvirt.host [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.745 2 DEBUG nova.virt.libvirt.host [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.745 2 DEBUG nova.virt.libvirt.host [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.746 2 DEBUG nova.virt.libvirt.driver [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.747 2 DEBUG nova.virt.hardware [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.747 2 DEBUG nova.virt.hardware [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.747 2 DEBUG nova.virt.hardware [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.747 2 DEBUG nova.virt.hardware [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.748 2 DEBUG nova.virt.hardware [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.748 2 DEBUG nova.virt.hardware [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.748 2 DEBUG nova.virt.hardware [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.748 2 DEBUG nova.virt.hardware [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.749 2 DEBUG nova.virt.hardware [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.749 2 DEBUG nova.virt.hardware [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.749 2 DEBUG nova.virt.hardware [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.753 2 DEBUG nova.virt.libvirt.vif [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:22:37Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ServerActionsTestOtherB-server-1769053978',display_name='tempest-ServerActionsTestOtherB-server-1769053978',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serveractionstestotherb-server-1769053978',id=114,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBG+aqSe4de2VLtRAXN5xeLQn4S/3X8QrNMy2M5WdQ5hviVyEOgqK+m+uWmzPaUSUgE38sEdkytfwUHD32CBZajBt4q3OEf9i3yPJUQGuqp42pAUD+A3EoBIyeptNeSxGdA==',key_name='tempest-keypair-1900171990',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='ffce7d629aa24a7f970d93b2a79045f1',ramdisk_id='',reservation_id='r-flcxdim8',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServerActionsTestOtherB-263921372',owner_user_name='tempest-ServerActionsTestOtherB-263921372-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:22:39Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='0ea122e2fff94f2ba7c78bf30b04029c',uuid=ae6bf863-8cca-48ab-a98f-065f8382fa99,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "f1306fa9-9429-43db-a3f4-48a2399611d7", "address": "fa:16:3e:41:ec:88", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf1306fa9-94", "ovs_interfaceid": "f1306fa9-9429-43db-a3f4-48a2399611d7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.753 2 DEBUG nova.network.os_vif_util [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Converting VIF {"id": "f1306fa9-9429-43db-a3f4-48a2399611d7", "address": "fa:16:3e:41:ec:88", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf1306fa9-94", "ovs_interfaceid": "f1306fa9-9429-43db-a3f4-48a2399611d7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.754 2 DEBUG nova.network.os_vif_util [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:41:ec:88,bridge_name='br-int',has_traffic_filtering=True,id=f1306fa9-9429-43db-a3f4-48a2399611d7,network=Network(20eb29be-ee23-463b-85af-bfc2388e9f77),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf1306fa9-94') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.755 2 DEBUG nova.objects.instance [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lazy-loading 'pci_devices' on Instance uuid ae6bf863-8cca-48ab-a98f-065f8382fa99 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.766 2 DEBUG nova.virt.libvirt.driver [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:22:44 compute-0 nova_compute[192079]:   <uuid>ae6bf863-8cca-48ab-a98f-065f8382fa99</uuid>
Oct 02 12:22:44 compute-0 nova_compute[192079]:   <name>instance-00000072</name>
Oct 02 12:22:44 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:22:44 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:22:44 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:22:44 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:       <nova:name>tempest-ServerActionsTestOtherB-server-1769053978</nova:name>
Oct 02 12:22:44 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:22:44</nova:creationTime>
Oct 02 12:22:44 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:22:44 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:22:44 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:22:44 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:22:44 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:22:44 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:22:44 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:22:44 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:22:44 compute-0 nova_compute[192079]:         <nova:user uuid="0ea122e2fff94f2ba7c78bf30b04029c">tempest-ServerActionsTestOtherB-263921372-project-member</nova:user>
Oct 02 12:22:44 compute-0 nova_compute[192079]:         <nova:project uuid="ffce7d629aa24a7f970d93b2a79045f1">tempest-ServerActionsTestOtherB-263921372</nova:project>
Oct 02 12:22:44 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:22:44 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:22:44 compute-0 nova_compute[192079]:         <nova:port uuid="f1306fa9-9429-43db-a3f4-48a2399611d7">
Oct 02 12:22:44 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.8" ipVersion="4"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:22:44 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:22:44 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:22:44 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <system>
Oct 02 12:22:44 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:22:44 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:22:44 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:22:44 compute-0 nova_compute[192079]:       <entry name="serial">ae6bf863-8cca-48ab-a98f-065f8382fa99</entry>
Oct 02 12:22:44 compute-0 nova_compute[192079]:       <entry name="uuid">ae6bf863-8cca-48ab-a98f-065f8382fa99</entry>
Oct 02 12:22:44 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     </system>
Oct 02 12:22:44 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:22:44 compute-0 nova_compute[192079]:   <os>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:   </os>
Oct 02 12:22:44 compute-0 nova_compute[192079]:   <features>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:   </features>
Oct 02 12:22:44 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:22:44 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:22:44 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:22:44 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/ae6bf863-8cca-48ab-a98f-065f8382fa99/disk"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:22:44 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/ae6bf863-8cca-48ab-a98f-065f8382fa99/disk.config"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:22:44 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:41:ec:88"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:       <target dev="tapf1306fa9-94"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:22:44 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/ae6bf863-8cca-48ab-a98f-065f8382fa99/console.log" append="off"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <video>
Oct 02 12:22:44 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     </video>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:22:44 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:22:44 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:22:44 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:22:44 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:22:44 compute-0 nova_compute[192079]: </domain>
Oct 02 12:22:44 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.767 2 DEBUG nova.compute.manager [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Preparing to wait for external event network-vif-plugged-f1306fa9-9429-43db-a3f4-48a2399611d7 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.767 2 DEBUG oslo_concurrency.lockutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Acquiring lock "ae6bf863-8cca-48ab-a98f-065f8382fa99-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.768 2 DEBUG oslo_concurrency.lockutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lock "ae6bf863-8cca-48ab-a98f-065f8382fa99-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.768 2 DEBUG oslo_concurrency.lockutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lock "ae6bf863-8cca-48ab-a98f-065f8382fa99-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.769 2 DEBUG nova.virt.libvirt.vif [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:22:37Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ServerActionsTestOtherB-server-1769053978',display_name='tempest-ServerActionsTestOtherB-server-1769053978',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serveractionstestotherb-server-1769053978',id=114,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBG+aqSe4de2VLtRAXN5xeLQn4S/3X8QrNMy2M5WdQ5hviVyEOgqK+m+uWmzPaUSUgE38sEdkytfwUHD32CBZajBt4q3OEf9i3yPJUQGuqp42pAUD+A3EoBIyeptNeSxGdA==',key_name='tempest-keypair-1900171990',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='ffce7d629aa24a7f970d93b2a79045f1',ramdisk_id='',reservation_id='r-flcxdim8',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServerActionsTestOtherB-263921372',owner_user_name='tempest-ServerActionsTestOtherB-263921372-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:22:39Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='0ea122e2fff94f2ba7c78bf30b04029c',uuid=ae6bf863-8cca-48ab-a98f-065f8382fa99,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "f1306fa9-9429-43db-a3f4-48a2399611d7", "address": "fa:16:3e:41:ec:88", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf1306fa9-94", "ovs_interfaceid": "f1306fa9-9429-43db-a3f4-48a2399611d7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.769 2 DEBUG nova.network.os_vif_util [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Converting VIF {"id": "f1306fa9-9429-43db-a3f4-48a2399611d7", "address": "fa:16:3e:41:ec:88", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf1306fa9-94", "ovs_interfaceid": "f1306fa9-9429-43db-a3f4-48a2399611d7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.770 2 DEBUG nova.network.os_vif_util [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:41:ec:88,bridge_name='br-int',has_traffic_filtering=True,id=f1306fa9-9429-43db-a3f4-48a2399611d7,network=Network(20eb29be-ee23-463b-85af-bfc2388e9f77),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf1306fa9-94') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.770 2 DEBUG os_vif [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:41:ec:88,bridge_name='br-int',has_traffic_filtering=True,id=f1306fa9-9429-43db-a3f4-48a2399611d7,network=Network(20eb29be-ee23-463b-85af-bfc2388e9f77),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf1306fa9-94') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.771 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.771 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.771 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.774 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.774 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapf1306fa9-94, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.775 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapf1306fa9-94, col_values=(('external_ids', {'iface-id': 'f1306fa9-9429-43db-a3f4-48a2399611d7', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:41:ec:88', 'vm-uuid': 'ae6bf863-8cca-48ab-a98f-065f8382fa99'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.776 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:44 compute-0 NetworkManager[51160]: <info>  [1759407764.7777] manager: (tapf1306fa9-94): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/190)
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.779 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.784 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.785 2 INFO os_vif [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:41:ec:88,bridge_name='br-int',has_traffic_filtering=True,id=f1306fa9-9429-43db-a3f4-48a2399611d7,network=Network(20eb29be-ee23-463b-85af-bfc2388e9f77),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf1306fa9-94')
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.834 2 DEBUG nova.virt.libvirt.driver [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.834 2 DEBUG nova.virt.libvirt.driver [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.835 2 DEBUG nova.virt.libvirt.driver [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] No VIF found with MAC fa:16:3e:41:ec:88, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.835 2 INFO nova.virt.libvirt.driver [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Using config drive
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.954 2 DEBUG nova.compute.manager [req-98e93fa3-190e-4e91-b33f-60b2362c9dae req-42d65517-79d7-4042-83b0-f2d602d69f47 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Received event network-changed-f1306fa9-9429-43db-a3f4-48a2399611d7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.955 2 DEBUG nova.compute.manager [req-98e93fa3-190e-4e91-b33f-60b2362c9dae req-42d65517-79d7-4042-83b0-f2d602d69f47 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Refreshing instance network info cache due to event network-changed-f1306fa9-9429-43db-a3f4-48a2399611d7. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.955 2 DEBUG oslo_concurrency.lockutils [req-98e93fa3-190e-4e91-b33f-60b2362c9dae req-42d65517-79d7-4042-83b0-f2d602d69f47 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-ae6bf863-8cca-48ab-a98f-065f8382fa99" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.955 2 DEBUG oslo_concurrency.lockutils [req-98e93fa3-190e-4e91-b33f-60b2362c9dae req-42d65517-79d7-4042-83b0-f2d602d69f47 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-ae6bf863-8cca-48ab-a98f-065f8382fa99" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:22:44 compute-0 nova_compute[192079]: 2025-10-02 12:22:44.955 2 DEBUG nova.network.neutron [req-98e93fa3-190e-4e91-b33f-60b2362c9dae req-42d65517-79d7-4042-83b0-f2d602d69f47 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Refreshing network info cache for port f1306fa9-9429-43db-a3f4-48a2399611d7 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:22:45 compute-0 nova_compute[192079]: 2025-10-02 12:22:45.210 2 INFO nova.virt.libvirt.driver [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Creating config drive at /var/lib/nova/instances/ae6bf863-8cca-48ab-a98f-065f8382fa99/disk.config
Oct 02 12:22:45 compute-0 nova_compute[192079]: 2025-10-02 12:22:45.216 2 DEBUG oslo_concurrency.processutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/ae6bf863-8cca-48ab-a98f-065f8382fa99/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp42nv_gjj execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:22:45 compute-0 nova_compute[192079]: 2025-10-02 12:22:45.340 2 DEBUG oslo_concurrency.processutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/ae6bf863-8cca-48ab-a98f-065f8382fa99/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp42nv_gjj" returned: 0 in 0.124s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:22:45 compute-0 kernel: tapf1306fa9-94: entered promiscuous mode
Oct 02 12:22:45 compute-0 NetworkManager[51160]: <info>  [1759407765.4074] manager: (tapf1306fa9-94): new Tun device (/org/freedesktop/NetworkManager/Devices/191)
Oct 02 12:22:45 compute-0 ovn_controller[94336]: 2025-10-02T12:22:45Z|00370|binding|INFO|Claiming lport f1306fa9-9429-43db-a3f4-48a2399611d7 for this chassis.
Oct 02 12:22:45 compute-0 ovn_controller[94336]: 2025-10-02T12:22:45Z|00371|binding|INFO|f1306fa9-9429-43db-a3f4-48a2399611d7: Claiming fa:16:3e:41:ec:88 10.100.0.8
Oct 02 12:22:45 compute-0 nova_compute[192079]: 2025-10-02 12:22:45.408 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:45.415 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:41:ec:88 10.100.0.8'], port_security=['fa:16:3e:41:ec:88 10.100.0.8'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.8/28', 'neutron:device_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-20eb29be-ee23-463b-85af-bfc2388e9f77', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'neutron:revision_number': '2', 'neutron:security_group_ids': '12e9168a-be86-462f-a658-971f38e3430f', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=e183e2c6-21dc-48e3-ae47-279bc8b32eeb, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=5, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=f1306fa9-9429-43db-a3f4-48a2399611d7) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:45.418 103294 INFO neutron.agent.ovn.metadata.agent [-] Port f1306fa9-9429-43db-a3f4-48a2399611d7 in datapath 20eb29be-ee23-463b-85af-bfc2388e9f77 bound to our chassis
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:45.420 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 20eb29be-ee23-463b-85af-bfc2388e9f77
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:45.434 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f54358a3-365e-460e-bc69-37481a7e9f6d]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:45.435 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap20eb29be-e1 in ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:22:45 compute-0 ovn_controller[94336]: 2025-10-02T12:22:45Z|00372|binding|INFO|Setting lport f1306fa9-9429-43db-a3f4-48a2399611d7 ovn-installed in OVS
Oct 02 12:22:45 compute-0 ovn_controller[94336]: 2025-10-02T12:22:45Z|00373|binding|INFO|Setting lport f1306fa9-9429-43db-a3f4-48a2399611d7 up in Southbound
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:45.438 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap20eb29be-e0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:45.438 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2deecda4-1c1b-4fb7-bba0-d2dc15831d15]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:45 compute-0 nova_compute[192079]: 2025-10-02 12:22:45.437 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:45.439 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[fb83a140-1c4b-4e05-a33f-8382481e2420]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:45 compute-0 nova_compute[192079]: 2025-10-02 12:22:45.442 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:45.453 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[af9584df-127b-4813-ad53-6ad029780506]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:45 compute-0 systemd-machined[152150]: New machine qemu-51-instance-00000072.
Oct 02 12:22:45 compute-0 systemd-udevd[237318]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:22:45 compute-0 systemd[1]: Started Virtual Machine qemu-51-instance-00000072.
Oct 02 12:22:45 compute-0 NetworkManager[51160]: <info>  [1759407765.4789] device (tapf1306fa9-94): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:22:45 compute-0 NetworkManager[51160]: <info>  [1759407765.4796] device (tapf1306fa9-94): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:45.476 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[18bc6435-3612-4465-a2db-9faef86b5287]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:45.515 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[168e5e6a-f61d-420d-9dde-36cdb2d92f8c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:45 compute-0 NetworkManager[51160]: <info>  [1759407765.5211] manager: (tap20eb29be-e0): new Veth device (/org/freedesktop/NetworkManager/Devices/192)
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:45.520 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[866fb51a-a7a2-4ece-8cdd-d7185cda646b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:45.553 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[644c9575-47ea-44ce-a489-53cddf0ee6db]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:45.557 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[7317e626-21c3-43f2-89e4-7ff1e397a43b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:45 compute-0 NetworkManager[51160]: <info>  [1759407765.5764] device (tap20eb29be-e0): carrier: link connected
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:45.581 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[4d2c2de2-8e1d-48b8-918f-dc72b66a287d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:45.600 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d6b36882-80c8-44ac-8a29-d19cb9df1b8d]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap20eb29be-e1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:77:55:96'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 122], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 576320, 'reachable_time': 22862, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 237348, 'error': None, 'target': 'ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:45.613 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[50759e2d-7c8b-4e50-b003-bf1d5538d24c]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe77:5596'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 576320, 'tstamp': 576320}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 237349, 'error': None, 'target': 'ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:45.628 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[652019bd-65d3-41e4-9077-6e0ea2cd77da]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap20eb29be-e1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:77:55:96'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 122], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 576320, 'reachable_time': 22862, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 237350, 'error': None, 'target': 'ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:45.653 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a47d4b81-8b72-472f-9db4-8f1a982db07c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:45.703 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5e167bcc-854b-471a-b4f9-4973f38a1614]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:45.705 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap20eb29be-e0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:45.705 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:45.705 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap20eb29be-e0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:22:45 compute-0 NetworkManager[51160]: <info>  [1759407765.7185] manager: (tap20eb29be-e0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/193)
Oct 02 12:22:45 compute-0 nova_compute[192079]: 2025-10-02 12:22:45.717 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:45 compute-0 kernel: tap20eb29be-e0: entered promiscuous mode
Oct 02 12:22:45 compute-0 nova_compute[192079]: 2025-10-02 12:22:45.721 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:45.723 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap20eb29be-e0, col_values=(('external_ids', {'iface-id': 'e533861f-45cb-4843-b071-0b628ca25128'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:22:45 compute-0 ovn_controller[94336]: 2025-10-02T12:22:45Z|00374|binding|INFO|Releasing lport e533861f-45cb-4843-b071-0b628ca25128 from this chassis (sb_readonly=0)
Oct 02 12:22:45 compute-0 nova_compute[192079]: 2025-10-02 12:22:45.724 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:45 compute-0 nova_compute[192079]: 2025-10-02 12:22:45.725 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:45.727 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/20eb29be-ee23-463b-85af-bfc2388e9f77.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/20eb29be-ee23-463b-85af-bfc2388e9f77.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:45.728 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[81fb047c-4a39-45a1-9759-1199550b8e58]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:45.728 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-20eb29be-ee23-463b-85af-bfc2388e9f77
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/20eb29be-ee23-463b-85af-bfc2388e9f77.pid.haproxy
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 20eb29be-ee23-463b-85af-bfc2388e9f77
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:22:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:22:45.729 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77', 'env', 'PROCESS_TAG=haproxy-20eb29be-ee23-463b-85af-bfc2388e9f77', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/20eb29be-ee23-463b-85af-bfc2388e9f77.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:22:45 compute-0 nova_compute[192079]: 2025-10-02 12:22:45.738 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:45 compute-0 nova_compute[192079]: 2025-10-02 12:22:45.800 2 DEBUG nova.compute.manager [req-417f0839-a29a-43c8-9b51-6e0d9c8900e5 req-b749c589-d917-4158-8d64-1271ca548589 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Received event network-vif-plugged-f1306fa9-9429-43db-a3f4-48a2399611d7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:22:45 compute-0 nova_compute[192079]: 2025-10-02 12:22:45.802 2 DEBUG oslo_concurrency.lockutils [req-417f0839-a29a-43c8-9b51-6e0d9c8900e5 req-b749c589-d917-4158-8d64-1271ca548589 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "ae6bf863-8cca-48ab-a98f-065f8382fa99-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:22:45 compute-0 nova_compute[192079]: 2025-10-02 12:22:45.802 2 DEBUG oslo_concurrency.lockutils [req-417f0839-a29a-43c8-9b51-6e0d9c8900e5 req-b749c589-d917-4158-8d64-1271ca548589 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ae6bf863-8cca-48ab-a98f-065f8382fa99-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:22:45 compute-0 nova_compute[192079]: 2025-10-02 12:22:45.803 2 DEBUG oslo_concurrency.lockutils [req-417f0839-a29a-43c8-9b51-6e0d9c8900e5 req-b749c589-d917-4158-8d64-1271ca548589 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ae6bf863-8cca-48ab-a98f-065f8382fa99-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:22:45 compute-0 nova_compute[192079]: 2025-10-02 12:22:45.803 2 DEBUG nova.compute.manager [req-417f0839-a29a-43c8-9b51-6e0d9c8900e5 req-b749c589-d917-4158-8d64-1271ca548589 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Processing event network-vif-plugged-f1306fa9-9429-43db-a3f4-48a2399611d7 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:22:46 compute-0 podman[237388]: 2025-10-02 12:22:46.060574952 +0000 UTC m=+0.047978079 container create ca3817d11e0e246b53557f9e569be0d1309be36876f6b30348e1c21110486e12 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2)
Oct 02 12:22:46 compute-0 systemd[1]: Started libpod-conmon-ca3817d11e0e246b53557f9e569be0d1309be36876f6b30348e1c21110486e12.scope.
Oct 02 12:22:46 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:22:46 compute-0 podman[237388]: 2025-10-02 12:22:46.033450873 +0000 UTC m=+0.020854020 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:22:46 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/66ee260b173b294694994146d44ca567983b816b29f68e5c0542cb20eafec7d3/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.140 2 DEBUG nova.compute.manager [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:22:46 compute-0 podman[237388]: 2025-10-02 12:22:46.141074136 +0000 UTC m=+0.128477263 container init ca3817d11e0e246b53557f9e569be0d1309be36876f6b30348e1c21110486e12 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true)
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.141 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407766.139828, ae6bf863-8cca-48ab-a98f-065f8382fa99 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.141 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] VM Started (Lifecycle Event)
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.144 2 DEBUG nova.virt.libvirt.driver [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:22:46 compute-0 podman[237388]: 2025-10-02 12:22:46.147166062 +0000 UTC m=+0.134569189 container start ca3817d11e0e246b53557f9e569be0d1309be36876f6b30348e1c21110486e12 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001)
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.148 2 INFO nova.virt.libvirt.driver [-] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Instance spawned successfully.
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.149 2 DEBUG nova.virt.libvirt.driver [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:22:46 compute-0 neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77[237403]: [NOTICE]   (237407) : New worker (237409) forked
Oct 02 12:22:46 compute-0 neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77[237403]: [NOTICE]   (237407) : Loading success.
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.175 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.180 2 DEBUG nova.virt.libvirt.driver [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.180 2 DEBUG nova.virt.libvirt.driver [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.181 2 DEBUG nova.virt.libvirt.driver [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.181 2 DEBUG nova.virt.libvirt.driver [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.182 2 DEBUG nova.virt.libvirt.driver [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.182 2 DEBUG nova.virt.libvirt.driver [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.186 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.230 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.230 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407766.1401324, ae6bf863-8cca-48ab-a98f-065f8382fa99 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.231 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] VM Paused (Lifecycle Event)
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.252 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.255 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407766.1437464, ae6bf863-8cca-48ab-a98f-065f8382fa99 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.256 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] VM Resumed (Lifecycle Event)
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.264 2 INFO nova.compute.manager [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Took 6.90 seconds to spawn the instance on the hypervisor.
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.264 2 DEBUG nova.compute.manager [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.275 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.278 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.311 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.351 2 INFO nova.compute.manager [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Took 7.36 seconds to build instance.
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.369 2 DEBUG oslo_concurrency.lockutils [None req-08c5a693-f9b2-4495-9a95-53b922308273 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lock "ae6bf863-8cca-48ab-a98f-065f8382fa99" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 7.536s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.701 2 DEBUG nova.network.neutron [req-98e93fa3-190e-4e91-b33f-60b2362c9dae req-42d65517-79d7-4042-83b0-f2d602d69f47 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Updated VIF entry in instance network info cache for port f1306fa9-9429-43db-a3f4-48a2399611d7. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.702 2 DEBUG nova.network.neutron [req-98e93fa3-190e-4e91-b33f-60b2362c9dae req-42d65517-79d7-4042-83b0-f2d602d69f47 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Updating instance_info_cache with network_info: [{"id": "f1306fa9-9429-43db-a3f4-48a2399611d7", "address": "fa:16:3e:41:ec:88", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf1306fa9-94", "ovs_interfaceid": "f1306fa9-9429-43db-a3f4-48a2399611d7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.719 2 DEBUG oslo_concurrency.lockutils [req-98e93fa3-190e-4e91-b33f-60b2362c9dae req-42d65517-79d7-4042-83b0-f2d602d69f47 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-ae6bf863-8cca-48ab-a98f-065f8382fa99" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:22:46 compute-0 nova_compute[192079]: 2025-10-02 12:22:46.864 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:47 compute-0 ovn_controller[94336]: 2025-10-02T12:22:47Z|00375|binding|INFO|Releasing lport e533861f-45cb-4843-b071-0b628ca25128 from this chassis (sb_readonly=0)
Oct 02 12:22:47 compute-0 nova_compute[192079]: 2025-10-02 12:22:47.416 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:47 compute-0 ovn_controller[94336]: 2025-10-02T12:22:47Z|00376|binding|INFO|Releasing lport e533861f-45cb-4843-b071-0b628ca25128 from this chassis (sb_readonly=0)
Oct 02 12:22:47 compute-0 nova_compute[192079]: 2025-10-02 12:22:47.618 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:47 compute-0 nova_compute[192079]: 2025-10-02 12:22:47.935 2 DEBUG nova.compute.manager [req-4b490ab9-21ed-448a-9358-8f76e003c0bd req-20844fb4-7ff9-482c-8661-25ff1f154357 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Received event network-vif-plugged-f1306fa9-9429-43db-a3f4-48a2399611d7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:22:47 compute-0 nova_compute[192079]: 2025-10-02 12:22:47.936 2 DEBUG oslo_concurrency.lockutils [req-4b490ab9-21ed-448a-9358-8f76e003c0bd req-20844fb4-7ff9-482c-8661-25ff1f154357 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "ae6bf863-8cca-48ab-a98f-065f8382fa99-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:22:47 compute-0 nova_compute[192079]: 2025-10-02 12:22:47.936 2 DEBUG oslo_concurrency.lockutils [req-4b490ab9-21ed-448a-9358-8f76e003c0bd req-20844fb4-7ff9-482c-8661-25ff1f154357 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ae6bf863-8cca-48ab-a98f-065f8382fa99-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:22:47 compute-0 nova_compute[192079]: 2025-10-02 12:22:47.937 2 DEBUG oslo_concurrency.lockutils [req-4b490ab9-21ed-448a-9358-8f76e003c0bd req-20844fb4-7ff9-482c-8661-25ff1f154357 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ae6bf863-8cca-48ab-a98f-065f8382fa99-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:22:47 compute-0 nova_compute[192079]: 2025-10-02 12:22:47.937 2 DEBUG nova.compute.manager [req-4b490ab9-21ed-448a-9358-8f76e003c0bd req-20844fb4-7ff9-482c-8661-25ff1f154357 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] No waiting events found dispatching network-vif-plugged-f1306fa9-9429-43db-a3f4-48a2399611d7 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:22:47 compute-0 nova_compute[192079]: 2025-10-02 12:22:47.937 2 WARNING nova.compute.manager [req-4b490ab9-21ed-448a-9358-8f76e003c0bd req-20844fb4-7ff9-482c-8661-25ff1f154357 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Received unexpected event network-vif-plugged-f1306fa9-9429-43db-a3f4-48a2399611d7 for instance with vm_state active and task_state None.
Oct 02 12:22:48 compute-0 nova_compute[192079]: 2025-10-02 12:22:48.136 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759407753.1350253, ad2d69bb-3aa9-4c11-b9de-29996574cfa2 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:22:48 compute-0 nova_compute[192079]: 2025-10-02 12:22:48.136 2 INFO nova.compute.manager [-] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] VM Stopped (Lifecycle Event)
Oct 02 12:22:48 compute-0 nova_compute[192079]: 2025-10-02 12:22:48.156 2 DEBUG nova.compute.manager [None req-c8ed43fc-0a8a-42ab-ada0-4d2e8f91f3e2 - - - - - -] [instance: ad2d69bb-3aa9-4c11-b9de-29996574cfa2] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:22:48 compute-0 NetworkManager[51160]: <info>  [1759407768.7473] manager: (patch-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d-to-br-int): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/194)
Oct 02 12:22:48 compute-0 NetworkManager[51160]: <info>  [1759407768.7484] manager: (patch-br-int-to-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/195)
Oct 02 12:22:48 compute-0 nova_compute[192079]: 2025-10-02 12:22:48.749 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:48 compute-0 nova_compute[192079]: 2025-10-02 12:22:48.886 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:48 compute-0 ovn_controller[94336]: 2025-10-02T12:22:48Z|00377|binding|INFO|Releasing lport e533861f-45cb-4843-b071-0b628ca25128 from this chassis (sb_readonly=0)
Oct 02 12:22:48 compute-0 nova_compute[192079]: 2025-10-02 12:22:48.903 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:49 compute-0 nova_compute[192079]: 2025-10-02 12:22:49.777 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:50 compute-0 nova_compute[192079]: 2025-10-02 12:22:50.022 2 DEBUG nova.compute.manager [req-90ba48ab-6099-4719-a882-d9b8c181d8c3 req-58443527-0754-4f2b-a46e-82a255576525 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Received event network-changed-f1306fa9-9429-43db-a3f4-48a2399611d7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:22:50 compute-0 nova_compute[192079]: 2025-10-02 12:22:50.023 2 DEBUG nova.compute.manager [req-90ba48ab-6099-4719-a882-d9b8c181d8c3 req-58443527-0754-4f2b-a46e-82a255576525 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Refreshing instance network info cache due to event network-changed-f1306fa9-9429-43db-a3f4-48a2399611d7. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:22:50 compute-0 nova_compute[192079]: 2025-10-02 12:22:50.023 2 DEBUG oslo_concurrency.lockutils [req-90ba48ab-6099-4719-a882-d9b8c181d8c3 req-58443527-0754-4f2b-a46e-82a255576525 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-ae6bf863-8cca-48ab-a98f-065f8382fa99" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:22:50 compute-0 nova_compute[192079]: 2025-10-02 12:22:50.024 2 DEBUG oslo_concurrency.lockutils [req-90ba48ab-6099-4719-a882-d9b8c181d8c3 req-58443527-0754-4f2b-a46e-82a255576525 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-ae6bf863-8cca-48ab-a98f-065f8382fa99" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:22:50 compute-0 nova_compute[192079]: 2025-10-02 12:22:50.024 2 DEBUG nova.network.neutron [req-90ba48ab-6099-4719-a882-d9b8c181d8c3 req-58443527-0754-4f2b-a46e-82a255576525 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Refreshing network info cache for port f1306fa9-9429-43db-a3f4-48a2399611d7 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:22:51 compute-0 nova_compute[192079]: 2025-10-02 12:22:51.621 2 DEBUG nova.network.neutron [req-90ba48ab-6099-4719-a882-d9b8c181d8c3 req-58443527-0754-4f2b-a46e-82a255576525 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Updated VIF entry in instance network info cache for port f1306fa9-9429-43db-a3f4-48a2399611d7. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:22:51 compute-0 nova_compute[192079]: 2025-10-02 12:22:51.623 2 DEBUG nova.network.neutron [req-90ba48ab-6099-4719-a882-d9b8c181d8c3 req-58443527-0754-4f2b-a46e-82a255576525 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Updating instance_info_cache with network_info: [{"id": "f1306fa9-9429-43db-a3f4-48a2399611d7", "address": "fa:16:3e:41:ec:88", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf1306fa9-94", "ovs_interfaceid": "f1306fa9-9429-43db-a3f4-48a2399611d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:22:51 compute-0 nova_compute[192079]: 2025-10-02 12:22:51.640 2 DEBUG oslo_concurrency.lockutils [req-90ba48ab-6099-4719-a882-d9b8c181d8c3 req-58443527-0754-4f2b-a46e-82a255576525 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-ae6bf863-8cca-48ab-a98f-065f8382fa99" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:22:51 compute-0 nova_compute[192079]: 2025-10-02 12:22:51.865 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:52 compute-0 podman[237420]: 2025-10-02 12:22:52.175773115 +0000 UTC m=+0.073871055 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_managed=true, config_id=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS)
Oct 02 12:22:52 compute-0 podman[237422]: 2025-10-02 12:22:52.206829041 +0000 UTC m=+0.096963023 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']})
Oct 02 12:22:52 compute-0 podman[237421]: 2025-10-02 12:22:52.220783722 +0000 UTC m=+0.112550269 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, config_id=ovn_controller, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_managed=true, container_name=ovn_controller, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, managed_by=edpm_ansible)
Oct 02 12:22:54 compute-0 nova_compute[192079]: 2025-10-02 12:22:54.818 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:55 compute-0 nova_compute[192079]: 2025-10-02 12:22:55.304 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:56 compute-0 nova_compute[192079]: 2025-10-02 12:22:56.867 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:22:58 compute-0 ovn_controller[94336]: 2025-10-02T12:22:58Z|00037|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:41:ec:88 10.100.0.8
Oct 02 12:22:58 compute-0 ovn_controller[94336]: 2025-10-02T12:22:58Z|00038|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:41:ec:88 10.100.0.8
Oct 02 12:22:59 compute-0 nova_compute[192079]: 2025-10-02 12:22:59.820 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:00 compute-0 podman[237507]: 2025-10-02 12:23:00.151797027 +0000 UTC m=+0.062384611 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, tcib_managed=true)
Oct 02 12:23:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:23:01.174 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=27, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=26) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:23:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:23:01.176 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 4 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:23:01 compute-0 nova_compute[192079]: 2025-10-02 12:23:01.228 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:01 compute-0 nova_compute[192079]: 2025-10-02 12:23:01.871 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:23:02.222 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:23:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:23:02.223 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:23:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:23:02.224 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:23:04 compute-0 nova_compute[192079]: 2025-10-02 12:23:04.821 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:05 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:23:05.178 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '27'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:23:06 compute-0 nova_compute[192079]: 2025-10-02 12:23:06.873 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:07 compute-0 podman[237527]: 2025-10-02 12:23:07.19022029 +0000 UTC m=+0.081413970 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, url=https://catalog.redhat.com/en/search?searchType=containers, version=9.6, io.openshift.tags=minimal rhel9, vendor=Red Hat, Inc., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, distribution-scope=public, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, build-date=2025-08-20T13:12:41, io.buildah.version=1.33.7, maintainer=Red Hat, Inc., container_name=openstack_network_exporter, io.openshift.expose-services=, architecture=x86_64, config_id=edpm, release=1755695350, name=ubi9-minimal, managed_by=edpm_ansible, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, com.redhat.component=ubi9-minimal-container, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vcs-type=git, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly.)
Oct 02 12:23:07 compute-0 podman[237528]: 2025-10-02 12:23:07.197658303 +0000 UTC m=+0.086214811 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=multipathd, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001)
Oct 02 12:23:08 compute-0 nova_compute[192079]: 2025-10-02 12:23:08.990 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:09 compute-0 nova_compute[192079]: 2025-10-02 12:23:09.872 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:11 compute-0 nova_compute[192079]: 2025-10-02 12:23:11.874 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:14 compute-0 podman[237565]: 2025-10-02 12:23:14.156933749 +0000 UTC m=+0.065023694 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:23:14 compute-0 podman[237566]: 2025-10-02 12:23:14.163876538 +0000 UTC m=+0.061427626 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, config_id=iscsid, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:23:14 compute-0 nova_compute[192079]: 2025-10-02 12:23:14.925 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:16 compute-0 nova_compute[192079]: 2025-10-02 12:23:16.876 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.106 12 DEBUG ceilometer.compute.discovery [-] instance data: {'id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'os_type': 'hvm', 'architecture': 'x86_64', 'OS-EXT-SRV-ATTR:instance_name': 'instance-00000072', 'OS-EXT-SRV-ATTR:host': 'compute-0.ctlplane.example.com', 'OS-EXT-STS:vm_state': 'running', 'tenant_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'hostId': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'status': 'active', 'metadata': {}} discover_libvirt_polling /usr/lib/python3.9/site-packages/ceilometer/compute/discovery.py:228
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.106 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets.drop in the context of pollsters
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.109 12 DEBUG ceilometer.compute.virt.libvirt.inspector [-] No delta meter predecessor for ae6bf863-8cca-48ab-a98f-065f8382fa99 / tapf1306fa9-94 inspect_vnics /usr/lib/python3.9/site-packages/ceilometer/compute/virt/libvirt/inspector.py:136
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.110 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/network.incoming.packets.drop volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'fbcfad12-8ca4-421d-8a78-a23daee05470', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets.drop', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'instance-00000072-ae6bf863-8cca-48ab-a98f-065f8382fa99-tapf1306fa9-94', 'timestamp': '2025-10-02T12:23:17.106868', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'tapf1306fa9-94', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:41:ec:88', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapf1306fa9-94'}, 'message_id': '931f848e-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.793931297, 'message_signature': '9697ff495bc832a72f17a416add528bfddfcbb1b01634117ff0072dd3b990448'}]}, 'timestamp': '2025-10-02 12:23:17.110742', '_unique_id': '0d73451eb0224ea9b68bf528d427d06a'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.112 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.113 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets in the context of pollsters
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.113 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/network.incoming.packets volume: 28 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'a71ae48e-1094-43ba-b781-a837345dfc3e', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 28, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'instance-00000072-ae6bf863-8cca-48ab-a98f-065f8382fa99-tapf1306fa9-94', 'timestamp': '2025-10-02T12:23:17.113730', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'tapf1306fa9-94', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:41:ec:88', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapf1306fa9-94'}, 'message_id': '93200b84-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.793931297, 'message_signature': '555414c468d51a1d68589a911b89bd718858c973cc00f0fe1b5c262b15c874bb'}]}, 'timestamp': '2025-10-02 12:23:17.114139', '_unique_id': 'f6a924d35ec648cdadf9073a356efc82'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.114 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.115 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets.error in the context of pollsters
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.115 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/network.incoming.packets.error volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '013f0444-59f1-49ef-83d1-3ec3b3c45caf', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets.error', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'instance-00000072-ae6bf863-8cca-48ab-a98f-065f8382fa99-tapf1306fa9-94', 'timestamp': '2025-10-02T12:23:17.115943', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'tapf1306fa9-94', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:41:ec:88', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapf1306fa9-94'}, 'message_id': '93206174-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.793931297, 'message_signature': '541910756cf05bb33ed382a9920419887e53591ea150cbab188cd2b140cc0daf'}]}, 'timestamp': '2025-10-02 12:23:17.116311', '_unique_id': 'aeef7f4cfc9747f394e9d6023f9ee387'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.117 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.latency in the context of pollsters
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.137 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/disk.device.write.latency volume: 1465872837 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.138 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/disk.device.write.latency volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '914b60e1-b50f-45bf-83d7-3a19a7a91f9a', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 1465872837, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99-vda', 'timestamp': '2025-10-02T12:23:17.117883', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'instance-00000072', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '9323b8d8-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.805022159, 'message_signature': 'c96eeda14b985f470180137632c40be8728410319fbd40df3eb9685f192fa1e6'}, {'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 0, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99-sda', 'timestamp': '2025-10-02T12:23:17.117883', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'instance-00000072', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '9323c5da-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.805022159, 'message_signature': 'd6e0c012711f1988672ee448852f334d41278ac29d9bdc8d856d3b9cc2786d10'}]}, 'timestamp': '2025-10-02 12:23:17.138826', '_unique_id': 'ab7c656ba761438d85b59638e9bbe8ca'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.141 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.143 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes in the context of pollsters
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.144 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/network.incoming.bytes volume: 4495 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'f22f30d7-eb6a-4686-bd83-136beb575fb4', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 4495, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'instance-00000072-ae6bf863-8cca-48ab-a98f-065f8382fa99-tapf1306fa9-94', 'timestamp': '2025-10-02T12:23:17.144010', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'tapf1306fa9-94', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:41:ec:88', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapf1306fa9-94'}, 'message_id': '9324b594-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.793931297, 'message_signature': '1cb0f6b5b12e35281d9a83e53db667cddc8024e315fb7399635a2888b4215546'}]}, 'timestamp': '2025-10-02 12:23:17.144906', '_unique_id': '56eb016521d84cc4af92e2ebd31a0734'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.146 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.148 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.latency in the context of pollsters
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.149 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/disk.device.read.latency volume: 601229545 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.149 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/disk.device.read.latency volume: 136387906 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'd12513cb-8621-4481-88a7-e54b3f2b6436', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 601229545, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99-vda', 'timestamp': '2025-10-02T12:23:17.148930', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'instance-00000072', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '93257650-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.805022159, 'message_signature': 'c8d94a7e24932c35c417c31b8f801edbd252e70b53101d87a09be9b7caa0199a'}, {'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 136387906, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99-sda', 'timestamp': '2025-10-02T12:23:17.148930', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'instance-00000072', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '9325a4c2-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.805022159, 'message_signature': '4f6de07c065852151dbfb17e623b3acd138f3d9eb144bcdadedf7829795db28a'}]}, 'timestamp': '2025-10-02 12:23:17.150836', '_unique_id': 'ac75530628d44d998e579f8fb69af332'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.153 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.156 12 INFO ceilometer.polling.manager [-] Polling pollster memory.usage in the context of pollsters
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.178 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/memory.usage volume: 42.69140625 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '83089060-648b-403d-86c2-384bbacb9d43', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'memory.usage', 'counter_type': 'gauge', 'counter_unit': 'MB', 'counter_volume': 42.69140625, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'timestamp': '2025-10-02T12:23:17.156950', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'instance-00000072', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1}, 'message_id': '932a0418-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.865847768, 'message_signature': '9a1d02a1eba7c2a8fa4f6b8c27cff9d94f8dbe421246ae4b5f24198d3713b2d6'}]}, 'timestamp': '2025-10-02 12:23:17.179496', '_unique_id': '224231c372084ca790e66a4bc31c5db6'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.180 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.181 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes.delta in the context of pollsters
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.181 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/network.incoming.bytes.delta volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '40ba8d24-2bb9-4998-9e77-a93c7fab70bd', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.bytes.delta', 'counter_type': 'delta', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'instance-00000072-ae6bf863-8cca-48ab-a98f-065f8382fa99-tapf1306fa9-94', 'timestamp': '2025-10-02T12:23:17.181533', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'tapf1306fa9-94', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:41:ec:88', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapf1306fa9-94'}, 'message_id': '932a6430-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.793931297, 'message_signature': 'c2973f0c22aa10376350721c6def920689ec2a922be5227a76e98c8d0196c8d7'}]}, 'timestamp': '2025-10-02 12:23:17.181911', '_unique_id': '2180ae83b01547cb82c01ade182a587f'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.182 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.183 12 INFO ceilometer.polling.manager [-] Polling pollster cpu in the context of pollsters
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.183 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/cpu volume: 11940000000 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'd67c26d3-8cbe-4f78-9362-59e4063b931b', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'cpu', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 11940000000, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'timestamp': '2025-10-02T12:23:17.183810', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'instance-00000072', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'cpu_number': 1}, 'message_id': '932abb24-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.865847768, 'message_signature': 'd43998d9905066b8c380b1d68fbcbcb2ab01ca6de5908c455e78dca8ed075ddb'}]}, 'timestamp': '2025-10-02 12:23:17.184161', '_unique_id': '600c1cfc25c045a1ac16add493968ee7'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.184 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.185 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.requests in the context of pollsters
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.185 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/disk.device.write.requests volume: 298 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.186 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/disk.device.write.requests volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '43bcfe45-3869-462a-8438-c6dc8eec596f', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 298, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99-vda', 'timestamp': '2025-10-02T12:23:17.185704', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'instance-00000072', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '932b0534-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.805022159, 'message_signature': 'd4f5fd959279e30de1412c65f7f102f2c0d4261efa664b5b208644835dbac717'}, {'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 0, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99-sda', 'timestamp': '2025-10-02T12:23:17.185704', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'instance-00000072', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '932b11dc-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.805022159, 'message_signature': 'eae5f53d31f33c680aa79f3f94050ba54dd5db865eef802e1af102f06dfe4408'}]}, 'timestamp': '2025-10-02 12:23:17.186337', '_unique_id': '906788a199534bdeaaf351876d167242'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes.rate in the context of pollsters
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.187 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for OutgoingBytesRatePollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.188 12 ERROR ceilometer.polling.manager [-] Prevent pollster network.outgoing.bytes.rate from polling [<NovaLikeServer: tempest-ServerActionsTestOtherB-server-1769053978>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-ServerActionsTestOtherB-server-1769053978>]
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.188 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.bytes in the context of pollsters
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.188 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/disk.device.write.bytes volume: 72953856 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.188 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/disk.device.write.bytes volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'd85bc76e-ca99-47ca-9bc1-146f81e2a0d9', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 72953856, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99-vda', 'timestamp': '2025-10-02T12:23:17.188439', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'instance-00000072', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '932b7032-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.805022159, 'message_signature': '987173661e2e283ccad29a342b95f524f21ccfbfc04f37b544a336413a8ff6c6'}, {'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99-sda', 'timestamp': '2025-10-02T12:23:17.188439', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'instance-00000072', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '932b7c08-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.805022159, 'message_signature': '4442bdb8fae563fac8049e00887e6f37b33c4298347e6751d3e103b3a4a9228b'}]}, 'timestamp': '2025-10-02 12:23:17.189082', '_unique_id': 'e5560923b1a34567a1403fddaaba98d6'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.189 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.190 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.bytes in the context of pollsters
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.190 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/disk.device.read.bytes volume: 29538816 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.191 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/disk.device.read.bytes volume: 299326 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '565dff0c-32b0-4b34-8140-6b5dbd4a5437', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 29538816, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99-vda', 'timestamp': '2025-10-02T12:23:17.190732', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'instance-00000072', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '932bc97e-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.805022159, 'message_signature': '89f94bbcdf35fc9800b47f618fa77cbca669fdf3e26fa641580ec2cd6abbbbd6'}, {'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 299326, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99-sda', 'timestamp': '2025-10-02T12:23:17.190732', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'instance-00000072', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '932bd7ca-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.805022159, 'message_signature': 'baeb84f8c3dc627397b6cb1e1824df171af68064e69cefefccf3edc6dc41977a'}]}, 'timestamp': '2025-10-02 12:23:17.191433', '_unique_id': '09df92b676f2439088509dfefe6216d7'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.192 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets in the context of pollsters
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.193 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/network.outgoing.packets volume: 28 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '333eed80-a07f-403a-ad45-73ed71207f45', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 28, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'instance-00000072-ae6bf863-8cca-48ab-a98f-065f8382fa99-tapf1306fa9-94', 'timestamp': '2025-10-02T12:23:17.193011', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'tapf1306fa9-94', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:41:ec:88', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapf1306fa9-94'}, 'message_id': '932c22f2-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.793931297, 'message_signature': '2eef8d1ae601deca4a0e1a116e170318a1d006790282e12f98332de7498e14eb'}]}, 'timestamp': '2025-10-02 12:23:17.193352', '_unique_id': '59ff038c3c3e4b0dbd9b7c28c3f32527'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes.rate in the context of pollsters
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.194 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for IncomingBytesRatePollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.195 12 ERROR ceilometer.polling.manager [-] Prevent pollster network.incoming.bytes.rate from polling [<NovaLikeServer: tempest-ServerActionsTestOtherB-server-1769053978>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-ServerActionsTestOtherB-server-1769053978>]
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.195 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes in the context of pollsters
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.195 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/network.outgoing.bytes volume: 3390 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'c3ab081c-757c-44a0-a7dc-2cbac27f1455', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 3390, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'instance-00000072-ae6bf863-8cca-48ab-a98f-065f8382fa99-tapf1306fa9-94', 'timestamp': '2025-10-02T12:23:17.195339', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'tapf1306fa9-94', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:41:ec:88', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapf1306fa9-94'}, 'message_id': '932c7da6-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.793931297, 'message_signature': 'beb4388dee33a9001500f306c6df35c43ea5d491b42bc50072b8f753cbd7e14a'}]}, 'timestamp': '2025-10-02 12:23:17.195671', '_unique_id': '902d9a5c48cf4981b7256b69c9934014'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.196 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.197 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets.drop in the context of pollsters
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.197 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/network.outgoing.packets.drop volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '50a8b1a7-3c5c-419c-bda8-d6fc7cd5401b', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets.drop', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'instance-00000072-ae6bf863-8cca-48ab-a98f-065f8382fa99-tapf1306fa9-94', 'timestamp': '2025-10-02T12:23:17.197206', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'tapf1306fa9-94', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:41:ec:88', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapf1306fa9-94'}, 'message_id': '932cc68a-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.793931297, 'message_signature': '57e773ac26dfff8cb876365bb6fda49b6ea84f691eb8583607a2e3cc03de3e54'}]}, 'timestamp': '2025-10-02 12:23:17.197538', '_unique_id': 'e3115beca4f849aba4f15c053d265543'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.198 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.199 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.capacity in the context of pollsters
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.209 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/disk.device.capacity volume: 1073741824 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.209 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/disk.device.capacity volume: 509952 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'b9dc633d-dde9-4259-b805-02c45e42b9f5', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 1073741824, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99-vda', 'timestamp': '2025-10-02T12:23:17.199112', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'instance-00000072', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '932ea75c-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.886196772, 'message_signature': '06ab910ca4bc3ee2f48a1ecf15598c6f5791040b8a68315c022477558c9a99af'}, {'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 509952, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99-sda', 'timestamp': '2025-10-02T12:23:17.199112', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'instance-00000072', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '932eb03a-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.886196772, 'message_signature': '530990d69e4dbf1cbe3d27ebbb4c3e65e1ea5d62a66166fad8d3facaad218a78'}]}, 'timestamp': '2025-10-02 12:23:17.210032', '_unique_id': 'a25eed235a9d4bd19e447030397cafdd'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.210 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.211 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.requests in the context of pollsters
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.211 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/disk.device.read.requests volume: 1064 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.211 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/disk.device.read.requests volume: 120 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'da4da664-7740-45be-a4cb-dd3e09549e61', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 1064, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99-vda', 'timestamp': '2025-10-02T12:23:17.211281', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'instance-00000072', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '932eea46-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.805022159, 'message_signature': '163614fadeed56d23a7a8c48e888a195988db9606f2022bf7d4ecff4972df97f'}, {'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 120, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99-sda', 'timestamp': '2025-10-02T12:23:17.211281', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'instance-00000072', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '932ef1ee-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.805022159, 'message_signature': 'fcdf440adb4124fc3f952d086c959e1c5fb2046870e31ebf5482fac232c6629c'}]}, 'timestamp': '2025-10-02 12:23:17.211687', '_unique_id': '0c13e898db2441f6b9af6cc875fc39a9'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.iops in the context of pollsters
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for PerDeviceDiskIOPSPollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 ERROR ceilometer.polling.manager [-] Prevent pollster disk.device.iops from polling [<NovaLikeServer: tempest-ServerActionsTestOtherB-server-1769053978>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-ServerActionsTestOtherB-server-1769053978>]
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.212 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.allocation in the context of pollsters
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.213 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/disk.device.allocation volume: 30482432 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.213 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/disk.device.allocation volume: 512000 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'e9c3f26b-cc28-49f0-a67e-c602435b3d3b', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 30482432, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99-vda', 'timestamp': '2025-10-02T12:23:17.213051', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'instance-00000072', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '932f2f4c-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.886196772, 'message_signature': 'ccefc99c7f13d8f69d48d706c43f36effd41363f8f8c63ecb1f9bd516a37d489'}, {'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 512000, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99-sda', 'timestamp': '2025-10-02T12:23:17.213051', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'instance-00000072', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '932f3730-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.886196772, 'message_signature': '5e7e53b43792a403e21f07afd084103ba0772830044cc3d77c8507e8a374bc99'}]}, 'timestamp': '2025-10-02 12:23:17.213465', '_unique_id': 'b03912c2f0a14e43af7f1e2d1d125917'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.usage in the context of pollsters
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/disk.device.usage volume: 29949952 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.214 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/disk.device.usage volume: 509952 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'd504d1a4-59e1-42be-b952-213cfd6132ca', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 29949952, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99-vda', 'timestamp': '2025-10-02T12:23:17.214651', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'instance-00000072', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '932f6e76-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.886196772, 'message_signature': '00f127dcde798f20c4d425cdc6b0a8d9ac542e203ec9a111b88dcab1ef794a12'}, {'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 509952, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99-sda', 'timestamp': '2025-10-02T12:23:17.214651', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'instance-00000072', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '932f770e-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.886196772, 'message_signature': '5714275cbc76d1d9a018f6aa1f9c0bbbce020fbc265d92b0a0f4bec1879f49db'}]}, 'timestamp': '2025-10-02 12:23:17.215099', '_unique_id': '4bc4f9038b8341688ab987fc57701075'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.215 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes.delta in the context of pollsters
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/network.outgoing.bytes.delta volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'e78acb6e-9c2f-40b4-b0e3-7ad9ce5b51e8', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.bytes.delta', 'counter_type': 'delta', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'instance-00000072-ae6bf863-8cca-48ab-a98f-065f8382fa99-tapf1306fa9-94', 'timestamp': '2025-10-02T12:23:17.216195', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'tapf1306fa9-94', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:41:ec:88', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapf1306fa9-94'}, 'message_id': '932faa6c-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.793931297, 'message_signature': '41705fc4773595c18072bf2f4a43c2d99cd252820b2ce6b15bb8f6618b6c6cb0'}]}, 'timestamp': '2025-10-02 12:23:17.216427', '_unique_id': 'c1dc5688d8a24225a6224e545c07a7cd'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.216 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.217 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.latency in the context of pollsters
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.217 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for PerDeviceDiskLatencyPollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.217 12 ERROR ceilometer.polling.manager [-] Prevent pollster disk.device.latency from polling [<NovaLikeServer: tempest-ServerActionsTestOtherB-server-1769053978>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-ServerActionsTestOtherB-server-1769053978>]
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.217 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets.error in the context of pollsters
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.217 12 DEBUG ceilometer.compute.pollsters [-] ae6bf863-8cca-48ab-a98f-065f8382fa99/network.outgoing.packets.error volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '42dcdba0-d8ca-47ec-9adf-98f06c7b1a4f', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets.error', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '0ea122e2fff94f2ba7c78bf30b04029c', 'user_name': None, 'project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'project_name': None, 'resource_id': 'instance-00000072-ae6bf863-8cca-48ab-a98f-065f8382fa99-tapf1306fa9-94', 'timestamp': '2025-10-02T12:23:17.217765', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestOtherB-server-1769053978', 'name': 'tapf1306fa9-94', 'instance_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'instance_type': 'm1.nano', 'host': 'e24fd025415e4d6264522828418b60001ee43b20fb42f90e2cdf9117', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:41:ec:88', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapf1306fa9-94'}, 'message_id': '932fe888-9f8a-11f0-af18-fa163efc5e78', 'monotonic_time': 5794.793931297, 'message_signature': 'b8f28809f3352471fbec6502c100779bfb2ebeb98320250ffd5e0492226ebfd9'}]}, 'timestamp': '2025-10-02 12:23:17.218038', '_unique_id': '38684bf473774764ab6ceeb7d05d3659'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:23:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:23:17.218 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:23:18 compute-0 ovn_controller[94336]: 2025-10-02T12:23:18Z|00378|binding|INFO|Releasing lport e533861f-45cb-4843-b071-0b628ca25128 from this chassis (sb_readonly=0)
Oct 02 12:23:18 compute-0 nova_compute[192079]: 2025-10-02 12:23:18.951 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:19 compute-0 nova_compute[192079]: 2025-10-02 12:23:19.964 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:21 compute-0 nova_compute[192079]: 2025-10-02 12:23:21.878 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:23 compute-0 podman[237610]: 2025-10-02 12:23:23.168881932 +0000 UTC m=+0.065215879 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:23:23 compute-0 podman[237608]: 2025-10-02 12:23:23.177420715 +0000 UTC m=+0.079019846 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, container_name=ovn_metadata_agent, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:23:23 compute-0 podman[237609]: 2025-10-02 12:23:23.226374269 +0000 UTC m=+0.124885575 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, container_name=ovn_controller, org.label-schema.license=GPLv2, tcib_managed=true, config_id=ovn_controller, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0)
Oct 02 12:23:23 compute-0 nova_compute[192079]: 2025-10-02 12:23:23.660 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:23:24 compute-0 nova_compute[192079]: 2025-10-02 12:23:24.965 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:26 compute-0 nova_compute[192079]: 2025-10-02 12:23:26.880 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:27 compute-0 nova_compute[192079]: 2025-10-02 12:23:27.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:23:28 compute-0 ovn_controller[94336]: 2025-10-02T12:23:28Z|00379|binding|INFO|Releasing lport e533861f-45cb-4843-b071-0b628ca25128 from this chassis (sb_readonly=0)
Oct 02 12:23:28 compute-0 nova_compute[192079]: 2025-10-02 12:23:28.639 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:28 compute-0 nova_compute[192079]: 2025-10-02 12:23:28.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:23:28 compute-0 nova_compute[192079]: 2025-10-02 12:23:28.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:23:28 compute-0 nova_compute[192079]: 2025-10-02 12:23:28.709 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:23:28 compute-0 nova_compute[192079]: 2025-10-02 12:23:28.709 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:23:28 compute-0 nova_compute[192079]: 2025-10-02 12:23:28.710 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:23:28 compute-0 nova_compute[192079]: 2025-10-02 12:23:28.710 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:23:28 compute-0 nova_compute[192079]: 2025-10-02 12:23:28.793 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ae6bf863-8cca-48ab-a98f-065f8382fa99/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:23:28 compute-0 nova_compute[192079]: 2025-10-02 12:23:28.852 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ae6bf863-8cca-48ab-a98f-065f8382fa99/disk --force-share --output=json" returned: 0 in 0.059s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:23:28 compute-0 nova_compute[192079]: 2025-10-02 12:23:28.853 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ae6bf863-8cca-48ab-a98f-065f8382fa99/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:23:28 compute-0 nova_compute[192079]: 2025-10-02 12:23:28.919 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ae6bf863-8cca-48ab-a98f-065f8382fa99/disk --force-share --output=json" returned: 0 in 0.066s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:23:29 compute-0 nova_compute[192079]: 2025-10-02 12:23:29.088 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:23:29 compute-0 nova_compute[192079]: 2025-10-02 12:23:29.089 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5562MB free_disk=73.31924057006836GB free_vcpus=7 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:23:29 compute-0 nova_compute[192079]: 2025-10-02 12:23:29.089 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:23:29 compute-0 nova_compute[192079]: 2025-10-02 12:23:29.089 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:23:29 compute-0 nova_compute[192079]: 2025-10-02 12:23:29.251 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance ae6bf863-8cca-48ab-a98f-065f8382fa99 actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 128, 'DISK_GB': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:23:29 compute-0 nova_compute[192079]: 2025-10-02 12:23:29.252 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 1 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:23:29 compute-0 nova_compute[192079]: 2025-10-02 12:23:29.253 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=640MB phys_disk=79GB used_disk=1GB total_vcpus=8 used_vcpus=1 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:23:29 compute-0 nova_compute[192079]: 2025-10-02 12:23:29.346 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:23:29 compute-0 nova_compute[192079]: 2025-10-02 12:23:29.367 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:23:29 compute-0 nova_compute[192079]: 2025-10-02 12:23:29.391 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:23:29 compute-0 nova_compute[192079]: 2025-10-02 12:23:29.391 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.302s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:23:29 compute-0 nova_compute[192079]: 2025-10-02 12:23:29.967 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:30 compute-0 nova_compute[192079]: 2025-10-02 12:23:30.392 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:23:30 compute-0 nova_compute[192079]: 2025-10-02 12:23:30.663 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:23:31 compute-0 podman[237681]: 2025-10-02 12:23:31.185333181 +0000 UTC m=+0.090587540 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_id=edpm, container_name=ceilometer_agent_compute, managed_by=edpm_ansible, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS)
Oct 02 12:23:31 compute-0 nova_compute[192079]: 2025-10-02 12:23:31.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:23:31 compute-0 nova_compute[192079]: 2025-10-02 12:23:31.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:23:31 compute-0 nova_compute[192079]: 2025-10-02 12:23:31.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:23:31 compute-0 nova_compute[192079]: 2025-10-02 12:23:31.895 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:32 compute-0 nova_compute[192079]: 2025-10-02 12:23:32.013 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "refresh_cache-ae6bf863-8cca-48ab-a98f-065f8382fa99" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:23:32 compute-0 nova_compute[192079]: 2025-10-02 12:23:32.014 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquired lock "refresh_cache-ae6bf863-8cca-48ab-a98f-065f8382fa99" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:23:32 compute-0 nova_compute[192079]: 2025-10-02 12:23:32.014 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Forcefully refreshing network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2004
Oct 02 12:23:32 compute-0 nova_compute[192079]: 2025-10-02 12:23:32.014 2 DEBUG nova.objects.instance [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lazy-loading 'info_cache' on Instance uuid ae6bf863-8cca-48ab-a98f-065f8382fa99 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:23:34 compute-0 nova_compute[192079]: 2025-10-02 12:23:34.571 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Updating instance_info_cache with network_info: [{"id": "f1306fa9-9429-43db-a3f4-48a2399611d7", "address": "fa:16:3e:41:ec:88", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf1306fa9-94", "ovs_interfaceid": "f1306fa9-9429-43db-a3f4-48a2399611d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:23:34 compute-0 nova_compute[192079]: 2025-10-02 12:23:34.592 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Releasing lock "refresh_cache-ae6bf863-8cca-48ab-a98f-065f8382fa99" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:23:34 compute-0 nova_compute[192079]: 2025-10-02 12:23:34.593 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Updated the network info_cache for instance _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9929
Oct 02 12:23:34 compute-0 nova_compute[192079]: 2025-10-02 12:23:34.593 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:23:34 compute-0 nova_compute[192079]: 2025-10-02 12:23:34.593 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:23:34 compute-0 nova_compute[192079]: 2025-10-02 12:23:34.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:23:34 compute-0 nova_compute[192079]: 2025-10-02 12:23:34.969 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:36 compute-0 nova_compute[192079]: 2025-10-02 12:23:36.897 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:38 compute-0 podman[237701]: 2025-10-02 12:23:38.144188462 +0000 UTC m=+0.057036126 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, url=https://catalog.redhat.com/en/search?searchType=containers, vcs-type=git, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, architecture=x86_64, config_id=edpm, distribution-scope=public, io.openshift.expose-services=, io.openshift.tags=minimal rhel9, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, vendor=Red Hat, Inc., container_name=openstack_network_exporter, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, build-date=2025-08-20T13:12:41, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.buildah.version=1.33.7, name=ubi9-minimal, release=1755695350, com.redhat.component=ubi9-minimal-container, managed_by=edpm_ansible, version=9.6, maintainer=Red Hat, Inc., summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly.)
Oct 02 12:23:38 compute-0 podman[237702]: 2025-10-02 12:23:38.171886277 +0000 UTC m=+0.079451536 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, config_id=multipathd, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=multipathd, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']})
Oct 02 12:23:40 compute-0 nova_compute[192079]: 2025-10-02 12:23:40.017 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:41 compute-0 nova_compute[192079]: 2025-10-02 12:23:41.901 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:45 compute-0 nova_compute[192079]: 2025-10-02 12:23:45.069 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:45 compute-0 podman[237742]: 2025-10-02 12:23:45.155069427 +0000 UTC m=+0.062374071 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>)
Oct 02 12:23:45 compute-0 podman[237743]: 2025-10-02 12:23:45.176976504 +0000 UTC m=+0.068839497 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, container_name=iscsid, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=iscsid, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 12:23:45 compute-0 nova_compute[192079]: 2025-10-02 12:23:45.890 2 DEBUG oslo_concurrency.lockutils [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Acquiring lock "ae6bf863-8cca-48ab-a98f-065f8382fa99" by "nova.compute.manager.ComputeManager.shelve_instance.<locals>.do_shelve_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:23:45 compute-0 nova_compute[192079]: 2025-10-02 12:23:45.891 2 DEBUG oslo_concurrency.lockutils [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lock "ae6bf863-8cca-48ab-a98f-065f8382fa99" acquired by "nova.compute.manager.ComputeManager.shelve_instance.<locals>.do_shelve_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:23:45 compute-0 nova_compute[192079]: 2025-10-02 12:23:45.891 2 INFO nova.compute.manager [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Shelving
Oct 02 12:23:45 compute-0 nova_compute[192079]: 2025-10-02 12:23:45.934 2 DEBUG nova.virt.libvirt.driver [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Shutting down instance from state 1 _clean_shutdown /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4071
Oct 02 12:23:46 compute-0 nova_compute[192079]: 2025-10-02 12:23:46.905 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:48 compute-0 kernel: tapf1306fa9-94 (unregistering): left promiscuous mode
Oct 02 12:23:48 compute-0 NetworkManager[51160]: <info>  [1759407828.1501] device (tapf1306fa9-94): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:23:48 compute-0 ovn_controller[94336]: 2025-10-02T12:23:48Z|00380|binding|INFO|Releasing lport f1306fa9-9429-43db-a3f4-48a2399611d7 from this chassis (sb_readonly=0)
Oct 02 12:23:48 compute-0 nova_compute[192079]: 2025-10-02 12:23:48.158 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:48 compute-0 ovn_controller[94336]: 2025-10-02T12:23:48Z|00381|binding|INFO|Setting lport f1306fa9-9429-43db-a3f4-48a2399611d7 down in Southbound
Oct 02 12:23:48 compute-0 ovn_controller[94336]: 2025-10-02T12:23:48Z|00382|binding|INFO|Removing iface tapf1306fa9-94 ovn-installed in OVS
Oct 02 12:23:48 compute-0 nova_compute[192079]: 2025-10-02 12:23:48.162 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:48 compute-0 nova_compute[192079]: 2025-10-02 12:23:48.176 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:23:48.183 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:41:ec:88 10.100.0.8'], port_security=['fa:16:3e:41:ec:88 10.100.0.8'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.8/28', 'neutron:device_id': 'ae6bf863-8cca-48ab-a98f-065f8382fa99', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-20eb29be-ee23-463b-85af-bfc2388e9f77', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'ffce7d629aa24a7f970d93b2a79045f1', 'neutron:revision_number': '4', 'neutron:security_group_ids': '12e9168a-be86-462f-a658-971f38e3430f', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com', 'neutron:port_fip': '192.168.122.185'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=e183e2c6-21dc-48e3-ae47-279bc8b32eeb, chassis=[], tunnel_key=5, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=f1306fa9-9429-43db-a3f4-48a2399611d7) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:23:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:23:48.184 103294 INFO neutron.agent.ovn.metadata.agent [-] Port f1306fa9-9429-43db-a3f4-48a2399611d7 in datapath 20eb29be-ee23-463b-85af-bfc2388e9f77 unbound from our chassis
Oct 02 12:23:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:23:48.186 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 20eb29be-ee23-463b-85af-bfc2388e9f77, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:23:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:23:48.188 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5f325f87-37cb-4f54-bbe7-6e0c9ec3cc6a]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:23:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:23:48.189 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77 namespace which is not needed anymore
Oct 02 12:23:48 compute-0 systemd[1]: machine-qemu\x2d51\x2dinstance\x2d00000072.scope: Deactivated successfully.
Oct 02 12:23:48 compute-0 systemd[1]: machine-qemu\x2d51\x2dinstance\x2d00000072.scope: Consumed 15.530s CPU time.
Oct 02 12:23:48 compute-0 systemd-machined[152150]: Machine qemu-51-instance-00000072 terminated.
Oct 02 12:23:48 compute-0 neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77[237403]: [NOTICE]   (237407) : haproxy version is 2.8.14-c23fe91
Oct 02 12:23:48 compute-0 neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77[237403]: [NOTICE]   (237407) : path to executable is /usr/sbin/haproxy
Oct 02 12:23:48 compute-0 neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77[237403]: [WARNING]  (237407) : Exiting Master process...
Oct 02 12:23:48 compute-0 neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77[237403]: [WARNING]  (237407) : Exiting Master process...
Oct 02 12:23:48 compute-0 neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77[237403]: [ALERT]    (237407) : Current worker (237409) exited with code 143 (Terminated)
Oct 02 12:23:48 compute-0 neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77[237403]: [WARNING]  (237407) : All workers exited. Exiting... (0)
Oct 02 12:23:48 compute-0 systemd[1]: libpod-ca3817d11e0e246b53557f9e569be0d1309be36876f6b30348e1c21110486e12.scope: Deactivated successfully.
Oct 02 12:23:48 compute-0 podman[237818]: 2025-10-02 12:23:48.334735263 +0000 UTC m=+0.051980687 container died ca3817d11e0e246b53557f9e569be0d1309be36876f6b30348e1c21110486e12 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:23:48 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-ca3817d11e0e246b53557f9e569be0d1309be36876f6b30348e1c21110486e12-userdata-shm.mount: Deactivated successfully.
Oct 02 12:23:48 compute-0 systemd[1]: var-lib-containers-storage-overlay-66ee260b173b294694994146d44ca567983b816b29f68e5c0542cb20eafec7d3-merged.mount: Deactivated successfully.
Oct 02 12:23:48 compute-0 podman[237818]: 2025-10-02 12:23:48.37757762 +0000 UTC m=+0.094823014 container cleanup ca3817d11e0e246b53557f9e569be0d1309be36876f6b30348e1c21110486e12 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true)
Oct 02 12:23:48 compute-0 systemd[1]: libpod-conmon-ca3817d11e0e246b53557f9e569be0d1309be36876f6b30348e1c21110486e12.scope: Deactivated successfully.
Oct 02 12:23:48 compute-0 podman[237848]: 2025-10-02 12:23:48.475581751 +0000 UTC m=+0.061304121 container remove ca3817d11e0e246b53557f9e569be0d1309be36876f6b30348e1c21110486e12 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, tcib_managed=true, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:23:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:23:48.482 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3f1564ba-cd47-4b7d-9c5a-a2c46ab96bcf]: (4, ('Thu Oct  2 12:23:48 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77 (ca3817d11e0e246b53557f9e569be0d1309be36876f6b30348e1c21110486e12)\nca3817d11e0e246b53557f9e569be0d1309be36876f6b30348e1c21110486e12\nThu Oct  2 12:23:48 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77 (ca3817d11e0e246b53557f9e569be0d1309be36876f6b30348e1c21110486e12)\nca3817d11e0e246b53557f9e569be0d1309be36876f6b30348e1c21110486e12\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:23:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:23:48.484 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1e3cfcff-915e-4f5c-8740-157fe4e9fd69]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:23:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:23:48.486 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap20eb29be-e0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:23:48 compute-0 kernel: tap20eb29be-e0: left promiscuous mode
Oct 02 12:23:48 compute-0 nova_compute[192079]: 2025-10-02 12:23:48.489 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:48 compute-0 nova_compute[192079]: 2025-10-02 12:23:48.508 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:48 compute-0 nova_compute[192079]: 2025-10-02 12:23:48.509 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:23:48.511 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[75070cdc-b566-4a41-9465-b9c32235dd52]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:23:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:23:48.540 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9f253f2c-540c-4330-a659-4ffae457680e]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:23:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:23:48.542 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[11f4532a-e2eb-4fb5-a0cf-8b37ef78cf93]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:23:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:23:48.555 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3ae77fa4-7d17-46a5-8706-f7f8dfbdf6e1]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 576313, 'reachable_time': 15659, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 237882, 'error': None, 'target': 'ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:23:48 compute-0 systemd[1]: run-netns-ovnmeta\x2d20eb29be\x2dee23\x2d463b\x2d85af\x2dbfc2388e9f77.mount: Deactivated successfully.
Oct 02 12:23:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:23:48.559 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-20eb29be-ee23-463b-85af-bfc2388e9f77 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:23:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:23:48.559 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[fc67c320-10e8-46c5-bb80-d53db95d11ed]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:23:48 compute-0 nova_compute[192079]: 2025-10-02 12:23:48.956 2 INFO nova.virt.libvirt.driver [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Instance shutdown successfully after 3 seconds.
Oct 02 12:23:48 compute-0 nova_compute[192079]: 2025-10-02 12:23:48.965 2 INFO nova.virt.libvirt.driver [-] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Instance destroyed successfully.
Oct 02 12:23:48 compute-0 nova_compute[192079]: 2025-10-02 12:23:48.966 2 DEBUG nova.objects.instance [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lazy-loading 'numa_topology' on Instance uuid ae6bf863-8cca-48ab-a98f-065f8382fa99 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:23:49 compute-0 nova_compute[192079]: 2025-10-02 12:23:49.317 2 INFO nova.virt.libvirt.driver [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Beginning cold snapshot process
Oct 02 12:23:49 compute-0 nova_compute[192079]: 2025-10-02 12:23:49.612 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:49 compute-0 nova_compute[192079]: 2025-10-02 12:23:49.720 2 DEBUG nova.privsep.utils [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Path '/var/lib/nova/instances' supports direct I/O supports_direct_io /usr/lib/python3.9/site-packages/nova/privsep/utils.py:63
Oct 02 12:23:49 compute-0 nova_compute[192079]: 2025-10-02 12:23:49.721 2 DEBUG oslo_concurrency.processutils [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Running cmd (subprocess): qemu-img convert -t none -O qcow2 -f qcow2 /var/lib/nova/instances/ae6bf863-8cca-48ab-a98f-065f8382fa99/disk /var/lib/nova/instances/snapshots/tmppevuyqme/1392d8881c774899a943095a1e29eb18 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:23:49 compute-0 nova_compute[192079]: 2025-10-02 12:23:49.975 2 DEBUG nova.compute.manager [req-dc71ee3b-81ac-4021-94e3-e5e404c1ab9f req-8c8cd1f6-d545-4404-8de3-6acb708585a0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Received event network-vif-unplugged-f1306fa9-9429-43db-a3f4-48a2399611d7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:23:49 compute-0 nova_compute[192079]: 2025-10-02 12:23:49.976 2 DEBUG oslo_concurrency.lockutils [req-dc71ee3b-81ac-4021-94e3-e5e404c1ab9f req-8c8cd1f6-d545-4404-8de3-6acb708585a0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "ae6bf863-8cca-48ab-a98f-065f8382fa99-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:23:49 compute-0 nova_compute[192079]: 2025-10-02 12:23:49.977 2 DEBUG oslo_concurrency.lockutils [req-dc71ee3b-81ac-4021-94e3-e5e404c1ab9f req-8c8cd1f6-d545-4404-8de3-6acb708585a0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ae6bf863-8cca-48ab-a98f-065f8382fa99-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:23:49 compute-0 nova_compute[192079]: 2025-10-02 12:23:49.977 2 DEBUG oslo_concurrency.lockutils [req-dc71ee3b-81ac-4021-94e3-e5e404c1ab9f req-8c8cd1f6-d545-4404-8de3-6acb708585a0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ae6bf863-8cca-48ab-a98f-065f8382fa99-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:23:49 compute-0 nova_compute[192079]: 2025-10-02 12:23:49.978 2 DEBUG nova.compute.manager [req-dc71ee3b-81ac-4021-94e3-e5e404c1ab9f req-8c8cd1f6-d545-4404-8de3-6acb708585a0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] No waiting events found dispatching network-vif-unplugged-f1306fa9-9429-43db-a3f4-48a2399611d7 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:23:49 compute-0 nova_compute[192079]: 2025-10-02 12:23:49.978 2 WARNING nova.compute.manager [req-dc71ee3b-81ac-4021-94e3-e5e404c1ab9f req-8c8cd1f6-d545-4404-8de3-6acb708585a0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Received unexpected event network-vif-unplugged-f1306fa9-9429-43db-a3f4-48a2399611d7 for instance with vm_state active and task_state shelving_image_pending_upload.
Oct 02 12:23:50 compute-0 nova_compute[192079]: 2025-10-02 12:23:50.071 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:50 compute-0 nova_compute[192079]: 2025-10-02 12:23:50.201 2 DEBUG oslo_concurrency.processutils [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] CMD "qemu-img convert -t none -O qcow2 -f qcow2 /var/lib/nova/instances/ae6bf863-8cca-48ab-a98f-065f8382fa99/disk /var/lib/nova/instances/snapshots/tmppevuyqme/1392d8881c774899a943095a1e29eb18" returned: 0 in 0.480s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:23:50 compute-0 nova_compute[192079]: 2025-10-02 12:23:50.202 2 INFO nova.virt.libvirt.driver [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Snapshot extracted, beginning image upload
Oct 02 12:23:51 compute-0 nova_compute[192079]: 2025-10-02 12:23:51.911 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:52 compute-0 nova_compute[192079]: 2025-10-02 12:23:52.092 2 DEBUG nova.compute.manager [req-8d85ba55-8575-422f-88ea-00b4d3c17416 req-1aafad02-c226-45c7-8e00-2dd7b9632f79 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Received event network-vif-plugged-f1306fa9-9429-43db-a3f4-48a2399611d7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:23:52 compute-0 nova_compute[192079]: 2025-10-02 12:23:52.093 2 DEBUG oslo_concurrency.lockutils [req-8d85ba55-8575-422f-88ea-00b4d3c17416 req-1aafad02-c226-45c7-8e00-2dd7b9632f79 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "ae6bf863-8cca-48ab-a98f-065f8382fa99-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:23:52 compute-0 nova_compute[192079]: 2025-10-02 12:23:52.093 2 DEBUG oslo_concurrency.lockutils [req-8d85ba55-8575-422f-88ea-00b4d3c17416 req-1aafad02-c226-45c7-8e00-2dd7b9632f79 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ae6bf863-8cca-48ab-a98f-065f8382fa99-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:23:52 compute-0 nova_compute[192079]: 2025-10-02 12:23:52.093 2 DEBUG oslo_concurrency.lockutils [req-8d85ba55-8575-422f-88ea-00b4d3c17416 req-1aafad02-c226-45c7-8e00-2dd7b9632f79 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ae6bf863-8cca-48ab-a98f-065f8382fa99-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:23:52 compute-0 nova_compute[192079]: 2025-10-02 12:23:52.093 2 DEBUG nova.compute.manager [req-8d85ba55-8575-422f-88ea-00b4d3c17416 req-1aafad02-c226-45c7-8e00-2dd7b9632f79 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] No waiting events found dispatching network-vif-plugged-f1306fa9-9429-43db-a3f4-48a2399611d7 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:23:52 compute-0 nova_compute[192079]: 2025-10-02 12:23:52.094 2 WARNING nova.compute.manager [req-8d85ba55-8575-422f-88ea-00b4d3c17416 req-1aafad02-c226-45c7-8e00-2dd7b9632f79 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Received unexpected event network-vif-plugged-f1306fa9-9429-43db-a3f4-48a2399611d7 for instance with vm_state active and task_state shelving_image_uploading.
Oct 02 12:23:53 compute-0 nova_compute[192079]: 2025-10-02 12:23:53.062 2 INFO nova.virt.libvirt.driver [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Snapshot image upload complete
Oct 02 12:23:53 compute-0 nova_compute[192079]: 2025-10-02 12:23:53.063 2 DEBUG nova.compute.manager [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:23:53 compute-0 nova_compute[192079]: 2025-10-02 12:23:53.159 2 INFO nova.compute.manager [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Shelve offloading
Oct 02 12:23:53 compute-0 nova_compute[192079]: 2025-10-02 12:23:53.185 2 INFO nova.virt.libvirt.driver [-] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Instance destroyed successfully.
Oct 02 12:23:53 compute-0 nova_compute[192079]: 2025-10-02 12:23:53.186 2 DEBUG nova.compute.manager [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:23:53 compute-0 nova_compute[192079]: 2025-10-02 12:23:53.190 2 DEBUG oslo_concurrency.lockutils [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Acquiring lock "refresh_cache-ae6bf863-8cca-48ab-a98f-065f8382fa99" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:23:53 compute-0 nova_compute[192079]: 2025-10-02 12:23:53.190 2 DEBUG oslo_concurrency.lockutils [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Acquired lock "refresh_cache-ae6bf863-8cca-48ab-a98f-065f8382fa99" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:23:53 compute-0 nova_compute[192079]: 2025-10-02 12:23:53.190 2 DEBUG nova.network.neutron [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:23:53 compute-0 nova_compute[192079]: 2025-10-02 12:23:53.755 2 DEBUG nova.compute.manager [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Stashing vm_state: active _prep_resize /usr/lib/python3.9/site-packages/nova/compute/manager.py:5560
Oct 02 12:23:53 compute-0 nova_compute[192079]: 2025-10-02 12:23:53.879 2 DEBUG oslo_concurrency.lockutils [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:23:53 compute-0 nova_compute[192079]: 2025-10-02 12:23:53.880 2 DEBUG oslo_concurrency.lockutils [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:23:53 compute-0 nova_compute[192079]: 2025-10-02 12:23:53.919 2 DEBUG nova.objects.instance [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Lazy-loading 'pci_requests' on Instance uuid 32196dd3-2739-4c43-9532-b0365f8095af obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:23:53 compute-0 nova_compute[192079]: 2025-10-02 12:23:53.937 2 DEBUG nova.virt.hardware [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:23:53 compute-0 nova_compute[192079]: 2025-10-02 12:23:53.938 2 INFO nova.compute.claims [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:23:53 compute-0 nova_compute[192079]: 2025-10-02 12:23:53.939 2 DEBUG nova.objects.instance [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Lazy-loading 'resources' on Instance uuid 32196dd3-2739-4c43-9532-b0365f8095af obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:23:53 compute-0 nova_compute[192079]: 2025-10-02 12:23:53.952 2 DEBUG nova.objects.instance [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Lazy-loading 'numa_topology' on Instance uuid 32196dd3-2739-4c43-9532-b0365f8095af obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:23:53 compute-0 nova_compute[192079]: 2025-10-02 12:23:53.965 2 DEBUG nova.objects.instance [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Lazy-loading 'pci_devices' on Instance uuid 32196dd3-2739-4c43-9532-b0365f8095af obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.024 2 INFO nova.compute.resource_tracker [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Updating resource usage from migration 544fb063-d35f-48cc-b24b-f2f0ae933652
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.025 2 DEBUG nova.compute.resource_tracker [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Starting to track incoming migration 544fb063-d35f-48cc-b24b-f2f0ae933652 with flavor 9ac83da7-f31e-4467-8569-d28002f6aeed _update_usage_from_migration /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1431
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.051 2 DEBUG oslo_concurrency.lockutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.051 2 DEBUG oslo_concurrency.lockutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.076 2 DEBUG nova.compute.manager [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.133 2 DEBUG nova.compute.provider_tree [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.155 2 DEBUG nova.scheduler.client.report [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:23:54 compute-0 podman[237895]: 2025-10-02 12:23:54.177732311 +0000 UTC m=+0.070687507 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>)
Oct 02 12:23:54 compute-0 podman[237893]: 2025-10-02 12:23:54.183776256 +0000 UTC m=+0.082339515 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, org.label-schema.license=GPLv2, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, container_name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_managed=true, config_id=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.build-date=20251001)
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.188 2 DEBUG oslo_concurrency.lockutils [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 0.307s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.189 2 INFO nova.compute.manager [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Migrating
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.196 2 DEBUG oslo_concurrency.lockutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.197 2 DEBUG oslo_concurrency.lockutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.199 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.204 2 DEBUG nova.virt.hardware [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.204 2 INFO nova.compute.claims [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:23:54 compute-0 podman[237894]: 2025-10-02 12:23:54.213835355 +0000 UTC m=+0.111163801 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, container_name=ovn_controller, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS)
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.397 2 DEBUG nova.compute.provider_tree [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.413 2 DEBUG nova.scheduler.client.report [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.442 2 DEBUG oslo_concurrency.lockutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.245s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.443 2 DEBUG nova.compute.manager [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.515 2 DEBUG nova.network.neutron [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Updating instance_info_cache with network_info: [{"id": "f1306fa9-9429-43db-a3f4-48a2399611d7", "address": "fa:16:3e:41:ec:88", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf1306fa9-94", "ovs_interfaceid": "f1306fa9-9429-43db-a3f4-48a2399611d7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.521 2 DEBUG nova.compute.manager [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.522 2 DEBUG nova.network.neutron [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.542 2 INFO nova.virt.libvirt.driver [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.547 2 DEBUG oslo_concurrency.lockutils [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Releasing lock "refresh_cache-ae6bf863-8cca-48ab-a98f-065f8382fa99" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.571 2 DEBUG nova.compute.manager [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.696 2 DEBUG nova.compute.manager [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.699 2 DEBUG nova.virt.libvirt.driver [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.700 2 INFO nova.virt.libvirt.driver [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Creating image(s)
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.701 2 DEBUG oslo_concurrency.lockutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "/var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.701 2 DEBUG oslo_concurrency.lockutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "/var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.703 2 DEBUG oslo_concurrency.lockutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "/var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.730 2 DEBUG oslo_concurrency.processutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.819 2 DEBUG oslo_concurrency.processutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.090s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.820 2 DEBUG oslo_concurrency.lockutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.821 2 DEBUG oslo_concurrency.lockutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.836 2 DEBUG oslo_concurrency.processutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.903 2 DEBUG oslo_concurrency.processutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.067s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.904 2 DEBUG oslo_concurrency.processutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.940 2 DEBUG oslo_concurrency.processutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/disk 1073741824" returned: 0 in 0.035s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.941 2 DEBUG oslo_concurrency.lockutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.119s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.941 2 DEBUG oslo_concurrency.processutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.989 2 DEBUG nova.policy [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:23:54 compute-0 nova_compute[192079]: 2025-10-02 12:23:54.999 2 DEBUG oslo_concurrency.processutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.058s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:23:55 compute-0 nova_compute[192079]: 2025-10-02 12:23:55.000 2 DEBUG nova.virt.disk.api [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Checking if we can resize image /var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:23:55 compute-0 nova_compute[192079]: 2025-10-02 12:23:55.000 2 DEBUG oslo_concurrency.processutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:23:55 compute-0 nova_compute[192079]: 2025-10-02 12:23:55.060 2 DEBUG oslo_concurrency.processutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/disk --force-share --output=json" returned: 0 in 0.060s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:23:55 compute-0 nova_compute[192079]: 2025-10-02 12:23:55.062 2 DEBUG nova.virt.disk.api [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Cannot resize image /var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:23:55 compute-0 nova_compute[192079]: 2025-10-02 12:23:55.063 2 DEBUG nova.objects.instance [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'migration_context' on Instance uuid 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:23:55 compute-0 nova_compute[192079]: 2025-10-02 12:23:55.074 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:55 compute-0 nova_compute[192079]: 2025-10-02 12:23:55.081 2 DEBUG nova.virt.libvirt.driver [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:23:55 compute-0 nova_compute[192079]: 2025-10-02 12:23:55.082 2 DEBUG nova.virt.libvirt.driver [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Ensure instance console log exists: /var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:23:55 compute-0 nova_compute[192079]: 2025-10-02 12:23:55.083 2 DEBUG oslo_concurrency.lockutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:23:55 compute-0 nova_compute[192079]: 2025-10-02 12:23:55.084 2 DEBUG oslo_concurrency.lockutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:23:55 compute-0 nova_compute[192079]: 2025-10-02 12:23:55.084 2 DEBUG oslo_concurrency.lockutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:23:55 compute-0 nova_compute[192079]: 2025-10-02 12:23:55.952 2 DEBUG nova.network.neutron [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Successfully created port: 1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:23:56 compute-0 nova_compute[192079]: 2025-10-02 12:23:56.285 2 INFO nova.virt.libvirt.driver [-] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Instance destroyed successfully.
Oct 02 12:23:56 compute-0 nova_compute[192079]: 2025-10-02 12:23:56.286 2 DEBUG nova.objects.instance [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lazy-loading 'resources' on Instance uuid ae6bf863-8cca-48ab-a98f-065f8382fa99 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:23:56 compute-0 nova_compute[192079]: 2025-10-02 12:23:56.311 2 DEBUG nova.virt.libvirt.vif [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:22:37Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerActionsTestOtherB-server-1769053978',display_name='tempest-ServerActionsTestOtherB-server-1769053978',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serveractionstestotherb-server-1769053978',id=114,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBG+aqSe4de2VLtRAXN5xeLQn4S/3X8QrNMy2M5WdQ5hviVyEOgqK+m+uWmzPaUSUgE38sEdkytfwUHD32CBZajBt4q3OEf9i3yPJUQGuqp42pAUD+A3EoBIyeptNeSxGdA==',key_name='tempest-keypair-1900171990',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:22:46Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=4,progress=0,project_id='ffce7d629aa24a7f970d93b2a79045f1',ramdisk_id='',reservation_id='r-flcxdim8',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServerActionsTestOtherB-263921372',owner_user_name='tempest-ServerActionsTestOtherB-263921372-project-member',shelved_at='2025-10-02T12:23:53.063627',shelved_host='compute-0.ctlplane.example.com',shelved_image_id='a84a17ed-71a4-4591-9f9d-d22a239469b6'},tags=<?>,task_state='shelving_offloading',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:23:50Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='0ea122e2fff94f2ba7c78bf30b04029c',uuid=ae6bf863-8cca-48ab-a98f-065f8382fa99,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='shelved') vif={"id": "f1306fa9-9429-43db-a3f4-48a2399611d7", "address": "fa:16:3e:41:ec:88", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf1306fa9-94", "ovs_interfaceid": "f1306fa9-9429-43db-a3f4-48a2399611d7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:23:56 compute-0 nova_compute[192079]: 2025-10-02 12:23:56.311 2 DEBUG nova.network.os_vif_util [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Converting VIF {"id": "f1306fa9-9429-43db-a3f4-48a2399611d7", "address": "fa:16:3e:41:ec:88", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf1306fa9-94", "ovs_interfaceid": "f1306fa9-9429-43db-a3f4-48a2399611d7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:23:56 compute-0 nova_compute[192079]: 2025-10-02 12:23:56.312 2 DEBUG nova.network.os_vif_util [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:41:ec:88,bridge_name='br-int',has_traffic_filtering=True,id=f1306fa9-9429-43db-a3f4-48a2399611d7,network=Network(20eb29be-ee23-463b-85af-bfc2388e9f77),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf1306fa9-94') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:23:56 compute-0 nova_compute[192079]: 2025-10-02 12:23:56.313 2 DEBUG os_vif [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:41:ec:88,bridge_name='br-int',has_traffic_filtering=True,id=f1306fa9-9429-43db-a3f4-48a2399611d7,network=Network(20eb29be-ee23-463b-85af-bfc2388e9f77),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf1306fa9-94') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:23:56 compute-0 nova_compute[192079]: 2025-10-02 12:23:56.315 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:56 compute-0 nova_compute[192079]: 2025-10-02 12:23:56.315 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapf1306fa9-94, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:23:56 compute-0 nova_compute[192079]: 2025-10-02 12:23:56.367 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:56 compute-0 nova_compute[192079]: 2025-10-02 12:23:56.369 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:56 compute-0 nova_compute[192079]: 2025-10-02 12:23:56.371 2 INFO os_vif [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:41:ec:88,bridge_name='br-int',has_traffic_filtering=True,id=f1306fa9-9429-43db-a3f4-48a2399611d7,network=Network(20eb29be-ee23-463b-85af-bfc2388e9f77),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf1306fa9-94')
Oct 02 12:23:56 compute-0 nova_compute[192079]: 2025-10-02 12:23:56.372 2 INFO nova.virt.libvirt.driver [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Deleting instance files /var/lib/nova/instances/ae6bf863-8cca-48ab-a98f-065f8382fa99_del
Oct 02 12:23:56 compute-0 nova_compute[192079]: 2025-10-02 12:23:56.379 2 INFO nova.virt.libvirt.driver [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Deletion of /var/lib/nova/instances/ae6bf863-8cca-48ab-a98f-065f8382fa99_del complete
Oct 02 12:23:56 compute-0 nova_compute[192079]: 2025-10-02 12:23:56.419 2 DEBUG nova.compute.manager [req-15206b53-82f8-48f8-a777-a8e99e6d16e6 req-2971e817-acac-43fb-93a3-b176f8f21785 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Received event network-changed-f1306fa9-9429-43db-a3f4-48a2399611d7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:23:56 compute-0 nova_compute[192079]: 2025-10-02 12:23:56.419 2 DEBUG nova.compute.manager [req-15206b53-82f8-48f8-a777-a8e99e6d16e6 req-2971e817-acac-43fb-93a3-b176f8f21785 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Refreshing instance network info cache due to event network-changed-f1306fa9-9429-43db-a3f4-48a2399611d7. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:23:56 compute-0 nova_compute[192079]: 2025-10-02 12:23:56.420 2 DEBUG oslo_concurrency.lockutils [req-15206b53-82f8-48f8-a777-a8e99e6d16e6 req-2971e817-acac-43fb-93a3-b176f8f21785 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-ae6bf863-8cca-48ab-a98f-065f8382fa99" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:23:56 compute-0 nova_compute[192079]: 2025-10-02 12:23:56.420 2 DEBUG oslo_concurrency.lockutils [req-15206b53-82f8-48f8-a777-a8e99e6d16e6 req-2971e817-acac-43fb-93a3-b176f8f21785 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-ae6bf863-8cca-48ab-a98f-065f8382fa99" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:23:56 compute-0 nova_compute[192079]: 2025-10-02 12:23:56.421 2 DEBUG nova.network.neutron [req-15206b53-82f8-48f8-a777-a8e99e6d16e6 req-2971e817-acac-43fb-93a3-b176f8f21785 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Refreshing network info cache for port f1306fa9-9429-43db-a3f4-48a2399611d7 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:23:56 compute-0 nova_compute[192079]: 2025-10-02 12:23:56.581 2 INFO nova.scheduler.client.report [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Deleted allocations for instance ae6bf863-8cca-48ab-a98f-065f8382fa99
Oct 02 12:23:56 compute-0 nova_compute[192079]: 2025-10-02 12:23:56.661 2 DEBUG oslo_concurrency.lockutils [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:23:56 compute-0 nova_compute[192079]: 2025-10-02 12:23:56.662 2 DEBUG oslo_concurrency.lockutils [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:23:56 compute-0 nova_compute[192079]: 2025-10-02 12:23:56.739 2 DEBUG nova.compute.provider_tree [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:23:56 compute-0 nova_compute[192079]: 2025-10-02 12:23:56.759 2 DEBUG nova.scheduler.client.report [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:23:56 compute-0 sshd-session[237973]: Accepted publickey for nova from 192.168.122.102 port 45116 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:23:56 compute-0 nova_compute[192079]: 2025-10-02 12:23:56.792 2 DEBUG oslo_concurrency.lockutils [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.130s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:23:56 compute-0 systemd-logind[827]: New session 64 of user nova.
Oct 02 12:23:56 compute-0 systemd[1]: Created slice User Slice of UID 42436.
Oct 02 12:23:56 compute-0 systemd[1]: Starting User Runtime Directory /run/user/42436...
Oct 02 12:23:56 compute-0 systemd[1]: Finished User Runtime Directory /run/user/42436.
Oct 02 12:23:56 compute-0 systemd[1]: Starting User Manager for UID 42436...
Oct 02 12:23:56 compute-0 systemd[237977]: pam_unix(systemd-user:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:23:56 compute-0 nova_compute[192079]: 2025-10-02 12:23:56.912 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:23:56 compute-0 nova_compute[192079]: 2025-10-02 12:23:56.926 2 DEBUG oslo_concurrency.lockutils [None req-743f8fdf-6d4f-441d-be71-0f31da4e9e8e 0ea122e2fff94f2ba7c78bf30b04029c ffce7d629aa24a7f970d93b2a79045f1 - - default default] Lock "ae6bf863-8cca-48ab-a98f-065f8382fa99" "released" by "nova.compute.manager.ComputeManager.shelve_instance.<locals>.do_shelve_instance" :: held 11.035s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:23:57 compute-0 systemd[237977]: Queued start job for default target Main User Target.
Oct 02 12:23:57 compute-0 systemd[237977]: Created slice User Application Slice.
Oct 02 12:23:57 compute-0 systemd[237977]: Started Mark boot as successful after the user session has run 2 minutes.
Oct 02 12:23:57 compute-0 systemd[237977]: Started Daily Cleanup of User's Temporary Directories.
Oct 02 12:23:57 compute-0 systemd[237977]: Reached target Paths.
Oct 02 12:23:57 compute-0 systemd[237977]: Reached target Timers.
Oct 02 12:23:57 compute-0 systemd[237977]: Starting D-Bus User Message Bus Socket...
Oct 02 12:23:57 compute-0 systemd[237977]: Starting Create User's Volatile Files and Directories...
Oct 02 12:23:57 compute-0 systemd[237977]: Finished Create User's Volatile Files and Directories.
Oct 02 12:23:57 compute-0 systemd[237977]: Listening on D-Bus User Message Bus Socket.
Oct 02 12:23:57 compute-0 systemd[237977]: Reached target Sockets.
Oct 02 12:23:57 compute-0 systemd[237977]: Reached target Basic System.
Oct 02 12:23:57 compute-0 systemd[237977]: Reached target Main User Target.
Oct 02 12:23:57 compute-0 systemd[237977]: Startup finished in 161ms.
Oct 02 12:23:57 compute-0 systemd[1]: Started User Manager for UID 42436.
Oct 02 12:23:57 compute-0 systemd[1]: Started Session 64 of User nova.
Oct 02 12:23:57 compute-0 sshd-session[237973]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:23:57 compute-0 sshd-session[237992]: Received disconnect from 192.168.122.102 port 45116:11: disconnected by user
Oct 02 12:23:57 compute-0 sshd-session[237992]: Disconnected from user nova 192.168.122.102 port 45116
Oct 02 12:23:57 compute-0 sshd-session[237973]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:23:57 compute-0 systemd[1]: session-64.scope: Deactivated successfully.
Oct 02 12:23:57 compute-0 systemd-logind[827]: Session 64 logged out. Waiting for processes to exit.
Oct 02 12:23:57 compute-0 systemd-logind[827]: Removed session 64.
Oct 02 12:23:57 compute-0 sshd-session[237994]: Accepted publickey for nova from 192.168.122.102 port 45130 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:23:57 compute-0 systemd-logind[827]: New session 66 of user nova.
Oct 02 12:23:57 compute-0 systemd[1]: Started Session 66 of User nova.
Oct 02 12:23:57 compute-0 sshd-session[237994]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:23:57 compute-0 sshd-session[237997]: Received disconnect from 192.168.122.102 port 45130:11: disconnected by user
Oct 02 12:23:57 compute-0 sshd-session[237997]: Disconnected from user nova 192.168.122.102 port 45130
Oct 02 12:23:57 compute-0 sshd-session[237994]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:23:57 compute-0 systemd[1]: session-66.scope: Deactivated successfully.
Oct 02 12:23:57 compute-0 systemd-logind[827]: Session 66 logged out. Waiting for processes to exit.
Oct 02 12:23:57 compute-0 systemd-logind[827]: Removed session 66.
Oct 02 12:23:57 compute-0 nova_compute[192079]: 2025-10-02 12:23:57.519 2 DEBUG nova.network.neutron [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Successfully updated port: 1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:23:57 compute-0 nova_compute[192079]: 2025-10-02 12:23:57.562 2 DEBUG oslo_concurrency.lockutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "refresh_cache-02f550a4-c57e-4d6f-b62b-decc0dbb1dbe" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:23:57 compute-0 nova_compute[192079]: 2025-10-02 12:23:57.563 2 DEBUG oslo_concurrency.lockutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquired lock "refresh_cache-02f550a4-c57e-4d6f-b62b-decc0dbb1dbe" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:23:57 compute-0 nova_compute[192079]: 2025-10-02 12:23:57.563 2 DEBUG nova.network.neutron [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:23:58 compute-0 nova_compute[192079]: 2025-10-02 12:23:58.357 2 DEBUG nova.network.neutron [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:23:58 compute-0 nova_compute[192079]: 2025-10-02 12:23:58.644 2 DEBUG nova.network.neutron [req-15206b53-82f8-48f8-a777-a8e99e6d16e6 req-2971e817-acac-43fb-93a3-b176f8f21785 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Updated VIF entry in instance network info cache for port f1306fa9-9429-43db-a3f4-48a2399611d7. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:23:58 compute-0 nova_compute[192079]: 2025-10-02 12:23:58.644 2 DEBUG nova.network.neutron [req-15206b53-82f8-48f8-a777-a8e99e6d16e6 req-2971e817-acac-43fb-93a3-b176f8f21785 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Updating instance_info_cache with network_info: [{"id": "f1306fa9-9429-43db-a3f4-48a2399611d7", "address": "fa:16:3e:41:ec:88", "network": {"id": "20eb29be-ee23-463b-85af-bfc2388e9f77", "bridge": null, "label": "tempest-ServerActionsTestOtherB-370285634-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ffce7d629aa24a7f970d93b2a79045f1", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "unbound", "details": {}, "devname": "tapf1306fa9-94", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:23:58 compute-0 nova_compute[192079]: 2025-10-02 12:23:58.665 2 DEBUG nova.compute.manager [req-8a6aa82e-af2c-4919-b68e-fd12620d1976 req-731b4b6b-bb5e-45f9-b5f0-4a3f752e1554 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Received event network-changed-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:23:58 compute-0 nova_compute[192079]: 2025-10-02 12:23:58.666 2 DEBUG nova.compute.manager [req-8a6aa82e-af2c-4919-b68e-fd12620d1976 req-731b4b6b-bb5e-45f9-b5f0-4a3f752e1554 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Refreshing instance network info cache due to event network-changed-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:23:58 compute-0 nova_compute[192079]: 2025-10-02 12:23:58.666 2 DEBUG oslo_concurrency.lockutils [req-8a6aa82e-af2c-4919-b68e-fd12620d1976 req-731b4b6b-bb5e-45f9-b5f0-4a3f752e1554 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-02f550a4-c57e-4d6f-b62b-decc0dbb1dbe" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:23:58 compute-0 nova_compute[192079]: 2025-10-02 12:23:58.668 2 DEBUG oslo_concurrency.lockutils [req-15206b53-82f8-48f8-a777-a8e99e6d16e6 req-2971e817-acac-43fb-93a3-b176f8f21785 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-ae6bf863-8cca-48ab-a98f-065f8382fa99" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.117 2 DEBUG nova.network.neutron [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Updating instance_info_cache with network_info: [{"id": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "address": "fa:16:3e:e3:d2:69", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap1cf70acd-de", "ovs_interfaceid": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.165 2 DEBUG nova.compute.manager [req-ce3319fc-9bc2-44be-b639-ea2648cf8a7e req-eb566131-458f-460f-9068-fe1f1e8c6334 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Received event network-vif-unplugged-375c20c8-b3bc-484b-820a-f3988fb1bfa1 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.166 2 DEBUG oslo_concurrency.lockutils [req-ce3319fc-9bc2-44be-b639-ea2648cf8a7e req-eb566131-458f-460f-9068-fe1f1e8c6334 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "32196dd3-2739-4c43-9532-b0365f8095af-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.166 2 DEBUG oslo_concurrency.lockutils [req-ce3319fc-9bc2-44be-b639-ea2648cf8a7e req-eb566131-458f-460f-9068-fe1f1e8c6334 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "32196dd3-2739-4c43-9532-b0365f8095af-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.166 2 DEBUG oslo_concurrency.lockutils [req-ce3319fc-9bc2-44be-b639-ea2648cf8a7e req-eb566131-458f-460f-9068-fe1f1e8c6334 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "32196dd3-2739-4c43-9532-b0365f8095af-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.166 2 DEBUG nova.compute.manager [req-ce3319fc-9bc2-44be-b639-ea2648cf8a7e req-eb566131-458f-460f-9068-fe1f1e8c6334 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] No waiting events found dispatching network-vif-unplugged-375c20c8-b3bc-484b-820a-f3988fb1bfa1 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.166 2 WARNING nova.compute.manager [req-ce3319fc-9bc2-44be-b639-ea2648cf8a7e req-eb566131-458f-460f-9068-fe1f1e8c6334 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Received unexpected event network-vif-unplugged-375c20c8-b3bc-484b-820a-f3988fb1bfa1 for instance with vm_state active and task_state resize_migrating.
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.179 2 DEBUG oslo_concurrency.lockutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Releasing lock "refresh_cache-02f550a4-c57e-4d6f-b62b-decc0dbb1dbe" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.180 2 DEBUG nova.compute.manager [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Instance network_info: |[{"id": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "address": "fa:16:3e:e3:d2:69", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap1cf70acd-de", "ovs_interfaceid": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.180 2 DEBUG oslo_concurrency.lockutils [req-8a6aa82e-af2c-4919-b68e-fd12620d1976 req-731b4b6b-bb5e-45f9-b5f0-4a3f752e1554 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-02f550a4-c57e-4d6f-b62b-decc0dbb1dbe" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.180 2 DEBUG nova.network.neutron [req-8a6aa82e-af2c-4919-b68e-fd12620d1976 req-731b4b6b-bb5e-45f9-b5f0-4a3f752e1554 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Refreshing network info cache for port 1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.182 2 DEBUG nova.virt.libvirt.driver [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Start _get_guest_xml network_info=[{"id": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "address": "fa:16:3e:e3:d2:69", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap1cf70acd-de", "ovs_interfaceid": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.186 2 WARNING nova.virt.libvirt.driver [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.200 2 DEBUG nova.virt.libvirt.host [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.201 2 DEBUG nova.virt.libvirt.host [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.210 2 DEBUG nova.virt.libvirt.host [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.212 2 DEBUG nova.virt.libvirt.host [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.214 2 DEBUG nova.virt.libvirt.driver [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.214 2 DEBUG nova.virt.hardware [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.215 2 DEBUG nova.virt.hardware [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.215 2 DEBUG nova.virt.hardware [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.215 2 DEBUG nova.virt.hardware [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.215 2 DEBUG nova.virt.hardware [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.215 2 DEBUG nova.virt.hardware [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.215 2 DEBUG nova.virt.hardware [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.216 2 DEBUG nova.virt.hardware [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.216 2 DEBUG nova.virt.hardware [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.216 2 DEBUG nova.virt.hardware [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.216 2 DEBUG nova.virt.hardware [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.219 2 DEBUG nova.virt.libvirt.vif [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:23:53Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-tempest.common.compute-instance-1540405064',display_name='tempest-tempest.common.compute-instance-1540405064',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-tempest-common-compute-instance-1540405064',id=118,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='e564a4cad5d443dba81ec04d2a05ced9',ramdisk_id='',reservation_id='r-sboi37yp',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServerActionsTestJSON-1646745100',owner_user_name='tempest-ServerActionsTestJSON-1646745100-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:23:54Z,user_data=None,user_id='d54b1826121b47caba89932a78c06ccd',uuid=02f550a4-c57e-4d6f-b62b-decc0dbb1dbe,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "address": "fa:16:3e:e3:d2:69", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap1cf70acd-de", "ovs_interfaceid": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.220 2 DEBUG nova.network.os_vif_util [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converting VIF {"id": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "address": "fa:16:3e:e3:d2:69", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap1cf70acd-de", "ovs_interfaceid": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.220 2 DEBUG nova.network.os_vif_util [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:e3:d2:69,bridge_name='br-int',has_traffic_filtering=True,id=1cf70acd-de15-44ba-8fd2-ea2846ce3ee6,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap1cf70acd-de') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.221 2 DEBUG nova.objects.instance [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'pci_devices' on Instance uuid 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.237 2 DEBUG nova.virt.libvirt.driver [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:24:00 compute-0 nova_compute[192079]:   <uuid>02f550a4-c57e-4d6f-b62b-decc0dbb1dbe</uuid>
Oct 02 12:24:00 compute-0 nova_compute[192079]:   <name>instance-00000076</name>
Oct 02 12:24:00 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:24:00 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:24:00 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:24:00 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:       <nova:name>tempest-tempest.common.compute-instance-1540405064</nova:name>
Oct 02 12:24:00 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:24:00</nova:creationTime>
Oct 02 12:24:00 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:24:00 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:24:00 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:24:00 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:24:00 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:24:00 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:24:00 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:24:00 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:24:00 compute-0 nova_compute[192079]:         <nova:user uuid="d54b1826121b47caba89932a78c06ccd">tempest-ServerActionsTestJSON-1646745100-project-member</nova:user>
Oct 02 12:24:00 compute-0 nova_compute[192079]:         <nova:project uuid="e564a4cad5d443dba81ec04d2a05ced9">tempest-ServerActionsTestJSON-1646745100</nova:project>
Oct 02 12:24:00 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:24:00 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:24:00 compute-0 nova_compute[192079]:         <nova:port uuid="1cf70acd-de15-44ba-8fd2-ea2846ce3ee6">
Oct 02 12:24:00 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.10" ipVersion="4"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:24:00 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:24:00 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:24:00 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <system>
Oct 02 12:24:00 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:24:00 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:24:00 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:24:00 compute-0 nova_compute[192079]:       <entry name="serial">02f550a4-c57e-4d6f-b62b-decc0dbb1dbe</entry>
Oct 02 12:24:00 compute-0 nova_compute[192079]:       <entry name="uuid">02f550a4-c57e-4d6f-b62b-decc0dbb1dbe</entry>
Oct 02 12:24:00 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     </system>
Oct 02 12:24:00 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:24:00 compute-0 nova_compute[192079]:   <os>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:   </os>
Oct 02 12:24:00 compute-0 nova_compute[192079]:   <features>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:   </features>
Oct 02 12:24:00 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:24:00 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:24:00 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:24:00 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/disk"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:24:00 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/disk.config"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:24:00 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:e3:d2:69"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:       <target dev="tap1cf70acd-de"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:24:00 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/console.log" append="off"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <video>
Oct 02 12:24:00 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     </video>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:24:00 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:24:00 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:24:00 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:24:00 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:24:00 compute-0 nova_compute[192079]: </domain>
Oct 02 12:24:00 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.238 2 DEBUG nova.compute.manager [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Preparing to wait for external event network-vif-plugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.239 2 DEBUG oslo_concurrency.lockutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.239 2 DEBUG oslo_concurrency.lockutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.239 2 DEBUG oslo_concurrency.lockutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.242 2 DEBUG nova.virt.libvirt.vif [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:23:53Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-tempest.common.compute-instance-1540405064',display_name='tempest-tempest.common.compute-instance-1540405064',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-tempest-common-compute-instance-1540405064',id=118,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='e564a4cad5d443dba81ec04d2a05ced9',ramdisk_id='',reservation_id='r-sboi37yp',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServerActionsTestJSON-1646745100',owner_user_name='tempest-ServerActionsTestJSON-1646745100-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:23:54Z,user_data=None,user_id='d54b1826121b47caba89932a78c06ccd',uuid=02f550a4-c57e-4d6f-b62b-decc0dbb1dbe,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "address": "fa:16:3e:e3:d2:69", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap1cf70acd-de", "ovs_interfaceid": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.243 2 DEBUG nova.network.os_vif_util [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converting VIF {"id": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "address": "fa:16:3e:e3:d2:69", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap1cf70acd-de", "ovs_interfaceid": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.244 2 DEBUG nova.network.os_vif_util [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:e3:d2:69,bridge_name='br-int',has_traffic_filtering=True,id=1cf70acd-de15-44ba-8fd2-ea2846ce3ee6,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap1cf70acd-de') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.244 2 DEBUG os_vif [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:e3:d2:69,bridge_name='br-int',has_traffic_filtering=True,id=1cf70acd-de15-44ba-8fd2-ea2846ce3ee6,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap1cf70acd-de') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.245 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.245 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.245 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.248 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.248 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap1cf70acd-de, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.249 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap1cf70acd-de, col_values=(('external_ids', {'iface-id': '1cf70acd-de15-44ba-8fd2-ea2846ce3ee6', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:e3:d2:69', 'vm-uuid': '02f550a4-c57e-4d6f-b62b-decc0dbb1dbe'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.251 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:00 compute-0 NetworkManager[51160]: <info>  [1759407840.2526] manager: (tap1cf70acd-de): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/196)
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.253 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.260 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.261 2 INFO os_vif [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:e3:d2:69,bridge_name='br-int',has_traffic_filtering=True,id=1cf70acd-de15-44ba-8fd2-ea2846ce3ee6,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap1cf70acd-de')
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.539 2 DEBUG nova.virt.libvirt.driver [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.540 2 DEBUG nova.virt.libvirt.driver [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.540 2 DEBUG nova.virt.libvirt.driver [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] No VIF found with MAC fa:16:3e:e3:d2:69, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:24:00 compute-0 nova_compute[192079]: 2025-10-02 12:24:00.541 2 INFO nova.virt.libvirt.driver [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Using config drive
Oct 02 12:24:00 compute-0 sshd-session[238003]: Accepted publickey for nova from 192.168.122.102 port 45138 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:24:00 compute-0 systemd-logind[827]: New session 67 of user nova.
Oct 02 12:24:00 compute-0 systemd[1]: Started Session 67 of User nova.
Oct 02 12:24:00 compute-0 sshd-session[238003]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:24:01 compute-0 sshd-session[238006]: Received disconnect from 192.168.122.102 port 45138:11: disconnected by user
Oct 02 12:24:01 compute-0 sshd-session[238006]: Disconnected from user nova 192.168.122.102 port 45138
Oct 02 12:24:01 compute-0 sshd-session[238003]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:24:01 compute-0 systemd[1]: session-67.scope: Deactivated successfully.
Oct 02 12:24:01 compute-0 systemd-logind[827]: Session 67 logged out. Waiting for processes to exit.
Oct 02 12:24:01 compute-0 systemd-logind[827]: Removed session 67.
Oct 02 12:24:01 compute-0 sshd-session[238008]: Accepted publickey for nova from 192.168.122.102 port 60988 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:24:01 compute-0 systemd-logind[827]: New session 68 of user nova.
Oct 02 12:24:01 compute-0 systemd[1]: Started Session 68 of User nova.
Oct 02 12:24:01 compute-0 sshd-session[238008]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:24:01 compute-0 podman[238010]: 2025-10-02 12:24:01.406303321 +0000 UTC m=+0.057384364 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, container_name=ceilometer_agent_compute, managed_by=edpm_ansible, org.label-schema.build-date=20251001, config_id=edpm, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:24:01 compute-0 sshd-session[238021]: Received disconnect from 192.168.122.102 port 60988:11: disconnected by user
Oct 02 12:24:01 compute-0 sshd-session[238021]: Disconnected from user nova 192.168.122.102 port 60988
Oct 02 12:24:01 compute-0 sshd-session[238008]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:24:01 compute-0 systemd[1]: session-68.scope: Deactivated successfully.
Oct 02 12:24:01 compute-0 systemd-logind[827]: Session 68 logged out. Waiting for processes to exit.
Oct 02 12:24:01 compute-0 systemd-logind[827]: Removed session 68.
Oct 02 12:24:01 compute-0 sshd-session[238030]: Accepted publickey for nova from 192.168.122.102 port 60990 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:24:01 compute-0 systemd-logind[827]: New session 69 of user nova.
Oct 02 12:24:01 compute-0 systemd[1]: Started Session 69 of User nova.
Oct 02 12:24:01 compute-0 sshd-session[238030]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:24:01 compute-0 nova_compute[192079]: 2025-10-02 12:24:01.669 2 INFO nova.virt.libvirt.driver [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Creating config drive at /var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/disk.config
Oct 02 12:24:01 compute-0 sshd-session[238033]: Received disconnect from 192.168.122.102 port 60990:11: disconnected by user
Oct 02 12:24:01 compute-0 sshd-session[238033]: Disconnected from user nova 192.168.122.102 port 60990
Oct 02 12:24:01 compute-0 sshd-session[238030]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:24:01 compute-0 nova_compute[192079]: 2025-10-02 12:24:01.677 2 DEBUG oslo_concurrency.processutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpv16cyvov execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:24:01 compute-0 systemd[1]: session-69.scope: Deactivated successfully.
Oct 02 12:24:01 compute-0 systemd-logind[827]: Session 69 logged out. Waiting for processes to exit.
Oct 02 12:24:01 compute-0 systemd-logind[827]: Removed session 69.
Oct 02 12:24:01 compute-0 nova_compute[192079]: 2025-10-02 12:24:01.805 2 DEBUG oslo_concurrency.processutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpv16cyvov" returned: 0 in 0.129s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:24:01 compute-0 kernel: tap1cf70acd-de: entered promiscuous mode
Oct 02 12:24:01 compute-0 NetworkManager[51160]: <info>  [1759407841.8820] manager: (tap1cf70acd-de): new Tun device (/org/freedesktop/NetworkManager/Devices/197)
Oct 02 12:24:01 compute-0 nova_compute[192079]: 2025-10-02 12:24:01.883 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:01 compute-0 ovn_controller[94336]: 2025-10-02T12:24:01Z|00383|binding|INFO|Claiming lport 1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 for this chassis.
Oct 02 12:24:01 compute-0 ovn_controller[94336]: 2025-10-02T12:24:01Z|00384|binding|INFO|1cf70acd-de15-44ba-8fd2-ea2846ce3ee6: Claiming fa:16:3e:e3:d2:69 10.100.0.10
Oct 02 12:24:01 compute-0 nova_compute[192079]: 2025-10-02 12:24:01.886 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:01 compute-0 ovn_controller[94336]: 2025-10-02T12:24:01Z|00385|binding|INFO|Setting lport 1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 ovn-installed in OVS
Oct 02 12:24:01 compute-0 nova_compute[192079]: 2025-10-02 12:24:01.898 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:01 compute-0 nova_compute[192079]: 2025-10-02 12:24:01.900 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:01 compute-0 ovn_controller[94336]: 2025-10-02T12:24:01Z|00386|binding|INFO|Setting lport 1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 up in Southbound
Oct 02 12:24:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:01.906 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:e3:d2:69 10.100.0.10'], port_security=['fa:16:3e:e3:d2:69 10.100.0.10'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.10/28', 'neutron:device_id': '02f550a4-c57e-4d6f-b62b-decc0dbb1dbe', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-a04f937a-375f-4fb0-90fe-5f514a88668f', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'neutron:revision_number': '2', 'neutron:security_group_ids': 'ac5f8c49-69a1-4f51-9d25-21551ac4bbc1', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=50c0aa38-5fd8-41c7-b4bf-85b59722c5c3, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=1cf70acd-de15-44ba-8fd2-ea2846ce3ee6) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:24:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:01.907 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 in datapath a04f937a-375f-4fb0-90fe-5f514a88668f bound to our chassis
Oct 02 12:24:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:01.908 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network a04f937a-375f-4fb0-90fe-5f514a88668f
Oct 02 12:24:01 compute-0 nova_compute[192079]: 2025-10-02 12:24:01.916 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:01 compute-0 systemd-udevd[238053]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:24:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:01.920 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8b7f252c-ee52-45ae-aa14-fa9ea9e0260b]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:01.921 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tapa04f937a-31 in ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:24:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:01.924 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tapa04f937a-30 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:24:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:01.924 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[eab83150-1535-45cb-a181-95c1f4bd5427]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:01.924 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[48b2fe33-9bc4-4e69-b800-fd94a6f359a5]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:01 compute-0 systemd-machined[152150]: New machine qemu-52-instance-00000076.
Oct 02 12:24:01 compute-0 NetworkManager[51160]: <info>  [1759407841.9316] device (tap1cf70acd-de): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:24:01 compute-0 NetworkManager[51160]: <info>  [1759407841.9325] device (tap1cf70acd-de): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:24:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:01.936 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[5880680d-5d29-4917-b0a0-99e2522d7ae5]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:01 compute-0 systemd[1]: Started Virtual Machine qemu-52-instance-00000076.
Oct 02 12:24:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:01.965 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ba9448f6-9b07-455b-b8d2-4b90c12fbd99]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:02.003 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[5ba88723-ec06-490a-b03b-e7bc6c6e0a42]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:02 compute-0 NetworkManager[51160]: <info>  [1759407842.0112] manager: (tapa04f937a-30): new Veth device (/org/freedesktop/NetworkManager/Devices/198)
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:02.011 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d4ee6d43-7284-4006-9445-f07a6ff01d73]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:02.049 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[f2fe5934-082b-406d-945a-b86c0878280d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:02.053 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[9328cde3-2fe7-4329-af37-e07af85c89de]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:02 compute-0 NetworkManager[51160]: <info>  [1759407842.0760] device (tapa04f937a-30): carrier: link connected
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:02.081 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[f482236d-1ecb-4258-8564-014756dc7a32]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:02.102 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[56a2f2aa-85ec-4287-88ae-c6fb44b44c0c]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapa04f937a-31'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:33:93:68'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 125], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 583970, 'reachable_time': 16933, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 238086, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:02 compute-0 nova_compute[192079]: 2025-10-02 12:24:02.116 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:02.121 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8221dd29-4de3-48c2-b114-0409c8b001af]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe33:9368'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 583970, 'tstamp': 583970}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 238087, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:02.141 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[65efe22b-e5e5-441d-abcd-079d99534d6f]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapa04f937a-31'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:33:93:68'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 125], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 583970, 'reachable_time': 16933, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 238088, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:02.184 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9311b051-1593-4274-a706-1ad2fa7beed3]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:02.223 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:02.224 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:02.224 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:02.263 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0c0e4dcc-e659-404e-be34-adeb5b3c81af]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:02.264 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapa04f937a-30, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:02.264 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:02.265 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapa04f937a-30, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:02 compute-0 kernel: tapa04f937a-30: entered promiscuous mode
Oct 02 12:24:02 compute-0 NetworkManager[51160]: <info>  [1759407842.2684] manager: (tapa04f937a-30): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/199)
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:02.269 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tapa04f937a-30, col_values=(('external_ids', {'iface-id': '38f1ac16-18c6-4b4a-b769-ebc7dd5181d4'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:02 compute-0 ovn_controller[94336]: 2025-10-02T12:24:02Z|00387|binding|INFO|Releasing lport 38f1ac16-18c6-4b4a-b769-ebc7dd5181d4 from this chassis (sb_readonly=0)
Oct 02 12:24:02 compute-0 nova_compute[192079]: 2025-10-02 12:24:02.281 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:02 compute-0 nova_compute[192079]: 2025-10-02 12:24:02.285 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:02.286 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/a04f937a-375f-4fb0-90fe-5f514a88668f.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/a04f937a-375f-4fb0-90fe-5f514a88668f.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:02.287 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8341238c-b0ad-4bcc-9259-b8286593fb97]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:02.288 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-a04f937a-375f-4fb0-90fe-5f514a88668f
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/a04f937a-375f-4fb0-90fe-5f514a88668f.pid.haproxy
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID a04f937a-375f-4fb0-90fe-5f514a88668f
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:02.289 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'env', 'PROCESS_TAG=haproxy-a04f937a-375f-4fb0-90fe-5f514a88668f', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/a04f937a-375f-4fb0-90fe-5f514a88668f.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:24:02 compute-0 nova_compute[192079]: 2025-10-02 12:24:02.423 2 DEBUG nova.compute.manager [req-7d8692c8-6330-4987-b7b1-77c594806f5b req-3afd3b87-50ab-4e60-bbc2-0fd18288f443 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Received event network-vif-plugged-375c20c8-b3bc-484b-820a-f3988fb1bfa1 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:24:02 compute-0 nova_compute[192079]: 2025-10-02 12:24:02.424 2 DEBUG oslo_concurrency.lockutils [req-7d8692c8-6330-4987-b7b1-77c594806f5b req-3afd3b87-50ab-4e60-bbc2-0fd18288f443 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "32196dd3-2739-4c43-9532-b0365f8095af-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:02 compute-0 nova_compute[192079]: 2025-10-02 12:24:02.424 2 DEBUG oslo_concurrency.lockutils [req-7d8692c8-6330-4987-b7b1-77c594806f5b req-3afd3b87-50ab-4e60-bbc2-0fd18288f443 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "32196dd3-2739-4c43-9532-b0365f8095af-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:02 compute-0 nova_compute[192079]: 2025-10-02 12:24:02.425 2 DEBUG oslo_concurrency.lockutils [req-7d8692c8-6330-4987-b7b1-77c594806f5b req-3afd3b87-50ab-4e60-bbc2-0fd18288f443 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "32196dd3-2739-4c43-9532-b0365f8095af-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:02 compute-0 nova_compute[192079]: 2025-10-02 12:24:02.425 2 DEBUG nova.compute.manager [req-7d8692c8-6330-4987-b7b1-77c594806f5b req-3afd3b87-50ab-4e60-bbc2-0fd18288f443 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] No waiting events found dispatching network-vif-plugged-375c20c8-b3bc-484b-820a-f3988fb1bfa1 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:24:02 compute-0 nova_compute[192079]: 2025-10-02 12:24:02.425 2 WARNING nova.compute.manager [req-7d8692c8-6330-4987-b7b1-77c594806f5b req-3afd3b87-50ab-4e60-bbc2-0fd18288f443 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Received unexpected event network-vif-plugged-375c20c8-b3bc-484b-820a-f3988fb1bfa1 for instance with vm_state active and task_state resize_migrated.
Oct 02 12:24:02 compute-0 nova_compute[192079]: 2025-10-02 12:24:02.496 2 DEBUG nova.compute.manager [req-a80c5cc9-ca8e-4605-9a4b-d4e7f1c03022 req-84381345-a322-458b-9575-2c3aaa9d276a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Received event network-vif-plugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:24:02 compute-0 nova_compute[192079]: 2025-10-02 12:24:02.496 2 DEBUG oslo_concurrency.lockutils [req-a80c5cc9-ca8e-4605-9a4b-d4e7f1c03022 req-84381345-a322-458b-9575-2c3aaa9d276a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:02 compute-0 nova_compute[192079]: 2025-10-02 12:24:02.496 2 DEBUG oslo_concurrency.lockutils [req-a80c5cc9-ca8e-4605-9a4b-d4e7f1c03022 req-84381345-a322-458b-9575-2c3aaa9d276a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:02 compute-0 nova_compute[192079]: 2025-10-02 12:24:02.497 2 DEBUG oslo_concurrency.lockutils [req-a80c5cc9-ca8e-4605-9a4b-d4e7f1c03022 req-84381345-a322-458b-9575-2c3aaa9d276a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:02 compute-0 nova_compute[192079]: 2025-10-02 12:24:02.497 2 DEBUG nova.compute.manager [req-a80c5cc9-ca8e-4605-9a4b-d4e7f1c03022 req-84381345-a322-458b-9575-2c3aaa9d276a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Processing event network-vif-plugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:24:02 compute-0 nova_compute[192079]: 2025-10-02 12:24:02.551 2 INFO nova.network.neutron [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Updating port 375c20c8-b3bc-484b-820a-f3988fb1bfa1 with attributes {'binding:host_id': 'compute-0.ctlplane.example.com', 'device_owner': 'compute:nova'}
Oct 02 12:24:02 compute-0 nova_compute[192079]: 2025-10-02 12:24:02.652 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:02.652 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=28, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=27) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:24:02 compute-0 podman[238125]: 2025-10-02 12:24:02.617805249 +0000 UTC m=+0.020193731 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:24:02 compute-0 podman[238125]: 2025-10-02 12:24:02.776648357 +0000 UTC m=+0.179036809 container create 546ced081c8fd2923fb66b42ce0b13e56e4b6604a4428dde87136f0928481235 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:24:02 compute-0 systemd[1]: Started libpod-conmon-546ced081c8fd2923fb66b42ce0b13e56e4b6604a4428dde87136f0928481235.scope.
Oct 02 12:24:02 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:24:02 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/71cac6337e6eac7db7050469188abd1ff0eff08ba991ce1cd691e7bf49ec2698/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:24:02 compute-0 podman[238125]: 2025-10-02 12:24:02.913495277 +0000 UTC m=+0.315883739 container init 546ced081c8fd2923fb66b42ce0b13e56e4b6604a4428dde87136f0928481235 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0)
Oct 02 12:24:02 compute-0 podman[238125]: 2025-10-02 12:24:02.919443149 +0000 UTC m=+0.321831601 container start 546ced081c8fd2923fb66b42ce0b13e56e4b6604a4428dde87136f0928481235 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true)
Oct 02 12:24:02 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[238141]: [NOTICE]   (238145) : New worker (238147) forked
Oct 02 12:24:02 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[238141]: [NOTICE]   (238145) : Loading success.
Oct 02 12:24:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:02.994 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 8 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.040 2 DEBUG nova.compute.manager [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.041 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407843.0401566, 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.042 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] VM Started (Lifecycle Event)
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.044 2 DEBUG nova.virt.libvirt.driver [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.047 2 INFO nova.virt.libvirt.driver [-] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Instance spawned successfully.
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.047 2 DEBUG nova.virt.libvirt.driver [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.064 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.069 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.073 2 DEBUG nova.virt.libvirt.driver [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.073 2 DEBUG nova.virt.libvirt.driver [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.074 2 DEBUG nova.virt.libvirt.driver [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.074 2 DEBUG nova.virt.libvirt.driver [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.075 2 DEBUG nova.virt.libvirt.driver [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.075 2 DEBUG nova.virt.libvirt.driver [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.085 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.086 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407843.0413256, 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.086 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] VM Paused (Lifecycle Event)
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.123 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.126 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407843.0434585, 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.127 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] VM Resumed (Lifecycle Event)
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.152 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.154 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.181 2 INFO nova.compute.manager [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Took 8.48 seconds to spawn the instance on the hypervisor.
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.182 2 DEBUG nova.compute.manager [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.183 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.267 2 INFO nova.compute.manager [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Took 9.11 seconds to build instance.
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.293 2 DEBUG oslo_concurrency.lockutils [None req-1e6c56fa-b371-420c-bbb5-daac8e73b71e d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 9.241s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.457 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759407828.4556131, ae6bf863-8cca-48ab-a98f-065f8382fa99 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.457 2 INFO nova.compute.manager [-] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] VM Stopped (Lifecycle Event)
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.501 2 DEBUG nova.compute.manager [None req-2d6bba28-14c4-4615-a548-c8d8d90174e3 - - - - - -] [instance: ae6bf863-8cca-48ab-a98f-065f8382fa99] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.741 2 DEBUG nova.network.neutron [req-8a6aa82e-af2c-4919-b68e-fd12620d1976 req-731b4b6b-bb5e-45f9-b5f0-4a3f752e1554 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Updated VIF entry in instance network info cache for port 1cf70acd-de15-44ba-8fd2-ea2846ce3ee6. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.741 2 DEBUG nova.network.neutron [req-8a6aa82e-af2c-4919-b68e-fd12620d1976 req-731b4b6b-bb5e-45f9-b5f0-4a3f752e1554 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Updating instance_info_cache with network_info: [{"id": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "address": "fa:16:3e:e3:d2:69", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap1cf70acd-de", "ovs_interfaceid": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:24:03 compute-0 nova_compute[192079]: 2025-10-02 12:24:03.767 2 DEBUG oslo_concurrency.lockutils [req-8a6aa82e-af2c-4919-b68e-fd12620d1976 req-731b4b6b-bb5e-45f9-b5f0-4a3f752e1554 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-02f550a4-c57e-4d6f-b62b-decc0dbb1dbe" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:24:04 compute-0 nova_compute[192079]: 2025-10-02 12:24:04.476 2 DEBUG oslo_concurrency.lockutils [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Acquiring lock "refresh_cache-32196dd3-2739-4c43-9532-b0365f8095af" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:24:04 compute-0 nova_compute[192079]: 2025-10-02 12:24:04.476 2 DEBUG oslo_concurrency.lockutils [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Acquired lock "refresh_cache-32196dd3-2739-4c43-9532-b0365f8095af" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:24:04 compute-0 nova_compute[192079]: 2025-10-02 12:24:04.476 2 DEBUG nova.network.neutron [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:24:04 compute-0 nova_compute[192079]: 2025-10-02 12:24:04.610 2 DEBUG nova.compute.manager [req-036b54a7-02ba-4321-a255-87515847fb61 req-e2bf5cd2-6cdd-4ae0-bcfe-f291e031b61e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Received event network-changed-375c20c8-b3bc-484b-820a-f3988fb1bfa1 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:24:04 compute-0 nova_compute[192079]: 2025-10-02 12:24:04.611 2 DEBUG nova.compute.manager [req-036b54a7-02ba-4321-a255-87515847fb61 req-e2bf5cd2-6cdd-4ae0-bcfe-f291e031b61e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Refreshing instance network info cache due to event network-changed-375c20c8-b3bc-484b-820a-f3988fb1bfa1. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:24:04 compute-0 nova_compute[192079]: 2025-10-02 12:24:04.611 2 DEBUG oslo_concurrency.lockutils [req-036b54a7-02ba-4321-a255-87515847fb61 req-e2bf5cd2-6cdd-4ae0-bcfe-f291e031b61e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-32196dd3-2739-4c43-9532-b0365f8095af" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:24:04 compute-0 nova_compute[192079]: 2025-10-02 12:24:04.633 2 DEBUG nova.compute.manager [req-acbb836f-d625-4105-b0ea-ec0dd7ec9618 req-96658927-b587-4350-a61a-52c70d62eb67 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Received event network-vif-plugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:24:04 compute-0 nova_compute[192079]: 2025-10-02 12:24:04.633 2 DEBUG oslo_concurrency.lockutils [req-acbb836f-d625-4105-b0ea-ec0dd7ec9618 req-96658927-b587-4350-a61a-52c70d62eb67 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:04 compute-0 nova_compute[192079]: 2025-10-02 12:24:04.633 2 DEBUG oslo_concurrency.lockutils [req-acbb836f-d625-4105-b0ea-ec0dd7ec9618 req-96658927-b587-4350-a61a-52c70d62eb67 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:04 compute-0 nova_compute[192079]: 2025-10-02 12:24:04.634 2 DEBUG oslo_concurrency.lockutils [req-acbb836f-d625-4105-b0ea-ec0dd7ec9618 req-96658927-b587-4350-a61a-52c70d62eb67 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:04 compute-0 nova_compute[192079]: 2025-10-02 12:24:04.634 2 DEBUG nova.compute.manager [req-acbb836f-d625-4105-b0ea-ec0dd7ec9618 req-96658927-b587-4350-a61a-52c70d62eb67 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] No waiting events found dispatching network-vif-plugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:24:04 compute-0 nova_compute[192079]: 2025-10-02 12:24:04.634 2 WARNING nova.compute.manager [req-acbb836f-d625-4105-b0ea-ec0dd7ec9618 req-96658927-b587-4350-a61a-52c70d62eb67 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Received unexpected event network-vif-plugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 for instance with vm_state active and task_state None.
Oct 02 12:24:05 compute-0 nova_compute[192079]: 2025-10-02 12:24:05.252 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:06 compute-0 nova_compute[192079]: 2025-10-02 12:24:06.920 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:07 compute-0 nova_compute[192079]: 2025-10-02 12:24:07.655 2 INFO nova.compute.manager [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Rebuilding instance
Oct 02 12:24:07 compute-0 nova_compute[192079]: 2025-10-02 12:24:07.936 2 DEBUG nova.compute.manager [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:24:07 compute-0 nova_compute[192079]: 2025-10-02 12:24:07.996 2 DEBUG nova.objects.instance [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'pci_requests' on Instance uuid 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.008 2 DEBUG nova.objects.instance [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'pci_devices' on Instance uuid 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.022 2 DEBUG nova.objects.instance [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'resources' on Instance uuid 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.036 2 DEBUG nova.objects.instance [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'migration_context' on Instance uuid 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.052 2 DEBUG nova.objects.instance [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Trying to apply a migration context that does not seem to be set for this instance apply_migration_context /usr/lib/python3.9/site-packages/nova/objects/instance.py:1032
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.057 2 DEBUG nova.virt.libvirt.driver [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Shutting down instance from state 1 _clean_shutdown /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4071
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.473 2 DEBUG nova.network.neutron [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Updating instance_info_cache with network_info: [{"id": "375c20c8-b3bc-484b-820a-f3988fb1bfa1", "address": "fa:16:3e:af:53:5f", "network": {"id": "91662be7-398f-4c34-a848-62b46821f0fd", "bridge": "br-int", "label": "tempest-network-smoke--722078817", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.233", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap375c20c8-b3", "ovs_interfaceid": "375c20c8-b3bc-484b-820a-f3988fb1bfa1", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.494 2 DEBUG oslo_concurrency.lockutils [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Releasing lock "refresh_cache-32196dd3-2739-4c43-9532-b0365f8095af" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.502 2 DEBUG oslo_concurrency.lockutils [req-036b54a7-02ba-4321-a255-87515847fb61 req-e2bf5cd2-6cdd-4ae0-bcfe-f291e031b61e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-32196dd3-2739-4c43-9532-b0365f8095af" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.503 2 DEBUG nova.network.neutron [req-036b54a7-02ba-4321-a255-87515847fb61 req-e2bf5cd2-6cdd-4ae0-bcfe-f291e031b61e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Refreshing network info cache for port 375c20c8-b3bc-484b-820a-f3988fb1bfa1 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.659 2 DEBUG nova.virt.libvirt.driver [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Starting finish_migration finish_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:11698
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.661 2 DEBUG nova.virt.libvirt.driver [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Instance directory exists: not creating _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4719
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.661 2 INFO nova.virt.libvirt.driver [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Creating image(s)
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.662 2 DEBUG nova.objects.instance [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Lazy-loading 'trusted_certs' on Instance uuid 32196dd3-2739-4c43-9532-b0365f8095af obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.700 2 DEBUG oslo_concurrency.processutils [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.770 2 DEBUG oslo_concurrency.processutils [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.070s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.771 2 DEBUG nova.virt.disk.api [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Checking if we can resize image /var/lib/nova/instances/32196dd3-2739-4c43-9532-b0365f8095af/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.771 2 DEBUG oslo_concurrency.processutils [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/32196dd3-2739-4c43-9532-b0365f8095af/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.823 2 DEBUG oslo_concurrency.processutils [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/32196dd3-2739-4c43-9532-b0365f8095af/disk --force-share --output=json" returned: 0 in 0.052s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.824 2 DEBUG nova.virt.disk.api [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Cannot resize image /var/lib/nova/instances/32196dd3-2739-4c43-9532-b0365f8095af/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.837 2 DEBUG nova.virt.libvirt.driver [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Did not create local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4859
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.838 2 DEBUG nova.virt.libvirt.driver [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Ensure instance console log exists: /var/lib/nova/instances/32196dd3-2739-4c43-9532-b0365f8095af/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.838 2 DEBUG oslo_concurrency.lockutils [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.839 2 DEBUG oslo_concurrency.lockutils [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.839 2 DEBUG oslo_concurrency.lockutils [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.841 2 DEBUG nova.virt.libvirt.driver [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Start _get_guest_xml network_info=[{"id": "375c20c8-b3bc-484b-820a-f3988fb1bfa1", "address": "fa:16:3e:af:53:5f", "network": {"id": "91662be7-398f-4c34-a848-62b46821f0fd", "bridge": "br-int", "label": "tempest-network-smoke--722078817", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.233", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-network-smoke--722078817", "vif_mac": "fa:16:3e:af:53:5f"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap375c20c8-b3", "ovs_interfaceid": "375c20c8-b3bc-484b-820a-f3988fb1bfa1", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.845 2 WARNING nova.virt.libvirt.driver [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.850 2 DEBUG nova.virt.libvirt.host [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.851 2 DEBUG nova.virt.libvirt.host [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.856 2 DEBUG nova.virt.libvirt.host [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.856 2 DEBUG nova.virt.libvirt.host [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.857 2 DEBUG nova.virt.libvirt.driver [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.857 2 DEBUG nova.virt.hardware [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.857 2 DEBUG nova.virt.hardware [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.858 2 DEBUG nova.virt.hardware [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.858 2 DEBUG nova.virt.hardware [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.858 2 DEBUG nova.virt.hardware [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.858 2 DEBUG nova.virt.hardware [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.858 2 DEBUG nova.virt.hardware [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.859 2 DEBUG nova.virt.hardware [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.859 2 DEBUG nova.virt.hardware [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.859 2 DEBUG nova.virt.hardware [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.859 2 DEBUG nova.virt.hardware [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.860 2 DEBUG nova.objects.instance [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Lazy-loading 'vcpu_model' on Instance uuid 32196dd3-2739-4c43-9532-b0365f8095af obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.876 2 DEBUG oslo_concurrency.processutils [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/32196dd3-2739-4c43-9532-b0365f8095af/disk.config --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.928 2 DEBUG oslo_concurrency.processutils [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/32196dd3-2739-4c43-9532-b0365f8095af/disk.config --force-share --output=json" returned: 0 in 0.052s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.929 2 DEBUG oslo_concurrency.lockutils [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Acquiring lock "/var/lib/nova/instances/32196dd3-2739-4c43-9532-b0365f8095af/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.929 2 DEBUG oslo_concurrency.lockutils [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Lock "/var/lib/nova/instances/32196dd3-2739-4c43-9532-b0365f8095af/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.930 2 DEBUG oslo_concurrency.lockutils [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Lock "/var/lib/nova/instances/32196dd3-2739-4c43-9532-b0365f8095af/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.931 2 DEBUG nova.virt.libvirt.vif [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:23:12Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestNetworkAdvancedServerOps-server-2105436045',display_name='tempest-TestNetworkAdvancedServerOps-server-2105436045',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkadvancedserverops-server-2105436045',id=117,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBIPVZ/1ugRUXJi6kpxyVgRUtYTdMlYSz5NQQRRxSWUHE0SJ8tz8WjHhrHski+4uyv4G//M9upfdriwZTygaxranlXIWK6yJW4zVM7pqGP5AEtkUxwGNjsUk0aVRz2H8oSQ==',key_name='tempest-TestNetworkAdvancedServerOps-1888170662',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:23:27Z,launched_on='compute-2.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=MigrationContext,new_flavor=Flavor(1),node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=Flavor(1),os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='76c7dd40d83e4e3ca71abbebf57921b6',ramdisk_id='',reservation_id='r-0oq6jqhj',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=ServiceList,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',old_vm_state='active',owner_project_name='tempest-TestNetworkAdvancedServerOps-597114071',owner_user_name='tempest-TestNetworkAdvancedServerOps-597114071-project-member'},tags=<?>,task_state='resize_finish',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:24:02Z,user_data=None,user_id='1faa7e121a0e43ad8cb4ae5b2cfcc6a2',uuid=32196dd3-2739-4c43-9532-b0365f8095af,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "375c20c8-b3bc-484b-820a-f3988fb1bfa1", "address": "fa:16:3e:af:53:5f", "network": {"id": "91662be7-398f-4c34-a848-62b46821f0fd", "bridge": "br-int", "label": "tempest-network-smoke--722078817", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.233", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-network-smoke--722078817", "vif_mac": "fa:16:3e:af:53:5f"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap375c20c8-b3", "ovs_interfaceid": "375c20c8-b3bc-484b-820a-f3988fb1bfa1", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.932 2 DEBUG nova.network.os_vif_util [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Converting VIF {"id": "375c20c8-b3bc-484b-820a-f3988fb1bfa1", "address": "fa:16:3e:af:53:5f", "network": {"id": "91662be7-398f-4c34-a848-62b46821f0fd", "bridge": "br-int", "label": "tempest-network-smoke--722078817", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.233", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-network-smoke--722078817", "vif_mac": "fa:16:3e:af:53:5f"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap375c20c8-b3", "ovs_interfaceid": "375c20c8-b3bc-484b-820a-f3988fb1bfa1", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.932 2 DEBUG nova.network.os_vif_util [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:af:53:5f,bridge_name='br-int',has_traffic_filtering=True,id=375c20c8-b3bc-484b-820a-f3988fb1bfa1,network=Network(91662be7-398f-4c34-a848-62b46821f0fd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap375c20c8-b3') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.934 2 DEBUG nova.virt.libvirt.driver [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:24:08 compute-0 nova_compute[192079]:   <uuid>32196dd3-2739-4c43-9532-b0365f8095af</uuid>
Oct 02 12:24:08 compute-0 nova_compute[192079]:   <name>instance-00000075</name>
Oct 02 12:24:08 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:24:08 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:24:08 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:24:08 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:       <nova:name>tempest-TestNetworkAdvancedServerOps-server-2105436045</nova:name>
Oct 02 12:24:08 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:24:08</nova:creationTime>
Oct 02 12:24:08 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:24:08 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:24:08 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:24:08 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:24:08 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:24:08 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:24:08 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:24:08 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:24:08 compute-0 nova_compute[192079]:         <nova:user uuid="1faa7e121a0e43ad8cb4ae5b2cfcc6a2">tempest-TestNetworkAdvancedServerOps-597114071-project-member</nova:user>
Oct 02 12:24:08 compute-0 nova_compute[192079]:         <nova:project uuid="76c7dd40d83e4e3ca71abbebf57921b6">tempest-TestNetworkAdvancedServerOps-597114071</nova:project>
Oct 02 12:24:08 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:24:08 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:24:08 compute-0 nova_compute[192079]:         <nova:port uuid="375c20c8-b3bc-484b-820a-f3988fb1bfa1">
Oct 02 12:24:08 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.11" ipVersion="4"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:24:08 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:24:08 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:24:08 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <system>
Oct 02 12:24:08 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:24:08 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:24:08 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:24:08 compute-0 nova_compute[192079]:       <entry name="serial">32196dd3-2739-4c43-9532-b0365f8095af</entry>
Oct 02 12:24:08 compute-0 nova_compute[192079]:       <entry name="uuid">32196dd3-2739-4c43-9532-b0365f8095af</entry>
Oct 02 12:24:08 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     </system>
Oct 02 12:24:08 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:24:08 compute-0 nova_compute[192079]:   <os>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:   </os>
Oct 02 12:24:08 compute-0 nova_compute[192079]:   <features>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:   </features>
Oct 02 12:24:08 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:24:08 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:24:08 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:24:08 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/32196dd3-2739-4c43-9532-b0365f8095af/disk"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:24:08 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/32196dd3-2739-4c43-9532-b0365f8095af/disk.config"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:24:08 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:af:53:5f"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:       <target dev="tap375c20c8-b3"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:24:08 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/32196dd3-2739-4c43-9532-b0365f8095af/console.log" append="off"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <video>
Oct 02 12:24:08 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     </video>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:24:08 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:24:08 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:24:08 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:24:08 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:24:08 compute-0 nova_compute[192079]: </domain>
Oct 02 12:24:08 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.936 2 DEBUG nova.virt.libvirt.vif [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:23:12Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestNetworkAdvancedServerOps-server-2105436045',display_name='tempest-TestNetworkAdvancedServerOps-server-2105436045',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkadvancedserverops-server-2105436045',id=117,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBIPVZ/1ugRUXJi6kpxyVgRUtYTdMlYSz5NQQRRxSWUHE0SJ8tz8WjHhrHski+4uyv4G//M9upfdriwZTygaxranlXIWK6yJW4zVM7pqGP5AEtkUxwGNjsUk0aVRz2H8oSQ==',key_name='tempest-TestNetworkAdvancedServerOps-1888170662',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:23:27Z,launched_on='compute-2.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=MigrationContext,new_flavor=Flavor(1),node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=Flavor(1),os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='76c7dd40d83e4e3ca71abbebf57921b6',ramdisk_id='',reservation_id='r-0oq6jqhj',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=ServiceList,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',old_vm_state='active',owner_project_name='tempest-TestNetworkAdvancedServerOps-597114071',owner_user_name='tempest-TestNetworkAdvancedServerOps-597114071-project-member'},tags=<?>,task_state='resize_finish',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:24:02Z,user_data=None,user_id='1faa7e121a0e43ad8cb4ae5b2cfcc6a2',uuid=32196dd3-2739-4c43-9532-b0365f8095af,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "375c20c8-b3bc-484b-820a-f3988fb1bfa1", "address": "fa:16:3e:af:53:5f", "network": {"id": "91662be7-398f-4c34-a848-62b46821f0fd", "bridge": "br-int", "label": "tempest-network-smoke--722078817", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.233", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-network-smoke--722078817", "vif_mac": "fa:16:3e:af:53:5f"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap375c20c8-b3", "ovs_interfaceid": "375c20c8-b3bc-484b-820a-f3988fb1bfa1", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.936 2 DEBUG nova.network.os_vif_util [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Converting VIF {"id": "375c20c8-b3bc-484b-820a-f3988fb1bfa1", "address": "fa:16:3e:af:53:5f", "network": {"id": "91662be7-398f-4c34-a848-62b46821f0fd", "bridge": "br-int", "label": "tempest-network-smoke--722078817", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.233", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-network-smoke--722078817", "vif_mac": "fa:16:3e:af:53:5f"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap375c20c8-b3", "ovs_interfaceid": "375c20c8-b3bc-484b-820a-f3988fb1bfa1", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.936 2 DEBUG nova.network.os_vif_util [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:af:53:5f,bridge_name='br-int',has_traffic_filtering=True,id=375c20c8-b3bc-484b-820a-f3988fb1bfa1,network=Network(91662be7-398f-4c34-a848-62b46821f0fd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap375c20c8-b3') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.937 2 DEBUG os_vif [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:af:53:5f,bridge_name='br-int',has_traffic_filtering=True,id=375c20c8-b3bc-484b-820a-f3988fb1bfa1,network=Network(91662be7-398f-4c34-a848-62b46821f0fd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap375c20c8-b3') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.937 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.938 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.938 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.940 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.941 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap375c20c8-b3, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.941 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap375c20c8-b3, col_values=(('external_ids', {'iface-id': '375c20c8-b3bc-484b-820a-f3988fb1bfa1', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:af:53:5f', 'vm-uuid': '32196dd3-2739-4c43-9532-b0365f8095af'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.943 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:08 compute-0 NetworkManager[51160]: <info>  [1759407848.9440] manager: (tap375c20c8-b3): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/200)
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.946 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.949 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:08 compute-0 nova_compute[192079]: 2025-10-02 12:24:08.950 2 INFO os_vif [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:af:53:5f,bridge_name='br-int',has_traffic_filtering=True,id=375c20c8-b3bc-484b-820a-f3988fb1bfa1,network=Network(91662be7-398f-4c34-a848-62b46821f0fd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap375c20c8-b3')
Oct 02 12:24:09 compute-0 nova_compute[192079]: 2025-10-02 12:24:09.010 2 DEBUG nova.virt.libvirt.driver [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:24:09 compute-0 nova_compute[192079]: 2025-10-02 12:24:09.010 2 DEBUG nova.virt.libvirt.driver [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:24:09 compute-0 nova_compute[192079]: 2025-10-02 12:24:09.011 2 DEBUG nova.virt.libvirt.driver [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] No VIF found with MAC fa:16:3e:af:53:5f, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:24:09 compute-0 nova_compute[192079]: 2025-10-02 12:24:09.011 2 INFO nova.virt.libvirt.driver [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Using config drive
Oct 02 12:24:09 compute-0 podman[238168]: 2025-10-02 12:24:09.044153687 +0000 UTC m=+0.066590886 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.33.7, build-date=2025-08-20T13:12:41, version=9.6, architecture=x86_64, name=ubi9-minimal, vendor=Red Hat, Inc., container_name=openstack_network_exporter, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., config_id=edpm, io.openshift.expose-services=, release=1755695350, managed_by=edpm_ansible, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., distribution-scope=public, io.openshift.tags=minimal rhel9, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., maintainer=Red Hat, Inc., com.redhat.component=ubi9-minimal-container, url=https://catalog.redhat.com/en/search?searchType=containers, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, vcs-type=git)
Oct 02 12:24:09 compute-0 podman[238169]: 2025-10-02 12:24:09.061761326 +0000 UTC m=+0.084216256 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, config_id=multipathd, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2)
Oct 02 12:24:09 compute-0 kernel: tap375c20c8-b3: entered promiscuous mode
Oct 02 12:24:09 compute-0 NetworkManager[51160]: <info>  [1759407849.0662] manager: (tap375c20c8-b3): new Tun device (/org/freedesktop/NetworkManager/Devices/201)
Oct 02 12:24:09 compute-0 nova_compute[192079]: 2025-10-02 12:24:09.069 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:09 compute-0 ovn_controller[94336]: 2025-10-02T12:24:09Z|00388|binding|INFO|Claiming lport 375c20c8-b3bc-484b-820a-f3988fb1bfa1 for this chassis.
Oct 02 12:24:09 compute-0 ovn_controller[94336]: 2025-10-02T12:24:09Z|00389|binding|INFO|375c20c8-b3bc-484b-820a-f3988fb1bfa1: Claiming fa:16:3e:af:53:5f 10.100.0.11
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:09.074 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:af:53:5f 10.100.0.11'], port_security=['fa:16:3e:af:53:5f 10.100.0.11'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.11/28', 'neutron:device_id': '32196dd3-2739-4c43-9532-b0365f8095af', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-91662be7-398f-4c34-a848-62b46821f0fd', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '76c7dd40d83e4e3ca71abbebf57921b6', 'neutron:revision_number': '6', 'neutron:security_group_ids': '56d0844b-17cf-4186-b565-d275a3fd7b1f', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:port_fip': '192.168.122.233'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=2bb1944c-7514-4575-bf6c-55d1c733e488, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=375c20c8-b3bc-484b-820a-f3988fb1bfa1) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:09.075 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 375c20c8-b3bc-484b-820a-f3988fb1bfa1 in datapath 91662be7-398f-4c34-a848-62b46821f0fd bound to our chassis
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:09.077 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 91662be7-398f-4c34-a848-62b46821f0fd
Oct 02 12:24:09 compute-0 ovn_controller[94336]: 2025-10-02T12:24:09Z|00390|binding|INFO|Setting lport 375c20c8-b3bc-484b-820a-f3988fb1bfa1 ovn-installed in OVS
Oct 02 12:24:09 compute-0 ovn_controller[94336]: 2025-10-02T12:24:09Z|00391|binding|INFO|Setting lport 375c20c8-b3bc-484b-820a-f3988fb1bfa1 up in Southbound
Oct 02 12:24:09 compute-0 nova_compute[192079]: 2025-10-02 12:24:09.086 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:09.087 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[cd6860a8-85d5-44c6-adf9-97a1e3beee2d]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:09.087 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap91662be7-31 in ovnmeta-91662be7-398f-4c34-a848-62b46821f0fd namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:24:09 compute-0 nova_compute[192079]: 2025-10-02 12:24:09.089 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:09.090 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap91662be7-30 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:09.090 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1d79fcbb-a1e0-4116-842e-4623ba3d9ebf]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:09.092 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d98fd2f2-3ba8-4ca1-bf9b-37c967558132]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:09 compute-0 nova_compute[192079]: 2025-10-02 12:24:09.094 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:09 compute-0 systemd-udevd[238221]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:09.108 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[2dd9218b-ee85-4158-b6bd-4985e3607307]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:09 compute-0 systemd-machined[152150]: New machine qemu-53-instance-00000075.
Oct 02 12:24:09 compute-0 NetworkManager[51160]: <info>  [1759407849.1210] device (tap375c20c8-b3): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:24:09 compute-0 NetworkManager[51160]: <info>  [1759407849.1215] device (tap375c20c8-b3): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:24:09 compute-0 systemd[1]: Started Virtual Machine qemu-53-instance-00000075.
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:09.132 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[77da4af9-597a-428f-b816-f6aac60f3640]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:09.160 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[0a06b25a-904b-4421-b811-822d7427aee2]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:09 compute-0 NetworkManager[51160]: <info>  [1759407849.1678] manager: (tap91662be7-30): new Veth device (/org/freedesktop/NetworkManager/Devices/202)
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:09.167 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e30a1ed7-9070-4fd4-ba09-4ab4b2785b52]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:09.194 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[2d5db01d-c956-48a8-abd1-e8084beb3b13]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:09.197 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[5785d765-9741-4108-afb6-d485fddd7f2b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:09 compute-0 NetworkManager[51160]: <info>  [1759407849.2187] device (tap91662be7-30): carrier: link connected
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:09.225 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[f4a25652-666a-4615-be05-ed3d75a4ed1b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:09.242 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[497014a3-ef85-4c31-90bf-b4f40ec0cf99]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap91662be7-31'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:44:4b:1d'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 127], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 584684, 'reachable_time': 41155, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 238253, 'error': None, 'target': 'ovnmeta-91662be7-398f-4c34-a848-62b46821f0fd', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:09.258 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8a660694-48fb-4c0d-9623-72c0edf14cc7]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe44:4b1d'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 584684, 'tstamp': 584684}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 238254, 'error': None, 'target': 'ovnmeta-91662be7-398f-4c34-a848-62b46821f0fd', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:09.275 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[466a6455-68f3-4356-8c49-ec7c4e0d61b8]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap91662be7-31'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:44:4b:1d'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 127], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 584684, 'reachable_time': 41155, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 238255, 'error': None, 'target': 'ovnmeta-91662be7-398f-4c34-a848-62b46821f0fd', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:09.304 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6cfb85b0-5a59-4af6-95ae-510c9288e6dd]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:09.362 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[96e5c2c1-d6b8-4a15-8c67-4c1c83b7d2e4]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:09.364 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap91662be7-30, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:09.365 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:09.366 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap91662be7-30, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:09 compute-0 nova_compute[192079]: 2025-10-02 12:24:09.368 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:09 compute-0 NetworkManager[51160]: <info>  [1759407849.3694] manager: (tap91662be7-30): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/203)
Oct 02 12:24:09 compute-0 kernel: tap91662be7-30: entered promiscuous mode
Oct 02 12:24:09 compute-0 nova_compute[192079]: 2025-10-02 12:24:09.370 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:09.375 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap91662be7-30, col_values=(('external_ids', {'iface-id': '9bd4e8e6-11ff-43aa-92bf-67aec1a8e528'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:09 compute-0 ovn_controller[94336]: 2025-10-02T12:24:09Z|00392|binding|INFO|Releasing lport 9bd4e8e6-11ff-43aa-92bf-67aec1a8e528 from this chassis (sb_readonly=0)
Oct 02 12:24:09 compute-0 nova_compute[192079]: 2025-10-02 12:24:09.377 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:09 compute-0 nova_compute[192079]: 2025-10-02 12:24:09.377 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:09.380 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/91662be7-398f-4c34-a848-62b46821f0fd.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/91662be7-398f-4c34-a848-62b46821f0fd.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:09.381 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[61f6d460-06da-471d-8c2a-65f3a6d381ff]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:09.383 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-91662be7-398f-4c34-a848-62b46821f0fd
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/91662be7-398f-4c34-a848-62b46821f0fd.pid.haproxy
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 91662be7-398f-4c34-a848-62b46821f0fd
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:24:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:09.384 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-91662be7-398f-4c34-a848-62b46821f0fd', 'env', 'PROCESS_TAG=haproxy-91662be7-398f-4c34-a848-62b46821f0fd', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/91662be7-398f-4c34-a848-62b46821f0fd.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:24:09 compute-0 nova_compute[192079]: 2025-10-02 12:24:09.388 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:09 compute-0 podman[238294]: 2025-10-02 12:24:09.802773552 +0000 UTC m=+0.059514194 container create a9981478098da85e2101443e875dc1f6586c8258a7b3edf7f5ebebf7a90a7c24 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-91662be7-398f-4c34-a848-62b46821f0fd, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.build-date=20251001)
Oct 02 12:24:09 compute-0 systemd[1]: Started libpod-conmon-a9981478098da85e2101443e875dc1f6586c8258a7b3edf7f5ebebf7a90a7c24.scope.
Oct 02 12:24:09 compute-0 podman[238294]: 2025-10-02 12:24:09.775387605 +0000 UTC m=+0.032128277 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:24:09 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:24:09 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/c8018020412bb8d77d4ad5f457fa3cf717c2944995dff6e2942bb78c87cf3704/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:24:09 compute-0 podman[238294]: 2025-10-02 12:24:09.881802835 +0000 UTC m=+0.138543507 container init a9981478098da85e2101443e875dc1f6586c8258a7b3edf7f5ebebf7a90a7c24 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-91662be7-398f-4c34-a848-62b46821f0fd, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS)
Oct 02 12:24:09 compute-0 podman[238294]: 2025-10-02 12:24:09.888622151 +0000 UTC m=+0.145362793 container start a9981478098da85e2101443e875dc1f6586c8258a7b3edf7f5ebebf7a90a7c24 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-91662be7-398f-4c34-a848-62b46821f0fd, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS)
Oct 02 12:24:09 compute-0 neutron-haproxy-ovnmeta-91662be7-398f-4c34-a848-62b46821f0fd[238309]: [NOTICE]   (238313) : New worker (238315) forked
Oct 02 12:24:09 compute-0 neutron-haproxy-ovnmeta-91662be7-398f-4c34-a848-62b46821f0fd[238309]: [NOTICE]   (238313) : Loading success.
Oct 02 12:24:10 compute-0 nova_compute[192079]: 2025-10-02 12:24:10.046 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407850.045791, 32196dd3-2739-4c43-9532-b0365f8095af => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:24:10 compute-0 nova_compute[192079]: 2025-10-02 12:24:10.046 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] VM Resumed (Lifecycle Event)
Oct 02 12:24:10 compute-0 nova_compute[192079]: 2025-10-02 12:24:10.048 2 DEBUG nova.compute.manager [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:24:10 compute-0 nova_compute[192079]: 2025-10-02 12:24:10.052 2 INFO nova.virt.libvirt.driver [-] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Instance running successfully.
Oct 02 12:24:10 compute-0 virtqemud[191807]: argument unsupported: QEMU guest agent is not configured
Oct 02 12:24:10 compute-0 nova_compute[192079]: 2025-10-02 12:24:10.055 2 DEBUG nova.virt.libvirt.guest [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Failed to set time: agent not configured sync_guest_time /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:200
Oct 02 12:24:10 compute-0 nova_compute[192079]: 2025-10-02 12:24:10.055 2 DEBUG nova.virt.libvirt.driver [None req-bbab774b-2ea1-4a57-9049-cf3c8fae42ad cb6cc43e566f47b68009374580e995a6 d31edba5aba7481a916ca3252d1375a4 - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] finish_migration finished successfully. finish_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:11793
Oct 02 12:24:10 compute-0 nova_compute[192079]: 2025-10-02 12:24:10.071 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:24:10 compute-0 nova_compute[192079]: 2025-10-02 12:24:10.074 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: active, current task_state: resize_finish, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:24:10 compute-0 nova_compute[192079]: 2025-10-02 12:24:10.123 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] During sync_power_state the instance has a pending task (resize_finish). Skip.
Oct 02 12:24:10 compute-0 nova_compute[192079]: 2025-10-02 12:24:10.123 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407850.0476813, 32196dd3-2739-4c43-9532-b0365f8095af => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:24:10 compute-0 nova_compute[192079]: 2025-10-02 12:24:10.123 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] VM Started (Lifecycle Event)
Oct 02 12:24:10 compute-0 nova_compute[192079]: 2025-10-02 12:24:10.167 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:24:10 compute-0 nova_compute[192079]: 2025-10-02 12:24:10.172 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Synchronizing instance power state after lifecycle event "Started"; current vm_state: active, current task_state: resize_finish, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:24:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:10.997 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '28'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:11 compute-0 nova_compute[192079]: 2025-10-02 12:24:11.695 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:11 compute-0 systemd[1]: Stopping User Manager for UID 42436...
Oct 02 12:24:11 compute-0 systemd[237977]: Activating special unit Exit the Session...
Oct 02 12:24:11 compute-0 systemd[237977]: Stopped target Main User Target.
Oct 02 12:24:11 compute-0 systemd[237977]: Stopped target Basic System.
Oct 02 12:24:11 compute-0 systemd[237977]: Stopped target Paths.
Oct 02 12:24:11 compute-0 systemd[237977]: Stopped target Sockets.
Oct 02 12:24:11 compute-0 systemd[237977]: Stopped target Timers.
Oct 02 12:24:11 compute-0 systemd[237977]: Stopped Mark boot as successful after the user session has run 2 minutes.
Oct 02 12:24:11 compute-0 systemd[237977]: Stopped Daily Cleanup of User's Temporary Directories.
Oct 02 12:24:11 compute-0 systemd[237977]: Closed D-Bus User Message Bus Socket.
Oct 02 12:24:11 compute-0 systemd[237977]: Stopped Create User's Volatile Files and Directories.
Oct 02 12:24:11 compute-0 systemd[237977]: Removed slice User Application Slice.
Oct 02 12:24:11 compute-0 systemd[237977]: Reached target Shutdown.
Oct 02 12:24:11 compute-0 systemd[237977]: Finished Exit the Session.
Oct 02 12:24:11 compute-0 systemd[237977]: Reached target Exit the Session.
Oct 02 12:24:11 compute-0 systemd[1]: user@42436.service: Deactivated successfully.
Oct 02 12:24:11 compute-0 systemd[1]: Stopped User Manager for UID 42436.
Oct 02 12:24:11 compute-0 systemd[1]: Stopping User Runtime Directory /run/user/42436...
Oct 02 12:24:11 compute-0 systemd[1]: run-user-42436.mount: Deactivated successfully.
Oct 02 12:24:11 compute-0 systemd[1]: user-runtime-dir@42436.service: Deactivated successfully.
Oct 02 12:24:11 compute-0 systemd[1]: Stopped User Runtime Directory /run/user/42436.
Oct 02 12:24:11 compute-0 systemd[1]: Removed slice User Slice of UID 42436.
Oct 02 12:24:11 compute-0 nova_compute[192079]: 2025-10-02 12:24:11.920 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:12 compute-0 nova_compute[192079]: 2025-10-02 12:24:12.188 2 DEBUG nova.network.neutron [req-036b54a7-02ba-4321-a255-87515847fb61 req-e2bf5cd2-6cdd-4ae0-bcfe-f291e031b61e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Updated VIF entry in instance network info cache for port 375c20c8-b3bc-484b-820a-f3988fb1bfa1. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:24:12 compute-0 nova_compute[192079]: 2025-10-02 12:24:12.189 2 DEBUG nova.network.neutron [req-036b54a7-02ba-4321-a255-87515847fb61 req-e2bf5cd2-6cdd-4ae0-bcfe-f291e031b61e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Updating instance_info_cache with network_info: [{"id": "375c20c8-b3bc-484b-820a-f3988fb1bfa1", "address": "fa:16:3e:af:53:5f", "network": {"id": "91662be7-398f-4c34-a848-62b46821f0fd", "bridge": "br-int", "label": "tempest-network-smoke--722078817", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.233", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap375c20c8-b3", "ovs_interfaceid": "375c20c8-b3bc-484b-820a-f3988fb1bfa1", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:24:12 compute-0 nova_compute[192079]: 2025-10-02 12:24:12.234 2 DEBUG oslo_concurrency.lockutils [req-036b54a7-02ba-4321-a255-87515847fb61 req-e2bf5cd2-6cdd-4ae0-bcfe-f291e031b61e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-32196dd3-2739-4c43-9532-b0365f8095af" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:24:12 compute-0 nova_compute[192079]: 2025-10-02 12:24:12.381 2 DEBUG nova.network.neutron [None req-409b0589-14df-43cc-a0b3-439ddfd4f3ac 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Port 375c20c8-b3bc-484b-820a-f3988fb1bfa1 binding to destination host compute-0.ctlplane.example.com is already ACTIVE migrate_instance_start /usr/lib/python3.9/site-packages/nova/network/neutron.py:3171
Oct 02 12:24:12 compute-0 nova_compute[192079]: 2025-10-02 12:24:12.383 2 DEBUG oslo_concurrency.lockutils [None req-409b0589-14df-43cc-a0b3-439ddfd4f3ac 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "refresh_cache-32196dd3-2739-4c43-9532-b0365f8095af" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:24:12 compute-0 nova_compute[192079]: 2025-10-02 12:24:12.384 2 DEBUG oslo_concurrency.lockutils [None req-409b0589-14df-43cc-a0b3-439ddfd4f3ac 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquired lock "refresh_cache-32196dd3-2739-4c43-9532-b0365f8095af" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:24:12 compute-0 nova_compute[192079]: 2025-10-02 12:24:12.384 2 DEBUG nova.network.neutron [None req-409b0589-14df-43cc-a0b3-439ddfd4f3ac 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:24:12 compute-0 nova_compute[192079]: 2025-10-02 12:24:12.488 2 DEBUG nova.compute.manager [req-b4c79e12-c0cc-4681-ae08-729ad76acecd req-f7263593-1b2e-4c52-aff9-70522a4b55c0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Received event network-vif-plugged-375c20c8-b3bc-484b-820a-f3988fb1bfa1 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:24:12 compute-0 nova_compute[192079]: 2025-10-02 12:24:12.489 2 DEBUG oslo_concurrency.lockutils [req-b4c79e12-c0cc-4681-ae08-729ad76acecd req-f7263593-1b2e-4c52-aff9-70522a4b55c0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "32196dd3-2739-4c43-9532-b0365f8095af-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:12 compute-0 nova_compute[192079]: 2025-10-02 12:24:12.489 2 DEBUG oslo_concurrency.lockutils [req-b4c79e12-c0cc-4681-ae08-729ad76acecd req-f7263593-1b2e-4c52-aff9-70522a4b55c0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "32196dd3-2739-4c43-9532-b0365f8095af-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:12 compute-0 nova_compute[192079]: 2025-10-02 12:24:12.489 2 DEBUG oslo_concurrency.lockutils [req-b4c79e12-c0cc-4681-ae08-729ad76acecd req-f7263593-1b2e-4c52-aff9-70522a4b55c0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "32196dd3-2739-4c43-9532-b0365f8095af-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:12 compute-0 nova_compute[192079]: 2025-10-02 12:24:12.490 2 DEBUG nova.compute.manager [req-b4c79e12-c0cc-4681-ae08-729ad76acecd req-f7263593-1b2e-4c52-aff9-70522a4b55c0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] No waiting events found dispatching network-vif-plugged-375c20c8-b3bc-484b-820a-f3988fb1bfa1 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:24:12 compute-0 nova_compute[192079]: 2025-10-02 12:24:12.490 2 WARNING nova.compute.manager [req-b4c79e12-c0cc-4681-ae08-729ad76acecd req-f7263593-1b2e-4c52-aff9-70522a4b55c0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Received unexpected event network-vif-plugged-375c20c8-b3bc-484b-820a-f3988fb1bfa1 for instance with vm_state resized and task_state resize_reverting.
Oct 02 12:24:13 compute-0 nova_compute[192079]: 2025-10-02 12:24:13.944 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:13 compute-0 nova_compute[192079]: 2025-10-02 12:24:13.984 2 DEBUG nova.network.neutron [None req-409b0589-14df-43cc-a0b3-439ddfd4f3ac 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Updating instance_info_cache with network_info: [{"id": "375c20c8-b3bc-484b-820a-f3988fb1bfa1", "address": "fa:16:3e:af:53:5f", "network": {"id": "91662be7-398f-4c34-a848-62b46821f0fd", "bridge": "br-int", "label": "tempest-network-smoke--722078817", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.233", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap375c20c8-b3", "ovs_interfaceid": "375c20c8-b3bc-484b-820a-f3988fb1bfa1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.005 2 DEBUG oslo_concurrency.lockutils [None req-409b0589-14df-43cc-a0b3-439ddfd4f3ac 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Releasing lock "refresh_cache-32196dd3-2739-4c43-9532-b0365f8095af" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.026 2 DEBUG nova.virt.libvirt.driver [None req-409b0589-14df-43cc-a0b3-439ddfd4f3ac 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Creating tmpfile /var/lib/nova/instances/32196dd3-2739-4c43-9532-b0365f8095af/tmpsf5793eb to verify with other compute node that the instance is on the same shared storage. check_instance_shared_storage_local /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:9618
Oct 02 12:24:14 compute-0 kernel: tap375c20c8-b3 (unregistering): left promiscuous mode
Oct 02 12:24:14 compute-0 NetworkManager[51160]: <info>  [1759407854.0655] device (tap375c20c8-b3): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.071 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:14 compute-0 ovn_controller[94336]: 2025-10-02T12:24:14Z|00393|binding|INFO|Releasing lport 375c20c8-b3bc-484b-820a-f3988fb1bfa1 from this chassis (sb_readonly=0)
Oct 02 12:24:14 compute-0 ovn_controller[94336]: 2025-10-02T12:24:14Z|00394|binding|INFO|Setting lport 375c20c8-b3bc-484b-820a-f3988fb1bfa1 down in Southbound
Oct 02 12:24:14 compute-0 ovn_controller[94336]: 2025-10-02T12:24:14Z|00395|binding|INFO|Removing iface tap375c20c8-b3 ovn-installed in OVS
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.074 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:14.080 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:af:53:5f 10.100.0.11'], port_security=['fa:16:3e:af:53:5f 10.100.0.11'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.11/28', 'neutron:device_id': '32196dd3-2739-4c43-9532-b0365f8095af', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-91662be7-398f-4c34-a848-62b46821f0fd', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '76c7dd40d83e4e3ca71abbebf57921b6', 'neutron:revision_number': '8', 'neutron:security_group_ids': '56d0844b-17cf-4186-b565-d275a3fd7b1f', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:port_fip': '192.168.122.233', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=2bb1944c-7514-4575-bf6c-55d1c733e488, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=375c20c8-b3bc-484b-820a-f3988fb1bfa1) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:24:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:14.082 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 375c20c8-b3bc-484b-820a-f3988fb1bfa1 in datapath 91662be7-398f-4c34-a848-62b46821f0fd unbound from our chassis
Oct 02 12:24:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:14.083 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 91662be7-398f-4c34-a848-62b46821f0fd, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.084 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:14.084 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0e1a8d1c-4b87-47c6-8964-e57db172c27d]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:14.085 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-91662be7-398f-4c34-a848-62b46821f0fd namespace which is not needed anymore
Oct 02 12:24:14 compute-0 systemd[1]: machine-qemu\x2d53\x2dinstance\x2d00000075.scope: Deactivated successfully.
Oct 02 12:24:14 compute-0 systemd[1]: machine-qemu\x2d53\x2dinstance\x2d00000075.scope: Consumed 4.861s CPU time.
Oct 02 12:24:14 compute-0 systemd-machined[152150]: Machine qemu-53-instance-00000075 terminated.
Oct 02 12:24:14 compute-0 neutron-haproxy-ovnmeta-91662be7-398f-4c34-a848-62b46821f0fd[238309]: [NOTICE]   (238313) : haproxy version is 2.8.14-c23fe91
Oct 02 12:24:14 compute-0 neutron-haproxy-ovnmeta-91662be7-398f-4c34-a848-62b46821f0fd[238309]: [NOTICE]   (238313) : path to executable is /usr/sbin/haproxy
Oct 02 12:24:14 compute-0 neutron-haproxy-ovnmeta-91662be7-398f-4c34-a848-62b46821f0fd[238309]: [WARNING]  (238313) : Exiting Master process...
Oct 02 12:24:14 compute-0 neutron-haproxy-ovnmeta-91662be7-398f-4c34-a848-62b46821f0fd[238309]: [WARNING]  (238313) : Exiting Master process...
Oct 02 12:24:14 compute-0 neutron-haproxy-ovnmeta-91662be7-398f-4c34-a848-62b46821f0fd[238309]: [ALERT]    (238313) : Current worker (238315) exited with code 143 (Terminated)
Oct 02 12:24:14 compute-0 neutron-haproxy-ovnmeta-91662be7-398f-4c34-a848-62b46821f0fd[238309]: [WARNING]  (238313) : All workers exited. Exiting... (0)
Oct 02 12:24:14 compute-0 systemd[1]: libpod-a9981478098da85e2101443e875dc1f6586c8258a7b3edf7f5ebebf7a90a7c24.scope: Deactivated successfully.
Oct 02 12:24:14 compute-0 podman[238351]: 2025-10-02 12:24:14.217385182 +0000 UTC m=+0.048538023 container died a9981478098da85e2101443e875dc1f6586c8258a7b3edf7f5ebebf7a90a7c24 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-91662be7-398f-4c34-a848-62b46821f0fd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:24:14 compute-0 systemd[1]: var-lib-containers-storage-overlay-c8018020412bb8d77d4ad5f457fa3cf717c2944995dff6e2942bb78c87cf3704-merged.mount: Deactivated successfully.
Oct 02 12:24:14 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-a9981478098da85e2101443e875dc1f6586c8258a7b3edf7f5ebebf7a90a7c24-userdata-shm.mount: Deactivated successfully.
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.272 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.277 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:14 compute-0 podman[238351]: 2025-10-02 12:24:14.286834655 +0000 UTC m=+0.117987476 container cleanup a9981478098da85e2101443e875dc1f6586c8258a7b3edf7f5ebebf7a90a7c24 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-91662be7-398f-4c34-a848-62b46821f0fd, org.label-schema.build-date=20251001, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 12:24:14 compute-0 systemd[1]: libpod-conmon-a9981478098da85e2101443e875dc1f6586c8258a7b3edf7f5ebebf7a90a7c24.scope: Deactivated successfully.
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.312 2 INFO nova.virt.libvirt.driver [-] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Instance destroyed successfully.
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.313 2 DEBUG nova.objects.instance [None req-409b0589-14df-43cc-a0b3-439ddfd4f3ac 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'resources' on Instance uuid 32196dd3-2739-4c43-9532-b0365f8095af obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.328 2 DEBUG nova.virt.libvirt.vif [None req-409b0589-14df-43cc-a0b3-439ddfd4f3ac 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:23:12Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestNetworkAdvancedServerOps-server-2105436045',display_name='tempest-TestNetworkAdvancedServerOps-server-2105436045',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkadvancedserverops-server-2105436045',id=117,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBIPVZ/1ugRUXJi6kpxyVgRUtYTdMlYSz5NQQRRxSWUHE0SJ8tz8WjHhrHski+4uyv4G//M9upfdriwZTygaxranlXIWK6yJW4zVM7pqGP5AEtkUxwGNjsUk0aVRz2H8oSQ==',key_name='tempest-TestNetworkAdvancedServerOps-1888170662',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:24:10Z,launched_on='compute-2.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=Flavor(1),node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=Flavor(1),os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='76c7dd40d83e4e3ca71abbebf57921b6',ramdisk_id='',reservation_id='r-0oq6jqhj',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',old_vm_state='active',owner_project_name='tempest-TestNetworkAdvancedServerOps-597114071',owner_user_name='tempest-TestNetworkAdvancedServerOps-597114071-project-member'},tags=<?>,task_state='resize_reverting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:24:10Z,user_data=None,user_id='1faa7e121a0e43ad8cb4ae5b2cfcc6a2',uuid=32196dd3-2739-4c43-9532-b0365f8095af,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='resized') vif={"id": "375c20c8-b3bc-484b-820a-f3988fb1bfa1", "address": "fa:16:3e:af:53:5f", "network": {"id": "91662be7-398f-4c34-a848-62b46821f0fd", "bridge": "br-int", "label": "tempest-network-smoke--722078817", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.233", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap375c20c8-b3", "ovs_interfaceid": "375c20c8-b3bc-484b-820a-f3988fb1bfa1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.329 2 DEBUG nova.network.os_vif_util [None req-409b0589-14df-43cc-a0b3-439ddfd4f3ac 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converting VIF {"id": "375c20c8-b3bc-484b-820a-f3988fb1bfa1", "address": "fa:16:3e:af:53:5f", "network": {"id": "91662be7-398f-4c34-a848-62b46821f0fd", "bridge": "br-int", "label": "tempest-network-smoke--722078817", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.233", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap375c20c8-b3", "ovs_interfaceid": "375c20c8-b3bc-484b-820a-f3988fb1bfa1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.330 2 DEBUG nova.network.os_vif_util [None req-409b0589-14df-43cc-a0b3-439ddfd4f3ac 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:af:53:5f,bridge_name='br-int',has_traffic_filtering=True,id=375c20c8-b3bc-484b-820a-f3988fb1bfa1,network=Network(91662be7-398f-4c34-a848-62b46821f0fd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap375c20c8-b3') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.330 2 DEBUG os_vif [None req-409b0589-14df-43cc-a0b3-439ddfd4f3ac 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:af:53:5f,bridge_name='br-int',has_traffic_filtering=True,id=375c20c8-b3bc-484b-820a-f3988fb1bfa1,network=Network(91662be7-398f-4c34-a848-62b46821f0fd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap375c20c8-b3') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.333 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.333 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap375c20c8-b3, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.335 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.336 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.339 2 INFO os_vif [None req-409b0589-14df-43cc-a0b3-439ddfd4f3ac 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:af:53:5f,bridge_name='br-int',has_traffic_filtering=True,id=375c20c8-b3bc-484b-820a-f3988fb1bfa1,network=Network(91662be7-398f-4c34-a848-62b46821f0fd),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap375c20c8-b3')
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.339 2 INFO nova.virt.libvirt.driver [None req-409b0589-14df-43cc-a0b3-439ddfd4f3ac 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Deleting instance files /var/lib/nova/instances/32196dd3-2739-4c43-9532-b0365f8095af_del
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.346 2 INFO nova.virt.libvirt.driver [None req-409b0589-14df-43cc-a0b3-439ddfd4f3ac 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Deletion of /var/lib/nova/instances/32196dd3-2739-4c43-9532-b0365f8095af_del complete
Oct 02 12:24:14 compute-0 podman[238405]: 2025-10-02 12:24:14.384442985 +0000 UTC m=+0.067866120 container remove a9981478098da85e2101443e875dc1f6586c8258a7b3edf7f5ebebf7a90a7c24 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-91662be7-398f-4c34-a848-62b46821f0fd, org.label-schema.license=GPLv2, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001)
Oct 02 12:24:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:14.390 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4a33cdfb-49d7-4f80-8ca5-572692ed0caa]: (4, ('Thu Oct  2 12:24:14 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-91662be7-398f-4c34-a848-62b46821f0fd (a9981478098da85e2101443e875dc1f6586c8258a7b3edf7f5ebebf7a90a7c24)\na9981478098da85e2101443e875dc1f6586c8258a7b3edf7f5ebebf7a90a7c24\nThu Oct  2 12:24:14 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-91662be7-398f-4c34-a848-62b46821f0fd (a9981478098da85e2101443e875dc1f6586c8258a7b3edf7f5ebebf7a90a7c24)\na9981478098da85e2101443e875dc1f6586c8258a7b3edf7f5ebebf7a90a7c24\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:14.391 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e672f6cb-850e-43b4-a91c-518c12de3fe0]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:14.392 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap91662be7-30, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.393 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:14 compute-0 kernel: tap91662be7-30: left promiscuous mode
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.405 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:14.407 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b03139c6-d50b-41b0-bbb4-78c475c299d9]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.430 2 DEBUG oslo_concurrency.lockutils [None req-409b0589-14df-43cc-a0b3-439ddfd4f3ac 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.431 2 DEBUG oslo_concurrency.lockutils [None req-409b0589-14df-43cc-a0b3-439ddfd4f3ac 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:14.435 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d71f69ab-4997-47af-bde4-57f8ac592f2d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:14.436 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[14c9cb48-a66b-4861-b544-d8b1d29ca3ae]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:14.450 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e038904a-bcd4-4f57-91ff-ecda4a41ca22]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 584678, 'reachable_time': 26958, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 238424, 'error': None, 'target': 'ovnmeta-91662be7-398f-4c34-a848-62b46821f0fd', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:14 compute-0 systemd[1]: run-netns-ovnmeta\x2d91662be7\x2d398f\x2d4c34\x2da848\x2d62b46821f0fd.mount: Deactivated successfully.
Oct 02 12:24:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:14.453 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-91662be7-398f-4c34-a848-62b46821f0fd deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:24:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:14.453 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[4cbb8b87-63a7-43ec-9979-5681bddd448a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.455 2 DEBUG nova.objects.instance [None req-409b0589-14df-43cc-a0b3-439ddfd4f3ac 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'migration_context' on Instance uuid 32196dd3-2739-4c43-9532-b0365f8095af obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.556 2 DEBUG nova.compute.provider_tree [None req-409b0589-14df-43cc-a0b3-439ddfd4f3ac 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.576 2 DEBUG nova.scheduler.client.report [None req-409b0589-14df-43cc-a0b3-439ddfd4f3ac 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.635 2 DEBUG nova.compute.manager [req-1a9dc7a2-5570-431b-bd9f-5e166ac9da7d req-f6a9cc67-393c-4417-b460-2446927d3267 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Received event network-vif-plugged-375c20c8-b3bc-484b-820a-f3988fb1bfa1 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.636 2 DEBUG oslo_concurrency.lockutils [req-1a9dc7a2-5570-431b-bd9f-5e166ac9da7d req-f6a9cc67-393c-4417-b460-2446927d3267 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "32196dd3-2739-4c43-9532-b0365f8095af-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.636 2 DEBUG oslo_concurrency.lockutils [req-1a9dc7a2-5570-431b-bd9f-5e166ac9da7d req-f6a9cc67-393c-4417-b460-2446927d3267 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "32196dd3-2739-4c43-9532-b0365f8095af-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.636 2 DEBUG oslo_concurrency.lockutils [req-1a9dc7a2-5570-431b-bd9f-5e166ac9da7d req-f6a9cc67-393c-4417-b460-2446927d3267 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "32196dd3-2739-4c43-9532-b0365f8095af-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.636 2 DEBUG nova.compute.manager [req-1a9dc7a2-5570-431b-bd9f-5e166ac9da7d req-f6a9cc67-393c-4417-b460-2446927d3267 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] No waiting events found dispatching network-vif-plugged-375c20c8-b3bc-484b-820a-f3988fb1bfa1 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.637 2 WARNING nova.compute.manager [req-1a9dc7a2-5570-431b-bd9f-5e166ac9da7d req-f6a9cc67-393c-4417-b460-2446927d3267 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Received unexpected event network-vif-plugged-375c20c8-b3bc-484b-820a-f3988fb1bfa1 for instance with vm_state resized and task_state resize_reverting.
Oct 02 12:24:14 compute-0 nova_compute[192079]: 2025-10-02 12:24:14.654 2 DEBUG oslo_concurrency.lockutils [None req-409b0589-14df-43cc-a0b3-439ddfd4f3ac 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 0.224s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:15 compute-0 ovn_controller[94336]: 2025-10-02T12:24:15Z|00039|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:e3:d2:69 10.100.0.10
Oct 02 12:24:15 compute-0 ovn_controller[94336]: 2025-10-02T12:24:15Z|00396|binding|INFO|Releasing lport 38f1ac16-18c6-4b4a-b769-ebc7dd5181d4 from this chassis (sb_readonly=0)
Oct 02 12:24:15 compute-0 ovn_controller[94336]: 2025-10-02T12:24:15Z|00040|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:e3:d2:69 10.100.0.10
Oct 02 12:24:15 compute-0 nova_compute[192079]: 2025-10-02 12:24:15.506 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:16 compute-0 podman[238425]: 2025-10-02 12:24:16.159395369 +0000 UTC m=+0.053447918 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']})
Oct 02 12:24:16 compute-0 podman[238426]: 2025-10-02 12:24:16.168861877 +0000 UTC m=+0.062835353 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, config_id=iscsid, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=iscsid, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_managed=true)
Oct 02 12:24:16 compute-0 nova_compute[192079]: 2025-10-02 12:24:16.810 2 DEBUG nova.compute.manager [req-fa28a159-c799-4d23-a7b3-1b109e13166a req-13cd90e2-b93c-4ff4-a0b5-204e2025cc01 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Received event network-vif-unplugged-375c20c8-b3bc-484b-820a-f3988fb1bfa1 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:24:16 compute-0 nova_compute[192079]: 2025-10-02 12:24:16.810 2 DEBUG oslo_concurrency.lockutils [req-fa28a159-c799-4d23-a7b3-1b109e13166a req-13cd90e2-b93c-4ff4-a0b5-204e2025cc01 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "32196dd3-2739-4c43-9532-b0365f8095af-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:16 compute-0 nova_compute[192079]: 2025-10-02 12:24:16.810 2 DEBUG oslo_concurrency.lockutils [req-fa28a159-c799-4d23-a7b3-1b109e13166a req-13cd90e2-b93c-4ff4-a0b5-204e2025cc01 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "32196dd3-2739-4c43-9532-b0365f8095af-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:16 compute-0 nova_compute[192079]: 2025-10-02 12:24:16.811 2 DEBUG oslo_concurrency.lockutils [req-fa28a159-c799-4d23-a7b3-1b109e13166a req-13cd90e2-b93c-4ff4-a0b5-204e2025cc01 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "32196dd3-2739-4c43-9532-b0365f8095af-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:16 compute-0 nova_compute[192079]: 2025-10-02 12:24:16.811 2 DEBUG nova.compute.manager [req-fa28a159-c799-4d23-a7b3-1b109e13166a req-13cd90e2-b93c-4ff4-a0b5-204e2025cc01 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] No waiting events found dispatching network-vif-unplugged-375c20c8-b3bc-484b-820a-f3988fb1bfa1 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:24:16 compute-0 nova_compute[192079]: 2025-10-02 12:24:16.811 2 WARNING nova.compute.manager [req-fa28a159-c799-4d23-a7b3-1b109e13166a req-13cd90e2-b93c-4ff4-a0b5-204e2025cc01 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Received unexpected event network-vif-unplugged-375c20c8-b3bc-484b-820a-f3988fb1bfa1 for instance with vm_state resized and task_state resize_reverting.
Oct 02 12:24:16 compute-0 nova_compute[192079]: 2025-10-02 12:24:16.811 2 DEBUG nova.compute.manager [req-fa28a159-c799-4d23-a7b3-1b109e13166a req-13cd90e2-b93c-4ff4-a0b5-204e2025cc01 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Received event network-vif-plugged-375c20c8-b3bc-484b-820a-f3988fb1bfa1 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:24:16 compute-0 nova_compute[192079]: 2025-10-02 12:24:16.811 2 DEBUG oslo_concurrency.lockutils [req-fa28a159-c799-4d23-a7b3-1b109e13166a req-13cd90e2-b93c-4ff4-a0b5-204e2025cc01 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "32196dd3-2739-4c43-9532-b0365f8095af-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:16 compute-0 nova_compute[192079]: 2025-10-02 12:24:16.811 2 DEBUG oslo_concurrency.lockutils [req-fa28a159-c799-4d23-a7b3-1b109e13166a req-13cd90e2-b93c-4ff4-a0b5-204e2025cc01 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "32196dd3-2739-4c43-9532-b0365f8095af-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:16 compute-0 nova_compute[192079]: 2025-10-02 12:24:16.812 2 DEBUG oslo_concurrency.lockutils [req-fa28a159-c799-4d23-a7b3-1b109e13166a req-13cd90e2-b93c-4ff4-a0b5-204e2025cc01 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "32196dd3-2739-4c43-9532-b0365f8095af-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:16 compute-0 nova_compute[192079]: 2025-10-02 12:24:16.812 2 DEBUG nova.compute.manager [req-fa28a159-c799-4d23-a7b3-1b109e13166a req-13cd90e2-b93c-4ff4-a0b5-204e2025cc01 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] No waiting events found dispatching network-vif-plugged-375c20c8-b3bc-484b-820a-f3988fb1bfa1 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:24:16 compute-0 nova_compute[192079]: 2025-10-02 12:24:16.812 2 WARNING nova.compute.manager [req-fa28a159-c799-4d23-a7b3-1b109e13166a req-13cd90e2-b93c-4ff4-a0b5-204e2025cc01 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Received unexpected event network-vif-plugged-375c20c8-b3bc-484b-820a-f3988fb1bfa1 for instance with vm_state resized and task_state resize_reverting.
Oct 02 12:24:16 compute-0 nova_compute[192079]: 2025-10-02 12:24:16.923 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:17 compute-0 nova_compute[192079]: 2025-10-02 12:24:17.477 2 DEBUG nova.compute.manager [req-e44ee9f6-7754-4566-90e8-04b66674eee0 req-4a78725e-0658-41f9-be71-ec0e78339ed1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Received event network-changed-375c20c8-b3bc-484b-820a-f3988fb1bfa1 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:24:17 compute-0 nova_compute[192079]: 2025-10-02 12:24:17.478 2 DEBUG nova.compute.manager [req-e44ee9f6-7754-4566-90e8-04b66674eee0 req-4a78725e-0658-41f9-be71-ec0e78339ed1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Refreshing instance network info cache due to event network-changed-375c20c8-b3bc-484b-820a-f3988fb1bfa1. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:24:17 compute-0 nova_compute[192079]: 2025-10-02 12:24:17.478 2 DEBUG oslo_concurrency.lockutils [req-e44ee9f6-7754-4566-90e8-04b66674eee0 req-4a78725e-0658-41f9-be71-ec0e78339ed1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-32196dd3-2739-4c43-9532-b0365f8095af" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:24:17 compute-0 nova_compute[192079]: 2025-10-02 12:24:17.479 2 DEBUG oslo_concurrency.lockutils [req-e44ee9f6-7754-4566-90e8-04b66674eee0 req-4a78725e-0658-41f9-be71-ec0e78339ed1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-32196dd3-2739-4c43-9532-b0365f8095af" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:24:17 compute-0 nova_compute[192079]: 2025-10-02 12:24:17.479 2 DEBUG nova.network.neutron [req-e44ee9f6-7754-4566-90e8-04b66674eee0 req-4a78725e-0658-41f9-be71-ec0e78339ed1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Refreshing network info cache for port 375c20c8-b3bc-484b-820a-f3988fb1bfa1 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:24:18 compute-0 nova_compute[192079]: 2025-10-02 12:24:18.121 2 DEBUG nova.virt.libvirt.driver [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Instance in state 1 after 10 seconds - resending shutdown _clean_shutdown /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4101
Oct 02 12:24:19 compute-0 nova_compute[192079]: 2025-10-02 12:24:19.129 2 DEBUG nova.network.neutron [req-e44ee9f6-7754-4566-90e8-04b66674eee0 req-4a78725e-0658-41f9-be71-ec0e78339ed1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Updated VIF entry in instance network info cache for port 375c20c8-b3bc-484b-820a-f3988fb1bfa1. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:24:19 compute-0 nova_compute[192079]: 2025-10-02 12:24:19.129 2 DEBUG nova.network.neutron [req-e44ee9f6-7754-4566-90e8-04b66674eee0 req-4a78725e-0658-41f9-be71-ec0e78339ed1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Updating instance_info_cache with network_info: [{"id": "375c20c8-b3bc-484b-820a-f3988fb1bfa1", "address": "fa:16:3e:af:53:5f", "network": {"id": "91662be7-398f-4c34-a848-62b46821f0fd", "bridge": "br-int", "label": "tempest-network-smoke--722078817", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.233", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap375c20c8-b3", "ovs_interfaceid": "375c20c8-b3bc-484b-820a-f3988fb1bfa1", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:24:19 compute-0 nova_compute[192079]: 2025-10-02 12:24:19.138 2 DEBUG nova.compute.manager [req-3f8bcaeb-4ca2-4897-9a73-755363ad4ff6 req-cf8fc0e1-c9ec-4a57-9206-c6dc3f0d9f84 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Received event network-vif-plugged-375c20c8-b3bc-484b-820a-f3988fb1bfa1 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:24:19 compute-0 nova_compute[192079]: 2025-10-02 12:24:19.139 2 DEBUG oslo_concurrency.lockutils [req-3f8bcaeb-4ca2-4897-9a73-755363ad4ff6 req-cf8fc0e1-c9ec-4a57-9206-c6dc3f0d9f84 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "32196dd3-2739-4c43-9532-b0365f8095af-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:19 compute-0 nova_compute[192079]: 2025-10-02 12:24:19.139 2 DEBUG oslo_concurrency.lockutils [req-3f8bcaeb-4ca2-4897-9a73-755363ad4ff6 req-cf8fc0e1-c9ec-4a57-9206-c6dc3f0d9f84 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "32196dd3-2739-4c43-9532-b0365f8095af-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:19 compute-0 nova_compute[192079]: 2025-10-02 12:24:19.139 2 DEBUG oslo_concurrency.lockutils [req-3f8bcaeb-4ca2-4897-9a73-755363ad4ff6 req-cf8fc0e1-c9ec-4a57-9206-c6dc3f0d9f84 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "32196dd3-2739-4c43-9532-b0365f8095af-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:19 compute-0 nova_compute[192079]: 2025-10-02 12:24:19.139 2 DEBUG nova.compute.manager [req-3f8bcaeb-4ca2-4897-9a73-755363ad4ff6 req-cf8fc0e1-c9ec-4a57-9206-c6dc3f0d9f84 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] No waiting events found dispatching network-vif-plugged-375c20c8-b3bc-484b-820a-f3988fb1bfa1 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:24:19 compute-0 nova_compute[192079]: 2025-10-02 12:24:19.140 2 WARNING nova.compute.manager [req-3f8bcaeb-4ca2-4897-9a73-755363ad4ff6 req-cf8fc0e1-c9ec-4a57-9206-c6dc3f0d9f84 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Received unexpected event network-vif-plugged-375c20c8-b3bc-484b-820a-f3988fb1bfa1 for instance with vm_state resized and task_state resize_reverting.
Oct 02 12:24:19 compute-0 nova_compute[192079]: 2025-10-02 12:24:19.171 2 DEBUG oslo_concurrency.lockutils [req-e44ee9f6-7754-4566-90e8-04b66674eee0 req-4a78725e-0658-41f9-be71-ec0e78339ed1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-32196dd3-2739-4c43-9532-b0365f8095af" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:24:19 compute-0 nova_compute[192079]: 2025-10-02 12:24:19.338 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:20 compute-0 ovn_controller[94336]: 2025-10-02T12:24:20Z|00397|binding|INFO|Releasing lport 38f1ac16-18c6-4b4a-b769-ebc7dd5181d4 from this chassis (sb_readonly=0)
Oct 02 12:24:20 compute-0 nova_compute[192079]: 2025-10-02 12:24:20.228 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:20 compute-0 kernel: tap1cf70acd-de (unregistering): left promiscuous mode
Oct 02 12:24:20 compute-0 NetworkManager[51160]: <info>  [1759407860.2859] device (tap1cf70acd-de): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:24:20 compute-0 ovn_controller[94336]: 2025-10-02T12:24:20Z|00398|binding|INFO|Releasing lport 1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 from this chassis (sb_readonly=0)
Oct 02 12:24:20 compute-0 ovn_controller[94336]: 2025-10-02T12:24:20Z|00399|binding|INFO|Setting lport 1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 down in Southbound
Oct 02 12:24:20 compute-0 ovn_controller[94336]: 2025-10-02T12:24:20Z|00400|binding|INFO|Removing iface tap1cf70acd-de ovn-installed in OVS
Oct 02 12:24:20 compute-0 nova_compute[192079]: 2025-10-02 12:24:20.292 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:20 compute-0 nova_compute[192079]: 2025-10-02 12:24:20.294 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:20 compute-0 nova_compute[192079]: 2025-10-02 12:24:20.310 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:20.317 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:e3:d2:69 10.100.0.10'], port_security=['fa:16:3e:e3:d2:69 10.100.0.10'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.10/28', 'neutron:device_id': '02f550a4-c57e-4d6f-b62b-decc0dbb1dbe', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-a04f937a-375f-4fb0-90fe-5f514a88668f', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'neutron:revision_number': '4', 'neutron:security_group_ids': 'ac5f8c49-69a1-4f51-9d25-21551ac4bbc1', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=50c0aa38-5fd8-41c7-b4bf-85b59722c5c3, chassis=[], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=1cf70acd-de15-44ba-8fd2-ea2846ce3ee6) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:24:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:20.318 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 in datapath a04f937a-375f-4fb0-90fe-5f514a88668f unbound from our chassis
Oct 02 12:24:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:20.319 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network a04f937a-375f-4fb0-90fe-5f514a88668f, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:24:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:20.320 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[461166a9-c260-4208-8ecc-a524a591b711]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:20.320 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f namespace which is not needed anymore
Oct 02 12:24:20 compute-0 systemd[1]: machine-qemu\x2d52\x2dinstance\x2d00000076.scope: Deactivated successfully.
Oct 02 12:24:20 compute-0 systemd[1]: machine-qemu\x2d52\x2dinstance\x2d00000076.scope: Consumed 13.955s CPU time.
Oct 02 12:24:20 compute-0 systemd-machined[152150]: Machine qemu-52-instance-00000076 terminated.
Oct 02 12:24:20 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[238141]: [NOTICE]   (238145) : haproxy version is 2.8.14-c23fe91
Oct 02 12:24:20 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[238141]: [NOTICE]   (238145) : path to executable is /usr/sbin/haproxy
Oct 02 12:24:20 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[238141]: [WARNING]  (238145) : Exiting Master process...
Oct 02 12:24:20 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[238141]: [ALERT]    (238145) : Current worker (238147) exited with code 143 (Terminated)
Oct 02 12:24:20 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[238141]: [WARNING]  (238145) : All workers exited. Exiting... (0)
Oct 02 12:24:20 compute-0 systemd[1]: libpod-546ced081c8fd2923fb66b42ce0b13e56e4b6604a4428dde87136f0928481235.scope: Deactivated successfully.
Oct 02 12:24:20 compute-0 podman[238490]: 2025-10-02 12:24:20.440952722 +0000 UTC m=+0.040729391 container died 546ced081c8fd2923fb66b42ce0b13e56e4b6604a4428dde87136f0928481235 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2)
Oct 02 12:24:20 compute-0 systemd[1]: var-lib-containers-storage-overlay-71cac6337e6eac7db7050469188abd1ff0eff08ba991ce1cd691e7bf49ec2698-merged.mount: Deactivated successfully.
Oct 02 12:24:20 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-546ced081c8fd2923fb66b42ce0b13e56e4b6604a4428dde87136f0928481235-userdata-shm.mount: Deactivated successfully.
Oct 02 12:24:20 compute-0 podman[238490]: 2025-10-02 12:24:20.470187799 +0000 UTC m=+0.069964468 container cleanup 546ced081c8fd2923fb66b42ce0b13e56e4b6604a4428dde87136f0928481235 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:24:20 compute-0 systemd[1]: libpod-conmon-546ced081c8fd2923fb66b42ce0b13e56e4b6604a4428dde87136f0928481235.scope: Deactivated successfully.
Oct 02 12:24:20 compute-0 podman[238516]: 2025-10-02 12:24:20.525206808 +0000 UTC m=+0.036791533 container remove 546ced081c8fd2923fb66b42ce0b13e56e4b6604a4428dde87136f0928481235 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:24:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:20.531 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e25c8b48-e5a0-461f-8ae5-2c5b8fc9a2c3]: (4, ('Thu Oct  2 12:24:20 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f (546ced081c8fd2923fb66b42ce0b13e56e4b6604a4428dde87136f0928481235)\n546ced081c8fd2923fb66b42ce0b13e56e4b6604a4428dde87136f0928481235\nThu Oct  2 12:24:20 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f (546ced081c8fd2923fb66b42ce0b13e56e4b6604a4428dde87136f0928481235)\n546ced081c8fd2923fb66b42ce0b13e56e4b6604a4428dde87136f0928481235\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:20.533 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b435deae-a0b3-440e-9ba6-93702c41d4de]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:20.534 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapa04f937a-30, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:20 compute-0 nova_compute[192079]: 2025-10-02 12:24:20.537 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:20 compute-0 kernel: tapa04f937a-30: left promiscuous mode
Oct 02 12:24:20 compute-0 nova_compute[192079]: 2025-10-02 12:24:20.551 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:20 compute-0 nova_compute[192079]: 2025-10-02 12:24:20.555 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:20.558 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[21e589e3-f7c4-4d01-8208-cc70346e9ce7]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:20.585 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[38e4a524-95a1-4ab2-b836-5dc0669f8412]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:20.586 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e5fb8887-9ad8-4405-bf3e-680f450c65c2]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:20.601 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[350a5e1d-991e-446c-880d-780eb5259717]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 583962, 'reachable_time': 43178, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 238552, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:20.603 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:24:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:20.603 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[a9e989e1-3351-41e9-b7b0-c3dfe1bb7b5c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:20 compute-0 systemd[1]: run-netns-ovnmeta\x2da04f937a\x2d375f\x2d4fb0\x2d90fe\x2d5f514a88668f.mount: Deactivated successfully.
Oct 02 12:24:20 compute-0 nova_compute[192079]: 2025-10-02 12:24:20.677 2 DEBUG nova.compute.manager [req-a94fae7e-a12f-4475-ae35-717953d29e3c req-f547aa41-c1c8-41b6-8447-f34095ad07a8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Received event network-vif-unplugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:24:20 compute-0 nova_compute[192079]: 2025-10-02 12:24:20.678 2 DEBUG oslo_concurrency.lockutils [req-a94fae7e-a12f-4475-ae35-717953d29e3c req-f547aa41-c1c8-41b6-8447-f34095ad07a8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:20 compute-0 nova_compute[192079]: 2025-10-02 12:24:20.678 2 DEBUG oslo_concurrency.lockutils [req-a94fae7e-a12f-4475-ae35-717953d29e3c req-f547aa41-c1c8-41b6-8447-f34095ad07a8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:20 compute-0 nova_compute[192079]: 2025-10-02 12:24:20.678 2 DEBUG oslo_concurrency.lockutils [req-a94fae7e-a12f-4475-ae35-717953d29e3c req-f547aa41-c1c8-41b6-8447-f34095ad07a8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:20 compute-0 nova_compute[192079]: 2025-10-02 12:24:20.679 2 DEBUG nova.compute.manager [req-a94fae7e-a12f-4475-ae35-717953d29e3c req-f547aa41-c1c8-41b6-8447-f34095ad07a8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] No waiting events found dispatching network-vif-unplugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:24:20 compute-0 nova_compute[192079]: 2025-10-02 12:24:20.679 2 WARNING nova.compute.manager [req-a94fae7e-a12f-4475-ae35-717953d29e3c req-f547aa41-c1c8-41b6-8447-f34095ad07a8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Received unexpected event network-vif-unplugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 for instance with vm_state active and task_state rebuilding.
Oct 02 12:24:20 compute-0 nova_compute[192079]: 2025-10-02 12:24:20.889 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_running_deleted_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.135 2 INFO nova.virt.libvirt.driver [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Instance shutdown successfully after 13 seconds.
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.141 2 INFO nova.virt.libvirt.driver [-] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Instance destroyed successfully.
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.146 2 INFO nova.virt.libvirt.driver [-] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Instance destroyed successfully.
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.146 2 DEBUG nova.virt.libvirt.vif [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:23:53Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-tempest.common.compute-instance-1540405064',display_name='tempest-ServerActionsTestJSON-server-1719994168',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-tempest-common-compute-instance-1540405064',id=118,image_ref='062d9f80-76b6-42ce-bee7-0fb82a008353',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:24:03Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={rebuild='server'},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='e564a4cad5d443dba81ec04d2a05ced9',ramdisk_id='',reservation_id='r-sboi37yp',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='062d9f80-76b6-42ce-bee7-0fb82a008353',image_container_format='bare',image_disk_format='qcow2',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServerActionsTestJSON-1646745100',owner_user_name='tempest-ServerActionsTestJSON-1646745100-project-member'},tags=<?>,task_state='rebuilding',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:24:06Z,user_data=None,user_id='d54b1826121b47caba89932a78c06ccd',uuid=02f550a4-c57e-4d6f-b62b-decc0dbb1dbe,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "address": "fa:16:3e:e3:d2:69", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap1cf70acd-de", "ovs_interfaceid": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.146 2 DEBUG nova.network.os_vif_util [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converting VIF {"id": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "address": "fa:16:3e:e3:d2:69", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap1cf70acd-de", "ovs_interfaceid": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.147 2 DEBUG nova.network.os_vif_util [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:e3:d2:69,bridge_name='br-int',has_traffic_filtering=True,id=1cf70acd-de15-44ba-8fd2-ea2846ce3ee6,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap1cf70acd-de') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.147 2 DEBUG os_vif [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:e3:d2:69,bridge_name='br-int',has_traffic_filtering=True,id=1cf70acd-de15-44ba-8fd2-ea2846ce3ee6,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap1cf70acd-de') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.148 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.149 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap1cf70acd-de, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.154 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.158 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.161 2 INFO os_vif [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:e3:d2:69,bridge_name='br-int',has_traffic_filtering=True,id=1cf70acd-de15-44ba-8fd2-ea2846ce3ee6,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap1cf70acd-de')
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.162 2 INFO nova.virt.libvirt.driver [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Deleting instance files /var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe_del
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.163 2 INFO nova.virt.libvirt.driver [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Deletion of /var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe_del complete
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.386 2 DEBUG nova.virt.libvirt.driver [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.386 2 INFO nova.virt.libvirt.driver [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Creating image(s)
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.387 2 DEBUG oslo_concurrency.lockutils [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "/var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.387 2 DEBUG oslo_concurrency.lockutils [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "/var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.388 2 DEBUG oslo_concurrency.lockutils [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "/var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.407 2 DEBUG oslo_concurrency.processutils [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.472 2 DEBUG oslo_concurrency.processutils [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2 --force-share --output=json" returned: 0 in 0.065s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.474 2 DEBUG oslo_concurrency.lockutils [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "d7f074efa852dc950deac120296f6eecf48a40d2" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.475 2 DEBUG oslo_concurrency.lockutils [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "d7f074efa852dc950deac120296f6eecf48a40d2" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.492 2 DEBUG oslo_concurrency.processutils [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.549 2 DEBUG oslo_concurrency.processutils [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2 --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.550 2 DEBUG oslo_concurrency.processutils [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2,backing_fmt=raw /var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.919 2 DEBUG oslo_concurrency.processutils [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2,backing_fmt=raw /var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/disk 1073741824" returned: 0 in 0.369s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.920 2 DEBUG oslo_concurrency.lockutils [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "d7f074efa852dc950deac120296f6eecf48a40d2" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.446s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.923 2 DEBUG oslo_concurrency.processutils [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.946 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.984 2 DEBUG oslo_concurrency.processutils [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2 --force-share --output=json" returned: 0 in 0.062s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.985 2 DEBUG nova.virt.disk.api [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Checking if we can resize image /var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:24:21 compute-0 nova_compute[192079]: 2025-10-02 12:24:21.986 2 DEBUG oslo_concurrency.processutils [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.040 2 DEBUG oslo_concurrency.processutils [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/disk --force-share --output=json" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.041 2 DEBUG nova.virt.disk.api [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Cannot resize image /var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.042 2 DEBUG nova.virt.libvirt.driver [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.042 2 DEBUG nova.virt.libvirt.driver [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Ensure instance console log exists: /var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.042 2 DEBUG oslo_concurrency.lockutils [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.043 2 DEBUG oslo_concurrency.lockutils [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.043 2 DEBUG oslo_concurrency.lockutils [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.045 2 DEBUG nova.virt.libvirt.driver [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Start _get_guest_xml network_info=[{"id": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "address": "fa:16:3e:e3:d2:69", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap1cf70acd-de", "ovs_interfaceid": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:28Z,direct_url=<?>,disk_format='qcow2',id=062d9f80-76b6-42ce-bee7-0fb82a008353,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img_alt',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:29Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.048 2 WARNING nova.virt.libvirt.driver [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.: NotImplementedError
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.053 2 DEBUG nova.virt.libvirt.host [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.053 2 DEBUG nova.virt.libvirt.host [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.056 2 DEBUG nova.virt.libvirt.host [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.056 2 DEBUG nova.virt.libvirt.host [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.058 2 DEBUG nova.virt.libvirt.driver [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.058 2 DEBUG nova.virt.hardware [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:28Z,direct_url=<?>,disk_format='qcow2',id=062d9f80-76b6-42ce-bee7-0fb82a008353,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img_alt',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:29Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.058 2 DEBUG nova.virt.hardware [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.058 2 DEBUG nova.virt.hardware [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.059 2 DEBUG nova.virt.hardware [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.059 2 DEBUG nova.virt.hardware [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.059 2 DEBUG nova.virt.hardware [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.059 2 DEBUG nova.virt.hardware [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.060 2 DEBUG nova.virt.hardware [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.060 2 DEBUG nova.virt.hardware [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.060 2 DEBUG nova.virt.hardware [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.060 2 DEBUG nova.virt.hardware [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.061 2 DEBUG nova.objects.instance [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'vcpu_model' on Instance uuid 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.079 2 DEBUG nova.virt.libvirt.vif [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=True,config_drive='True',created_at=2025-10-02T12:23:53Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-tempest.common.compute-instance-1540405064',display_name='tempest-ServerActionsTestJSON-server-1719994168',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-tempest-common-compute-instance-1540405064',id=118,image_ref='062d9f80-76b6-42ce-bee7-0fb82a008353',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:24:03Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={rebuild='server'},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='e564a4cad5d443dba81ec04d2a05ced9',ramdisk_id='',reservation_id='r-sboi37yp',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',clean_attempts='1',image_base_image_ref='062d9f80-76b6-42ce-bee7-0fb82a008353',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServerActionsTestJSON-1646745100',owner_user_name='tempest-ServerActionsTestJSON-1646745100-project-member'},tags=<?>,task_state='rebuild_spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:24:21Z,user_data=None,user_id='d54b1826121b47caba89932a78c06ccd',uuid=02f550a4-c57e-4d6f-b62b-decc0dbb1dbe,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "address": "fa:16:3e:e3:d2:69", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap1cf70acd-de", "ovs_interfaceid": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.079 2 DEBUG nova.network.os_vif_util [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converting VIF {"id": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "address": "fa:16:3e:e3:d2:69", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap1cf70acd-de", "ovs_interfaceid": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.080 2 DEBUG nova.network.os_vif_util [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:e3:d2:69,bridge_name='br-int',has_traffic_filtering=True,id=1cf70acd-de15-44ba-8fd2-ea2846ce3ee6,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap1cf70acd-de') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.082 2 DEBUG nova.virt.libvirt.driver [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:24:22 compute-0 nova_compute[192079]:   <uuid>02f550a4-c57e-4d6f-b62b-decc0dbb1dbe</uuid>
Oct 02 12:24:22 compute-0 nova_compute[192079]:   <name>instance-00000076</name>
Oct 02 12:24:22 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:24:22 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:24:22 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:24:22 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:       <nova:name>tempest-ServerActionsTestJSON-server-1719994168</nova:name>
Oct 02 12:24:22 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:24:22</nova:creationTime>
Oct 02 12:24:22 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:24:22 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:24:22 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:24:22 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:24:22 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:24:22 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:24:22 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:24:22 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:24:22 compute-0 nova_compute[192079]:         <nova:user uuid="d54b1826121b47caba89932a78c06ccd">tempest-ServerActionsTestJSON-1646745100-project-member</nova:user>
Oct 02 12:24:22 compute-0 nova_compute[192079]:         <nova:project uuid="e564a4cad5d443dba81ec04d2a05ced9">tempest-ServerActionsTestJSON-1646745100</nova:project>
Oct 02 12:24:22 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:24:22 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="062d9f80-76b6-42ce-bee7-0fb82a008353"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:24:22 compute-0 nova_compute[192079]:         <nova:port uuid="1cf70acd-de15-44ba-8fd2-ea2846ce3ee6">
Oct 02 12:24:22 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.10" ipVersion="4"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:24:22 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:24:22 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:24:22 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <system>
Oct 02 12:24:22 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:24:22 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:24:22 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:24:22 compute-0 nova_compute[192079]:       <entry name="serial">02f550a4-c57e-4d6f-b62b-decc0dbb1dbe</entry>
Oct 02 12:24:22 compute-0 nova_compute[192079]:       <entry name="uuid">02f550a4-c57e-4d6f-b62b-decc0dbb1dbe</entry>
Oct 02 12:24:22 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     </system>
Oct 02 12:24:22 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:24:22 compute-0 nova_compute[192079]:   <os>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:   </os>
Oct 02 12:24:22 compute-0 nova_compute[192079]:   <features>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:   </features>
Oct 02 12:24:22 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:24:22 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:24:22 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:24:22 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/disk"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:24:22 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/disk.config"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:24:22 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:e3:d2:69"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:       <target dev="tap1cf70acd-de"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:24:22 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/console.log" append="off"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <video>
Oct 02 12:24:22 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     </video>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:24:22 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:24:22 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:24:22 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:24:22 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:24:22 compute-0 nova_compute[192079]: </domain>
Oct 02 12:24:22 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.083 2 DEBUG nova.compute.manager [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Preparing to wait for external event network-vif-plugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.083 2 DEBUG oslo_concurrency.lockutils [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.084 2 DEBUG oslo_concurrency.lockutils [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.084 2 DEBUG oslo_concurrency.lockutils [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.085 2 DEBUG nova.virt.libvirt.vif [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=True,config_drive='True',created_at=2025-10-02T12:23:53Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-tempest.common.compute-instance-1540405064',display_name='tempest-ServerActionsTestJSON-server-1719994168',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-tempest-common-compute-instance-1540405064',id=118,image_ref='062d9f80-76b6-42ce-bee7-0fb82a008353',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:24:03Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={rebuild='server'},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='e564a4cad5d443dba81ec04d2a05ced9',ramdisk_id='',reservation_id='r-sboi37yp',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',clean_attempts='1',image_base_image_ref='062d9f80-76b6-42ce-bee7-0fb82a008353',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServerActionsTestJSON-1646745100',owner_user_name='tempest-ServerActionsTestJSON-1646745100-project-member'},tags=<?>,task_state='rebuild_spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:24:21Z,user_data=None,user_id='d54b1826121b47caba89932a78c06ccd',uuid=02f550a4-c57e-4d6f-b62b-decc0dbb1dbe,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "address": "fa:16:3e:e3:d2:69", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap1cf70acd-de", "ovs_interfaceid": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.085 2 DEBUG nova.network.os_vif_util [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converting VIF {"id": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "address": "fa:16:3e:e3:d2:69", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap1cf70acd-de", "ovs_interfaceid": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.086 2 DEBUG nova.network.os_vif_util [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:e3:d2:69,bridge_name='br-int',has_traffic_filtering=True,id=1cf70acd-de15-44ba-8fd2-ea2846ce3ee6,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap1cf70acd-de') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.086 2 DEBUG os_vif [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:e3:d2:69,bridge_name='br-int',has_traffic_filtering=True,id=1cf70acd-de15-44ba-8fd2-ea2846ce3ee6,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap1cf70acd-de') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.087 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.087 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.088 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.091 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.091 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap1cf70acd-de, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.091 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap1cf70acd-de, col_values=(('external_ids', {'iface-id': '1cf70acd-de15-44ba-8fd2-ea2846ce3ee6', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:e3:d2:69', 'vm-uuid': '02f550a4-c57e-4d6f-b62b-decc0dbb1dbe'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.094 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:22 compute-0 NetworkManager[51160]: <info>  [1759407862.0951] manager: (tap1cf70acd-de): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/204)
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.096 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.100 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.100 2 INFO os_vif [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:e3:d2:69,bridge_name='br-int',has_traffic_filtering=True,id=1cf70acd-de15-44ba-8fd2-ea2846ce3ee6,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap1cf70acd-de')
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.230 2 DEBUG nova.virt.libvirt.driver [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.233 2 DEBUG nova.virt.libvirt.driver [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.233 2 DEBUG nova.virt.libvirt.driver [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] No VIF found with MAC fa:16:3e:e3:d2:69, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.234 2 INFO nova.virt.libvirt.driver [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Using config drive
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.256 2 DEBUG nova.objects.instance [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'ec2_ids' on Instance uuid 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.295 2 DEBUG nova.objects.instance [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'keypairs' on Instance uuid 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.876 2 DEBUG nova.compute.manager [req-9bd249e0-ad5b-4d8a-9a02-9d18f1248d51 req-51de7228-3ed4-4649-8f65-32a8e134de89 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Received event network-vif-plugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.876 2 DEBUG oslo_concurrency.lockutils [req-9bd249e0-ad5b-4d8a-9a02-9d18f1248d51 req-51de7228-3ed4-4649-8f65-32a8e134de89 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.877 2 DEBUG oslo_concurrency.lockutils [req-9bd249e0-ad5b-4d8a-9a02-9d18f1248d51 req-51de7228-3ed4-4649-8f65-32a8e134de89 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.877 2 DEBUG oslo_concurrency.lockutils [req-9bd249e0-ad5b-4d8a-9a02-9d18f1248d51 req-51de7228-3ed4-4649-8f65-32a8e134de89 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:22 compute-0 nova_compute[192079]: 2025-10-02 12:24:22.877 2 DEBUG nova.compute.manager [req-9bd249e0-ad5b-4d8a-9a02-9d18f1248d51 req-51de7228-3ed4-4649-8f65-32a8e134de89 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Processing event network-vif-plugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:24:23 compute-0 nova_compute[192079]: 2025-10-02 12:24:23.168 2 INFO nova.virt.libvirt.driver [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Creating config drive at /var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/disk.config
Oct 02 12:24:23 compute-0 nova_compute[192079]: 2025-10-02 12:24:23.174 2 DEBUG oslo_concurrency.processutils [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp1ope1_xj execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:24:23 compute-0 nova_compute[192079]: 2025-10-02 12:24:23.299 2 DEBUG oslo_concurrency.processutils [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp1ope1_xj" returned: 0 in 0.125s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:24:23 compute-0 kernel: tap1cf70acd-de: entered promiscuous mode
Oct 02 12:24:23 compute-0 NetworkManager[51160]: <info>  [1759407863.3812] manager: (tap1cf70acd-de): new Tun device (/org/freedesktop/NetworkManager/Devices/205)
Oct 02 12:24:23 compute-0 ovn_controller[94336]: 2025-10-02T12:24:23Z|00401|binding|INFO|Claiming lport 1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 for this chassis.
Oct 02 12:24:23 compute-0 ovn_controller[94336]: 2025-10-02T12:24:23Z|00402|binding|INFO|1cf70acd-de15-44ba-8fd2-ea2846ce3ee6: Claiming fa:16:3e:e3:d2:69 10.100.0.10
Oct 02 12:24:23 compute-0 nova_compute[192079]: 2025-10-02 12:24:23.383 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:23.393 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:e3:d2:69 10.100.0.10'], port_security=['fa:16:3e:e3:d2:69 10.100.0.10'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.10/28', 'neutron:device_id': '02f550a4-c57e-4d6f-b62b-decc0dbb1dbe', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-a04f937a-375f-4fb0-90fe-5f514a88668f', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'neutron:revision_number': '5', 'neutron:security_group_ids': 'ac5f8c49-69a1-4f51-9d25-21551ac4bbc1', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=50c0aa38-5fd8-41c7-b4bf-85b59722c5c3, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=1cf70acd-de15-44ba-8fd2-ea2846ce3ee6) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:23.394 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 in datapath a04f937a-375f-4fb0-90fe-5f514a88668f bound to our chassis
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:23.396 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network a04f937a-375f-4fb0-90fe-5f514a88668f
Oct 02 12:24:23 compute-0 ovn_controller[94336]: 2025-10-02T12:24:23Z|00403|binding|INFO|Setting lport 1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 ovn-installed in OVS
Oct 02 12:24:23 compute-0 ovn_controller[94336]: 2025-10-02T12:24:23Z|00404|binding|INFO|Setting lport 1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 up in Southbound
Oct 02 12:24:23 compute-0 nova_compute[192079]: 2025-10-02 12:24:23.399 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:23 compute-0 nova_compute[192079]: 2025-10-02 12:24:23.403 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:23.416 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[20ee4650-045a-45f5-ac2c-482837809a80]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:23.417 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tapa04f937a-31 in ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:23.420 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tapa04f937a-30 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:23.420 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9320601e-8d20-450d-a331-341597ad4131]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:23.421 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[40d9eacb-e2b5-476b-aee2-9f756fb21041]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:23 compute-0 systemd-udevd[238590]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:23.437 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[cfbc1918-88ba-4dbd-9c4b-85f9f4459aee]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:23 compute-0 systemd-machined[152150]: New machine qemu-54-instance-00000076.
Oct 02 12:24:23 compute-0 NetworkManager[51160]: <info>  [1759407863.4497] device (tap1cf70acd-de): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:24:23 compute-0 NetworkManager[51160]: <info>  [1759407863.4509] device (tap1cf70acd-de): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:24:23 compute-0 systemd[1]: Started Virtual Machine qemu-54-instance-00000076.
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:23.470 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9ceb02cd-b754-4382-8a92-36c81a0069bf]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:23.516 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[1d3852e7-66d3-41a1-9dec-3f572d1fd7c2]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:23 compute-0 NetworkManager[51160]: <info>  [1759407863.5298] manager: (tapa04f937a-30): new Veth device (/org/freedesktop/NetworkManager/Devices/206)
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:23.528 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b0c84358-e468-4b50-a70e-593f8e3bf8c5]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:23.573 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[dce30f35-64c4-4b1a-a5b3-3472e0646118]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:23.578 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[26095e7f-66d2-4208-af8e-c08df305a6b6]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:23 compute-0 NetworkManager[51160]: <info>  [1759407863.5991] device (tapa04f937a-30): carrier: link connected
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:23.607 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[0e5fbac6-e291-4f82-aa66-56f07ff1853b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:23.625 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[71082a7a-279c-44c0-9c1b-bbd401de83c1]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapa04f937a-31'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:33:93:68'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 131], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 586122, 'reachable_time': 27277, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 238622, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:23.643 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[47416c84-c2b3-427a-9ceb-75547f2cc334]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe33:9368'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 586122, 'tstamp': 586122}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 238624, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:23.663 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[65aa8b8e-1991-45fd-9cfb-269bc3bb11f2]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapa04f937a-31'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:33:93:68'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 131], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 586122, 'reachable_time': 27277, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 238629, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:23.689 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ba44423b-c14a-4297-be42-74180c8b2e2f]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:23.744 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[18ba7afa-acaf-4f60-bf73-a620700d9427]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:23.745 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapa04f937a-30, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:23.746 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:23.746 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapa04f937a-30, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:23 compute-0 nova_compute[192079]: 2025-10-02 12:24:23.748 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:23 compute-0 NetworkManager[51160]: <info>  [1759407863.7494] manager: (tapa04f937a-30): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/207)
Oct 02 12:24:23 compute-0 kernel: tapa04f937a-30: entered promiscuous mode
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:23.754 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tapa04f937a-30, col_values=(('external_ids', {'iface-id': '38f1ac16-18c6-4b4a-b769-ebc7dd5181d4'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:23 compute-0 nova_compute[192079]: 2025-10-02 12:24:23.755 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:23 compute-0 ovn_controller[94336]: 2025-10-02T12:24:23Z|00405|binding|INFO|Releasing lport 38f1ac16-18c6-4b4a-b769-ebc7dd5181d4 from this chassis (sb_readonly=0)
Oct 02 12:24:23 compute-0 nova_compute[192079]: 2025-10-02 12:24:23.771 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:23.771 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/a04f937a-375f-4fb0-90fe-5f514a88668f.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/a04f937a-375f-4fb0-90fe-5f514a88668f.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:23.772 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4762d4d6-f0e5-482f-ab81-ed55c29bed80]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:23.775 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-a04f937a-375f-4fb0-90fe-5f514a88668f
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/a04f937a-375f-4fb0-90fe-5f514a88668f.pid.haproxy
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID a04f937a-375f-4fb0-90fe-5f514a88668f
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:24:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:23.779 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'env', 'PROCESS_TAG=haproxy-a04f937a-375f-4fb0-90fe-5f514a88668f', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/a04f937a-375f-4fb0-90fe-5f514a88668f.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:24:24 compute-0 podman[238663]: 2025-10-02 12:24:24.14611595 +0000 UTC m=+0.052004569 container create bf8748ea9c31ac3ca5fd82deb65843577a43721e3a49a74057e179ce128821ce (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:24:24 compute-0 systemd[1]: Started libpod-conmon-bf8748ea9c31ac3ca5fd82deb65843577a43721e3a49a74057e179ce128821ce.scope.
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.190 2 DEBUG nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Removed pending event for 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe due to event _event_emit_delayed /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:438
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.191 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407864.189696, 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.192 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] VM Started (Lifecycle Event)
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.194 2 DEBUG nova.compute.manager [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.197 2 DEBUG nova.virt.libvirt.driver [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.201 2 INFO nova.virt.libvirt.driver [-] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Instance spawned successfully.
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.201 2 DEBUG nova.virt.libvirt.driver [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:24:24 compute-0 podman[238663]: 2025-10-02 12:24:24.121620452 +0000 UTC m=+0.027509091 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.216 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:24:24 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.221 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Synchronizing instance power state after lifecycle event "Started"; current vm_state: active, current task_state: rebuild_spawning, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:24:24 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/90975a7db421eb6ab5d2c14b06988f0edd18722e223b24c41ceb79838b209a24/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.230 2 DEBUG nova.virt.libvirt.driver [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.230 2 DEBUG nova.virt.libvirt.driver [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.231 2 DEBUG nova.virt.libvirt.driver [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.231 2 DEBUG nova.virt.libvirt.driver [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.232 2 DEBUG nova.virt.libvirt.driver [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.232 2 DEBUG nova.virt.libvirt.driver [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:24:24 compute-0 podman[238663]: 2025-10-02 12:24:24.237211842 +0000 UTC m=+0.143100491 container init bf8748ea9c31ac3ca5fd82deb65843577a43721e3a49a74057e179ce128821ce (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.240 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] During sync_power_state the instance has a pending task (rebuild_spawning). Skip.
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.240 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407864.1918871, 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.240 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] VM Paused (Lifecycle Event)
Oct 02 12:24:24 compute-0 podman[238663]: 2025-10-02 12:24:24.245071836 +0000 UTC m=+0.150960465 container start bf8748ea9c31ac3ca5fd82deb65843577a43721e3a49a74057e179ce128821ce (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:24:24 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[238678]: [NOTICE]   (238712) : New worker (238728) forked
Oct 02 12:24:24 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[238678]: [NOTICE]   (238712) : Loading success.
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.265 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.271 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407864.1974053, 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.272 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] VM Resumed (Lifecycle Event)
Oct 02 12:24:24 compute-0 podman[238679]: 2025-10-02 12:24:24.285734054 +0000 UTC m=+0.086455647 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.300 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.304 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: active, current task_state: rebuild_spawning, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:24:24 compute-0 podman[238691]: 2025-10-02 12:24:24.316867893 +0000 UTC m=+0.084299239 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:24:24 compute-0 podman[238692]: 2025-10-02 12:24:24.335692885 +0000 UTC m=+0.090076345 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_id=ovn_controller, container_name=ovn_controller)
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.337 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] During sync_power_state the instance has a pending task (rebuild_spawning). Skip.
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.347 2 DEBUG nova.compute.manager [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.440 2 DEBUG oslo_concurrency.lockutils [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.444 2 DEBUG oslo_concurrency.lockutils [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.004s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.444 2 DEBUG nova.objects.instance [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Trying to apply a migration context that does not seem to be set for this instance apply_migration_context /usr/lib/python3.9/site-packages/nova/objects/instance.py:1032
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.601 2 DEBUG oslo_concurrency.lockutils [None req-5f3fbd3b-647d-4c64-813c-b1d6ddcffd1d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 0.158s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:24 compute-0 nova_compute[192079]: 2025-10-02 12:24:24.873 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:24:25 compute-0 nova_compute[192079]: 2025-10-02 12:24:25.502 2 DEBUG nova.compute.manager [req-183d8452-60e5-400e-8838-e2d020bb88a2 req-eaceff95-c5a0-4584-94ab-45caaea532f7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Received event network-vif-plugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:24:25 compute-0 nova_compute[192079]: 2025-10-02 12:24:25.502 2 DEBUG oslo_concurrency.lockutils [req-183d8452-60e5-400e-8838-e2d020bb88a2 req-eaceff95-c5a0-4584-94ab-45caaea532f7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:25 compute-0 nova_compute[192079]: 2025-10-02 12:24:25.503 2 DEBUG oslo_concurrency.lockutils [req-183d8452-60e5-400e-8838-e2d020bb88a2 req-eaceff95-c5a0-4584-94ab-45caaea532f7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:25 compute-0 nova_compute[192079]: 2025-10-02 12:24:25.503 2 DEBUG oslo_concurrency.lockutils [req-183d8452-60e5-400e-8838-e2d020bb88a2 req-eaceff95-c5a0-4584-94ab-45caaea532f7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:25 compute-0 nova_compute[192079]: 2025-10-02 12:24:25.503 2 DEBUG nova.compute.manager [req-183d8452-60e5-400e-8838-e2d020bb88a2 req-eaceff95-c5a0-4584-94ab-45caaea532f7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] No waiting events found dispatching network-vif-plugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:24:25 compute-0 nova_compute[192079]: 2025-10-02 12:24:25.503 2 WARNING nova.compute.manager [req-183d8452-60e5-400e-8838-e2d020bb88a2 req-eaceff95-c5a0-4584-94ab-45caaea532f7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Received unexpected event network-vif-plugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 for instance with vm_state active and task_state None.
Oct 02 12:24:26 compute-0 nova_compute[192079]: 2025-10-02 12:24:26.927 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:27 compute-0 nova_compute[192079]: 2025-10-02 12:24:27.094 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:27 compute-0 nova_compute[192079]: 2025-10-02 12:24:27.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:24:27 compute-0 nova_compute[192079]: 2025-10-02 12:24:27.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_incomplete_migrations run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:24:27 compute-0 nova_compute[192079]: 2025-10-02 12:24:27.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Cleaning up deleted instances with incomplete migration  _cleanup_incomplete_migrations /usr/lib/python3.9/site-packages/nova/compute/manager.py:11183
Oct 02 12:24:27 compute-0 nova_compute[192079]: 2025-10-02 12:24:27.668 2 DEBUG nova.compute.manager [req-1dacdeb3-e609-4e3a-b40b-615972b0fbc0 req-308f0fd8-0d90-4dfb-b896-b691327ee163 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Received event network-vif-plugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:24:27 compute-0 nova_compute[192079]: 2025-10-02 12:24:27.668 2 DEBUG oslo_concurrency.lockutils [req-1dacdeb3-e609-4e3a-b40b-615972b0fbc0 req-308f0fd8-0d90-4dfb-b896-b691327ee163 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:27 compute-0 nova_compute[192079]: 2025-10-02 12:24:27.668 2 DEBUG oslo_concurrency.lockutils [req-1dacdeb3-e609-4e3a-b40b-615972b0fbc0 req-308f0fd8-0d90-4dfb-b896-b691327ee163 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:27 compute-0 nova_compute[192079]: 2025-10-02 12:24:27.669 2 DEBUG oslo_concurrency.lockutils [req-1dacdeb3-e609-4e3a-b40b-615972b0fbc0 req-308f0fd8-0d90-4dfb-b896-b691327ee163 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:27 compute-0 nova_compute[192079]: 2025-10-02 12:24:27.669 2 DEBUG nova.compute.manager [req-1dacdeb3-e609-4e3a-b40b-615972b0fbc0 req-308f0fd8-0d90-4dfb-b896-b691327ee163 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] No waiting events found dispatching network-vif-plugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:24:27 compute-0 nova_compute[192079]: 2025-10-02 12:24:27.669 2 WARNING nova.compute.manager [req-1dacdeb3-e609-4e3a-b40b-615972b0fbc0 req-308f0fd8-0d90-4dfb-b896-b691327ee163 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Received unexpected event network-vif-plugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 for instance with vm_state active and task_state None.
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.230 2 DEBUG oslo_concurrency.lockutils [None req-c4f81151-0099-46bf-870e-f293799e7951 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.232 2 DEBUG oslo_concurrency.lockutils [None req-c4f81151-0099-46bf-870e-f293799e7951 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.002s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.232 2 DEBUG oslo_concurrency.lockutils [None req-c4f81151-0099-46bf-870e-f293799e7951 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.232 2 DEBUG oslo_concurrency.lockutils [None req-c4f81151-0099-46bf-870e-f293799e7951 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.232 2 DEBUG oslo_concurrency.lockutils [None req-c4f81151-0099-46bf-870e-f293799e7951 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.243 2 INFO nova.compute.manager [None req-c4f81151-0099-46bf-870e-f293799e7951 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Terminating instance
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.254 2 DEBUG nova.compute.manager [None req-c4f81151-0099-46bf-870e-f293799e7951 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:24:28 compute-0 kernel: tap1cf70acd-de (unregistering): left promiscuous mode
Oct 02 12:24:28 compute-0 NetworkManager[51160]: <info>  [1759407868.2748] device (tap1cf70acd-de): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:24:28 compute-0 ovn_controller[94336]: 2025-10-02T12:24:28Z|00406|binding|INFO|Releasing lport 1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 from this chassis (sb_readonly=0)
Oct 02 12:24:28 compute-0 ovn_controller[94336]: 2025-10-02T12:24:28Z|00407|binding|INFO|Setting lport 1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 down in Southbound
Oct 02 12:24:28 compute-0 ovn_controller[94336]: 2025-10-02T12:24:28Z|00408|binding|INFO|Removing iface tap1cf70acd-de ovn-installed in OVS
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.284 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:28.302 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:e3:d2:69 10.100.0.10'], port_security=['fa:16:3e:e3:d2:69 10.100.0.10'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.10/28', 'neutron:device_id': '02f550a4-c57e-4d6f-b62b-decc0dbb1dbe', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-a04f937a-375f-4fb0-90fe-5f514a88668f', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'neutron:revision_number': '6', 'neutron:security_group_ids': 'ac5f8c49-69a1-4f51-9d25-21551ac4bbc1', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=50c0aa38-5fd8-41c7-b4bf-85b59722c5c3, chassis=[], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=1cf70acd-de15-44ba-8fd2-ea2846ce3ee6) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:24:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:28.304 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 in datapath a04f937a-375f-4fb0-90fe-5f514a88668f unbound from our chassis
Oct 02 12:24:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:28.306 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network a04f937a-375f-4fb0-90fe-5f514a88668f, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.306 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:28.307 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4062118c-4a38-4f3a-9026-1ec1ac7353bf]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:28.308 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f namespace which is not needed anymore
Oct 02 12:24:28 compute-0 systemd[1]: machine-qemu\x2d54\x2dinstance\x2d00000076.scope: Deactivated successfully.
Oct 02 12:24:28 compute-0 systemd[1]: machine-qemu\x2d54\x2dinstance\x2d00000076.scope: Consumed 4.804s CPU time.
Oct 02 12:24:28 compute-0 systemd-machined[152150]: Machine qemu-54-instance-00000076 terminated.
Oct 02 12:24:28 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[238678]: [NOTICE]   (238712) : haproxy version is 2.8.14-c23fe91
Oct 02 12:24:28 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[238678]: [NOTICE]   (238712) : path to executable is /usr/sbin/haproxy
Oct 02 12:24:28 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[238678]: [WARNING]  (238712) : Exiting Master process...
Oct 02 12:24:28 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[238678]: [ALERT]    (238712) : Current worker (238728) exited with code 143 (Terminated)
Oct 02 12:24:28 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[238678]: [WARNING]  (238712) : All workers exited. Exiting... (0)
Oct 02 12:24:28 compute-0 systemd[1]: libpod-bf8748ea9c31ac3ca5fd82deb65843577a43721e3a49a74057e179ce128821ce.scope: Deactivated successfully.
Oct 02 12:24:28 compute-0 podman[238783]: 2025-10-02 12:24:28.437863043 +0000 UTC m=+0.043803866 container died bf8748ea9c31ac3ca5fd82deb65843577a43721e3a49a74057e179ce128821ce (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.build-date=20251001)
Oct 02 12:24:28 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-bf8748ea9c31ac3ca5fd82deb65843577a43721e3a49a74057e179ce128821ce-userdata-shm.mount: Deactivated successfully.
Oct 02 12:24:28 compute-0 systemd[1]: var-lib-containers-storage-overlay-90975a7db421eb6ab5d2c14b06988f0edd18722e223b24c41ceb79838b209a24-merged.mount: Deactivated successfully.
Oct 02 12:24:28 compute-0 podman[238783]: 2025-10-02 12:24:28.471321234 +0000 UTC m=+0.077262057 container cleanup bf8748ea9c31ac3ca5fd82deb65843577a43721e3a49a74057e179ce128821ce (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, io.buildah.version=1.41.3, tcib_managed=true, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0)
Oct 02 12:24:28 compute-0 kernel: tap1cf70acd-de: entered promiscuous mode
Oct 02 12:24:28 compute-0 NetworkManager[51160]: <info>  [1759407868.4742] manager: (tap1cf70acd-de): new Tun device (/org/freedesktop/NetworkManager/Devices/208)
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.475 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:28 compute-0 kernel: tap1cf70acd-de (unregistering): left promiscuous mode
Oct 02 12:24:28 compute-0 ovn_controller[94336]: 2025-10-02T12:24:28Z|00409|binding|INFO|Claiming lport 1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 for this chassis.
Oct 02 12:24:28 compute-0 ovn_controller[94336]: 2025-10-02T12:24:28Z|00410|binding|INFO|1cf70acd-de15-44ba-8fd2-ea2846ce3ee6: Claiming fa:16:3e:e3:d2:69 10.100.0.10
Oct 02 12:24:28 compute-0 systemd[1]: libpod-conmon-bf8748ea9c31ac3ca5fd82deb65843577a43721e3a49a74057e179ce128821ce.scope: Deactivated successfully.
Oct 02 12:24:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:28.484 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:e3:d2:69 10.100.0.10'], port_security=['fa:16:3e:e3:d2:69 10.100.0.10'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.10/28', 'neutron:device_id': '02f550a4-c57e-4d6f-b62b-decc0dbb1dbe', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-a04f937a-375f-4fb0-90fe-5f514a88668f', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'neutron:revision_number': '6', 'neutron:security_group_ids': 'ac5f8c49-69a1-4f51-9d25-21551ac4bbc1', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=50c0aa38-5fd8-41c7-b4bf-85b59722c5c3, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=1cf70acd-de15-44ba-8fd2-ea2846ce3ee6) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:24:28 compute-0 ovn_controller[94336]: 2025-10-02T12:24:28Z|00411|binding|INFO|Setting lport 1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 ovn-installed in OVS
Oct 02 12:24:28 compute-0 ovn_controller[94336]: 2025-10-02T12:24:28Z|00412|binding|INFO|Setting lport 1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 up in Southbound
Oct 02 12:24:28 compute-0 ovn_controller[94336]: 2025-10-02T12:24:28Z|00413|binding|INFO|Releasing lport 1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 from this chassis (sb_readonly=1)
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.501 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:28 compute-0 ovn_controller[94336]: 2025-10-02T12:24:28Z|00414|if_status|INFO|Dropped 4 log messages in last 553 seconds (most recently, 553 seconds ago) due to excessive rate
Oct 02 12:24:28 compute-0 ovn_controller[94336]: 2025-10-02T12:24:28Z|00415|if_status|INFO|Not setting lport 1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 down as sb is readonly
Oct 02 12:24:28 compute-0 ovn_controller[94336]: 2025-10-02T12:24:28Z|00416|binding|INFO|Removing iface tap1cf70acd-de ovn-installed in OVS
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.503 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:28 compute-0 ovn_controller[94336]: 2025-10-02T12:24:28Z|00417|binding|INFO|Releasing lport 1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 from this chassis (sb_readonly=0)
Oct 02 12:24:28 compute-0 ovn_controller[94336]: 2025-10-02T12:24:28Z|00418|binding|INFO|Setting lport 1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 down in Southbound
Oct 02 12:24:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:28.515 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:e3:d2:69 10.100.0.10'], port_security=['fa:16:3e:e3:d2:69 10.100.0.10'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.10/28', 'neutron:device_id': '02f550a4-c57e-4d6f-b62b-decc0dbb1dbe', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-a04f937a-375f-4fb0-90fe-5f514a88668f', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'neutron:revision_number': '6', 'neutron:security_group_ids': 'ac5f8c49-69a1-4f51-9d25-21551ac4bbc1', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=50c0aa38-5fd8-41c7-b4bf-85b59722c5c3, chassis=[], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=1cf70acd-de15-44ba-8fd2-ea2846ce3ee6) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.520 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.530 2 INFO nova.virt.libvirt.driver [-] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Instance destroyed successfully.
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.531 2 DEBUG nova.objects.instance [None req-c4f81151-0099-46bf-870e-f293799e7951 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'resources' on Instance uuid 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:24:28 compute-0 podman[238816]: 2025-10-02 12:24:28.54639905 +0000 UTC m=+0.047241028 container remove bf8748ea9c31ac3ca5fd82deb65843577a43721e3a49a74057e179ce128821ce (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001)
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.551 2 DEBUG nova.virt.libvirt.vif [None req-c4f81151-0099-46bf-870e-f293799e7951 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=True,config_drive='True',created_at=2025-10-02T12:23:53Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-tempest.common.compute-instance-1540405064',display_name='tempest-ServerActionsTestJSON-server-1719994168',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-tempest-common-compute-instance-1540405064',id=118,image_ref='062d9f80-76b6-42ce-bee7-0fb82a008353',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:24:24Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={rebuild='server'},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='e564a4cad5d443dba81ec04d2a05ced9',ramdisk_id='',reservation_id='r-sboi37yp',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',clean_attempts='1',image_base_image_ref='062d9f80-76b6-42ce-bee7-0fb82a008353',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServerActionsTestJSON-1646745100',owner_user_name='tempest-ServerActionsTestJSON-1646745100-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:24:24Z,user_data=None,user_id='d54b1826121b47caba89932a78c06ccd',uuid=02f550a4-c57e-4d6f-b62b-decc0dbb1dbe,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "address": "fa:16:3e:e3:d2:69", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap1cf70acd-de", "ovs_interfaceid": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.551 2 DEBUG nova.network.os_vif_util [None req-c4f81151-0099-46bf-870e-f293799e7951 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converting VIF {"id": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "address": "fa:16:3e:e3:d2:69", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap1cf70acd-de", "ovs_interfaceid": "1cf70acd-de15-44ba-8fd2-ea2846ce3ee6", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.552 2 DEBUG nova.network.os_vif_util [None req-c4f81151-0099-46bf-870e-f293799e7951 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:e3:d2:69,bridge_name='br-int',has_traffic_filtering=True,id=1cf70acd-de15-44ba-8fd2-ea2846ce3ee6,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap1cf70acd-de') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:24:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:28.551 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e416a669-768b-4dae-b28d-28fc5523a827]: (4, ('Thu Oct  2 12:24:28 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f (bf8748ea9c31ac3ca5fd82deb65843577a43721e3a49a74057e179ce128821ce)\nbf8748ea9c31ac3ca5fd82deb65843577a43721e3a49a74057e179ce128821ce\nThu Oct  2 12:24:28 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f (bf8748ea9c31ac3ca5fd82deb65843577a43721e3a49a74057e179ce128821ce)\nbf8748ea9c31ac3ca5fd82deb65843577a43721e3a49a74057e179ce128821ce\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.552 2 DEBUG os_vif [None req-c4f81151-0099-46bf-870e-f293799e7951 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:e3:d2:69,bridge_name='br-int',has_traffic_filtering=True,id=1cf70acd-de15-44ba-8fd2-ea2846ce3ee6,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap1cf70acd-de') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:24:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:28.553 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e18c6e54-ea23-4b25-8e97-d8333e819a73]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.554 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:28.554 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapa04f937a-30, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.554 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap1cf70acd-de, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.556 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.557 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.560 2 INFO os_vif [None req-c4f81151-0099-46bf-870e-f293799e7951 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:e3:d2:69,bridge_name='br-int',has_traffic_filtering=True,id=1cf70acd-de15-44ba-8fd2-ea2846ce3ee6,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap1cf70acd-de')
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.560 2 INFO nova.virt.libvirt.driver [None req-c4f81151-0099-46bf-870e-f293799e7951 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Deleting instance files /var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe_del
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.561 2 INFO nova.virt.libvirt.driver [None req-c4f81151-0099-46bf-870e-f293799e7951 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Deletion of /var/lib/nova/instances/02f550a4-c57e-4d6f-b62b-decc0dbb1dbe_del complete
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.572 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:28 compute-0 kernel: tapa04f937a-30: left promiscuous mode
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.573 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:28.575 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[864a0b7c-7bee-4eab-b625-77000e36e0cf]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:28.609 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[99fb2790-824b-44a3-92e1-9f5b0620f70e]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:28.611 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[326099ba-fd77-449c-9972-c6f8d4b7dc50]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.625 2 INFO nova.compute.manager [None req-c4f81151-0099-46bf-870e-f293799e7951 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Took 0.37 seconds to destroy the instance on the hypervisor.
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.626 2 DEBUG oslo.service.loopingcall [None req-c4f81151-0099-46bf-870e-f293799e7951 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.626 2 DEBUG nova.compute.manager [-] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:24:28 compute-0 nova_compute[192079]: 2025-10-02 12:24:28.626 2 DEBUG nova.network.neutron [-] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:24:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:28.632 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ffdd6c9c-5af5-469e-99d9-03558ff9d5d1]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 586113, 'reachable_time': 41347, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 238837, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:28.636 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:24:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:28.636 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[682d3a99-336b-4459-88cc-98761612566b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:28.637 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 in datapath a04f937a-375f-4fb0-90fe-5f514a88668f unbound from our chassis
Oct 02 12:24:28 compute-0 systemd[1]: run-netns-ovnmeta\x2da04f937a\x2d375f\x2d4fb0\x2d90fe\x2d5f514a88668f.mount: Deactivated successfully.
Oct 02 12:24:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:28.639 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network a04f937a-375f-4fb0-90fe-5f514a88668f, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:24:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:28.639 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f4ced9b3-25fa-4953-83a2-84f1929c44fe]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:28.640 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 in datapath a04f937a-375f-4fb0-90fe-5f514a88668f unbound from our chassis
Oct 02 12:24:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:28.641 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network a04f937a-375f-4fb0-90fe-5f514a88668f, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:24:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:28.641 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0414c103-5d29-4d84-8b23-36682d9469dd]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:29 compute-0 nova_compute[192079]: 2025-10-02 12:24:29.310 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759407854.3086991, 32196dd3-2739-4c43-9532-b0365f8095af => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:24:29 compute-0 nova_compute[192079]: 2025-10-02 12:24:29.310 2 INFO nova.compute.manager [-] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] VM Stopped (Lifecycle Event)
Oct 02 12:24:29 compute-0 nova_compute[192079]: 2025-10-02 12:24:29.352 2 DEBUG nova.compute.manager [None req-7beec16b-99d1-492b-a279-0956d9e627ce - - - - - -] [instance: 32196dd3-2739-4c43-9532-b0365f8095af] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:24:29 compute-0 nova_compute[192079]: 2025-10-02 12:24:29.678 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:24:29 compute-0 nova_compute[192079]: 2025-10-02 12:24:29.679 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:24:29 compute-0 nova_compute[192079]: 2025-10-02 12:24:29.811 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:29 compute-0 nova_compute[192079]: 2025-10-02 12:24:29.811 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:29 compute-0 nova_compute[192079]: 2025-10-02 12:24:29.812 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:29 compute-0 nova_compute[192079]: 2025-10-02 12:24:29.812 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:24:29 compute-0 nova_compute[192079]: 2025-10-02 12:24:29.987 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:24:29 compute-0 nova_compute[192079]: 2025-10-02 12:24:29.989 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5681MB free_disk=73.34830093383789GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:24:29 compute-0 nova_compute[192079]: 2025-10-02 12:24:29.989 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:29 compute-0 nova_compute[192079]: 2025-10-02 12:24:29.989 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.006 2 DEBUG nova.compute.manager [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Received event network-vif-unplugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.006 2 DEBUG oslo_concurrency.lockutils [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.007 2 DEBUG oslo_concurrency.lockutils [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.007 2 DEBUG oslo_concurrency.lockutils [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.007 2 DEBUG nova.compute.manager [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] No waiting events found dispatching network-vif-unplugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.007 2 DEBUG nova.compute.manager [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Received event network-vif-unplugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.007 2 DEBUG nova.compute.manager [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Received event network-vif-plugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.007 2 DEBUG oslo_concurrency.lockutils [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.007 2 DEBUG oslo_concurrency.lockutils [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.008 2 DEBUG oslo_concurrency.lockutils [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.008 2 DEBUG nova.compute.manager [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] No waiting events found dispatching network-vif-plugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.008 2 WARNING nova.compute.manager [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Received unexpected event network-vif-plugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 for instance with vm_state active and task_state deleting.
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.008 2 DEBUG nova.compute.manager [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Received event network-vif-plugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.008 2 DEBUG oslo_concurrency.lockutils [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.008 2 DEBUG oslo_concurrency.lockutils [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.008 2 DEBUG oslo_concurrency.lockutils [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.009 2 DEBUG nova.compute.manager [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] No waiting events found dispatching network-vif-plugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.009 2 WARNING nova.compute.manager [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Received unexpected event network-vif-plugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 for instance with vm_state active and task_state deleting.
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.009 2 DEBUG nova.compute.manager [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Received event network-vif-plugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.009 2 DEBUG oslo_concurrency.lockutils [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.009 2 DEBUG oslo_concurrency.lockutils [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.009 2 DEBUG oslo_concurrency.lockutils [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.009 2 DEBUG nova.compute.manager [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] No waiting events found dispatching network-vif-plugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.009 2 WARNING nova.compute.manager [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Received unexpected event network-vif-plugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 for instance with vm_state active and task_state deleting.
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.010 2 DEBUG nova.compute.manager [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Received event network-vif-unplugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.010 2 DEBUG oslo_concurrency.lockutils [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.010 2 DEBUG oslo_concurrency.lockutils [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.010 2 DEBUG oslo_concurrency.lockutils [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.010 2 DEBUG nova.compute.manager [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] No waiting events found dispatching network-vif-unplugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.010 2 DEBUG nova.compute.manager [req-0a54d272-7e79-4fb4-9623-4d6ec3d95ef7 req-b88c8173-af89-48db-b1d9-e776e07359ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Received event network-vif-unplugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.142 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.143 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 1 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.143 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=640MB phys_disk=79GB used_disk=1GB total_vcpus=8 used_vcpus=1 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.208 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.238 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.320 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.320 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.331s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.460 2 DEBUG nova.network.neutron [-] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.582 2 DEBUG nova.compute.manager [req-ef82f8ca-7046-4a9f-892b-258e5b3d29c0 req-b23b2b85-4481-4967-9616-ecf3b5938585 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Received event network-vif-deleted-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.582 2 INFO nova.compute.manager [req-ef82f8ca-7046-4a9f-892b-258e5b3d29c0 req-b23b2b85-4481-4967-9616-ecf3b5938585 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Neutron deleted interface 1cf70acd-de15-44ba-8fd2-ea2846ce3ee6; detaching it from the instance and deleting it from the info cache
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.582 2 DEBUG nova.network.neutron [req-ef82f8ca-7046-4a9f-892b-258e5b3d29c0 req-b23b2b85-4481-4967-9616-ecf3b5938585 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.585 2 INFO nova.compute.manager [-] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Took 1.96 seconds to deallocate network for instance.
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.658 2 DEBUG nova.compute.manager [req-ef82f8ca-7046-4a9f-892b-258e5b3d29c0 req-b23b2b85-4481-4967-9616-ecf3b5938585 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Detach interface failed, port_id=1cf70acd-de15-44ba-8fd2-ea2846ce3ee6, reason: Instance 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe could not be found. _process_instance_vif_deleted_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10882
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.827 2 DEBUG oslo_concurrency.lockutils [None req-c4f81151-0099-46bf-870e-f293799e7951 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.828 2 DEBUG oslo_concurrency.lockutils [None req-c4f81151-0099-46bf-870e-f293799e7951 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.875 2 DEBUG nova.compute.provider_tree [None req-c4f81151-0099-46bf-870e-f293799e7951 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:24:30 compute-0 nova_compute[192079]: 2025-10-02 12:24:30.896 2 DEBUG nova.scheduler.client.report [None req-c4f81151-0099-46bf-870e-f293799e7951 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:24:31 compute-0 nova_compute[192079]: 2025-10-02 12:24:31.094 2 DEBUG oslo_concurrency.lockutils [None req-c4f81151-0099-46bf-870e-f293799e7951 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.267s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:31 compute-0 nova_compute[192079]: 2025-10-02 12:24:31.172 2 INFO nova.scheduler.client.report [None req-c4f81151-0099-46bf-870e-f293799e7951 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Deleted allocations for instance 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe
Oct 02 12:24:31 compute-0 nova_compute[192079]: 2025-10-02 12:24:31.305 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:24:31 compute-0 nova_compute[192079]: 2025-10-02 12:24:31.305 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:24:31 compute-0 nova_compute[192079]: 2025-10-02 12:24:31.440 2 DEBUG oslo_concurrency.lockutils [None req-c4f81151-0099-46bf-870e-f293799e7951 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 3.208s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:31 compute-0 nova_compute[192079]: 2025-10-02 12:24:31.929 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:32 compute-0 podman[238839]: 2025-10-02 12:24:32.182546736 +0000 UTC m=+0.082269892 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, container_name=ceilometer_agent_compute, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 12:24:32 compute-0 nova_compute[192079]: 2025-10-02 12:24:32.217 2 DEBUG nova.compute.manager [req-af787ed9-fbba-4bec-b4cf-2f31a3a6a898 req-3dc23b1b-8813-4489-b53a-7aa05aaf143b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Received event network-vif-plugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:24:32 compute-0 nova_compute[192079]: 2025-10-02 12:24:32.218 2 DEBUG oslo_concurrency.lockutils [req-af787ed9-fbba-4bec-b4cf-2f31a3a6a898 req-3dc23b1b-8813-4489-b53a-7aa05aaf143b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:32 compute-0 nova_compute[192079]: 2025-10-02 12:24:32.218 2 DEBUG oslo_concurrency.lockutils [req-af787ed9-fbba-4bec-b4cf-2f31a3a6a898 req-3dc23b1b-8813-4489-b53a-7aa05aaf143b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:32 compute-0 nova_compute[192079]: 2025-10-02 12:24:32.218 2 DEBUG oslo_concurrency.lockutils [req-af787ed9-fbba-4bec-b4cf-2f31a3a6a898 req-3dc23b1b-8813-4489-b53a-7aa05aaf143b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "02f550a4-c57e-4d6f-b62b-decc0dbb1dbe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:32 compute-0 nova_compute[192079]: 2025-10-02 12:24:32.218 2 DEBUG nova.compute.manager [req-af787ed9-fbba-4bec-b4cf-2f31a3a6a898 req-3dc23b1b-8813-4489-b53a-7aa05aaf143b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] No waiting events found dispatching network-vif-plugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:24:32 compute-0 nova_compute[192079]: 2025-10-02 12:24:32.218 2 WARNING nova.compute.manager [req-af787ed9-fbba-4bec-b4cf-2f31a3a6a898 req-3dc23b1b-8813-4489-b53a-7aa05aaf143b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Received unexpected event network-vif-plugged-1cf70acd-de15-44ba-8fd2-ea2846ce3ee6 for instance with vm_state deleted and task_state None.
Oct 02 12:24:33 compute-0 nova_compute[192079]: 2025-10-02 12:24:33.557 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:33 compute-0 nova_compute[192079]: 2025-10-02 12:24:33.611 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:33 compute-0 nova_compute[192079]: 2025-10-02 12:24:33.663 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:24:33 compute-0 nova_compute[192079]: 2025-10-02 12:24:33.664 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:24:33 compute-0 nova_compute[192079]: 2025-10-02 12:24:33.664 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:24:33 compute-0 nova_compute[192079]: 2025-10-02 12:24:33.683 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:24:33 compute-0 nova_compute[192079]: 2025-10-02 12:24:33.684 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:24:33 compute-0 nova_compute[192079]: 2025-10-02 12:24:33.684 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:24:33 compute-0 nova_compute[192079]: 2025-10-02 12:24:33.757 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:34 compute-0 nova_compute[192079]: 2025-10-02 12:24:34.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:24:35 compute-0 nova_compute[192079]: 2025-10-02 12:24:35.659 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:24:36 compute-0 nova_compute[192079]: 2025-10-02 12:24:36.711 2 DEBUG nova.compute.manager [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Stashing vm_state: active _prep_resize /usr/lib/python3.9/site-packages/nova/compute/manager.py:5560
Oct 02 12:24:36 compute-0 nova_compute[192079]: 2025-10-02 12:24:36.811 2 DEBUG oslo_concurrency.lockutils [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:36 compute-0 nova_compute[192079]: 2025-10-02 12:24:36.811 2 DEBUG oslo_concurrency.lockutils [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:36 compute-0 nova_compute[192079]: 2025-10-02 12:24:36.830 2 DEBUG nova.objects.instance [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'pci_requests' on Instance uuid ae56113d-001e-4f10-9236-c07fe5146d9c obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:24:36 compute-0 nova_compute[192079]: 2025-10-02 12:24:36.844 2 DEBUG nova.virt.hardware [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:24:36 compute-0 nova_compute[192079]: 2025-10-02 12:24:36.845 2 INFO nova.compute.claims [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:24:36 compute-0 nova_compute[192079]: 2025-10-02 12:24:36.845 2 DEBUG nova.objects.instance [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'resources' on Instance uuid ae56113d-001e-4f10-9236-c07fe5146d9c obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:24:36 compute-0 nova_compute[192079]: 2025-10-02 12:24:36.856 2 DEBUG nova.objects.instance [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'pci_devices' on Instance uuid ae56113d-001e-4f10-9236-c07fe5146d9c obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:24:36 compute-0 nova_compute[192079]: 2025-10-02 12:24:36.899 2 INFO nova.compute.resource_tracker [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Updating resource usage from migration ab9c53f2-4424-4021-b1fd-891b3ab4902d
Oct 02 12:24:36 compute-0 nova_compute[192079]: 2025-10-02 12:24:36.900 2 DEBUG nova.compute.resource_tracker [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Starting to track incoming migration ab9c53f2-4424-4021-b1fd-891b3ab4902d with flavor 9949d9da-6314-4ede-8797-6f2f0a6a64fc _update_usage_from_migration /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1431
Oct 02 12:24:36 compute-0 nova_compute[192079]: 2025-10-02 12:24:36.931 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:37 compute-0 nova_compute[192079]: 2025-10-02 12:24:37.273 2 DEBUG nova.compute.provider_tree [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:24:37 compute-0 nova_compute[192079]: 2025-10-02 12:24:37.306 2 DEBUG nova.scheduler.client.report [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:24:37 compute-0 nova_compute[192079]: 2025-10-02 12:24:37.374 2 DEBUG oslo_concurrency.lockutils [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 0.562s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:37 compute-0 nova_compute[192079]: 2025-10-02 12:24:37.375 2 INFO nova.compute.manager [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Migrating
Oct 02 12:24:38 compute-0 nova_compute[192079]: 2025-10-02 12:24:38.560 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:40 compute-0 podman[238861]: 2025-10-02 12:24:40.179841757 +0000 UTC m=+0.071992353 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, managed_by=edpm_ansible, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_managed=true, config_id=multipathd, container_name=multipathd, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:24:40 compute-0 podman[238860]: 2025-10-02 12:24:40.187874947 +0000 UTC m=+0.093402707 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., version=9.6, url=https://catalog.redhat.com/en/search?searchType=containers, vcs-type=git, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, config_id=edpm, distribution-scope=public, io.buildah.version=1.33.7, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, io.openshift.expose-services=, com.redhat.component=ubi9-minimal-container, io.openshift.tags=minimal rhel9, name=ubi9-minimal, vendor=Red Hat, Inc., summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., container_name=openstack_network_exporter, maintainer=Red Hat, Inc., release=1755695350, build-date=2025-08-20T13:12:41, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., architecture=x86_64, managed_by=edpm_ansible, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']})
Oct 02 12:24:40 compute-0 sshd-session[238901]: Accepted publickey for nova from 192.168.122.101 port 38568 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:24:40 compute-0 systemd[1]: Created slice User Slice of UID 42436.
Oct 02 12:24:40 compute-0 systemd[1]: Starting User Runtime Directory /run/user/42436...
Oct 02 12:24:40 compute-0 systemd-logind[827]: New session 70 of user nova.
Oct 02 12:24:40 compute-0 systemd[1]: Finished User Runtime Directory /run/user/42436.
Oct 02 12:24:40 compute-0 systemd[1]: Starting User Manager for UID 42436...
Oct 02 12:24:40 compute-0 systemd[238905]: pam_unix(systemd-user:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:24:40 compute-0 systemd[238905]: Queued start job for default target Main User Target.
Oct 02 12:24:40 compute-0 systemd[238905]: Created slice User Application Slice.
Oct 02 12:24:40 compute-0 systemd[238905]: Started Mark boot as successful after the user session has run 2 minutes.
Oct 02 12:24:40 compute-0 systemd[238905]: Started Daily Cleanup of User's Temporary Directories.
Oct 02 12:24:40 compute-0 systemd[238905]: Reached target Paths.
Oct 02 12:24:40 compute-0 systemd[238905]: Reached target Timers.
Oct 02 12:24:40 compute-0 systemd[238905]: Starting D-Bus User Message Bus Socket...
Oct 02 12:24:40 compute-0 systemd[238905]: Starting Create User's Volatile Files and Directories...
Oct 02 12:24:40 compute-0 systemd[238905]: Finished Create User's Volatile Files and Directories.
Oct 02 12:24:40 compute-0 systemd[238905]: Listening on D-Bus User Message Bus Socket.
Oct 02 12:24:40 compute-0 systemd[238905]: Reached target Sockets.
Oct 02 12:24:40 compute-0 systemd[238905]: Reached target Basic System.
Oct 02 12:24:40 compute-0 systemd[238905]: Reached target Main User Target.
Oct 02 12:24:40 compute-0 systemd[238905]: Startup finished in 139ms.
Oct 02 12:24:40 compute-0 systemd[1]: Started User Manager for UID 42436.
Oct 02 12:24:40 compute-0 systemd[1]: Started Session 70 of User nova.
Oct 02 12:24:40 compute-0 sshd-session[238901]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:24:41 compute-0 sshd-session[238920]: Received disconnect from 192.168.122.101 port 38568:11: disconnected by user
Oct 02 12:24:41 compute-0 sshd-session[238920]: Disconnected from user nova 192.168.122.101 port 38568
Oct 02 12:24:41 compute-0 sshd-session[238901]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:24:41 compute-0 systemd[1]: session-70.scope: Deactivated successfully.
Oct 02 12:24:41 compute-0 systemd-logind[827]: Session 70 logged out. Waiting for processes to exit.
Oct 02 12:24:41 compute-0 systemd-logind[827]: Removed session 70.
Oct 02 12:24:41 compute-0 sshd-session[238922]: Accepted publickey for nova from 192.168.122.101 port 38584 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:24:41 compute-0 systemd-logind[827]: New session 72 of user nova.
Oct 02 12:24:41 compute-0 systemd[1]: Started Session 72 of User nova.
Oct 02 12:24:41 compute-0 sshd-session[238922]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:24:41 compute-0 sshd-session[238925]: Received disconnect from 192.168.122.101 port 38584:11: disconnected by user
Oct 02 12:24:41 compute-0 sshd-session[238925]: Disconnected from user nova 192.168.122.101 port 38584
Oct 02 12:24:41 compute-0 sshd-session[238922]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:24:41 compute-0 systemd-logind[827]: Session 72 logged out. Waiting for processes to exit.
Oct 02 12:24:41 compute-0 systemd[1]: session-72.scope: Deactivated successfully.
Oct 02 12:24:41 compute-0 systemd-logind[827]: Removed session 72.
Oct 02 12:24:41 compute-0 nova_compute[192079]: 2025-10-02 12:24:41.933 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:43 compute-0 nova_compute[192079]: 2025-10-02 12:24:43.530 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759407868.5284727, 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:24:43 compute-0 nova_compute[192079]: 2025-10-02 12:24:43.530 2 INFO nova.compute.manager [-] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] VM Stopped (Lifecycle Event)
Oct 02 12:24:43 compute-0 nova_compute[192079]: 2025-10-02 12:24:43.563 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:43 compute-0 nova_compute[192079]: 2025-10-02 12:24:43.714 2 DEBUG nova.compute.manager [None req-a6d4ce86-12f7-4228-b2c4-0953de2cd73b - - - - - -] [instance: 02f550a4-c57e-4d6f-b62b-decc0dbb1dbe] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:24:44 compute-0 sshd-session[238927]: Accepted publickey for nova from 192.168.122.101 port 38588 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:24:44 compute-0 systemd-logind[827]: New session 73 of user nova.
Oct 02 12:24:44 compute-0 systemd[1]: Started Session 73 of User nova.
Oct 02 12:24:44 compute-0 sshd-session[238927]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:24:45 compute-0 sshd-session[238930]: Received disconnect from 192.168.122.101 port 38588:11: disconnected by user
Oct 02 12:24:45 compute-0 sshd-session[238930]: Disconnected from user nova 192.168.122.101 port 38588
Oct 02 12:24:45 compute-0 sshd-session[238927]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:24:45 compute-0 systemd[1]: session-73.scope: Deactivated successfully.
Oct 02 12:24:45 compute-0 systemd-logind[827]: Session 73 logged out. Waiting for processes to exit.
Oct 02 12:24:45 compute-0 systemd-logind[827]: Removed session 73.
Oct 02 12:24:45 compute-0 sshd-session[238932]: Accepted publickey for nova from 192.168.122.101 port 38600 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:24:45 compute-0 systemd-logind[827]: New session 74 of user nova.
Oct 02 12:24:45 compute-0 nova_compute[192079]: 2025-10-02 12:24:45.152 2 DEBUG nova.compute.manager [req-642253c1-2f32-49b9-b62a-3602531c3ded req-387c8624-66fc-4f5d-87f7-ea0daeffafda 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Received event network-vif-unplugged-d1031883-2135-4183-8a9d-0609c32ad14b external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:24:45 compute-0 nova_compute[192079]: 2025-10-02 12:24:45.153 2 DEBUG oslo_concurrency.lockutils [req-642253c1-2f32-49b9-b62a-3602531c3ded req-387c8624-66fc-4f5d-87f7-ea0daeffafda 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "ae56113d-001e-4f10-9236-c07fe5146d9c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:45 compute-0 nova_compute[192079]: 2025-10-02 12:24:45.153 2 DEBUG oslo_concurrency.lockutils [req-642253c1-2f32-49b9-b62a-3602531c3ded req-387c8624-66fc-4f5d-87f7-ea0daeffafda 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ae56113d-001e-4f10-9236-c07fe5146d9c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:45 compute-0 nova_compute[192079]: 2025-10-02 12:24:45.153 2 DEBUG oslo_concurrency.lockutils [req-642253c1-2f32-49b9-b62a-3602531c3ded req-387c8624-66fc-4f5d-87f7-ea0daeffafda 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ae56113d-001e-4f10-9236-c07fe5146d9c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:45 compute-0 nova_compute[192079]: 2025-10-02 12:24:45.154 2 DEBUG nova.compute.manager [req-642253c1-2f32-49b9-b62a-3602531c3ded req-387c8624-66fc-4f5d-87f7-ea0daeffafda 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] No waiting events found dispatching network-vif-unplugged-d1031883-2135-4183-8a9d-0609c32ad14b pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:24:45 compute-0 nova_compute[192079]: 2025-10-02 12:24:45.154 2 WARNING nova.compute.manager [req-642253c1-2f32-49b9-b62a-3602531c3ded req-387c8624-66fc-4f5d-87f7-ea0daeffafda 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Received unexpected event network-vif-unplugged-d1031883-2135-4183-8a9d-0609c32ad14b for instance with vm_state active and task_state resize_migrating.
Oct 02 12:24:45 compute-0 nova_compute[192079]: 2025-10-02 12:24:45.154 2 DEBUG nova.compute.manager [req-642253c1-2f32-49b9-b62a-3602531c3ded req-387c8624-66fc-4f5d-87f7-ea0daeffafda 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Received event network-vif-plugged-d1031883-2135-4183-8a9d-0609c32ad14b external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:24:45 compute-0 nova_compute[192079]: 2025-10-02 12:24:45.154 2 DEBUG oslo_concurrency.lockutils [req-642253c1-2f32-49b9-b62a-3602531c3ded req-387c8624-66fc-4f5d-87f7-ea0daeffafda 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "ae56113d-001e-4f10-9236-c07fe5146d9c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:45 compute-0 nova_compute[192079]: 2025-10-02 12:24:45.154 2 DEBUG oslo_concurrency.lockutils [req-642253c1-2f32-49b9-b62a-3602531c3ded req-387c8624-66fc-4f5d-87f7-ea0daeffafda 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ae56113d-001e-4f10-9236-c07fe5146d9c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:45 compute-0 nova_compute[192079]: 2025-10-02 12:24:45.155 2 DEBUG oslo_concurrency.lockutils [req-642253c1-2f32-49b9-b62a-3602531c3ded req-387c8624-66fc-4f5d-87f7-ea0daeffafda 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ae56113d-001e-4f10-9236-c07fe5146d9c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:45 compute-0 nova_compute[192079]: 2025-10-02 12:24:45.155 2 DEBUG nova.compute.manager [req-642253c1-2f32-49b9-b62a-3602531c3ded req-387c8624-66fc-4f5d-87f7-ea0daeffafda 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] No waiting events found dispatching network-vif-plugged-d1031883-2135-4183-8a9d-0609c32ad14b pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:24:45 compute-0 nova_compute[192079]: 2025-10-02 12:24:45.155 2 WARNING nova.compute.manager [req-642253c1-2f32-49b9-b62a-3602531c3ded req-387c8624-66fc-4f5d-87f7-ea0daeffafda 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Received unexpected event network-vif-plugged-d1031883-2135-4183-8a9d-0609c32ad14b for instance with vm_state active and task_state resize_migrating.
Oct 02 12:24:45 compute-0 systemd[1]: Started Session 74 of User nova.
Oct 02 12:24:45 compute-0 sshd-session[238932]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:24:45 compute-0 sshd-session[238935]: Received disconnect from 192.168.122.101 port 38600:11: disconnected by user
Oct 02 12:24:45 compute-0 sshd-session[238935]: Disconnected from user nova 192.168.122.101 port 38600
Oct 02 12:24:45 compute-0 sshd-session[238932]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:24:45 compute-0 systemd[1]: session-74.scope: Deactivated successfully.
Oct 02 12:24:45 compute-0 systemd-logind[827]: Session 74 logged out. Waiting for processes to exit.
Oct 02 12:24:45 compute-0 systemd-logind[827]: Removed session 74.
Oct 02 12:24:45 compute-0 sshd-session[238937]: Accepted publickey for nova from 192.168.122.101 port 38610 ssh2: ECDSA SHA256:NfSiS0snQPd89nQFd0X4RvU37LpZ/RqmjHTIDnTC+yU
Oct 02 12:24:45 compute-0 systemd-logind[827]: New session 75 of user nova.
Oct 02 12:24:45 compute-0 systemd[1]: Started Session 75 of User nova.
Oct 02 12:24:45 compute-0 sshd-session[238937]: pam_unix(sshd:session): session opened for user nova(uid=42436) by nova(uid=0)
Oct 02 12:24:45 compute-0 sshd-session[238940]: Received disconnect from 192.168.122.101 port 38610:11: disconnected by user
Oct 02 12:24:45 compute-0 sshd-session[238940]: Disconnected from user nova 192.168.122.101 port 38610
Oct 02 12:24:45 compute-0 sshd-session[238937]: pam_unix(sshd:session): session closed for user nova
Oct 02 12:24:45 compute-0 systemd[1]: session-75.scope: Deactivated successfully.
Oct 02 12:24:45 compute-0 systemd-logind[827]: Session 75 logged out. Waiting for processes to exit.
Oct 02 12:24:45 compute-0 systemd-logind[827]: Removed session 75.
Oct 02 12:24:46 compute-0 nova_compute[192079]: 2025-10-02 12:24:46.855 2 INFO nova.network.neutron [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Updating port d1031883-2135-4183-8a9d-0609c32ad14b with attributes {'binding:host_id': 'compute-0.ctlplane.example.com', 'device_owner': 'compute:nova'}
Oct 02 12:24:46 compute-0 nova_compute[192079]: 2025-10-02 12:24:46.935 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:47 compute-0 podman[238943]: 2025-10-02 12:24:47.177923507 +0000 UTC m=+0.079944009 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, tcib_managed=true, config_id=iscsid, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=iscsid, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:24:47 compute-0 podman[238942]: 2025-10-02 12:24:47.178185774 +0000 UTC m=+0.082275353 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter)
Oct 02 12:24:47 compute-0 nova_compute[192079]: 2025-10-02 12:24:47.722 2 DEBUG oslo_concurrency.lockutils [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "refresh_cache-ae56113d-001e-4f10-9236-c07fe5146d9c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:24:47 compute-0 nova_compute[192079]: 2025-10-02 12:24:47.722 2 DEBUG oslo_concurrency.lockutils [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquired lock "refresh_cache-ae56113d-001e-4f10-9236-c07fe5146d9c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:24:47 compute-0 nova_compute[192079]: 2025-10-02 12:24:47.722 2 DEBUG nova.network.neutron [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:24:48 compute-0 nova_compute[192079]: 2025-10-02 12:24:48.014 2 DEBUG nova.compute.manager [req-e995343e-6992-4e13-ae68-af1b7d3ef6c7 req-e4706ac4-3eb0-41f3-bad7-8001f407cb19 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Received event network-changed-d1031883-2135-4183-8a9d-0609c32ad14b external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:24:48 compute-0 nova_compute[192079]: 2025-10-02 12:24:48.015 2 DEBUG nova.compute.manager [req-e995343e-6992-4e13-ae68-af1b7d3ef6c7 req-e4706ac4-3eb0-41f3-bad7-8001f407cb19 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Refreshing instance network info cache due to event network-changed-d1031883-2135-4183-8a9d-0609c32ad14b. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:24:48 compute-0 nova_compute[192079]: 2025-10-02 12:24:48.015 2 DEBUG oslo_concurrency.lockutils [req-e995343e-6992-4e13-ae68-af1b7d3ef6c7 req-e4706ac4-3eb0-41f3-bad7-8001f407cb19 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-ae56113d-001e-4f10-9236-c07fe5146d9c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:24:48 compute-0 nova_compute[192079]: 2025-10-02 12:24:48.564 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:51 compute-0 nova_compute[192079]: 2025-10-02 12:24:51.935 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:52 compute-0 nova_compute[192079]: 2025-10-02 12:24:52.031 2 DEBUG nova.network.neutron [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Updating instance_info_cache with network_info: [{"id": "d1031883-2135-4183-8a9d-0609c32ad14b", "address": "fa:16:3e:0a:b9:ae", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd1031883-21", "ovs_interfaceid": "d1031883-2135-4183-8a9d-0609c32ad14b", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:24:52 compute-0 nova_compute[192079]: 2025-10-02 12:24:52.132 2 DEBUG oslo_concurrency.lockutils [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Releasing lock "refresh_cache-ae56113d-001e-4f10-9236-c07fe5146d9c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:24:52 compute-0 nova_compute[192079]: 2025-10-02 12:24:52.138 2 DEBUG oslo_concurrency.lockutils [req-e995343e-6992-4e13-ae68-af1b7d3ef6c7 req-e4706ac4-3eb0-41f3-bad7-8001f407cb19 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-ae56113d-001e-4f10-9236-c07fe5146d9c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:24:52 compute-0 nova_compute[192079]: 2025-10-02 12:24:52.138 2 DEBUG nova.network.neutron [req-e995343e-6992-4e13-ae68-af1b7d3ef6c7 req-e4706ac4-3eb0-41f3-bad7-8001f407cb19 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Refreshing network info cache for port d1031883-2135-4183-8a9d-0609c32ad14b _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:24:52 compute-0 nova_compute[192079]: 2025-10-02 12:24:52.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:24:52 compute-0 nova_compute[192079]: 2025-10-02 12:24:52.948 2 DEBUG nova.virt.libvirt.driver [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Starting finish_migration finish_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:11698
Oct 02 12:24:52 compute-0 nova_compute[192079]: 2025-10-02 12:24:52.950 2 DEBUG nova.virt.libvirt.driver [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Instance directory exists: not creating _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4719
Oct 02 12:24:52 compute-0 nova_compute[192079]: 2025-10-02 12:24:52.950 2 INFO nova.virt.libvirt.driver [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Creating image(s)
Oct 02 12:24:52 compute-0 nova_compute[192079]: 2025-10-02 12:24:52.951 2 DEBUG nova.objects.instance [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'trusted_certs' on Instance uuid ae56113d-001e-4f10-9236-c07fe5146d9c obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.050 2 DEBUG oslo_concurrency.processutils [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.103 2 DEBUG oslo_concurrency.processutils [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.105 2 DEBUG nova.virt.disk.api [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Checking if we can resize image /var/lib/nova/instances/ae56113d-001e-4f10-9236-c07fe5146d9c/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.106 2 DEBUG oslo_concurrency.processutils [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ae56113d-001e-4f10-9236-c07fe5146d9c/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.170 2 DEBUG oslo_concurrency.processutils [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ae56113d-001e-4f10-9236-c07fe5146d9c/disk --force-share --output=json" returned: 0 in 0.064s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.171 2 DEBUG nova.virt.disk.api [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Cannot resize image /var/lib/nova/instances/ae56113d-001e-4f10-9236-c07fe5146d9c/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.247 2 DEBUG nova.virt.libvirt.driver [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Did not create local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4859
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.248 2 DEBUG nova.virt.libvirt.driver [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Ensure instance console log exists: /var/lib/nova/instances/ae56113d-001e-4f10-9236-c07fe5146d9c/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.249 2 DEBUG oslo_concurrency.lockutils [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.249 2 DEBUG oslo_concurrency.lockutils [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.250 2 DEBUG oslo_concurrency.lockutils [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.256 2 DEBUG nova.virt.libvirt.driver [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Start _get_guest_xml network_info=[{"id": "d1031883-2135-4183-8a9d-0609c32ad14b", "address": "fa:16:3e:0a:b9:ae", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-ServerActionsTestJSON-1926715354-network", "vif_mac": "fa:16:3e:0a:b9:ae"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd1031883-21", "ovs_interfaceid": "d1031883-2135-4183-8a9d-0609c32ad14b", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.264 2 WARNING nova.virt.libvirt.driver [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.271 2 DEBUG nova.virt.libvirt.host [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.272 2 DEBUG nova.virt.libvirt.host [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.276 2 DEBUG nova.virt.libvirt.host [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.277 2 DEBUG nova.virt.libvirt.host [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.278 2 DEBUG nova.virt.libvirt.driver [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.279 2 DEBUG nova.virt.hardware [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:25Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9949d9da-6314-4ede-8797-6f2f0a6a64fc',id=2,is_public=True,memory_mb=192,name='m1.micro',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.280 2 DEBUG nova.virt.hardware [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.280 2 DEBUG nova.virt.hardware [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.281 2 DEBUG nova.virt.hardware [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.281 2 DEBUG nova.virt.hardware [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.281 2 DEBUG nova.virt.hardware [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.282 2 DEBUG nova.virt.hardware [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.282 2 DEBUG nova.virt.hardware [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.283 2 DEBUG nova.virt.hardware [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.283 2 DEBUG nova.virt.hardware [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.284 2 DEBUG nova.virt.hardware [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.284 2 DEBUG nova.objects.instance [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'vcpu_model' on Instance uuid ae56113d-001e-4f10-9236-c07fe5146d9c obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.354 2 DEBUG nova.virt.libvirt.vif [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:21:20Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerActionsTestJSON-server-161503604',display_name='tempest-ServerActionsTestJSON-server-161503604',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(2),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serveractionstestjson-server-161503604',id=111,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=2,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBJJLom+UJzZg9dduKQv+725QaYDZoMXvP/xlpKnb/K05SGc4dkyLwCDweJ3QifTmxLWqK9Sz5A12yMJbzpa36v5C4bUqj8uiWk/vbR1BAjBdKM9d/Ug8M2nT8LwDBGP/9A==',key_name='tempest-keypair-1006285918',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:21:29Z,launched_on='compute-1.ctlplane.example.com',locked=False,locked_by=None,memory_mb=192,metadata={},migration_context=MigrationContext,new_flavor=Flavor(2),node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=Flavor(1),os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='e564a4cad5d443dba81ec04d2a05ced9',ramdisk_id='',reservation_id='r-ntvf7r4i',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=ServiceList,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',old_vm_state='active',owner_project_name='tempest-ServerActionsTestJSON-1646745100',owner_user_name='tempest-ServerActionsTestJSON-1646745100-project-member'},tags=<?>,task_state='resize_finish',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:24:45Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='d54b1826121b47caba89932a78c06ccd',uuid=ae56113d-001e-4f10-9236-c07fe5146d9c,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "d1031883-2135-4183-8a9d-0609c32ad14b", "address": "fa:16:3e:0a:b9:ae", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-ServerActionsTestJSON-1926715354-network", "vif_mac": "fa:16:3e:0a:b9:ae"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd1031883-21", "ovs_interfaceid": "d1031883-2135-4183-8a9d-0609c32ad14b", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.355 2 DEBUG nova.network.os_vif_util [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converting VIF {"id": "d1031883-2135-4183-8a9d-0609c32ad14b", "address": "fa:16:3e:0a:b9:ae", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-ServerActionsTestJSON-1926715354-network", "vif_mac": "fa:16:3e:0a:b9:ae"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd1031883-21", "ovs_interfaceid": "d1031883-2135-4183-8a9d-0609c32ad14b", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.356 2 DEBUG nova.network.os_vif_util [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:0a:b9:ae,bridge_name='br-int',has_traffic_filtering=True,id=d1031883-2135-4183-8a9d-0609c32ad14b,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd1031883-21') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.362 2 DEBUG nova.virt.libvirt.driver [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:24:53 compute-0 nova_compute[192079]:   <uuid>ae56113d-001e-4f10-9236-c07fe5146d9c</uuid>
Oct 02 12:24:53 compute-0 nova_compute[192079]:   <name>instance-0000006f</name>
Oct 02 12:24:53 compute-0 nova_compute[192079]:   <memory>196608</memory>
Oct 02 12:24:53 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:24:53 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:24:53 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:       <nova:name>tempest-ServerActionsTestJSON-server-161503604</nova:name>
Oct 02 12:24:53 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:24:53</nova:creationTime>
Oct 02 12:24:53 compute-0 nova_compute[192079]:       <nova:flavor name="m1.micro">
Oct 02 12:24:53 compute-0 nova_compute[192079]:         <nova:memory>192</nova:memory>
Oct 02 12:24:53 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:24:53 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:24:53 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:24:53 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:24:53 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:24:53 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:24:53 compute-0 nova_compute[192079]:         <nova:user uuid="d54b1826121b47caba89932a78c06ccd">tempest-ServerActionsTestJSON-1646745100-project-member</nova:user>
Oct 02 12:24:53 compute-0 nova_compute[192079]:         <nova:project uuid="e564a4cad5d443dba81ec04d2a05ced9">tempest-ServerActionsTestJSON-1646745100</nova:project>
Oct 02 12:24:53 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:24:53 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:24:53 compute-0 nova_compute[192079]:         <nova:port uuid="d1031883-2135-4183-8a9d-0609c32ad14b">
Oct 02 12:24:53 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.12" ipVersion="4"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:24:53 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:24:53 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:24:53 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <system>
Oct 02 12:24:53 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:24:53 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:24:53 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:24:53 compute-0 nova_compute[192079]:       <entry name="serial">ae56113d-001e-4f10-9236-c07fe5146d9c</entry>
Oct 02 12:24:53 compute-0 nova_compute[192079]:       <entry name="uuid">ae56113d-001e-4f10-9236-c07fe5146d9c</entry>
Oct 02 12:24:53 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     </system>
Oct 02 12:24:53 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:24:53 compute-0 nova_compute[192079]:   <os>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:   </os>
Oct 02 12:24:53 compute-0 nova_compute[192079]:   <features>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:   </features>
Oct 02 12:24:53 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:24:53 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:24:53 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:24:53 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/ae56113d-001e-4f10-9236-c07fe5146d9c/disk"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:24:53 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/ae56113d-001e-4f10-9236-c07fe5146d9c/disk.config"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:24:53 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:0a:b9:ae"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:       <target dev="tapd1031883-21"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:24:53 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/ae56113d-001e-4f10-9236-c07fe5146d9c/console.log" append="off"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <video>
Oct 02 12:24:53 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     </video>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:24:53 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:24:53 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:24:53 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:24:53 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:24:53 compute-0 nova_compute[192079]: </domain>
Oct 02 12:24:53 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.366 2 DEBUG nova.virt.libvirt.vif [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:21:20Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerActionsTestJSON-server-161503604',display_name='tempest-ServerActionsTestJSON-server-161503604',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(2),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serveractionstestjson-server-161503604',id=111,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=2,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBJJLom+UJzZg9dduKQv+725QaYDZoMXvP/xlpKnb/K05SGc4dkyLwCDweJ3QifTmxLWqK9Sz5A12yMJbzpa36v5C4bUqj8uiWk/vbR1BAjBdKM9d/Ug8M2nT8LwDBGP/9A==',key_name='tempest-keypair-1006285918',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:21:29Z,launched_on='compute-1.ctlplane.example.com',locked=False,locked_by=None,memory_mb=192,metadata={},migration_context=MigrationContext,new_flavor=Flavor(2),node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=Flavor(1),os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='e564a4cad5d443dba81ec04d2a05ced9',ramdisk_id='',reservation_id='r-ntvf7r4i',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=ServiceList,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',old_vm_state='active',owner_project_name='tempest-ServerActionsTestJSON-1646745100',owner_user_name='tempest-ServerActionsTestJSON-1646745100-project-member'},tags=<?>,task_state='resize_finish',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:24:45Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='d54b1826121b47caba89932a78c06ccd',uuid=ae56113d-001e-4f10-9236-c07fe5146d9c,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "d1031883-2135-4183-8a9d-0609c32ad14b", "address": "fa:16:3e:0a:b9:ae", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-ServerActionsTestJSON-1926715354-network", "vif_mac": "fa:16:3e:0a:b9:ae"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd1031883-21", "ovs_interfaceid": "d1031883-2135-4183-8a9d-0609c32ad14b", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.366 2 DEBUG nova.network.os_vif_util [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converting VIF {"id": "d1031883-2135-4183-8a9d-0609c32ad14b", "address": "fa:16:3e:0a:b9:ae", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-ServerActionsTestJSON-1926715354-network", "vif_mac": "fa:16:3e:0a:b9:ae"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd1031883-21", "ovs_interfaceid": "d1031883-2135-4183-8a9d-0609c32ad14b", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.367 2 DEBUG nova.network.os_vif_util [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:0a:b9:ae,bridge_name='br-int',has_traffic_filtering=True,id=d1031883-2135-4183-8a9d-0609c32ad14b,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd1031883-21') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.368 2 DEBUG os_vif [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:0a:b9:ae,bridge_name='br-int',has_traffic_filtering=True,id=d1031883-2135-4183-8a9d-0609c32ad14b,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd1031883-21') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.369 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.370 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.370 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.375 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.376 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapd1031883-21, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.376 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapd1031883-21, col_values=(('external_ids', {'iface-id': 'd1031883-2135-4183-8a9d-0609c32ad14b', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:0a:b9:ae', 'vm-uuid': 'ae56113d-001e-4f10-9236-c07fe5146d9c'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.379 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:53 compute-0 NetworkManager[51160]: <info>  [1759407893.3805] manager: (tapd1031883-21): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/209)
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.381 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.391 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.392 2 INFO os_vif [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:0a:b9:ae,bridge_name='br-int',has_traffic_filtering=True,id=d1031883-2135-4183-8a9d-0609c32ad14b,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd1031883-21')
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.615 2 DEBUG nova.virt.libvirt.driver [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.616 2 DEBUG nova.virt.libvirt.driver [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.616 2 DEBUG nova.virt.libvirt.driver [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] No VIF found with MAC fa:16:3e:0a:b9:ae, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.616 2 INFO nova.virt.libvirt.driver [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Using config drive
Oct 02 12:24:53 compute-0 kernel: tapd1031883-21: entered promiscuous mode
Oct 02 12:24:53 compute-0 NetworkManager[51160]: <info>  [1759407893.7072] manager: (tapd1031883-21): new Tun device (/org/freedesktop/NetworkManager/Devices/210)
Oct 02 12:24:53 compute-0 ovn_controller[94336]: 2025-10-02T12:24:53Z|00419|binding|INFO|Claiming lport d1031883-2135-4183-8a9d-0609c32ad14b for this chassis.
Oct 02 12:24:53 compute-0 ovn_controller[94336]: 2025-10-02T12:24:53Z|00420|binding|INFO|d1031883-2135-4183-8a9d-0609c32ad14b: Claiming fa:16:3e:0a:b9:ae 10.100.0.12
Oct 02 12:24:53 compute-0 systemd-udevd[239005]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.751 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.760 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.765 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:53 compute-0 NetworkManager[51160]: <info>  [1759407893.7679] manager: (patch-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d-to-br-int): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/211)
Oct 02 12:24:53 compute-0 NetworkManager[51160]: <info>  [1759407893.7686] manager: (patch-br-int-to-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/212)
Oct 02 12:24:53 compute-0 NetworkManager[51160]: <info>  [1759407893.7712] device (tapd1031883-21): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:24:53 compute-0 NetworkManager[51160]: <info>  [1759407893.7719] device (tapd1031883-21): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:24:53 compute-0 systemd-machined[152150]: New machine qemu-55-instance-0000006f.
Oct 02 12:24:53 compute-0 systemd[1]: Started Virtual Machine qemu-55-instance-0000006f.
Oct 02 12:24:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:53.854 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:0a:b9:ae 10.100.0.12'], port_security=['fa:16:3e:0a:b9:ae 10.100.0.12'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.12/28', 'neutron:device_id': 'ae56113d-001e-4f10-9236-c07fe5146d9c', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-a04f937a-375f-4fb0-90fe-5f514a88668f', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'neutron:revision_number': '14', 'neutron:security_group_ids': 'c0383701-0ec7-4f3b-8585-5effc4f5ca5a', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:port_fip': '192.168.122.248'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=50c0aa38-5fd8-41c7-b4bf-85b59722c5c3, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=d1031883-2135-4183-8a9d-0609c32ad14b) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:24:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:53.856 103294 INFO neutron.agent.ovn.metadata.agent [-] Port d1031883-2135-4183-8a9d-0609c32ad14b in datapath a04f937a-375f-4fb0-90fe-5f514a88668f bound to our chassis
Oct 02 12:24:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:53.857 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network a04f937a-375f-4fb0-90fe-5f514a88668f
Oct 02 12:24:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:53.876 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[88265978-08e2-4c91-bc95-0d3db4b810bf]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:53.878 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tapa04f937a-31 in ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:24:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:53.881 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tapa04f937a-30 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:24:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:53.881 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6f6e5d36-d494-4efd-8694-09fce332427b]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:53.882 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[74351483-ac39-478c-a771-f37b5cc62443]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:53.903 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[28a9346d-a6f5-4e91-9bd9-c1b9d7708327]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.914 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.927 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:53 compute-0 ovn_controller[94336]: 2025-10-02T12:24:53Z|00421|binding|INFO|Setting lport d1031883-2135-4183-8a9d-0609c32ad14b ovn-installed in OVS
Oct 02 12:24:53 compute-0 ovn_controller[94336]: 2025-10-02T12:24:53Z|00422|binding|INFO|Setting lport d1031883-2135-4183-8a9d-0609c32ad14b up in Southbound
Oct 02 12:24:53 compute-0 nova_compute[192079]: 2025-10-02 12:24:53.937 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:53.937 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a11ec5d0-0965-4715-b1ef-a887b4b66e0f]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:53.977 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[6f63efaa-2c05-4ce8-8c1b-c2dc4b541374]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:53.984 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c62e82a0-514c-48e1-9dba-fa3e83b698bd]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:53 compute-0 NetworkManager[51160]: <info>  [1759407893.9857] manager: (tapa04f937a-30): new Veth device (/org/freedesktop/NetworkManager/Devices/213)
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:54.024 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[34d96168-7e71-452d-91db-38462b7d4a18]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:54.028 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[dec64d21-861d-4aab-b5e7-f9aca7d49c15]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:54 compute-0 NetworkManager[51160]: <info>  [1759407894.0566] device (tapa04f937a-30): carrier: link connected
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:54.061 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[6719ebb0-6482-4e17-9b37-90d81b4388a6]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:54.080 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a5b70f1a-63a1-4835-bc55-3109de56efd6]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapa04f937a-31'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:33:93:68'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 134], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 589168, 'reachable_time': 35144, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 239047, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:54.098 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3ed59687-3373-4ed8-b5fd-0378b8fed974]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe33:9368'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 589168, 'tstamp': 589168}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 239049, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:54 compute-0 nova_compute[192079]: 2025-10-02 12:24:54.111 2 DEBUG nova.network.neutron [req-e995343e-6992-4e13-ae68-af1b7d3ef6c7 req-e4706ac4-3eb0-41f3-bad7-8001f407cb19 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Updated VIF entry in instance network info cache for port d1031883-2135-4183-8a9d-0609c32ad14b. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:24:54 compute-0 nova_compute[192079]: 2025-10-02 12:24:54.112 2 DEBUG nova.network.neutron [req-e995343e-6992-4e13-ae68-af1b7d3ef6c7 req-e4706ac4-3eb0-41f3-bad7-8001f407cb19 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Updating instance_info_cache with network_info: [{"id": "d1031883-2135-4183-8a9d-0609c32ad14b", "address": "fa:16:3e:0a:b9:ae", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd1031883-21", "ovs_interfaceid": "d1031883-2135-4183-8a9d-0609c32ad14b", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:54.120 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[24054c85-0090-4c02-88f7-955637de5e76]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapa04f937a-31'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:33:93:68'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 2, 'tx_packets': 1, 'rx_bytes': 220, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 2, 'tx_packets': 1, 'rx_bytes': 220, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 134], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 589168, 'reachable_time': 35144, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 2, 'inoctets': 192, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 2, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 192, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 2, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 239050, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:54 compute-0 nova_compute[192079]: 2025-10-02 12:24:54.130 2 DEBUG oslo_concurrency.lockutils [req-e995343e-6992-4e13-ae68-af1b7d3ef6c7 req-e4706ac4-3eb0-41f3-bad7-8001f407cb19 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-ae56113d-001e-4f10-9236-c07fe5146d9c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:54.159 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1478c69f-6bab-4b09-a01d-8f9b65ca5e8b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:54.221 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[126b5527-6b9e-4766-990b-54269aa182ff]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:54.222 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapa04f937a-30, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:54.222 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:54.222 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapa04f937a-30, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:54 compute-0 nova_compute[192079]: 2025-10-02 12:24:54.224 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:54 compute-0 NetworkManager[51160]: <info>  [1759407894.2249] manager: (tapa04f937a-30): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/214)
Oct 02 12:24:54 compute-0 kernel: tapa04f937a-30: entered promiscuous mode
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:54.227 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tapa04f937a-30, col_values=(('external_ids', {'iface-id': '38f1ac16-18c6-4b4a-b769-ebc7dd5181d4'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:24:54 compute-0 ovn_controller[94336]: 2025-10-02T12:24:54Z|00423|binding|INFO|Releasing lport 38f1ac16-18c6-4b4a-b769-ebc7dd5181d4 from this chassis (sb_readonly=0)
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:54.230 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/a04f937a-375f-4fb0-90fe-5f514a88668f.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/a04f937a-375f-4fb0-90fe-5f514a88668f.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:54.230 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3a652489-7526-4875-a8c8-f1c7e40ad750]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:54.231 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-a04f937a-375f-4fb0-90fe-5f514a88668f
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/a04f937a-375f-4fb0-90fe-5f514a88668f.pid.haproxy
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID a04f937a-375f-4fb0-90fe-5f514a88668f
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:54.232 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'env', 'PROCESS_TAG=haproxy-a04f937a-375f-4fb0-90fe-5f514a88668f', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/a04f937a-375f-4fb0-90fe-5f514a88668f.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:24:54 compute-0 nova_compute[192079]: 2025-10-02 12:24:54.245 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:54.474 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:ea:74:6f 10.100.0.2 2001:db8::f816:3eff:feea:746f'], port_security=[], type=localport, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': ''}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.2/28 2001:db8::f816:3eff:feea:746f/64', 'neutron:device_id': 'ovnmeta-26df2dcf-f57c-4dae-8522-0277df741ed3', 'neutron:device_owner': 'network:distributed', 'neutron:mtu': '', 'neutron:network_name': 'neutron-26df2dcf-f57c-4dae-8522-0277df741ed3', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'neutron:revision_number': '3', 'neutron:security_group_ids': '', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=e2784fb0-50ac-4c91-ba90-3b5c38b8adf4, chassis=[], tunnel_key=1, gateway_chassis=[], requested_chassis=[], logical_port=adc60e93-14bb-4eb4-8a79-15dda196dc01) old=Port_Binding(mac=['fa:16:3e:ea:74:6f 10.100.0.2'], external_ids={'neutron:cidrs': '10.100.0.2/28', 'neutron:device_id': 'ovnmeta-26df2dcf-f57c-4dae-8522-0277df741ed3', 'neutron:device_owner': 'network:distributed', 'neutron:mtu': '', 'neutron:network_name': 'neutron-26df2dcf-f57c-4dae-8522-0277df741ed3', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'neutron:revision_number': '2', 'neutron:security_group_ids': '', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:24:54 compute-0 nova_compute[192079]: 2025-10-02 12:24:54.537 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407894.5366669, ae56113d-001e-4f10-9236-c07fe5146d9c => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:24:54 compute-0 nova_compute[192079]: 2025-10-02 12:24:54.538 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] VM Resumed (Lifecycle Event)
Oct 02 12:24:54 compute-0 nova_compute[192079]: 2025-10-02 12:24:54.540 2 DEBUG nova.compute.manager [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:24:54 compute-0 nova_compute[192079]: 2025-10-02 12:24:54.543 2 INFO nova.virt.libvirt.driver [-] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Instance running successfully.
Oct 02 12:24:54 compute-0 virtqemud[191807]: argument unsupported: QEMU guest agent is not configured
Oct 02 12:24:54 compute-0 nova_compute[192079]: 2025-10-02 12:24:54.546 2 DEBUG nova.virt.libvirt.guest [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Failed to set time: agent not configured sync_guest_time /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:200
Oct 02 12:24:54 compute-0 nova_compute[192079]: 2025-10-02 12:24:54.547 2 DEBUG nova.virt.libvirt.driver [None req-819407d4-1366-4493-9427-0896e8ec66b3 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] finish_migration finished successfully. finish_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:11793
Oct 02 12:24:54 compute-0 nova_compute[192079]: 2025-10-02 12:24:54.608 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:24:54 compute-0 nova_compute[192079]: 2025-10-02 12:24:54.611 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: active, current task_state: resize_finish, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:24:54 compute-0 podman[239081]: 2025-10-02 12:24:54.637409181 +0000 UTC m=+0.071074097 container create 9ca932e0bede617b7516411e95b8f9128a10a22ef4d0bfe48cbd8c9838a543b3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001)
Oct 02 12:24:54 compute-0 systemd[1]: Started libpod-conmon-9ca932e0bede617b7516411e95b8f9128a10a22ef4d0bfe48cbd8c9838a543b3.scope.
Oct 02 12:24:54 compute-0 podman[239081]: 2025-10-02 12:24:54.599309942 +0000 UTC m=+0.032974908 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:24:54 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:24:54 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/8d5835a926c4ba489b3ce0cb5381de817af417fe744751c576856d10ec9fffee/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:24:54 compute-0 podman[239081]: 2025-10-02 12:24:54.734843087 +0000 UTC m=+0.168508013 container init 9ca932e0bede617b7516411e95b8f9128a10a22ef4d0bfe48cbd8c9838a543b3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0)
Oct 02 12:24:54 compute-0 podman[239081]: 2025-10-02 12:24:54.74376141 +0000 UTC m=+0.177426286 container start 9ca932e0bede617b7516411e95b8f9128a10a22ef4d0bfe48cbd8c9838a543b3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3)
Oct 02 12:24:54 compute-0 podman[239094]: 2025-10-02 12:24:54.764436143 +0000 UTC m=+0.078507291 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, container_name=ovn_metadata_agent, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent)
Oct 02 12:24:54 compute-0 podman[239098]: 2025-10-02 12:24:54.765693387 +0000 UTC m=+0.073131673 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:24:54 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[239110]: [NOTICE]   (239148) : New worker (239163) forked
Oct 02 12:24:54 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[239110]: [NOTICE]   (239148) : Loading success.
Oct 02 12:24:54 compute-0 podman[239097]: 2025-10-02 12:24:54.830959716 +0000 UTC m=+0.140921711 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_controller, io.buildah.version=1.41.3, managed_by=edpm_ansible, config_id=ovn_controller, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_managed=true)
Oct 02 12:24:54 compute-0 nova_compute[192079]: 2025-10-02 12:24:54.847 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] During sync_power_state the instance has a pending task (resize_finish). Skip.
Oct 02 12:24:54 compute-0 nova_compute[192079]: 2025-10-02 12:24:54.847 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407894.5383847, ae56113d-001e-4f10-9236-c07fe5146d9c => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:24:54 compute-0 nova_compute[192079]: 2025-10-02 12:24:54.848 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] VM Started (Lifecycle Event)
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:54.857 103294 INFO neutron.agent.ovn.metadata.agent [-] Metadata Port adc60e93-14bb-4eb4-8a79-15dda196dc01 in datapath 26df2dcf-f57c-4dae-8522-0277df741ed3 updated
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:54.859 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 26df2dcf-f57c-4dae-8522-0277df741ed3, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:24:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:24:54.860 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3e29214b-49aa-4421-b691-3f10ad9e814e]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:24:54 compute-0 nova_compute[192079]: 2025-10-02 12:24:54.956 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:24:54 compute-0 nova_compute[192079]: 2025-10-02 12:24:54.961 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Synchronizing instance power state after lifecycle event "Started"; current vm_state: active, current task_state: resize_finish, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:24:55 compute-0 systemd[1]: Stopping User Manager for UID 42436...
Oct 02 12:24:55 compute-0 systemd[238905]: Activating special unit Exit the Session...
Oct 02 12:24:55 compute-0 systemd[238905]: Stopped target Main User Target.
Oct 02 12:24:55 compute-0 systemd[238905]: Stopped target Basic System.
Oct 02 12:24:55 compute-0 systemd[238905]: Stopped target Paths.
Oct 02 12:24:55 compute-0 systemd[238905]: Stopped target Sockets.
Oct 02 12:24:55 compute-0 systemd[238905]: Stopped target Timers.
Oct 02 12:24:55 compute-0 systemd[238905]: Stopped Mark boot as successful after the user session has run 2 minutes.
Oct 02 12:24:55 compute-0 systemd[238905]: Stopped Daily Cleanup of User's Temporary Directories.
Oct 02 12:24:55 compute-0 systemd[238905]: Closed D-Bus User Message Bus Socket.
Oct 02 12:24:55 compute-0 systemd[238905]: Stopped Create User's Volatile Files and Directories.
Oct 02 12:24:55 compute-0 systemd[238905]: Removed slice User Application Slice.
Oct 02 12:24:55 compute-0 systemd[238905]: Reached target Shutdown.
Oct 02 12:24:55 compute-0 systemd[238905]: Finished Exit the Session.
Oct 02 12:24:55 compute-0 systemd[238905]: Reached target Exit the Session.
Oct 02 12:24:55 compute-0 systemd[1]: user@42436.service: Deactivated successfully.
Oct 02 12:24:55 compute-0 systemd[1]: Stopped User Manager for UID 42436.
Oct 02 12:24:55 compute-0 systemd[1]: Stopping User Runtime Directory /run/user/42436...
Oct 02 12:24:55 compute-0 systemd[1]: run-user-42436.mount: Deactivated successfully.
Oct 02 12:24:55 compute-0 systemd[1]: user-runtime-dir@42436.service: Deactivated successfully.
Oct 02 12:24:55 compute-0 systemd[1]: Stopped User Runtime Directory /run/user/42436.
Oct 02 12:24:55 compute-0 systemd[1]: Removed slice User Slice of UID 42436.
Oct 02 12:24:56 compute-0 nova_compute[192079]: 2025-10-02 12:24:56.174 2 DEBUG nova.compute.manager [req-629f90f1-58cf-4486-b8de-badbd4846e54 req-7d60d8ae-f3e5-48ed-a45f-252d3208c3d5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Received event network-vif-plugged-d1031883-2135-4183-8a9d-0609c32ad14b external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:24:56 compute-0 nova_compute[192079]: 2025-10-02 12:24:56.174 2 DEBUG oslo_concurrency.lockutils [req-629f90f1-58cf-4486-b8de-badbd4846e54 req-7d60d8ae-f3e5-48ed-a45f-252d3208c3d5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "ae56113d-001e-4f10-9236-c07fe5146d9c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:56 compute-0 nova_compute[192079]: 2025-10-02 12:24:56.174 2 DEBUG oslo_concurrency.lockutils [req-629f90f1-58cf-4486-b8de-badbd4846e54 req-7d60d8ae-f3e5-48ed-a45f-252d3208c3d5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ae56113d-001e-4f10-9236-c07fe5146d9c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:56 compute-0 nova_compute[192079]: 2025-10-02 12:24:56.175 2 DEBUG oslo_concurrency.lockutils [req-629f90f1-58cf-4486-b8de-badbd4846e54 req-7d60d8ae-f3e5-48ed-a45f-252d3208c3d5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ae56113d-001e-4f10-9236-c07fe5146d9c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:56 compute-0 nova_compute[192079]: 2025-10-02 12:24:56.175 2 DEBUG nova.compute.manager [req-629f90f1-58cf-4486-b8de-badbd4846e54 req-7d60d8ae-f3e5-48ed-a45f-252d3208c3d5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] No waiting events found dispatching network-vif-plugged-d1031883-2135-4183-8a9d-0609c32ad14b pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:24:56 compute-0 nova_compute[192079]: 2025-10-02 12:24:56.175 2 WARNING nova.compute.manager [req-629f90f1-58cf-4486-b8de-badbd4846e54 req-7d60d8ae-f3e5-48ed-a45f-252d3208c3d5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Received unexpected event network-vif-plugged-d1031883-2135-4183-8a9d-0609c32ad14b for instance with vm_state resized and task_state None.
Oct 02 12:24:56 compute-0 nova_compute[192079]: 2025-10-02 12:24:56.429 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:56 compute-0 nova_compute[192079]: 2025-10-02 12:24:56.938 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:57 compute-0 nova_compute[192079]: 2025-10-02 12:24:57.711 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._run_pending_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:24:57 compute-0 nova_compute[192079]: 2025-10-02 12:24:57.712 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Cleaning up deleted instances _run_pending_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:11145
Oct 02 12:24:57 compute-0 nova_compute[192079]: 2025-10-02 12:24:57.735 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] There are 0 instances to clean _run_pending_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:11154
Oct 02 12:24:58 compute-0 nova_compute[192079]: 2025-10-02 12:24:58.433 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:24:58 compute-0 nova_compute[192079]: 2025-10-02 12:24:58.515 2 DEBUG nova.compute.manager [req-1971ad18-0f58-4fed-9e86-9060aaa46f99 req-4da7bdda-201a-48b0-90de-c8a8b96ec1c7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Received event network-vif-plugged-d1031883-2135-4183-8a9d-0609c32ad14b external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:24:58 compute-0 nova_compute[192079]: 2025-10-02 12:24:58.516 2 DEBUG oslo_concurrency.lockutils [req-1971ad18-0f58-4fed-9e86-9060aaa46f99 req-4da7bdda-201a-48b0-90de-c8a8b96ec1c7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "ae56113d-001e-4f10-9236-c07fe5146d9c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:24:58 compute-0 nova_compute[192079]: 2025-10-02 12:24:58.517 2 DEBUG oslo_concurrency.lockutils [req-1971ad18-0f58-4fed-9e86-9060aaa46f99 req-4da7bdda-201a-48b0-90de-c8a8b96ec1c7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ae56113d-001e-4f10-9236-c07fe5146d9c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:24:58 compute-0 nova_compute[192079]: 2025-10-02 12:24:58.517 2 DEBUG oslo_concurrency.lockutils [req-1971ad18-0f58-4fed-9e86-9060aaa46f99 req-4da7bdda-201a-48b0-90de-c8a8b96ec1c7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ae56113d-001e-4f10-9236-c07fe5146d9c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:24:58 compute-0 nova_compute[192079]: 2025-10-02 12:24:58.517 2 DEBUG nova.compute.manager [req-1971ad18-0f58-4fed-9e86-9060aaa46f99 req-4da7bdda-201a-48b0-90de-c8a8b96ec1c7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] No waiting events found dispatching network-vif-plugged-d1031883-2135-4183-8a9d-0609c32ad14b pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:24:58 compute-0 nova_compute[192079]: 2025-10-02 12:24:58.518 2 WARNING nova.compute.manager [req-1971ad18-0f58-4fed-9e86-9060aaa46f99 req-4da7bdda-201a-48b0-90de-c8a8b96ec1c7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Received unexpected event network-vif-plugged-d1031883-2135-4183-8a9d-0609c32ad14b for instance with vm_state resized and task_state None.
Oct 02 12:25:00 compute-0 nova_compute[192079]: 2025-10-02 12:25:00.498 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_power_states run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:25:00 compute-0 nova_compute[192079]: 2025-10-02 12:25:00.635 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Triggering sync for uuid ae56113d-001e-4f10-9236-c07fe5146d9c _sync_power_states /usr/lib/python3.9/site-packages/nova/compute/manager.py:10268
Oct 02 12:25:00 compute-0 nova_compute[192079]: 2025-10-02 12:25:00.636 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "ae56113d-001e-4f10-9236-c07fe5146d9c" by "nova.compute.manager.ComputeManager._sync_power_states.<locals>._sync.<locals>.query_driver_power_state_and_sync" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:25:00 compute-0 nova_compute[192079]: 2025-10-02 12:25:00.636 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "ae56113d-001e-4f10-9236-c07fe5146d9c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.<locals>._sync.<locals>.query_driver_power_state_and_sync" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:25:00 compute-0 nova_compute[192079]: 2025-10-02 12:25:00.737 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "ae56113d-001e-4f10-9236-c07fe5146d9c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.<locals>._sync.<locals>.query_driver_power_state_and_sync" :: held 0.101s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:25:01 compute-0 nova_compute[192079]: 2025-10-02 12:25:01.939 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:02.224 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:25:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:02.225 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:25:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:02.226 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:25:03 compute-0 podman[239181]: 2025-10-02 12:25:03.161604891 +0000 UTC m=+0.080512025 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, config_id=edpm, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:25:03 compute-0 nova_compute[192079]: 2025-10-02 12:25:03.437 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.194 2 DEBUG oslo_concurrency.lockutils [None req-6ee4cdb1-4080-4b7d-8453-5f05d30a21b8 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "ae56113d-001e-4f10-9236-c07fe5146d9c" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.195 2 DEBUG oslo_concurrency.lockutils [None req-6ee4cdb1-4080-4b7d-8453-5f05d30a21b8 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "ae56113d-001e-4f10-9236-c07fe5146d9c" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.196 2 DEBUG oslo_concurrency.lockutils [None req-6ee4cdb1-4080-4b7d-8453-5f05d30a21b8 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "ae56113d-001e-4f10-9236-c07fe5146d9c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.196 2 DEBUG oslo_concurrency.lockutils [None req-6ee4cdb1-4080-4b7d-8453-5f05d30a21b8 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "ae56113d-001e-4f10-9236-c07fe5146d9c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.196 2 DEBUG oslo_concurrency.lockutils [None req-6ee4cdb1-4080-4b7d-8453-5f05d30a21b8 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "ae56113d-001e-4f10-9236-c07fe5146d9c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.329 2 INFO nova.compute.manager [None req-6ee4cdb1-4080-4b7d-8453-5f05d30a21b8 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Terminating instance
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.461 2 DEBUG nova.compute.manager [None req-6ee4cdb1-4080-4b7d-8453-5f05d30a21b8 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:25:06 compute-0 kernel: tapd1031883-21 (unregistering): left promiscuous mode
Oct 02 12:25:06 compute-0 NetworkManager[51160]: <info>  [1759407906.4825] device (tapd1031883-21): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.490 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:06 compute-0 ovn_controller[94336]: 2025-10-02T12:25:06Z|00424|binding|INFO|Releasing lport d1031883-2135-4183-8a9d-0609c32ad14b from this chassis (sb_readonly=0)
Oct 02 12:25:06 compute-0 ovn_controller[94336]: 2025-10-02T12:25:06Z|00425|binding|INFO|Setting lport d1031883-2135-4183-8a9d-0609c32ad14b down in Southbound
Oct 02 12:25:06 compute-0 ovn_controller[94336]: 2025-10-02T12:25:06Z|00426|binding|INFO|Removing iface tapd1031883-21 ovn-installed in OVS
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.493 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.508 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:06.529 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:0a:b9:ae 10.100.0.12'], port_security=['fa:16:3e:0a:b9:ae 10.100.0.12'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.12/28', 'neutron:device_id': 'ae56113d-001e-4f10-9236-c07fe5146d9c', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-a04f937a-375f-4fb0-90fe-5f514a88668f', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'neutron:revision_number': '16', 'neutron:security_group_ids': 'c0383701-0ec7-4f3b-8585-5effc4f5ca5a', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:port_fip': '192.168.122.248', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=50c0aa38-5fd8-41c7-b4bf-85b59722c5c3, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=d1031883-2135-4183-8a9d-0609c32ad14b) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:25:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:06.530 103294 INFO neutron.agent.ovn.metadata.agent [-] Port d1031883-2135-4183-8a9d-0609c32ad14b in datapath a04f937a-375f-4fb0-90fe-5f514a88668f unbound from our chassis
Oct 02 12:25:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:06.531 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network a04f937a-375f-4fb0-90fe-5f514a88668f, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:25:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:06.532 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f07531d2-adf9-4eb9-bbb8-a061e2c505bb]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:06.533 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f namespace which is not needed anymore
Oct 02 12:25:06 compute-0 systemd[1]: machine-qemu\x2d55\x2dinstance\x2d0000006f.scope: Deactivated successfully.
Oct 02 12:25:06 compute-0 systemd[1]: machine-qemu\x2d55\x2dinstance\x2d0000006f.scope: Consumed 12.316s CPU time.
Oct 02 12:25:06 compute-0 systemd-machined[152150]: Machine qemu-55-instance-0000006f terminated.
Oct 02 12:25:06 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[239110]: [NOTICE]   (239148) : haproxy version is 2.8.14-c23fe91
Oct 02 12:25:06 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[239110]: [NOTICE]   (239148) : path to executable is /usr/sbin/haproxy
Oct 02 12:25:06 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[239110]: [WARNING]  (239148) : Exiting Master process...
Oct 02 12:25:06 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[239110]: [WARNING]  (239148) : Exiting Master process...
Oct 02 12:25:06 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[239110]: [ALERT]    (239148) : Current worker (239163) exited with code 143 (Terminated)
Oct 02 12:25:06 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[239110]: [WARNING]  (239148) : All workers exited. Exiting... (0)
Oct 02 12:25:06 compute-0 systemd[1]: libpod-9ca932e0bede617b7516411e95b8f9128a10a22ef4d0bfe48cbd8c9838a543b3.scope: Deactivated successfully.
Oct 02 12:25:06 compute-0 podman[239235]: 2025-10-02 12:25:06.667059276 +0000 UTC m=+0.044801192 container died 9ca932e0bede617b7516411e95b8f9128a10a22ef4d0bfe48cbd8c9838a543b3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.build-date=20251001, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0)
Oct 02 12:25:06 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-9ca932e0bede617b7516411e95b8f9128a10a22ef4d0bfe48cbd8c9838a543b3-userdata-shm.mount: Deactivated successfully.
Oct 02 12:25:06 compute-0 systemd[1]: var-lib-containers-storage-overlay-8d5835a926c4ba489b3ce0cb5381de817af417fe744751c576856d10ec9fffee-merged.mount: Deactivated successfully.
Oct 02 12:25:06 compute-0 podman[239235]: 2025-10-02 12:25:06.70979907 +0000 UTC m=+0.087540976 container cleanup 9ca932e0bede617b7516411e95b8f9128a10a22ef4d0bfe48cbd8c9838a543b3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001)
Oct 02 12:25:06 compute-0 systemd[1]: libpod-conmon-9ca932e0bede617b7516411e95b8f9128a10a22ef4d0bfe48cbd8c9838a543b3.scope: Deactivated successfully.
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.721 2 INFO nova.virt.libvirt.driver [-] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Instance destroyed successfully.
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.722 2 DEBUG nova.objects.instance [None req-6ee4cdb1-4080-4b7d-8453-5f05d30a21b8 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'resources' on Instance uuid ae56113d-001e-4f10-9236-c07fe5146d9c obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.755 2 DEBUG nova.virt.libvirt.vif [None req-6ee4cdb1-4080-4b7d-8453-5f05d30a21b8 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:21:20Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerActionsTestJSON-server-161503604',display_name='tempest-ServerActionsTestJSON-server-161503604',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(2),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serveractionstestjson-server-161503604',id=111,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=2,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBJJLom+UJzZg9dduKQv+725QaYDZoMXvP/xlpKnb/K05SGc4dkyLwCDweJ3QifTmxLWqK9Sz5A12yMJbzpa36v5C4bUqj8uiWk/vbR1BAjBdKM9d/Ug8M2nT8LwDBGP/9A==',key_name='tempest-keypair-1006285918',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:24:54Z,launched_on='compute-1.ctlplane.example.com',locked=False,locked_by=None,memory_mb=192,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='e564a4cad5d443dba81ec04d2a05ced9',ramdisk_id='',reservation_id='r-ntvf7r4i',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServerActionsTestJSON-1646745100',owner_user_name='tempest-ServerActionsTestJSON-1646745100-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:25:03Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='d54b1826121b47caba89932a78c06ccd',uuid=ae56113d-001e-4f10-9236-c07fe5146d9c,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "d1031883-2135-4183-8a9d-0609c32ad14b", "address": "fa:16:3e:0a:b9:ae", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd1031883-21", "ovs_interfaceid": "d1031883-2135-4183-8a9d-0609c32ad14b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.756 2 DEBUG nova.network.os_vif_util [None req-6ee4cdb1-4080-4b7d-8453-5f05d30a21b8 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converting VIF {"id": "d1031883-2135-4183-8a9d-0609c32ad14b", "address": "fa:16:3e:0a:b9:ae", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd1031883-21", "ovs_interfaceid": "d1031883-2135-4183-8a9d-0609c32ad14b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.756 2 DEBUG nova.network.os_vif_util [None req-6ee4cdb1-4080-4b7d-8453-5f05d30a21b8 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:0a:b9:ae,bridge_name='br-int',has_traffic_filtering=True,id=d1031883-2135-4183-8a9d-0609c32ad14b,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd1031883-21') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.757 2 DEBUG os_vif [None req-6ee4cdb1-4080-4b7d-8453-5f05d30a21b8 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:0a:b9:ae,bridge_name='br-int',has_traffic_filtering=True,id=d1031883-2135-4183-8a9d-0609c32ad14b,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd1031883-21') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.759 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.760 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapd1031883-21, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.761 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.763 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.765 2 INFO os_vif [None req-6ee4cdb1-4080-4b7d-8453-5f05d30a21b8 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:0a:b9:ae,bridge_name='br-int',has_traffic_filtering=True,id=d1031883-2135-4183-8a9d-0609c32ad14b,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd1031883-21')
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.766 2 INFO nova.virt.libvirt.driver [None req-6ee4cdb1-4080-4b7d-8453-5f05d30a21b8 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Deleting instance files /var/lib/nova/instances/ae56113d-001e-4f10-9236-c07fe5146d9c_del
Oct 02 12:25:06 compute-0 podman[239280]: 2025-10-02 12:25:06.767231346 +0000 UTC m=+0.036737962 container remove 9ca932e0bede617b7516411e95b8f9128a10a22ef4d0bfe48cbd8c9838a543b3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_managed=true, io.buildah.version=1.41.3)
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.771 2 INFO nova.virt.libvirt.driver [None req-6ee4cdb1-4080-4b7d-8453-5f05d30a21b8 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Deletion of /var/lib/nova/instances/ae56113d-001e-4f10-9236-c07fe5146d9c_del complete
Oct 02 12:25:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:06.772 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a45a2720-f6cd-4f4e-8c82-df449a6fee43]: (4, ('Thu Oct  2 12:25:06 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f (9ca932e0bede617b7516411e95b8f9128a10a22ef4d0bfe48cbd8c9838a543b3)\n9ca932e0bede617b7516411e95b8f9128a10a22ef4d0bfe48cbd8c9838a543b3\nThu Oct  2 12:25:06 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f (9ca932e0bede617b7516411e95b8f9128a10a22ef4d0bfe48cbd8c9838a543b3)\n9ca932e0bede617b7516411e95b8f9128a10a22ef4d0bfe48cbd8c9838a543b3\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:06.773 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[63293816-d3ee-4962-95eb-ee36c688fef4]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:06.774 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapa04f937a-30, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.775 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:06 compute-0 kernel: tapa04f937a-30: left promiscuous mode
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.786 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.787 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:06.789 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[09dca046-ad80-4d61-922b-7661e6c3de24]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:06.829 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[dfea2f6d-e91f-46b6-9db4-8c15e2af5579]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:06.830 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ebcd9ea7-241a-46a1-bc8a-8835cb2b6145]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:06.848 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[438994a3-14c7-4b00-9ca6-7d5091ac193b]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 589159, 'reachable_time': 27769, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 239295, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:06 compute-0 systemd[1]: run-netns-ovnmeta\x2da04f937a\x2d375f\x2d4fb0\x2d90fe\x2d5f514a88668f.mount: Deactivated successfully.
Oct 02 12:25:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:06.851 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:25:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:06.851 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[b1eb6f5e-7010-4bdc-9948-782eb7c1daf9]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.941 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.964 2 INFO nova.compute.manager [None req-6ee4cdb1-4080-4b7d-8453-5f05d30a21b8 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Took 0.50 seconds to destroy the instance on the hypervisor.
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.965 2 DEBUG oslo.service.loopingcall [None req-6ee4cdb1-4080-4b7d-8453-5f05d30a21b8 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.965 2 DEBUG nova.compute.manager [-] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:25:06 compute-0 nova_compute[192079]: 2025-10-02 12:25:06.966 2 DEBUG nova.network.neutron [-] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:25:07 compute-0 nova_compute[192079]: 2025-10-02 12:25:07.046 2 DEBUG nova.compute.manager [req-e2ffb8b0-284f-488d-8a85-6526503c953a req-15a1e196-55fe-4799-b7c0-f8db1332be7f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Received event network-vif-unplugged-d1031883-2135-4183-8a9d-0609c32ad14b external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:25:07 compute-0 nova_compute[192079]: 2025-10-02 12:25:07.047 2 DEBUG oslo_concurrency.lockutils [req-e2ffb8b0-284f-488d-8a85-6526503c953a req-15a1e196-55fe-4799-b7c0-f8db1332be7f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "ae56113d-001e-4f10-9236-c07fe5146d9c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:25:07 compute-0 nova_compute[192079]: 2025-10-02 12:25:07.047 2 DEBUG oslo_concurrency.lockutils [req-e2ffb8b0-284f-488d-8a85-6526503c953a req-15a1e196-55fe-4799-b7c0-f8db1332be7f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ae56113d-001e-4f10-9236-c07fe5146d9c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:25:07 compute-0 nova_compute[192079]: 2025-10-02 12:25:07.047 2 DEBUG oslo_concurrency.lockutils [req-e2ffb8b0-284f-488d-8a85-6526503c953a req-15a1e196-55fe-4799-b7c0-f8db1332be7f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ae56113d-001e-4f10-9236-c07fe5146d9c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:25:07 compute-0 nova_compute[192079]: 2025-10-02 12:25:07.048 2 DEBUG nova.compute.manager [req-e2ffb8b0-284f-488d-8a85-6526503c953a req-15a1e196-55fe-4799-b7c0-f8db1332be7f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] No waiting events found dispatching network-vif-unplugged-d1031883-2135-4183-8a9d-0609c32ad14b pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:25:07 compute-0 nova_compute[192079]: 2025-10-02 12:25:07.048 2 DEBUG nova.compute.manager [req-e2ffb8b0-284f-488d-8a85-6526503c953a req-15a1e196-55fe-4799-b7c0-f8db1332be7f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Received event network-vif-unplugged-d1031883-2135-4183-8a9d-0609c32ad14b for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:25:07 compute-0 nova_compute[192079]: 2025-10-02 12:25:07.895 2 DEBUG nova.network.neutron [-] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:25:07 compute-0 nova_compute[192079]: 2025-10-02 12:25:07.923 2 INFO nova.compute.manager [-] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Took 0.96 seconds to deallocate network for instance.
Oct 02 12:25:08 compute-0 nova_compute[192079]: 2025-10-02 12:25:08.016 2 DEBUG oslo_concurrency.lockutils [None req-6ee4cdb1-4080-4b7d-8453-5f05d30a21b8 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:25:08 compute-0 nova_compute[192079]: 2025-10-02 12:25:08.017 2 DEBUG oslo_concurrency.lockutils [None req-6ee4cdb1-4080-4b7d-8453-5f05d30a21b8 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:25:08 compute-0 nova_compute[192079]: 2025-10-02 12:25:08.023 2 DEBUG oslo_concurrency.lockutils [None req-6ee4cdb1-4080-4b7d-8453-5f05d30a21b8 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.005s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:25:08 compute-0 nova_compute[192079]: 2025-10-02 12:25:08.068 2 INFO nova.scheduler.client.report [None req-6ee4cdb1-4080-4b7d-8453-5f05d30a21b8 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Deleted allocations for instance ae56113d-001e-4f10-9236-c07fe5146d9c
Oct 02 12:25:08 compute-0 nova_compute[192079]: 2025-10-02 12:25:08.178 2 DEBUG oslo_concurrency.lockutils [None req-6ee4cdb1-4080-4b7d-8453-5f05d30a21b8 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "ae56113d-001e-4f10-9236-c07fe5146d9c" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.983s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:25:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:08.183 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=29, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=28) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:25:08 compute-0 nova_compute[192079]: 2025-10-02 12:25:08.183 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:08.184 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 10 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:25:09 compute-0 nova_compute[192079]: 2025-10-02 12:25:09.227 2 DEBUG nova.compute.manager [req-6b04fe4d-7c40-47ee-adad-1d32ad3db317 req-7bc39432-c918-46fb-9e6d-7e555af382e6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Received event network-vif-plugged-d1031883-2135-4183-8a9d-0609c32ad14b external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:25:09 compute-0 nova_compute[192079]: 2025-10-02 12:25:09.228 2 DEBUG oslo_concurrency.lockutils [req-6b04fe4d-7c40-47ee-adad-1d32ad3db317 req-7bc39432-c918-46fb-9e6d-7e555af382e6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "ae56113d-001e-4f10-9236-c07fe5146d9c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:25:09 compute-0 nova_compute[192079]: 2025-10-02 12:25:09.228 2 DEBUG oslo_concurrency.lockutils [req-6b04fe4d-7c40-47ee-adad-1d32ad3db317 req-7bc39432-c918-46fb-9e6d-7e555af382e6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ae56113d-001e-4f10-9236-c07fe5146d9c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:25:09 compute-0 nova_compute[192079]: 2025-10-02 12:25:09.228 2 DEBUG oslo_concurrency.lockutils [req-6b04fe4d-7c40-47ee-adad-1d32ad3db317 req-7bc39432-c918-46fb-9e6d-7e555af382e6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ae56113d-001e-4f10-9236-c07fe5146d9c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:25:09 compute-0 nova_compute[192079]: 2025-10-02 12:25:09.229 2 DEBUG nova.compute.manager [req-6b04fe4d-7c40-47ee-adad-1d32ad3db317 req-7bc39432-c918-46fb-9e6d-7e555af382e6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] No waiting events found dispatching network-vif-plugged-d1031883-2135-4183-8a9d-0609c32ad14b pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:25:09 compute-0 nova_compute[192079]: 2025-10-02 12:25:09.229 2 WARNING nova.compute.manager [req-6b04fe4d-7c40-47ee-adad-1d32ad3db317 req-7bc39432-c918-46fb-9e6d-7e555af382e6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Received unexpected event network-vif-plugged-d1031883-2135-4183-8a9d-0609c32ad14b for instance with vm_state deleted and task_state None.
Oct 02 12:25:09 compute-0 nova_compute[192079]: 2025-10-02 12:25:09.229 2 DEBUG nova.compute.manager [req-6b04fe4d-7c40-47ee-adad-1d32ad3db317 req-7bc39432-c918-46fb-9e6d-7e555af382e6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Received event network-vif-deleted-d1031883-2135-4183-8a9d-0609c32ad14b external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:25:09 compute-0 nova_compute[192079]: 2025-10-02 12:25:09.962 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:11 compute-0 podman[239297]: 2025-10-02 12:25:11.175506796 +0000 UTC m=+0.080616829 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=multipathd, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.license=GPLv2, config_id=multipathd, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS)
Oct 02 12:25:11 compute-0 podman[239296]: 2025-10-02 12:25:11.185496107 +0000 UTC m=+0.088175293 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vendor=Red Hat, Inc., build-date=2025-08-20T13:12:41, managed_by=edpm_ansible, name=ubi9-minimal, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, url=https://catalog.redhat.com/en/search?searchType=containers, vcs-type=git, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., distribution-scope=public, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., maintainer=Red Hat, Inc., container_name=openstack_network_exporter, io.openshift.tags=minimal rhel9, release=1755695350, com.redhat.component=ubi9-minimal-container, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.buildah.version=1.33.7, version=9.6, io.openshift.expose-services=, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, architecture=x86_64, config_id=edpm)
Oct 02 12:25:11 compute-0 nova_compute[192079]: 2025-10-02 12:25:11.798 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:11 compute-0 nova_compute[192079]: 2025-10-02 12:25:11.943 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:13 compute-0 nova_compute[192079]: 2025-10-02 12:25:13.049 2 DEBUG oslo_concurrency.lockutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:25:13 compute-0 nova_compute[192079]: 2025-10-02 12:25:13.050 2 DEBUG oslo_concurrency.lockutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:25:13 compute-0 nova_compute[192079]: 2025-10-02 12:25:13.098 2 DEBUG nova.compute.manager [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:25:13 compute-0 nova_compute[192079]: 2025-10-02 12:25:13.291 2 DEBUG oslo_concurrency.lockutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:25:13 compute-0 nova_compute[192079]: 2025-10-02 12:25:13.292 2 DEBUG oslo_concurrency.lockutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:25:13 compute-0 nova_compute[192079]: 2025-10-02 12:25:13.299 2 DEBUG nova.virt.hardware [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:25:13 compute-0 nova_compute[192079]: 2025-10-02 12:25:13.299 2 INFO nova.compute.claims [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:25:13 compute-0 nova_compute[192079]: 2025-10-02 12:25:13.580 2 DEBUG nova.compute.provider_tree [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:25:13 compute-0 nova_compute[192079]: 2025-10-02 12:25:13.599 2 DEBUG nova.scheduler.client.report [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:25:13 compute-0 nova_compute[192079]: 2025-10-02 12:25:13.638 2 DEBUG oslo_concurrency.lockutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.346s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:25:13 compute-0 nova_compute[192079]: 2025-10-02 12:25:13.638 2 DEBUG nova.compute.manager [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:25:13 compute-0 nova_compute[192079]: 2025-10-02 12:25:13.707 2 DEBUG nova.compute.manager [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:25:13 compute-0 nova_compute[192079]: 2025-10-02 12:25:13.708 2 DEBUG nova.network.neutron [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:25:13 compute-0 nova_compute[192079]: 2025-10-02 12:25:13.733 2 INFO nova.virt.libvirt.driver [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:25:13 compute-0 nova_compute[192079]: 2025-10-02 12:25:13.759 2 DEBUG nova.compute.manager [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:25:13 compute-0 nova_compute[192079]: 2025-10-02 12:25:13.960 2 DEBUG nova.compute.manager [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:25:13 compute-0 nova_compute[192079]: 2025-10-02 12:25:13.962 2 DEBUG nova.virt.libvirt.driver [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:25:13 compute-0 nova_compute[192079]: 2025-10-02 12:25:13.963 2 INFO nova.virt.libvirt.driver [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Creating image(s)
Oct 02 12:25:13 compute-0 nova_compute[192079]: 2025-10-02 12:25:13.964 2 DEBUG oslo_concurrency.lockutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "/var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:25:13 compute-0 nova_compute[192079]: 2025-10-02 12:25:13.964 2 DEBUG oslo_concurrency.lockutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "/var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:25:13 compute-0 nova_compute[192079]: 2025-10-02 12:25:13.966 2 DEBUG oslo_concurrency.lockutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "/var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:25:13 compute-0 nova_compute[192079]: 2025-10-02 12:25:13.986 2 DEBUG nova.policy [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:25:13 compute-0 nova_compute[192079]: 2025-10-02 12:25:13.989 2 DEBUG oslo_concurrency.processutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:25:14 compute-0 nova_compute[192079]: 2025-10-02 12:25:14.063 2 DEBUG oslo_concurrency.processutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.074s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:25:14 compute-0 nova_compute[192079]: 2025-10-02 12:25:14.064 2 DEBUG oslo_concurrency.lockutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:25:14 compute-0 nova_compute[192079]: 2025-10-02 12:25:14.064 2 DEBUG oslo_concurrency.lockutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:25:14 compute-0 nova_compute[192079]: 2025-10-02 12:25:14.076 2 DEBUG oslo_concurrency.processutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:25:14 compute-0 nova_compute[192079]: 2025-10-02 12:25:14.150 2 DEBUG oslo_concurrency.processutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.074s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:25:14 compute-0 nova_compute[192079]: 2025-10-02 12:25:14.151 2 DEBUG oslo_concurrency.processutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:25:14 compute-0 nova_compute[192079]: 2025-10-02 12:25:14.193 2 DEBUG oslo_concurrency.processutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk 1073741824" returned: 0 in 0.042s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:25:14 compute-0 nova_compute[192079]: 2025-10-02 12:25:14.194 2 DEBUG oslo_concurrency.lockutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.130s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:25:14 compute-0 nova_compute[192079]: 2025-10-02 12:25:14.194 2 DEBUG oslo_concurrency.processutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:25:14 compute-0 nova_compute[192079]: 2025-10-02 12:25:14.247 2 DEBUG oslo_concurrency.processutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.053s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:25:14 compute-0 nova_compute[192079]: 2025-10-02 12:25:14.248 2 DEBUG nova.virt.disk.api [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Checking if we can resize image /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:25:14 compute-0 nova_compute[192079]: 2025-10-02 12:25:14.249 2 DEBUG oslo_concurrency.processutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:25:14 compute-0 nova_compute[192079]: 2025-10-02 12:25:14.304 2 DEBUG oslo_concurrency.processutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:25:14 compute-0 nova_compute[192079]: 2025-10-02 12:25:14.304 2 DEBUG nova.virt.disk.api [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Cannot resize image /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:25:14 compute-0 nova_compute[192079]: 2025-10-02 12:25:14.305 2 DEBUG nova.objects.instance [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'migration_context' on Instance uuid 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:25:14 compute-0 nova_compute[192079]: 2025-10-02 12:25:14.318 2 DEBUG nova.virt.libvirt.driver [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:25:14 compute-0 nova_compute[192079]: 2025-10-02 12:25:14.318 2 DEBUG nova.virt.libvirt.driver [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Ensure instance console log exists: /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:25:14 compute-0 nova_compute[192079]: 2025-10-02 12:25:14.319 2 DEBUG oslo_concurrency.lockutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:25:14 compute-0 nova_compute[192079]: 2025-10-02 12:25:14.319 2 DEBUG oslo_concurrency.lockutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:25:14 compute-0 nova_compute[192079]: 2025-10-02 12:25:14.319 2 DEBUG oslo_concurrency.lockutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:25:14 compute-0 nova_compute[192079]: 2025-10-02 12:25:14.959 2 DEBUG nova.network.neutron [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Successfully created port: ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:25:15 compute-0 nova_compute[192079]: 2025-10-02 12:25:15.675 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:16 compute-0 nova_compute[192079]: 2025-10-02 12:25:16.416 2 DEBUG nova.network.neutron [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Successfully updated port: ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:25:16 compute-0 nova_compute[192079]: 2025-10-02 12:25:16.435 2 DEBUG oslo_concurrency.lockutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "refresh_cache-40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:25:16 compute-0 nova_compute[192079]: 2025-10-02 12:25:16.435 2 DEBUG oslo_concurrency.lockutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquired lock "refresh_cache-40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:25:16 compute-0 nova_compute[192079]: 2025-10-02 12:25:16.435 2 DEBUG nova.network.neutron [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:25:16 compute-0 nova_compute[192079]: 2025-10-02 12:25:16.635 2 DEBUG nova.network.neutron [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:25:16 compute-0 nova_compute[192079]: 2025-10-02 12:25:16.802 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:16 compute-0 nova_compute[192079]: 2025-10-02 12:25:16.945 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.032 2 DEBUG nova.compute.manager [req-a84f4445-d002-45a3-b9d7-e0b50605948d req-40ea9657-95a6-4845-b2ec-6f8ba147d3cd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Received event network-changed-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.033 2 DEBUG nova.compute.manager [req-a84f4445-d002-45a3-b9d7-e0b50605948d req-40ea9657-95a6-4845-b2ec-6f8ba147d3cd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Refreshing instance network info cache due to event network-changed-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.033 2 DEBUG oslo_concurrency.lockutils [req-a84f4445-d002-45a3-b9d7-e0b50605948d req-40ea9657-95a6-4845-b2ec-6f8ba147d3cd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:25:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:25:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:25:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:25:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:25:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:25:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:25:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:25:17.104 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:25:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:25:17.105 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:25:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:25:17.105 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:25:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:25:17.105 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:25:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:25:17.105 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:25:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:25:17.105 12 DEBUG ceilometer.polling.manager [-] Skip pollster memory.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:25:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:25:17.105 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:25:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:25:17.105 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:25:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:25:17.105 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.iops, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:25:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:25:17.105 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:25:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:25:17.105 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:25:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:25:17.105 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:25:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:25:17.105 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.allocation, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:25:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:25:17.105 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:25:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:25:17.106 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.capacity, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:25:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:25:17.106 12 DEBUG ceilometer.polling.manager [-] Skip pollster cpu, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:25:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:25:17.106 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:25:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:25:17.106 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:25:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:25:17.106 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:25:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:25:17.106 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:25:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:25:17.106 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:25:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:25:17.106 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.660 2 DEBUG nova.network.neutron [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Updating instance_info_cache with network_info: [{"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.681 2 DEBUG oslo_concurrency.lockutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Releasing lock "refresh_cache-40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.681 2 DEBUG nova.compute.manager [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Instance network_info: |[{"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.681 2 DEBUG oslo_concurrency.lockutils [req-a84f4445-d002-45a3-b9d7-e0b50605948d req-40ea9657-95a6-4845-b2ec-6f8ba147d3cd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.682 2 DEBUG nova.network.neutron [req-a84f4445-d002-45a3-b9d7-e0b50605948d req-40ea9657-95a6-4845-b2ec-6f8ba147d3cd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Refreshing network info cache for port ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.684 2 DEBUG nova.virt.libvirt.driver [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Start _get_guest_xml network_info=[{"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.688 2 WARNING nova.virt.libvirt.driver [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.694 2 DEBUG nova.virt.libvirt.host [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.695 2 DEBUG nova.virt.libvirt.host [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.699 2 DEBUG nova.virt.libvirt.host [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.700 2 DEBUG nova.virt.libvirt.host [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.701 2 DEBUG nova.virt.libvirt.driver [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.701 2 DEBUG nova.virt.hardware [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.701 2 DEBUG nova.virt.hardware [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.702 2 DEBUG nova.virt.hardware [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.702 2 DEBUG nova.virt.hardware [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.702 2 DEBUG nova.virt.hardware [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.702 2 DEBUG nova.virt.hardware [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.702 2 DEBUG nova.virt.hardware [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.703 2 DEBUG nova.virt.hardware [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.703 2 DEBUG nova.virt.hardware [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.703 2 DEBUG nova.virt.hardware [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.703 2 DEBUG nova.virt.hardware [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.706 2 DEBUG nova.virt.libvirt.vif [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:25:11Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ServerActionsTestJSON-server-300185996',display_name='tempest-ServerActionsTestJSON-server-300185996',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serveractionstestjson-server-300185996',id=121,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBJJLom+UJzZg9dduKQv+725QaYDZoMXvP/xlpKnb/K05SGc4dkyLwCDweJ3QifTmxLWqK9Sz5A12yMJbzpa36v5C4bUqj8uiWk/vbR1BAjBdKM9d/Ug8M2nT8LwDBGP/9A==',key_name='tempest-keypair-1006285918',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='e564a4cad5d443dba81ec04d2a05ced9',ramdisk_id='',reservation_id='r-als9bbed',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServerActionsTestJSON-1646745100',owner_user_name='tempest-ServerActionsTestJSON-1646745100-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:25:13Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='d54b1826121b47caba89932a78c06ccd',uuid=40c8eb3a-547f-435e-8e59-ce9dcddb5f8e,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.706 2 DEBUG nova.network.os_vif_util [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converting VIF {"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.707 2 DEBUG nova.network.os_vif_util [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:d5:ef:4c,bridge_name='br-int',has_traffic_filtering=True,id=ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapae0f2dc4-de') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.708 2 DEBUG nova.objects.instance [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'pci_devices' on Instance uuid 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.727 2 DEBUG nova.virt.libvirt.driver [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:25:17 compute-0 nova_compute[192079]:   <uuid>40c8eb3a-547f-435e-8e59-ce9dcddb5f8e</uuid>
Oct 02 12:25:17 compute-0 nova_compute[192079]:   <name>instance-00000079</name>
Oct 02 12:25:17 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:25:17 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:25:17 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:25:17 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:       <nova:name>tempest-ServerActionsTestJSON-server-300185996</nova:name>
Oct 02 12:25:17 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:25:17</nova:creationTime>
Oct 02 12:25:17 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:25:17 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:25:17 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:25:17 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:25:17 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:25:17 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:25:17 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:25:17 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:25:17 compute-0 nova_compute[192079]:         <nova:user uuid="d54b1826121b47caba89932a78c06ccd">tempest-ServerActionsTestJSON-1646745100-project-member</nova:user>
Oct 02 12:25:17 compute-0 nova_compute[192079]:         <nova:project uuid="e564a4cad5d443dba81ec04d2a05ced9">tempest-ServerActionsTestJSON-1646745100</nova:project>
Oct 02 12:25:17 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:25:17 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:25:17 compute-0 nova_compute[192079]:         <nova:port uuid="ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0">
Oct 02 12:25:17 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.9" ipVersion="4"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:25:17 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:25:17 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:25:17 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <system>
Oct 02 12:25:17 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:25:17 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:25:17 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:25:17 compute-0 nova_compute[192079]:       <entry name="serial">40c8eb3a-547f-435e-8e59-ce9dcddb5f8e</entry>
Oct 02 12:25:17 compute-0 nova_compute[192079]:       <entry name="uuid">40c8eb3a-547f-435e-8e59-ce9dcddb5f8e</entry>
Oct 02 12:25:17 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     </system>
Oct 02 12:25:17 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:25:17 compute-0 nova_compute[192079]:   <os>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:   </os>
Oct 02 12:25:17 compute-0 nova_compute[192079]:   <features>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:   </features>
Oct 02 12:25:17 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:25:17 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:25:17 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:25:17 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:25:17 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk.config"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:25:17 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:d5:ef:4c"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:       <target dev="tapae0f2dc4-de"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:25:17 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/console.log" append="off"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <video>
Oct 02 12:25:17 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     </video>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:25:17 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:25:17 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:25:17 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:25:17 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:25:17 compute-0 nova_compute[192079]: </domain>
Oct 02 12:25:17 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.728 2 DEBUG nova.compute.manager [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Preparing to wait for external event network-vif-plugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.729 2 DEBUG oslo_concurrency.lockutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.729 2 DEBUG oslo_concurrency.lockutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.729 2 DEBUG oslo_concurrency.lockutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.730 2 DEBUG nova.virt.libvirt.vif [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:25:11Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ServerActionsTestJSON-server-300185996',display_name='tempest-ServerActionsTestJSON-server-300185996',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serveractionstestjson-server-300185996',id=121,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBJJLom+UJzZg9dduKQv+725QaYDZoMXvP/xlpKnb/K05SGc4dkyLwCDweJ3QifTmxLWqK9Sz5A12yMJbzpa36v5C4bUqj8uiWk/vbR1BAjBdKM9d/Ug8M2nT8LwDBGP/9A==',key_name='tempest-keypair-1006285918',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='e564a4cad5d443dba81ec04d2a05ced9',ramdisk_id='',reservation_id='r-als9bbed',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServerActionsTestJSON-1646745100',owner_user_name='tempest-ServerActionsTestJSON-1646745100-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:25:13Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='d54b1826121b47caba89932a78c06ccd',uuid=40c8eb3a-547f-435e-8e59-ce9dcddb5f8e,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.730 2 DEBUG nova.network.os_vif_util [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converting VIF {"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.731 2 DEBUG nova.network.os_vif_util [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:d5:ef:4c,bridge_name='br-int',has_traffic_filtering=True,id=ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapae0f2dc4-de') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.731 2 DEBUG os_vif [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:d5:ef:4c,bridge_name='br-int',has_traffic_filtering=True,id=ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapae0f2dc4-de') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.731 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.732 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.732 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.735 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.735 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapae0f2dc4-de, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.735 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapae0f2dc4-de, col_values=(('external_ids', {'iface-id': 'ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:d5:ef:4c', 'vm-uuid': '40c8eb3a-547f-435e-8e59-ce9dcddb5f8e'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.740 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.741 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:25:17 compute-0 NetworkManager[51160]: <info>  [1759407917.7430] manager: (tapae0f2dc4-de): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/215)
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.749 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.749 2 INFO os_vif [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:d5:ef:4c,bridge_name='br-int',has_traffic_filtering=True,id=ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapae0f2dc4-de')
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.848 2 DEBUG nova.virt.libvirt.driver [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.849 2 DEBUG nova.virt.libvirt.driver [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.849 2 DEBUG nova.virt.libvirt.driver [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] No VIF found with MAC fa:16:3e:d5:ef:4c, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:25:17 compute-0 nova_compute[192079]: 2025-10-02 12:25:17.850 2 INFO nova.virt.libvirt.driver [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Using config drive
Oct 02 12:25:17 compute-0 podman[239356]: 2025-10-02 12:25:17.888059214 +0000 UTC m=+0.082139610 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter)
Oct 02 12:25:17 compute-0 podman[239357]: 2025-10-02 12:25:17.909913969 +0000 UTC m=+0.104134099 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, org.label-schema.vendor=CentOS, tcib_managed=true, container_name=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, managed_by=edpm_ansible, config_id=iscsid, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:25:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:18.186 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '29'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:25:18 compute-0 nova_compute[192079]: 2025-10-02 12:25:18.438 2 INFO nova.virt.libvirt.driver [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Creating config drive at /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk.config
Oct 02 12:25:18 compute-0 nova_compute[192079]: 2025-10-02 12:25:18.449 2 DEBUG oslo_concurrency.processutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpsfkg1g5b execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:25:18 compute-0 nova_compute[192079]: 2025-10-02 12:25:18.598 2 DEBUG oslo_concurrency.processutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpsfkg1g5b" returned: 0 in 0.149s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:25:18 compute-0 kernel: tapae0f2dc4-de: entered promiscuous mode
Oct 02 12:25:18 compute-0 NetworkManager[51160]: <info>  [1759407918.6741] manager: (tapae0f2dc4-de): new Tun device (/org/freedesktop/NetworkManager/Devices/216)
Oct 02 12:25:18 compute-0 ovn_controller[94336]: 2025-10-02T12:25:18Z|00427|binding|INFO|Claiming lport ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 for this chassis.
Oct 02 12:25:18 compute-0 ovn_controller[94336]: 2025-10-02T12:25:18Z|00428|binding|INFO|ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0: Claiming fa:16:3e:d5:ef:4c 10.100.0.9
Oct 02 12:25:18 compute-0 nova_compute[192079]: 2025-10-02 12:25:18.674 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:18 compute-0 ovn_controller[94336]: 2025-10-02T12:25:18Z|00429|binding|INFO|Setting lport ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 ovn-installed in OVS
Oct 02 12:25:18 compute-0 nova_compute[192079]: 2025-10-02 12:25:18.691 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:18 compute-0 nova_compute[192079]: 2025-10-02 12:25:18.694 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:18.701 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:d5:ef:4c 10.100.0.9'], port_security=['fa:16:3e:d5:ef:4c 10.100.0.9'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.9/28', 'neutron:device_id': '40c8eb3a-547f-435e-8e59-ce9dcddb5f8e', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-a04f937a-375f-4fb0-90fe-5f514a88668f', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'neutron:revision_number': '2', 'neutron:security_group_ids': 'c0383701-0ec7-4f3b-8585-5effc4f5ca5a', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=50c0aa38-5fd8-41c7-b4bf-85b59722c5c3, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:25:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:18.702 103294 INFO neutron.agent.ovn.metadata.agent [-] Port ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 in datapath a04f937a-375f-4fb0-90fe-5f514a88668f bound to our chassis
Oct 02 12:25:18 compute-0 ovn_controller[94336]: 2025-10-02T12:25:18Z|00430|binding|INFO|Setting lport ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 up in Southbound
Oct 02 12:25:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:18.704 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network a04f937a-375f-4fb0-90fe-5f514a88668f
Oct 02 12:25:18 compute-0 systemd-udevd[239414]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:25:18 compute-0 systemd-machined[152150]: New machine qemu-56-instance-00000079.
Oct 02 12:25:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:18.714 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[16745703-b9fb-4da2-9857-0e3c78ea8ee9]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:18.715 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tapa04f937a-31 in ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:25:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:18.717 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tapa04f937a-30 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:25:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:18.717 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[78c4cb64-d0d7-4efd-bb8c-33e3e3051874]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:18.719 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[636dfa93-ab2d-47a1-874e-014ab7532239]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:18 compute-0 NetworkManager[51160]: <info>  [1759407918.7213] device (tapae0f2dc4-de): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:25:18 compute-0 NetworkManager[51160]: <info>  [1759407918.7238] device (tapae0f2dc4-de): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:25:18 compute-0 systemd[1]: Started Virtual Machine qemu-56-instance-00000079.
Oct 02 12:25:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:18.733 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[9a620891-b680-4c91-b2b8-55ee12f66efc]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:18.752 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4abdb098-90cf-4fb9-b411-75fb5716381e]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:18.787 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[27df0daf-8695-452b-9d64-b5cbaaa0da86]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:18 compute-0 NetworkManager[51160]: <info>  [1759407918.7943] manager: (tapa04f937a-30): new Veth device (/org/freedesktop/NetworkManager/Devices/217)
Oct 02 12:25:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:18.793 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[fc8e294e-e405-40ba-85e1-1175bd972a2e]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:18 compute-0 systemd-udevd[239418]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:25:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:18.829 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[7c452e88-2e64-4a80-828e-85905d55b59a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:18.834 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[4e43be96-53e3-4076-990f-80aaabc80f17]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:18 compute-0 NetworkManager[51160]: <info>  [1759407918.8638] device (tapa04f937a-30): carrier: link connected
Oct 02 12:25:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:18.872 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[8c6a4439-8733-4995-ae17-de5995c73987]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:18.893 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[28036247-6dc6-4f44-aefe-d963e7861e32]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapa04f937a-31'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:33:93:68'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 137], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 591649, 'reachable_time': 29110, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 239448, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:18.911 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[60686680-c3cc-4ec6-a2db-b3150cf1d4a8]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe33:9368'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 591649, 'tstamp': 591649}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 239449, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:18.934 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4901bd33-a106-4d6a-bc48-f6e411b158cd]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapa04f937a-31'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:33:93:68'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 137], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 591649, 'reachable_time': 29110, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 239450, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:18.974 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8cbfbc9b-5a0f-4c10-be21-064184bcba9a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:19.043 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8cbffb52-f43d-4056-93bd-8c3a20e1f3a4]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:19.044 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapa04f937a-30, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:19.045 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:19.045 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapa04f937a-30, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.047 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:19 compute-0 NetworkManager[51160]: <info>  [1759407919.0490] manager: (tapa04f937a-30): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/218)
Oct 02 12:25:19 compute-0 kernel: tapa04f937a-30: entered promiscuous mode
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.051 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:19.052 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tapa04f937a-30, col_values=(('external_ids', {'iface-id': '38f1ac16-18c6-4b4a-b769-ebc7dd5181d4'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.053 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:19 compute-0 ovn_controller[94336]: 2025-10-02T12:25:19Z|00431|binding|INFO|Releasing lport 38f1ac16-18c6-4b4a-b769-ebc7dd5181d4 from this chassis (sb_readonly=0)
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.072 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:19.074 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/a04f937a-375f-4fb0-90fe-5f514a88668f.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/a04f937a-375f-4fb0-90fe-5f514a88668f.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:19.076 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d58ddaf9-64ad-4b22-9d57-af15d7217471]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:19.077 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-a04f937a-375f-4fb0-90fe-5f514a88668f
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/a04f937a-375f-4fb0-90fe-5f514a88668f.pid.haproxy
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID a04f937a-375f-4fb0-90fe-5f514a88668f
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:25:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:19.077 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'env', 'PROCESS_TAG=haproxy-a04f937a-375f-4fb0-90fe-5f514a88668f', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/a04f937a-375f-4fb0-90fe-5f514a88668f.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.094 2 DEBUG nova.compute.manager [req-9c651dbd-39b7-473d-824c-6ff8eec9f68b req-16643485-1b58-406c-9dc2-eb2f0c9b81a5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Received event network-vif-plugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.095 2 DEBUG oslo_concurrency.lockutils [req-9c651dbd-39b7-473d-824c-6ff8eec9f68b req-16643485-1b58-406c-9dc2-eb2f0c9b81a5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.095 2 DEBUG oslo_concurrency.lockutils [req-9c651dbd-39b7-473d-824c-6ff8eec9f68b req-16643485-1b58-406c-9dc2-eb2f0c9b81a5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.096 2 DEBUG oslo_concurrency.lockutils [req-9c651dbd-39b7-473d-824c-6ff8eec9f68b req-16643485-1b58-406c-9dc2-eb2f0c9b81a5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.096 2 DEBUG nova.compute.manager [req-9c651dbd-39b7-473d-824c-6ff8eec9f68b req-16643485-1b58-406c-9dc2-eb2f0c9b81a5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Processing event network-vif-plugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.434 2 DEBUG nova.network.neutron [req-a84f4445-d002-45a3-b9d7-e0b50605948d req-40ea9657-95a6-4845-b2ec-6f8ba147d3cd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Updated VIF entry in instance network info cache for port ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.435 2 DEBUG nova.network.neutron [req-a84f4445-d002-45a3-b9d7-e0b50605948d req-40ea9657-95a6-4845-b2ec-6f8ba147d3cd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Updating instance_info_cache with network_info: [{"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.457 2 DEBUG oslo_concurrency.lockutils [req-a84f4445-d002-45a3-b9d7-e0b50605948d req-40ea9657-95a6-4845-b2ec-6f8ba147d3cd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:25:19 compute-0 podman[239489]: 2025-10-02 12:25:19.418098062 +0000 UTC m=+0.021018295 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:25:19 compute-0 podman[239489]: 2025-10-02 12:25:19.539540671 +0000 UTC m=+0.142460884 container create efeaadbacd2cfebc798478efe8b521ec1f0bec9ea511e2de8017568a1e74c73f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:25:19 compute-0 systemd[1]: Started libpod-conmon-efeaadbacd2cfebc798478efe8b521ec1f0bec9ea511e2de8017568a1e74c73f.scope.
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.592 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407919.5918477, 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.592 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] VM Started (Lifecycle Event)
Oct 02 12:25:19 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.595 2 DEBUG nova.compute.manager [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:25:19 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/4726a4cd226093453f99b40308054307bbd0325142e7ebf688e82d400650b91a/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.602 2 DEBUG nova.virt.libvirt.driver [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.606 2 INFO nova.virt.libvirt.driver [-] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Instance spawned successfully.
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.606 2 DEBUG nova.virt.libvirt.driver [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:25:19 compute-0 podman[239489]: 2025-10-02 12:25:19.608801289 +0000 UTC m=+0.211721502 container init efeaadbacd2cfebc798478efe8b521ec1f0bec9ea511e2de8017568a1e74c73f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3)
Oct 02 12:25:19 compute-0 podman[239489]: 2025-10-02 12:25:19.614027441 +0000 UTC m=+0.216947644 container start efeaadbacd2cfebc798478efe8b521ec1f0bec9ea511e2de8017568a1e74c73f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0)
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.628 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.632 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:25:19 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[239504]: [NOTICE]   (239508) : New worker (239510) forked
Oct 02 12:25:19 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[239504]: [NOTICE]   (239508) : Loading success.
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.643 2 DEBUG nova.virt.libvirt.driver [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.643 2 DEBUG nova.virt.libvirt.driver [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.643 2 DEBUG nova.virt.libvirt.driver [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.644 2 DEBUG nova.virt.libvirt.driver [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.644 2 DEBUG nova.virt.libvirt.driver [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.644 2 DEBUG nova.virt.libvirt.driver [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.654 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.655 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407919.5920026, 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.655 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] VM Paused (Lifecycle Event)
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.705 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.711 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407919.6015646, 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.712 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] VM Resumed (Lifecycle Event)
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.731 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.734 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.755 2 INFO nova.compute.manager [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Took 5.79 seconds to spawn the instance on the hypervisor.
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.755 2 DEBUG nova.compute.manager [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.758 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.864 2 INFO nova.compute.manager [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Took 6.66 seconds to build instance.
Oct 02 12:25:19 compute-0 nova_compute[192079]: 2025-10-02 12:25:19.900 2 DEBUG oslo_concurrency.lockutils [None req-c57ac389-708e-4578-a4c3-df57601a3894 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 6.850s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:25:21 compute-0 nova_compute[192079]: 2025-10-02 12:25:21.184 2 DEBUG nova.compute.manager [req-37e75aa3-f450-45d5-ad64-e30081a8c14f req-d70d2ec2-4c37-487a-afe6-77156d555659 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Received event network-vif-plugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:25:21 compute-0 nova_compute[192079]: 2025-10-02 12:25:21.185 2 DEBUG oslo_concurrency.lockutils [req-37e75aa3-f450-45d5-ad64-e30081a8c14f req-d70d2ec2-4c37-487a-afe6-77156d555659 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:25:21 compute-0 nova_compute[192079]: 2025-10-02 12:25:21.185 2 DEBUG oslo_concurrency.lockutils [req-37e75aa3-f450-45d5-ad64-e30081a8c14f req-d70d2ec2-4c37-487a-afe6-77156d555659 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:25:21 compute-0 nova_compute[192079]: 2025-10-02 12:25:21.185 2 DEBUG oslo_concurrency.lockutils [req-37e75aa3-f450-45d5-ad64-e30081a8c14f req-d70d2ec2-4c37-487a-afe6-77156d555659 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:25:21 compute-0 nova_compute[192079]: 2025-10-02 12:25:21.186 2 DEBUG nova.compute.manager [req-37e75aa3-f450-45d5-ad64-e30081a8c14f req-d70d2ec2-4c37-487a-afe6-77156d555659 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] No waiting events found dispatching network-vif-plugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:25:21 compute-0 nova_compute[192079]: 2025-10-02 12:25:21.186 2 WARNING nova.compute.manager [req-37e75aa3-f450-45d5-ad64-e30081a8c14f req-d70d2ec2-4c37-487a-afe6-77156d555659 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Received unexpected event network-vif-plugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 for instance with vm_state active and task_state None.
Oct 02 12:25:21 compute-0 nova_compute[192079]: 2025-10-02 12:25:21.720 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759407906.7186546, ae56113d-001e-4f10-9236-c07fe5146d9c => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:25:21 compute-0 nova_compute[192079]: 2025-10-02 12:25:21.720 2 INFO nova.compute.manager [-] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] VM Stopped (Lifecycle Event)
Oct 02 12:25:21 compute-0 nova_compute[192079]: 2025-10-02 12:25:21.743 2 DEBUG nova.compute.manager [None req-08bb2085-b216-4c5b-9566-bdb2897e0485 - - - - - -] [instance: ae56113d-001e-4f10-9236-c07fe5146d9c] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:25:21 compute-0 nova_compute[192079]: 2025-10-02 12:25:21.840 2 DEBUG nova.compute.manager [req-19ad423d-71cd-44b7-9474-03fd54b6169e req-0f78c556-a441-4c6a-a266-502a25e93dab 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Received event network-changed-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:25:21 compute-0 nova_compute[192079]: 2025-10-02 12:25:21.841 2 DEBUG nova.compute.manager [req-19ad423d-71cd-44b7-9474-03fd54b6169e req-0f78c556-a441-4c6a-a266-502a25e93dab 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Refreshing instance network info cache due to event network-changed-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:25:21 compute-0 nova_compute[192079]: 2025-10-02 12:25:21.841 2 DEBUG oslo_concurrency.lockutils [req-19ad423d-71cd-44b7-9474-03fd54b6169e req-0f78c556-a441-4c6a-a266-502a25e93dab 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:25:21 compute-0 nova_compute[192079]: 2025-10-02 12:25:21.841 2 DEBUG oslo_concurrency.lockutils [req-19ad423d-71cd-44b7-9474-03fd54b6169e req-0f78c556-a441-4c6a-a266-502a25e93dab 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:25:21 compute-0 nova_compute[192079]: 2025-10-02 12:25:21.841 2 DEBUG nova.network.neutron [req-19ad423d-71cd-44b7-9474-03fd54b6169e req-0f78c556-a441-4c6a-a266-502a25e93dab 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Refreshing network info cache for port ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:25:21 compute-0 nova_compute[192079]: 2025-10-02 12:25:21.948 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:22 compute-0 nova_compute[192079]: 2025-10-02 12:25:22.739 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:23 compute-0 nova_compute[192079]: 2025-10-02 12:25:23.775 2 DEBUG nova.network.neutron [req-19ad423d-71cd-44b7-9474-03fd54b6169e req-0f78c556-a441-4c6a-a266-502a25e93dab 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Updated VIF entry in instance network info cache for port ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:25:23 compute-0 nova_compute[192079]: 2025-10-02 12:25:23.776 2 DEBUG nova.network.neutron [req-19ad423d-71cd-44b7-9474-03fd54b6169e req-0f78c556-a441-4c6a-a266-502a25e93dab 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Updating instance_info_cache with network_info: [{"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:25:23 compute-0 nova_compute[192079]: 2025-10-02 12:25:23.869 2 DEBUG oslo_concurrency.lockutils [req-19ad423d-71cd-44b7-9474-03fd54b6169e req-0f78c556-a441-4c6a-a266-502a25e93dab 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:25:25 compute-0 podman[239519]: 2025-10-02 12:25:25.166636746 +0000 UTC m=+0.071965452 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, container_name=ovn_metadata_agent, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:25:25 compute-0 podman[239521]: 2025-10-02 12:25:25.187147945 +0000 UTC m=+0.082951091 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>)
Oct 02 12:25:25 compute-0 podman[239520]: 2025-10-02 12:25:25.206682617 +0000 UTC m=+0.100707865 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, tcib_managed=true, container_name=ovn_controller, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3)
Oct 02 12:25:26 compute-0 nova_compute[192079]: 2025-10-02 12:25:26.799 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:25:26 compute-0 nova_compute[192079]: 2025-10-02 12:25:26.951 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:27 compute-0 nova_compute[192079]: 2025-10-02 12:25:27.741 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:29 compute-0 nova_compute[192079]: 2025-10-02 12:25:29.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:25:29 compute-0 nova_compute[192079]: 2025-10-02 12:25:29.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:25:29 compute-0 nova_compute[192079]: 2025-10-02 12:25:29.708 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:25:29 compute-0 nova_compute[192079]: 2025-10-02 12:25:29.709 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:25:29 compute-0 nova_compute[192079]: 2025-10-02 12:25:29.709 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:25:29 compute-0 nova_compute[192079]: 2025-10-02 12:25:29.709 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:25:29 compute-0 nova_compute[192079]: 2025-10-02 12:25:29.792 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:25:29 compute-0 nova_compute[192079]: 2025-10-02 12:25:29.893 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk --force-share --output=json" returned: 0 in 0.101s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:25:29 compute-0 nova_compute[192079]: 2025-10-02 12:25:29.894 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:25:29 compute-0 nova_compute[192079]: 2025-10-02 12:25:29.954 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk --force-share --output=json" returned: 0 in 0.060s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:25:30 compute-0 nova_compute[192079]: 2025-10-02 12:25:30.107 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:25:30 compute-0 nova_compute[192079]: 2025-10-02 12:25:30.108 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5563MB free_disk=73.3474349975586GB free_vcpus=7 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:25:30 compute-0 nova_compute[192079]: 2025-10-02 12:25:30.109 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:25:30 compute-0 nova_compute[192079]: 2025-10-02 12:25:30.109 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:25:30 compute-0 nova_compute[192079]: 2025-10-02 12:25:30.393 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:25:30 compute-0 nova_compute[192079]: 2025-10-02 12:25:30.394 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 1 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:25:30 compute-0 nova_compute[192079]: 2025-10-02 12:25:30.395 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=640MB phys_disk=79GB used_disk=1GB total_vcpus=8 used_vcpus=1 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:25:30 compute-0 nova_compute[192079]: 2025-10-02 12:25:30.521 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:25:30 compute-0 nova_compute[192079]: 2025-10-02 12:25:30.587 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:25:30 compute-0 nova_compute[192079]: 2025-10-02 12:25:30.625 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:25:30 compute-0 nova_compute[192079]: 2025-10-02 12:25:30.625 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.516s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:25:31 compute-0 nova_compute[192079]: 2025-10-02 12:25:31.625 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:25:31 compute-0 nova_compute[192079]: 2025-10-02 12:25:31.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:25:31 compute-0 nova_compute[192079]: 2025-10-02 12:25:31.976 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:32 compute-0 ovn_controller[94336]: 2025-10-02T12:25:32Z|00041|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:d5:ef:4c 10.100.0.9
Oct 02 12:25:32 compute-0 ovn_controller[94336]: 2025-10-02T12:25:32Z|00042|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:d5:ef:4c 10.100.0.9
Oct 02 12:25:32 compute-0 nova_compute[192079]: 2025-10-02 12:25:32.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:25:32 compute-0 nova_compute[192079]: 2025-10-02 12:25:32.743 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:33 compute-0 nova_compute[192079]: 2025-10-02 12:25:33.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:25:33 compute-0 nova_compute[192079]: 2025-10-02 12:25:33.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:25:34 compute-0 podman[239605]: 2025-10-02 12:25:34.167416685 +0000 UTC m=+0.067664115 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ceilometer_agent_compute, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, config_id=edpm, io.buildah.version=1.41.3, org.label-schema.build-date=20251001)
Oct 02 12:25:35 compute-0 nova_compute[192079]: 2025-10-02 12:25:35.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:25:35 compute-0 nova_compute[192079]: 2025-10-02 12:25:35.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:25:35 compute-0 nova_compute[192079]: 2025-10-02 12:25:35.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:25:35 compute-0 nova_compute[192079]: 2025-10-02 12:25:35.820 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "refresh_cache-40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:25:35 compute-0 nova_compute[192079]: 2025-10-02 12:25:35.820 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquired lock "refresh_cache-40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:25:35 compute-0 nova_compute[192079]: 2025-10-02 12:25:35.821 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Forcefully refreshing network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2004
Oct 02 12:25:35 compute-0 nova_compute[192079]: 2025-10-02 12:25:35.821 2 DEBUG nova.objects.instance [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lazy-loading 'info_cache' on Instance uuid 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:25:36 compute-0 nova_compute[192079]: 2025-10-02 12:25:36.977 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:37 compute-0 nova_compute[192079]: 2025-10-02 12:25:37.232 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Updating instance_info_cache with network_info: [{"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:25:37 compute-0 nova_compute[192079]: 2025-10-02 12:25:37.249 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Releasing lock "refresh_cache-40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:25:37 compute-0 nova_compute[192079]: 2025-10-02 12:25:37.250 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Updated the network info_cache for instance _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9929
Oct 02 12:25:37 compute-0 nova_compute[192079]: 2025-10-02 12:25:37.250 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:25:37 compute-0 nova_compute[192079]: 2025-10-02 12:25:37.639 2 DEBUG oslo_concurrency.lockutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "ab7610b5-3462-4dc2-a802-0998246e8cdb" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:25:37 compute-0 nova_compute[192079]: 2025-10-02 12:25:37.639 2 DEBUG oslo_concurrency.lockutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "ab7610b5-3462-4dc2-a802-0998246e8cdb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:25:37 compute-0 nova_compute[192079]: 2025-10-02 12:25:37.656 2 DEBUG nova.compute.manager [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:25:37 compute-0 nova_compute[192079]: 2025-10-02 12:25:37.739 2 DEBUG oslo_concurrency.lockutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:25:37 compute-0 nova_compute[192079]: 2025-10-02 12:25:37.740 2 DEBUG oslo_concurrency.lockutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:25:37 compute-0 nova_compute[192079]: 2025-10-02 12:25:37.745 2 DEBUG nova.virt.hardware [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:25:37 compute-0 nova_compute[192079]: 2025-10-02 12:25:37.745 2 INFO nova.compute.claims [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:25:37 compute-0 nova_compute[192079]: 2025-10-02 12:25:37.781 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:37 compute-0 nova_compute[192079]: 2025-10-02 12:25:37.880 2 DEBUG nova.compute.provider_tree [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:25:37 compute-0 nova_compute[192079]: 2025-10-02 12:25:37.902 2 DEBUG nova.scheduler.client.report [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:25:37 compute-0 nova_compute[192079]: 2025-10-02 12:25:37.929 2 DEBUG oslo_concurrency.lockutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.190s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:25:37 compute-0 nova_compute[192079]: 2025-10-02 12:25:37.931 2 DEBUG nova.compute.manager [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:37.999 2 DEBUG nova.compute.manager [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.000 2 DEBUG nova.network.neutron [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.024 2 INFO nova.virt.libvirt.driver [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.048 2 DEBUG nova.compute.manager [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.198 2 DEBUG nova.compute.manager [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.201 2 DEBUG nova.virt.libvirt.driver [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.202 2 INFO nova.virt.libvirt.driver [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Creating image(s)
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.203 2 DEBUG oslo_concurrency.lockutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "/var/lib/nova/instances/ab7610b5-3462-4dc2-a802-0998246e8cdb/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.203 2 DEBUG oslo_concurrency.lockutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "/var/lib/nova/instances/ab7610b5-3462-4dc2-a802-0998246e8cdb/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.204 2 DEBUG oslo_concurrency.lockutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "/var/lib/nova/instances/ab7610b5-3462-4dc2-a802-0998246e8cdb/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.221 2 DEBUG nova.policy [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.224 2 DEBUG oslo_concurrency.processutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.286 2 DEBUG oslo_concurrency.processutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.062s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.287 2 DEBUG oslo_concurrency.lockutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.288 2 DEBUG oslo_concurrency.lockutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.299 2 DEBUG oslo_concurrency.processutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.352 2 DEBUG oslo_concurrency.processutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.053s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.353 2 DEBUG oslo_concurrency.processutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/ab7610b5-3462-4dc2-a802-0998246e8cdb/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.387 2 DEBUG oslo_concurrency.processutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/ab7610b5-3462-4dc2-a802-0998246e8cdb/disk 1073741824" returned: 0 in 0.034s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.388 2 DEBUG oslo_concurrency.lockutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.100s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.388 2 DEBUG oslo_concurrency.processutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.475 2 DEBUG oslo_concurrency.processutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.086s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.476 2 DEBUG nova.virt.disk.api [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Checking if we can resize image /var/lib/nova/instances/ab7610b5-3462-4dc2-a802-0998246e8cdb/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.476 2 DEBUG oslo_concurrency.processutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ab7610b5-3462-4dc2-a802-0998246e8cdb/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.543 2 DEBUG oslo_concurrency.processutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/ab7610b5-3462-4dc2-a802-0998246e8cdb/disk --force-share --output=json" returned: 0 in 0.067s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.544 2 DEBUG nova.virt.disk.api [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Cannot resize image /var/lib/nova/instances/ab7610b5-3462-4dc2-a802-0998246e8cdb/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.544 2 DEBUG nova.objects.instance [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lazy-loading 'migration_context' on Instance uuid ab7610b5-3462-4dc2-a802-0998246e8cdb obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.559 2 DEBUG nova.virt.libvirt.driver [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.559 2 DEBUG nova.virt.libvirt.driver [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Ensure instance console log exists: /var/lib/nova/instances/ab7610b5-3462-4dc2-a802-0998246e8cdb/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.560 2 DEBUG oslo_concurrency.lockutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.560 2 DEBUG oslo_concurrency.lockutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:25:38 compute-0 nova_compute[192079]: 2025-10-02 12:25:38.561 2 DEBUG oslo_concurrency.lockutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:25:39 compute-0 nova_compute[192079]: 2025-10-02 12:25:39.837 2 DEBUG nova.network.neutron [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Successfully created port: 84dc02cc-883a-4f1b-a938-49e678b5f445 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:25:40 compute-0 nova_compute[192079]: 2025-10-02 12:25:40.806 2 DEBUG nova.network.neutron [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Successfully updated port: 84dc02cc-883a-4f1b-a938-49e678b5f445 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:25:40 compute-0 nova_compute[192079]: 2025-10-02 12:25:40.860 2 DEBUG oslo_concurrency.lockutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "refresh_cache-ab7610b5-3462-4dc2-a802-0998246e8cdb" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:25:40 compute-0 nova_compute[192079]: 2025-10-02 12:25:40.861 2 DEBUG oslo_concurrency.lockutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquired lock "refresh_cache-ab7610b5-3462-4dc2-a802-0998246e8cdb" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:25:40 compute-0 nova_compute[192079]: 2025-10-02 12:25:40.862 2 DEBUG nova.network.neutron [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:25:40 compute-0 nova_compute[192079]: 2025-10-02 12:25:40.970 2 DEBUG nova.compute.manager [req-75f38b57-af2d-4636-929a-c0bff16162bc req-fa6b9771-dc4b-4a85-83f0-6f05a39b078d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Received event network-changed-84dc02cc-883a-4f1b-a938-49e678b5f445 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:25:40 compute-0 nova_compute[192079]: 2025-10-02 12:25:40.970 2 DEBUG nova.compute.manager [req-75f38b57-af2d-4636-929a-c0bff16162bc req-fa6b9771-dc4b-4a85-83f0-6f05a39b078d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Refreshing instance network info cache due to event network-changed-84dc02cc-883a-4f1b-a938-49e678b5f445. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:25:40 compute-0 nova_compute[192079]: 2025-10-02 12:25:40.971 2 DEBUG oslo_concurrency.lockutils [req-75f38b57-af2d-4636-929a-c0bff16162bc req-fa6b9771-dc4b-4a85-83f0-6f05a39b078d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-ab7610b5-3462-4dc2-a802-0998246e8cdb" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:25:41 compute-0 nova_compute[192079]: 2025-10-02 12:25:41.028 2 DEBUG nova.network.neutron [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:25:41 compute-0 nova_compute[192079]: 2025-10-02 12:25:41.980 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:42 compute-0 podman[239640]: 2025-10-02 12:25:42.151358111 +0000 UTC m=+0.061034944 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, container_name=multipathd, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=multipathd, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, tcib_managed=true)
Oct 02 12:25:42 compute-0 podman[239639]: 2025-10-02 12:25:42.163955264 +0000 UTC m=+0.066786690 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, vcs-type=git, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., container_name=openstack_network_exporter, io.openshift.expose-services=, release=1755695350, com.redhat.component=ubi9-minimal-container, config_id=edpm, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, managed_by=edpm_ansible, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., name=ubi9-minimal, version=9.6, url=https://catalog.redhat.com/en/search?searchType=containers, architecture=x86_64, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, distribution-scope=public, io.buildah.version=1.33.7, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., maintainer=Red Hat, Inc., io.openshift.tags=minimal rhel9, vendor=Red Hat, Inc., build-date=2025-08-20T13:12:41)
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.444 2 DEBUG nova.network.neutron [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Updating instance_info_cache with network_info: [{"id": "84dc02cc-883a-4f1b-a938-49e678b5f445", "address": "fa:16:3e:80:c4:31", "network": {"id": "26df2dcf-f57c-4dae-8522-0277df741ed3", "bridge": "br-int", "label": "tempest-network-smoke--1584637508", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}, {"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe80:c431", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap84dc02cc-88", "ovs_interfaceid": "84dc02cc-883a-4f1b-a938-49e678b5f445", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.466 2 DEBUG oslo_concurrency.lockutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Releasing lock "refresh_cache-ab7610b5-3462-4dc2-a802-0998246e8cdb" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.466 2 DEBUG nova.compute.manager [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Instance network_info: |[{"id": "84dc02cc-883a-4f1b-a938-49e678b5f445", "address": "fa:16:3e:80:c4:31", "network": {"id": "26df2dcf-f57c-4dae-8522-0277df741ed3", "bridge": "br-int", "label": "tempest-network-smoke--1584637508", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}, {"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe80:c431", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap84dc02cc-88", "ovs_interfaceid": "84dc02cc-883a-4f1b-a938-49e678b5f445", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.467 2 DEBUG oslo_concurrency.lockutils [req-75f38b57-af2d-4636-929a-c0bff16162bc req-fa6b9771-dc4b-4a85-83f0-6f05a39b078d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-ab7610b5-3462-4dc2-a802-0998246e8cdb" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.467 2 DEBUG nova.network.neutron [req-75f38b57-af2d-4636-929a-c0bff16162bc req-fa6b9771-dc4b-4a85-83f0-6f05a39b078d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Refreshing network info cache for port 84dc02cc-883a-4f1b-a938-49e678b5f445 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.469 2 DEBUG nova.virt.libvirt.driver [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Start _get_guest_xml network_info=[{"id": "84dc02cc-883a-4f1b-a938-49e678b5f445", "address": "fa:16:3e:80:c4:31", "network": {"id": "26df2dcf-f57c-4dae-8522-0277df741ed3", "bridge": "br-int", "label": "tempest-network-smoke--1584637508", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}, {"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe80:c431", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap84dc02cc-88", "ovs_interfaceid": "84dc02cc-883a-4f1b-a938-49e678b5f445", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.474 2 WARNING nova.virt.libvirt.driver [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.480 2 DEBUG nova.virt.libvirt.host [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.480 2 DEBUG nova.virt.libvirt.host [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.488 2 DEBUG nova.virt.libvirt.host [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.489 2 DEBUG nova.virt.libvirt.host [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.490 2 DEBUG nova.virt.libvirt.driver [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.490 2 DEBUG nova.virt.hardware [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.490 2 DEBUG nova.virt.hardware [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.490 2 DEBUG nova.virt.hardware [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.491 2 DEBUG nova.virt.hardware [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.491 2 DEBUG nova.virt.hardware [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.491 2 DEBUG nova.virt.hardware [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.491 2 DEBUG nova.virt.hardware [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.492 2 DEBUG nova.virt.hardware [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.492 2 DEBUG nova.virt.hardware [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.492 2 DEBUG nova.virt.hardware [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.492 2 DEBUG nova.virt.hardware [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.495 2 DEBUG nova.virt.libvirt.vif [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:25:36Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestGettingAddress-server-1815158103',display_name='tempest-TestGettingAddress-server-1815158103',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testgettingaddress-server-1815158103',id=123,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBN27qqZO7DS6SotTIkgadWOrlyFzalcMBya6l3P3FHA92Trdk8QzNk/bIfeVZHQyyH9bzXdJACR3sdrkH4czxiQm1W3dnbgCG/vLQtAxveP29c1TkzsAJfjG23nfB+bI6Q==',key_name='tempest-TestGettingAddress-794970227',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='fd801958556f4c8aab047ecdef6b5ee8',ramdisk_id='',reservation_id='r-bhcbl67j',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestGettingAddress-1355720650',owner_user_name='tempest-TestGettingAddress-1355720650-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:25:38Z,user_data=None,user_id='97ce9f1898484e0e9a1f7c84a9f0dfe3',uuid=ab7610b5-3462-4dc2-a802-0998246e8cdb,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "84dc02cc-883a-4f1b-a938-49e678b5f445", "address": "fa:16:3e:80:c4:31", "network": {"id": "26df2dcf-f57c-4dae-8522-0277df741ed3", "bridge": "br-int", "label": "tempest-network-smoke--1584637508", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}, {"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe80:c431", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap84dc02cc-88", "ovs_interfaceid": "84dc02cc-883a-4f1b-a938-49e678b5f445", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.495 2 DEBUG nova.network.os_vif_util [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converting VIF {"id": "84dc02cc-883a-4f1b-a938-49e678b5f445", "address": "fa:16:3e:80:c4:31", "network": {"id": "26df2dcf-f57c-4dae-8522-0277df741ed3", "bridge": "br-int", "label": "tempest-network-smoke--1584637508", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}, {"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe80:c431", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap84dc02cc-88", "ovs_interfaceid": "84dc02cc-883a-4f1b-a938-49e678b5f445", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.496 2 DEBUG nova.network.os_vif_util [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:80:c4:31,bridge_name='br-int',has_traffic_filtering=True,id=84dc02cc-883a-4f1b-a938-49e678b5f445,network=Network(26df2dcf-f57c-4dae-8522-0277df741ed3),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap84dc02cc-88') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.497 2 DEBUG nova.objects.instance [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lazy-loading 'pci_devices' on Instance uuid ab7610b5-3462-4dc2-a802-0998246e8cdb obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.514 2 DEBUG nova.virt.libvirt.driver [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:25:42 compute-0 nova_compute[192079]:   <uuid>ab7610b5-3462-4dc2-a802-0998246e8cdb</uuid>
Oct 02 12:25:42 compute-0 nova_compute[192079]:   <name>instance-0000007b</name>
Oct 02 12:25:42 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:25:42 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:25:42 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:25:42 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:       <nova:name>tempest-TestGettingAddress-server-1815158103</nova:name>
Oct 02 12:25:42 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:25:42</nova:creationTime>
Oct 02 12:25:42 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:25:42 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:25:42 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:25:42 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:25:42 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:25:42 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:25:42 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:25:42 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:25:42 compute-0 nova_compute[192079]:         <nova:user uuid="97ce9f1898484e0e9a1f7c84a9f0dfe3">tempest-TestGettingAddress-1355720650-project-member</nova:user>
Oct 02 12:25:42 compute-0 nova_compute[192079]:         <nova:project uuid="fd801958556f4c8aab047ecdef6b5ee8">tempest-TestGettingAddress-1355720650</nova:project>
Oct 02 12:25:42 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:25:42 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:25:42 compute-0 nova_compute[192079]:         <nova:port uuid="84dc02cc-883a-4f1b-a938-49e678b5f445">
Oct 02 12:25:42 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.10" ipVersion="4"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="2001:db8::f816:3eff:fe80:c431" ipVersion="6"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:25:42 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:25:42 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:25:42 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <system>
Oct 02 12:25:42 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:25:42 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:25:42 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:25:42 compute-0 nova_compute[192079]:       <entry name="serial">ab7610b5-3462-4dc2-a802-0998246e8cdb</entry>
Oct 02 12:25:42 compute-0 nova_compute[192079]:       <entry name="uuid">ab7610b5-3462-4dc2-a802-0998246e8cdb</entry>
Oct 02 12:25:42 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     </system>
Oct 02 12:25:42 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:25:42 compute-0 nova_compute[192079]:   <os>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:   </os>
Oct 02 12:25:42 compute-0 nova_compute[192079]:   <features>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:   </features>
Oct 02 12:25:42 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:25:42 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:25:42 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:25:42 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/ab7610b5-3462-4dc2-a802-0998246e8cdb/disk"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:25:42 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/ab7610b5-3462-4dc2-a802-0998246e8cdb/disk.config"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:25:42 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:80:c4:31"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:       <target dev="tap84dc02cc-88"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:25:42 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/ab7610b5-3462-4dc2-a802-0998246e8cdb/console.log" append="off"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <video>
Oct 02 12:25:42 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     </video>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:25:42 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:25:42 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:25:42 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:25:42 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:25:42 compute-0 nova_compute[192079]: </domain>
Oct 02 12:25:42 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.515 2 DEBUG nova.compute.manager [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Preparing to wait for external event network-vif-plugged-84dc02cc-883a-4f1b-a938-49e678b5f445 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.515 2 DEBUG oslo_concurrency.lockutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "ab7610b5-3462-4dc2-a802-0998246e8cdb-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.516 2 DEBUG oslo_concurrency.lockutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "ab7610b5-3462-4dc2-a802-0998246e8cdb-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.516 2 DEBUG oslo_concurrency.lockutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "ab7610b5-3462-4dc2-a802-0998246e8cdb-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.516 2 DEBUG nova.virt.libvirt.vif [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:25:36Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestGettingAddress-server-1815158103',display_name='tempest-TestGettingAddress-server-1815158103',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testgettingaddress-server-1815158103',id=123,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBN27qqZO7DS6SotTIkgadWOrlyFzalcMBya6l3P3FHA92Trdk8QzNk/bIfeVZHQyyH9bzXdJACR3sdrkH4czxiQm1W3dnbgCG/vLQtAxveP29c1TkzsAJfjG23nfB+bI6Q==',key_name='tempest-TestGettingAddress-794970227',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='fd801958556f4c8aab047ecdef6b5ee8',ramdisk_id='',reservation_id='r-bhcbl67j',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestGettingAddress-1355720650',owner_user_name='tempest-TestGettingAddress-1355720650-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:25:38Z,user_data=None,user_id='97ce9f1898484e0e9a1f7c84a9f0dfe3',uuid=ab7610b5-3462-4dc2-a802-0998246e8cdb,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "84dc02cc-883a-4f1b-a938-49e678b5f445", "address": "fa:16:3e:80:c4:31", "network": {"id": "26df2dcf-f57c-4dae-8522-0277df741ed3", "bridge": "br-int", "label": "tempest-network-smoke--1584637508", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}, {"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe80:c431", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap84dc02cc-88", "ovs_interfaceid": "84dc02cc-883a-4f1b-a938-49e678b5f445", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.517 2 DEBUG nova.network.os_vif_util [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converting VIF {"id": "84dc02cc-883a-4f1b-a938-49e678b5f445", "address": "fa:16:3e:80:c4:31", "network": {"id": "26df2dcf-f57c-4dae-8522-0277df741ed3", "bridge": "br-int", "label": "tempest-network-smoke--1584637508", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}, {"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe80:c431", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap84dc02cc-88", "ovs_interfaceid": "84dc02cc-883a-4f1b-a938-49e678b5f445", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.517 2 DEBUG nova.network.os_vif_util [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:80:c4:31,bridge_name='br-int',has_traffic_filtering=True,id=84dc02cc-883a-4f1b-a938-49e678b5f445,network=Network(26df2dcf-f57c-4dae-8522-0277df741ed3),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap84dc02cc-88') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.518 2 DEBUG os_vif [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:80:c4:31,bridge_name='br-int',has_traffic_filtering=True,id=84dc02cc-883a-4f1b-a938-49e678b5f445,network=Network(26df2dcf-f57c-4dae-8522-0277df741ed3),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap84dc02cc-88') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.518 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.518 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.519 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.521 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.522 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap84dc02cc-88, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.522 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap84dc02cc-88, col_values=(('external_ids', {'iface-id': '84dc02cc-883a-4f1b-a938-49e678b5f445', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:80:c4:31', 'vm-uuid': 'ab7610b5-3462-4dc2-a802-0998246e8cdb'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.524 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:42 compute-0 NetworkManager[51160]: <info>  [1759407942.5255] manager: (tap84dc02cc-88): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/219)
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.526 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.529 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.530 2 INFO os_vif [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:80:c4:31,bridge_name='br-int',has_traffic_filtering=True,id=84dc02cc-883a-4f1b-a938-49e678b5f445,network=Network(26df2dcf-f57c-4dae-8522-0277df741ed3),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap84dc02cc-88')
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.573 2 DEBUG nova.virt.libvirt.driver [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.574 2 DEBUG nova.virt.libvirt.driver [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.574 2 DEBUG nova.virt.libvirt.driver [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] No VIF found with MAC fa:16:3e:80:c4:31, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:25:42 compute-0 nova_compute[192079]: 2025-10-02 12:25:42.575 2 INFO nova.virt.libvirt.driver [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Using config drive
Oct 02 12:25:44 compute-0 nova_compute[192079]: 2025-10-02 12:25:44.393 2 INFO nova.virt.libvirt.driver [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Creating config drive at /var/lib/nova/instances/ab7610b5-3462-4dc2-a802-0998246e8cdb/disk.config
Oct 02 12:25:44 compute-0 nova_compute[192079]: 2025-10-02 12:25:44.398 2 DEBUG oslo_concurrency.processutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/ab7610b5-3462-4dc2-a802-0998246e8cdb/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpfnhch_sz execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:25:44 compute-0 nova_compute[192079]: 2025-10-02 12:25:44.526 2 DEBUG oslo_concurrency.processutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/ab7610b5-3462-4dc2-a802-0998246e8cdb/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpfnhch_sz" returned: 0 in 0.127s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:25:44 compute-0 kernel: tap84dc02cc-88: entered promiscuous mode
Oct 02 12:25:44 compute-0 NetworkManager[51160]: <info>  [1759407944.5868] manager: (tap84dc02cc-88): new Tun device (/org/freedesktop/NetworkManager/Devices/220)
Oct 02 12:25:44 compute-0 ovn_controller[94336]: 2025-10-02T12:25:44Z|00432|binding|INFO|Claiming lport 84dc02cc-883a-4f1b-a938-49e678b5f445 for this chassis.
Oct 02 12:25:44 compute-0 ovn_controller[94336]: 2025-10-02T12:25:44Z|00433|binding|INFO|84dc02cc-883a-4f1b-a938-49e678b5f445: Claiming fa:16:3e:80:c4:31 10.100.0.10 2001:db8::f816:3eff:fe80:c431
Oct 02 12:25:44 compute-0 nova_compute[192079]: 2025-10-02 12:25:44.588 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:44.601 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:80:c4:31 10.100.0.10 2001:db8::f816:3eff:fe80:c431'], port_security=['fa:16:3e:80:c4:31 10.100.0.10 2001:db8::f816:3eff:fe80:c431'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.10/28 2001:db8::f816:3eff:fe80:c431/64', 'neutron:device_id': 'ab7610b5-3462-4dc2-a802-0998246e8cdb', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-26df2dcf-f57c-4dae-8522-0277df741ed3', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'neutron:revision_number': '2', 'neutron:security_group_ids': '2c57d713-64e3-4621-a624-32092d283319', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=e2784fb0-50ac-4c91-ba90-3b5c38b8adf4, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=5, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=84dc02cc-883a-4f1b-a938-49e678b5f445) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:44.602 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 84dc02cc-883a-4f1b-a938-49e678b5f445 in datapath 26df2dcf-f57c-4dae-8522-0277df741ed3 bound to our chassis
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:44.603 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 26df2dcf-f57c-4dae-8522-0277df741ed3
Oct 02 12:25:44 compute-0 ovn_controller[94336]: 2025-10-02T12:25:44Z|00434|binding|INFO|Setting lport 84dc02cc-883a-4f1b-a938-49e678b5f445 ovn-installed in OVS
Oct 02 12:25:44 compute-0 ovn_controller[94336]: 2025-10-02T12:25:44Z|00435|binding|INFO|Setting lport 84dc02cc-883a-4f1b-a938-49e678b5f445 up in Southbound
Oct 02 12:25:44 compute-0 nova_compute[192079]: 2025-10-02 12:25:44.610 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:44 compute-0 nova_compute[192079]: 2025-10-02 12:25:44.613 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:44 compute-0 systemd-udevd[239701]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:44.615 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[28db4c8e-069e-4a9e-94b8-9fe20f3ee7ed]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:44.616 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap26df2dcf-f1 in ovnmeta-26df2dcf-f57c-4dae-8522-0277df741ed3 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:44.618 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap26df2dcf-f0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:44.618 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[065cb49b-3595-4aac-9357-c1a7dddd6eb7]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:44.619 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e9a205f1-1188-4e00-abec-73cc7aff5a35]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:44 compute-0 systemd-machined[152150]: New machine qemu-57-instance-0000007b.
Oct 02 12:25:44 compute-0 NetworkManager[51160]: <info>  [1759407944.6273] device (tap84dc02cc-88): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:25:44 compute-0 NetworkManager[51160]: <info>  [1759407944.6282] device (tap84dc02cc-88): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:44.629 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[684d020f-294e-40ce-9f6a-f98ba992cdb1]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:44 compute-0 systemd[1]: Started Virtual Machine qemu-57-instance-0000007b.
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:44.656 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c82130c7-c8cb-487f-95fc-7877e8974819]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:44.685 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[a48864ba-6c58-4934-87d2-4ba894ce3c83]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:44.689 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[504260ff-b91e-4e23-a6b4-81d47a42fd07]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:44 compute-0 NetworkManager[51160]: <info>  [1759407944.6905] manager: (tap26df2dcf-f0): new Veth device (/org/freedesktop/NetworkManager/Devices/221)
Oct 02 12:25:44 compute-0 systemd-udevd[239704]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:44.717 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[828ff0ba-8cd3-4eb4-8660-75c6529f95e3]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:44.720 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[f7dac8fd-dadf-4795-b34e-e6dee0ec8bdd]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:44 compute-0 NetworkManager[51160]: <info>  [1759407944.7393] device (tap26df2dcf-f0): carrier: link connected
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:44.746 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[8eea4213-c082-4943-9991-3172e2d7bf62]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:44.762 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[34806771-a1f5-499c-b623-4f4256372e5a]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap26df2dcf-f1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:ea:74:6f'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 139], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 594236, 'reachable_time': 27676, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 239733, 'error': None, 'target': 'ovnmeta-26df2dcf-f57c-4dae-8522-0277df741ed3', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:44.778 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f3186429-8d17-4925-b3a1-c722a0e804bf]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:feea:746f'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 594236, 'tstamp': 594236}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 239734, 'error': None, 'target': 'ovnmeta-26df2dcf-f57c-4dae-8522-0277df741ed3', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:44.796 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[eca5edc3-a013-4bea-89bf-dfe54ec32f0c]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap26df2dcf-f1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:ea:74:6f'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 139], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 594236, 'reachable_time': 27676, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 239735, 'error': None, 'target': 'ovnmeta-26df2dcf-f57c-4dae-8522-0277df741ed3', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:44.828 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5082bb9c-71fd-40db-86c3-eef03fecb177]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:44.880 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[da3d5c89-b255-4bec-96f1-f6ffecd9ed7f]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:44.881 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap26df2dcf-f0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:44.881 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:44.882 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap26df2dcf-f0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:25:44 compute-0 nova_compute[192079]: 2025-10-02 12:25:44.884 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:44 compute-0 NetworkManager[51160]: <info>  [1759407944.8844] manager: (tap26df2dcf-f0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/222)
Oct 02 12:25:44 compute-0 kernel: tap26df2dcf-f0: entered promiscuous mode
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:44.886 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap26df2dcf-f0, col_values=(('external_ids', {'iface-id': 'adc60e93-14bb-4eb4-8a79-15dda196dc01'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:25:44 compute-0 nova_compute[192079]: 2025-10-02 12:25:44.888 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:44 compute-0 nova_compute[192079]: 2025-10-02 12:25:44.889 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:44 compute-0 ovn_controller[94336]: 2025-10-02T12:25:44Z|00436|binding|INFO|Releasing lport adc60e93-14bb-4eb4-8a79-15dda196dc01 from this chassis (sb_readonly=0)
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:44.890 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/26df2dcf-f57c-4dae-8522-0277df741ed3.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/26df2dcf-f57c-4dae-8522-0277df741ed3.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:44.891 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[57a71449-7bdc-41ee-a8db-2931cc024172]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:44.892 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-26df2dcf-f57c-4dae-8522-0277df741ed3
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/26df2dcf-f57c-4dae-8522-0277df741ed3.pid.haproxy
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 26df2dcf-f57c-4dae-8522-0277df741ed3
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:25:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:25:44.892 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-26df2dcf-f57c-4dae-8522-0277df741ed3', 'env', 'PROCESS_TAG=haproxy-26df2dcf-f57c-4dae-8522-0277df741ed3', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/26df2dcf-f57c-4dae-8522-0277df741ed3.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:25:44 compute-0 nova_compute[192079]: 2025-10-02 12:25:44.901 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:45 compute-0 podman[239768]: 2025-10-02 12:25:45.266481927 +0000 UTC m=+0.023669656 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:25:45 compute-0 podman[239768]: 2025-10-02 12:25:45.381115641 +0000 UTC m=+0.138303360 container create 3b8d51259ad8cd53160f2f60e5dc2a0fca47d6bd733bfa5d5b454cc726afa5b2 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-26df2dcf-f57c-4dae-8522-0277df741ed3, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS)
Oct 02 12:25:45 compute-0 systemd[1]: Started libpod-conmon-3b8d51259ad8cd53160f2f60e5dc2a0fca47d6bd733bfa5d5b454cc726afa5b2.scope.
Oct 02 12:25:45 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:25:45 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/24affa7f1a261e8d3f6c8eadd147a9c8e38c7c7206b4caf8e61c5c2658b341ac/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:25:45 compute-0 podman[239768]: 2025-10-02 12:25:45.476322046 +0000 UTC m=+0.233509795 container init 3b8d51259ad8cd53160f2f60e5dc2a0fca47d6bd733bfa5d5b454cc726afa5b2 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-26df2dcf-f57c-4dae-8522-0277df741ed3, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:25:45 compute-0 podman[239768]: 2025-10-02 12:25:45.482649438 +0000 UTC m=+0.239837147 container start 3b8d51259ad8cd53160f2f60e5dc2a0fca47d6bd733bfa5d5b454cc726afa5b2 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-26df2dcf-f57c-4dae-8522-0277df741ed3, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001)
Oct 02 12:25:45 compute-0 neutron-haproxy-ovnmeta-26df2dcf-f57c-4dae-8522-0277df741ed3[239789]: [NOTICE]   (239793) : New worker (239795) forked
Oct 02 12:25:45 compute-0 neutron-haproxy-ovnmeta-26df2dcf-f57c-4dae-8522-0277df741ed3[239789]: [NOTICE]   (239793) : Loading success.
Oct 02 12:25:45 compute-0 nova_compute[192079]: 2025-10-02 12:25:45.738 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407945.7379405, ab7610b5-3462-4dc2-a802-0998246e8cdb => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:25:45 compute-0 nova_compute[192079]: 2025-10-02 12:25:45.739 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] VM Started (Lifecycle Event)
Oct 02 12:25:45 compute-0 nova_compute[192079]: 2025-10-02 12:25:45.757 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:25:45 compute-0 nova_compute[192079]: 2025-10-02 12:25:45.762 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407945.738161, ab7610b5-3462-4dc2-a802-0998246e8cdb => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:25:45 compute-0 nova_compute[192079]: 2025-10-02 12:25:45.763 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] VM Paused (Lifecycle Event)
Oct 02 12:25:45 compute-0 nova_compute[192079]: 2025-10-02 12:25:45.778 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:25:45 compute-0 nova_compute[192079]: 2025-10-02 12:25:45.782 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:25:45 compute-0 nova_compute[192079]: 2025-10-02 12:25:45.799 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:25:46 compute-0 nova_compute[192079]: 2025-10-02 12:25:46.606 2 DEBUG nova.compute.manager [req-90db3cc2-3341-4505-b3b7-9d76e4d1a5f2 req-8f452e73-a114-4dea-9212-efd7d9354074 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Received event network-vif-plugged-84dc02cc-883a-4f1b-a938-49e678b5f445 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:25:46 compute-0 nova_compute[192079]: 2025-10-02 12:25:46.606 2 DEBUG oslo_concurrency.lockutils [req-90db3cc2-3341-4505-b3b7-9d76e4d1a5f2 req-8f452e73-a114-4dea-9212-efd7d9354074 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "ab7610b5-3462-4dc2-a802-0998246e8cdb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:25:46 compute-0 nova_compute[192079]: 2025-10-02 12:25:46.606 2 DEBUG oslo_concurrency.lockutils [req-90db3cc2-3341-4505-b3b7-9d76e4d1a5f2 req-8f452e73-a114-4dea-9212-efd7d9354074 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ab7610b5-3462-4dc2-a802-0998246e8cdb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:25:46 compute-0 nova_compute[192079]: 2025-10-02 12:25:46.607 2 DEBUG oslo_concurrency.lockutils [req-90db3cc2-3341-4505-b3b7-9d76e4d1a5f2 req-8f452e73-a114-4dea-9212-efd7d9354074 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ab7610b5-3462-4dc2-a802-0998246e8cdb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:25:46 compute-0 nova_compute[192079]: 2025-10-02 12:25:46.607 2 DEBUG nova.compute.manager [req-90db3cc2-3341-4505-b3b7-9d76e4d1a5f2 req-8f452e73-a114-4dea-9212-efd7d9354074 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Processing event network-vif-plugged-84dc02cc-883a-4f1b-a938-49e678b5f445 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:25:46 compute-0 nova_compute[192079]: 2025-10-02 12:25:46.608 2 DEBUG nova.compute.manager [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:25:46 compute-0 nova_compute[192079]: 2025-10-02 12:25:46.611 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407946.61157, ab7610b5-3462-4dc2-a802-0998246e8cdb => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:25:46 compute-0 nova_compute[192079]: 2025-10-02 12:25:46.612 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] VM Resumed (Lifecycle Event)
Oct 02 12:25:46 compute-0 nova_compute[192079]: 2025-10-02 12:25:46.615 2 DEBUG nova.virt.libvirt.driver [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:25:46 compute-0 nova_compute[192079]: 2025-10-02 12:25:46.621 2 INFO nova.virt.libvirt.driver [-] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Instance spawned successfully.
Oct 02 12:25:46 compute-0 nova_compute[192079]: 2025-10-02 12:25:46.621 2 DEBUG nova.virt.libvirt.driver [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:25:46 compute-0 nova_compute[192079]: 2025-10-02 12:25:46.634 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:25:46 compute-0 nova_compute[192079]: 2025-10-02 12:25:46.641 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:25:46 compute-0 nova_compute[192079]: 2025-10-02 12:25:46.645 2 DEBUG nova.virt.libvirt.driver [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:25:46 compute-0 nova_compute[192079]: 2025-10-02 12:25:46.645 2 DEBUG nova.virt.libvirt.driver [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:25:46 compute-0 nova_compute[192079]: 2025-10-02 12:25:46.646 2 DEBUG nova.virt.libvirt.driver [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:25:46 compute-0 nova_compute[192079]: 2025-10-02 12:25:46.646 2 DEBUG nova.virt.libvirt.driver [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:25:46 compute-0 nova_compute[192079]: 2025-10-02 12:25:46.647 2 DEBUG nova.virt.libvirt.driver [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:25:46 compute-0 nova_compute[192079]: 2025-10-02 12:25:46.647 2 DEBUG nova.virt.libvirt.driver [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:25:46 compute-0 nova_compute[192079]: 2025-10-02 12:25:46.671 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:25:46 compute-0 nova_compute[192079]: 2025-10-02 12:25:46.719 2 INFO nova.compute.manager [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Took 8.52 seconds to spawn the instance on the hypervisor.
Oct 02 12:25:46 compute-0 nova_compute[192079]: 2025-10-02 12:25:46.720 2 DEBUG nova.compute.manager [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:25:46 compute-0 nova_compute[192079]: 2025-10-02 12:25:46.835 2 INFO nova.compute.manager [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Took 9.12 seconds to build instance.
Oct 02 12:25:46 compute-0 nova_compute[192079]: 2025-10-02 12:25:46.867 2 DEBUG oslo_concurrency.lockutils [None req-195fa90d-6c75-404d-97db-0b027002e83e 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "ab7610b5-3462-4dc2-a802-0998246e8cdb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 9.228s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:25:47 compute-0 nova_compute[192079]: 2025-10-02 12:25:47.031 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:47 compute-0 nova_compute[192079]: 2025-10-02 12:25:47.053 2 DEBUG nova.network.neutron [req-75f38b57-af2d-4636-929a-c0bff16162bc req-fa6b9771-dc4b-4a85-83f0-6f05a39b078d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Updated VIF entry in instance network info cache for port 84dc02cc-883a-4f1b-a938-49e678b5f445. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:25:47 compute-0 nova_compute[192079]: 2025-10-02 12:25:47.054 2 DEBUG nova.network.neutron [req-75f38b57-af2d-4636-929a-c0bff16162bc req-fa6b9771-dc4b-4a85-83f0-6f05a39b078d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Updating instance_info_cache with network_info: [{"id": "84dc02cc-883a-4f1b-a938-49e678b5f445", "address": "fa:16:3e:80:c4:31", "network": {"id": "26df2dcf-f57c-4dae-8522-0277df741ed3", "bridge": "br-int", "label": "tempest-network-smoke--1584637508", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}, {"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe80:c431", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap84dc02cc-88", "ovs_interfaceid": "84dc02cc-883a-4f1b-a938-49e678b5f445", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:25:47 compute-0 nova_compute[192079]: 2025-10-02 12:25:47.159 2 DEBUG oslo_concurrency.lockutils [req-75f38b57-af2d-4636-929a-c0bff16162bc req-fa6b9771-dc4b-4a85-83f0-6f05a39b078d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-ab7610b5-3462-4dc2-a802-0998246e8cdb" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:25:47 compute-0 nova_compute[192079]: 2025-10-02 12:25:47.524 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:48 compute-0 podman[239804]: 2025-10-02 12:25:48.14040666 +0000 UTC m=+0.053329224 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:25:48 compute-0 podman[239805]: 2025-10-02 12:25:48.140653197 +0000 UTC m=+0.053328275 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, tcib_managed=true, config_id=iscsid, container_name=iscsid, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS)
Oct 02 12:25:48 compute-0 nova_compute[192079]: 2025-10-02 12:25:48.766 2 DEBUG nova.compute.manager [req-e0e9b609-c786-4508-a4b9-a5549970e1ef req-a0e4956b-e11c-43c6-b01f-8b77629a17a9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Received event network-vif-plugged-84dc02cc-883a-4f1b-a938-49e678b5f445 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:25:48 compute-0 nova_compute[192079]: 2025-10-02 12:25:48.767 2 DEBUG oslo_concurrency.lockutils [req-e0e9b609-c786-4508-a4b9-a5549970e1ef req-a0e4956b-e11c-43c6-b01f-8b77629a17a9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "ab7610b5-3462-4dc2-a802-0998246e8cdb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:25:48 compute-0 nova_compute[192079]: 2025-10-02 12:25:48.767 2 DEBUG oslo_concurrency.lockutils [req-e0e9b609-c786-4508-a4b9-a5549970e1ef req-a0e4956b-e11c-43c6-b01f-8b77629a17a9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ab7610b5-3462-4dc2-a802-0998246e8cdb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:25:48 compute-0 nova_compute[192079]: 2025-10-02 12:25:48.768 2 DEBUG oslo_concurrency.lockutils [req-e0e9b609-c786-4508-a4b9-a5549970e1ef req-a0e4956b-e11c-43c6-b01f-8b77629a17a9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ab7610b5-3462-4dc2-a802-0998246e8cdb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:25:48 compute-0 nova_compute[192079]: 2025-10-02 12:25:48.768 2 DEBUG nova.compute.manager [req-e0e9b609-c786-4508-a4b9-a5549970e1ef req-a0e4956b-e11c-43c6-b01f-8b77629a17a9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] No waiting events found dispatching network-vif-plugged-84dc02cc-883a-4f1b-a938-49e678b5f445 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:25:48 compute-0 nova_compute[192079]: 2025-10-02 12:25:48.768 2 WARNING nova.compute.manager [req-e0e9b609-c786-4508-a4b9-a5549970e1ef req-a0e4956b-e11c-43c6-b01f-8b77629a17a9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Received unexpected event network-vif-plugged-84dc02cc-883a-4f1b-a938-49e678b5f445 for instance with vm_state active and task_state None.
Oct 02 12:25:51 compute-0 nova_compute[192079]: 2025-10-02 12:25:51.415 2 DEBUG nova.compute.manager [req-16b4af30-d920-40e1-b4b1-bdbd732ebed2 req-e34175a4-4ac6-4347-b919-8ba5c58fc6c3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Received event network-changed-84dc02cc-883a-4f1b-a938-49e678b5f445 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:25:51 compute-0 nova_compute[192079]: 2025-10-02 12:25:51.416 2 DEBUG nova.compute.manager [req-16b4af30-d920-40e1-b4b1-bdbd732ebed2 req-e34175a4-4ac6-4347-b919-8ba5c58fc6c3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Refreshing instance network info cache due to event network-changed-84dc02cc-883a-4f1b-a938-49e678b5f445. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:25:51 compute-0 nova_compute[192079]: 2025-10-02 12:25:51.416 2 DEBUG oslo_concurrency.lockutils [req-16b4af30-d920-40e1-b4b1-bdbd732ebed2 req-e34175a4-4ac6-4347-b919-8ba5c58fc6c3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-ab7610b5-3462-4dc2-a802-0998246e8cdb" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:25:51 compute-0 nova_compute[192079]: 2025-10-02 12:25:51.417 2 DEBUG oslo_concurrency.lockutils [req-16b4af30-d920-40e1-b4b1-bdbd732ebed2 req-e34175a4-4ac6-4347-b919-8ba5c58fc6c3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-ab7610b5-3462-4dc2-a802-0998246e8cdb" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:25:51 compute-0 nova_compute[192079]: 2025-10-02 12:25:51.417 2 DEBUG nova.network.neutron [req-16b4af30-d920-40e1-b4b1-bdbd732ebed2 req-e34175a4-4ac6-4347-b919-8ba5c58fc6c3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Refreshing network info cache for port 84dc02cc-883a-4f1b-a938-49e678b5f445 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:25:52 compute-0 nova_compute[192079]: 2025-10-02 12:25:52.032 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:52 compute-0 nova_compute[192079]: 2025-10-02 12:25:52.527 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:53 compute-0 nova_compute[192079]: 2025-10-02 12:25:53.489 2 DEBUG nova.network.neutron [req-16b4af30-d920-40e1-b4b1-bdbd732ebed2 req-e34175a4-4ac6-4347-b919-8ba5c58fc6c3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Updated VIF entry in instance network info cache for port 84dc02cc-883a-4f1b-a938-49e678b5f445. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:25:53 compute-0 nova_compute[192079]: 2025-10-02 12:25:53.489 2 DEBUG nova.network.neutron [req-16b4af30-d920-40e1-b4b1-bdbd732ebed2 req-e34175a4-4ac6-4347-b919-8ba5c58fc6c3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Updating instance_info_cache with network_info: [{"id": "84dc02cc-883a-4f1b-a938-49e678b5f445", "address": "fa:16:3e:80:c4:31", "network": {"id": "26df2dcf-f57c-4dae-8522-0277df741ed3", "bridge": "br-int", "label": "tempest-network-smoke--1584637508", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}, {"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe80:c431", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap84dc02cc-88", "ovs_interfaceid": "84dc02cc-883a-4f1b-a938-49e678b5f445", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:25:53 compute-0 nova_compute[192079]: 2025-10-02 12:25:53.556 2 DEBUG oslo_concurrency.lockutils [req-16b4af30-d920-40e1-b4b1-bdbd732ebed2 req-e34175a4-4ac6-4347-b919-8ba5c58fc6c3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-ab7610b5-3462-4dc2-a802-0998246e8cdb" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:25:56 compute-0 podman[239846]: 2025-10-02 12:25:56.144951768 +0000 UTC m=+0.055490614 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent)
Oct 02 12:25:56 compute-0 podman[239852]: 2025-10-02 12:25:56.148257218 +0000 UTC m=+0.047867256 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:25:56 compute-0 podman[239847]: 2025-10-02 12:25:56.196954976 +0000 UTC m=+0.102475255 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.schema-version=1.0, container_name=ovn_controller, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, config_id=ovn_controller, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:25:56 compute-0 nova_compute[192079]: 2025-10-02 12:25:56.404 2 DEBUG nova.compute.manager [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Stashing vm_state: active _prep_resize /usr/lib/python3.9/site-packages/nova/compute/manager.py:5560
Oct 02 12:25:56 compute-0 nova_compute[192079]: 2025-10-02 12:25:56.618 2 DEBUG oslo_concurrency.lockutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:25:56 compute-0 nova_compute[192079]: 2025-10-02 12:25:56.619 2 DEBUG oslo_concurrency.lockutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:25:56 compute-0 nova_compute[192079]: 2025-10-02 12:25:56.656 2 DEBUG nova.objects.instance [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'pci_requests' on Instance uuid 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:25:56 compute-0 nova_compute[192079]: 2025-10-02 12:25:56.697 2 DEBUG nova.virt.hardware [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:25:56 compute-0 nova_compute[192079]: 2025-10-02 12:25:56.698 2 INFO nova.compute.claims [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:25:56 compute-0 nova_compute[192079]: 2025-10-02 12:25:56.698 2 DEBUG nova.objects.instance [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'resources' on Instance uuid 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:25:56 compute-0 nova_compute[192079]: 2025-10-02 12:25:56.742 2 DEBUG nova.objects.instance [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'pci_devices' on Instance uuid 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:25:56 compute-0 nova_compute[192079]: 2025-10-02 12:25:56.858 2 INFO nova.compute.resource_tracker [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Updating resource usage from migration e7859187-b33d-44dd-b078-3c66a9a847e5
Oct 02 12:25:57 compute-0 nova_compute[192079]: 2025-10-02 12:25:57.035 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:57 compute-0 nova_compute[192079]: 2025-10-02 12:25:57.233 2 DEBUG nova.compute.provider_tree [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:25:57 compute-0 nova_compute[192079]: 2025-10-02 12:25:57.269 2 DEBUG nova.scheduler.client.report [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:25:57 compute-0 nova_compute[192079]: 2025-10-02 12:25:57.326 2 DEBUG oslo_concurrency.lockutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 0.708s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:25:57 compute-0 nova_compute[192079]: 2025-10-02 12:25:57.327 2 INFO nova.compute.manager [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Migrating
Oct 02 12:25:57 compute-0 nova_compute[192079]: 2025-10-02 12:25:57.453 2 DEBUG oslo_concurrency.lockutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "refresh_cache-40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:25:57 compute-0 nova_compute[192079]: 2025-10-02 12:25:57.455 2 DEBUG oslo_concurrency.lockutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquired lock "refresh_cache-40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:25:57 compute-0 nova_compute[192079]: 2025-10-02 12:25:57.455 2 DEBUG nova.network.neutron [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:25:57 compute-0 nova_compute[192079]: 2025-10-02 12:25:57.529 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:25:58 compute-0 ovn_controller[94336]: 2025-10-02T12:25:58Z|00043|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:80:c4:31 10.100.0.10
Oct 02 12:25:58 compute-0 ovn_controller[94336]: 2025-10-02T12:25:58Z|00044|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:80:c4:31 10.100.0.10
Oct 02 12:25:58 compute-0 nova_compute[192079]: 2025-10-02 12:25:58.971 2 DEBUG nova.network.neutron [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Updating instance_info_cache with network_info: [{"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:25:59 compute-0 nova_compute[192079]: 2025-10-02 12:25:59.042 2 DEBUG oslo_concurrency.lockutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Releasing lock "refresh_cache-40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:25:59 compute-0 nova_compute[192079]: 2025-10-02 12:25:59.914 2 DEBUG nova.virt.libvirt.driver [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Starting migrate_disk_and_power_off migrate_disk_and_power_off /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:11511
Oct 02 12:25:59 compute-0 nova_compute[192079]: 2025-10-02 12:25:59.918 2 DEBUG nova.virt.libvirt.driver [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Shutting down instance from state 1 _clean_shutdown /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4071
Oct 02 12:26:02 compute-0 nova_compute[192079]: 2025-10-02 12:26:02.036 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:02 compute-0 kernel: tapae0f2dc4-de (unregistering): left promiscuous mode
Oct 02 12:26:02 compute-0 NetworkManager[51160]: <info>  [1759407962.0706] device (tapae0f2dc4-de): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:26:02 compute-0 nova_compute[192079]: 2025-10-02 12:26:02.078 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:02 compute-0 ovn_controller[94336]: 2025-10-02T12:26:02Z|00437|binding|INFO|Releasing lport ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 from this chassis (sb_readonly=0)
Oct 02 12:26:02 compute-0 ovn_controller[94336]: 2025-10-02T12:26:02Z|00438|binding|INFO|Setting lport ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 down in Southbound
Oct 02 12:26:02 compute-0 ovn_controller[94336]: 2025-10-02T12:26:02Z|00439|binding|INFO|Removing iface tapae0f2dc4-de ovn-installed in OVS
Oct 02 12:26:02 compute-0 nova_compute[192079]: 2025-10-02 12:26:02.097 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:02 compute-0 systemd[1]: machine-qemu\x2d56\x2dinstance\x2d00000079.scope: Deactivated successfully.
Oct 02 12:26:02 compute-0 systemd[1]: machine-qemu\x2d56\x2dinstance\x2d00000079.scope: Consumed 14.317s CPU time.
Oct 02 12:26:02 compute-0 systemd-machined[152150]: Machine qemu-56-instance-00000079 terminated.
Oct 02 12:26:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:02.147 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:d5:ef:4c 10.100.0.9'], port_security=['fa:16:3e:d5:ef:4c 10.100.0.9'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.9/28', 'neutron:device_id': '40c8eb3a-547f-435e-8e59-ce9dcddb5f8e', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-a04f937a-375f-4fb0-90fe-5f514a88668f', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'neutron:revision_number': '4', 'neutron:security_group_ids': 'c0383701-0ec7-4f3b-8585-5effc4f5ca5a', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com', 'neutron:port_fip': '192.168.122.248'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=50c0aa38-5fd8-41c7-b4bf-85b59722c5c3, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:26:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:02.148 103294 INFO neutron.agent.ovn.metadata.agent [-] Port ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 in datapath a04f937a-375f-4fb0-90fe-5f514a88668f unbound from our chassis
Oct 02 12:26:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:02.151 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network a04f937a-375f-4fb0-90fe-5f514a88668f, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:26:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:02.152 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[12040bb2-0742-4083-a691-1279a699593a]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:02.152 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f namespace which is not needed anymore
Oct 02 12:26:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:02.225 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:26:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:02.225 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:26:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:02.226 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:26:02 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[239504]: [NOTICE]   (239508) : haproxy version is 2.8.14-c23fe91
Oct 02 12:26:02 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[239504]: [NOTICE]   (239508) : path to executable is /usr/sbin/haproxy
Oct 02 12:26:02 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[239504]: [WARNING]  (239508) : Exiting Master process...
Oct 02 12:26:02 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[239504]: [ALERT]    (239508) : Current worker (239510) exited with code 143 (Terminated)
Oct 02 12:26:02 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[239504]: [WARNING]  (239508) : All workers exited. Exiting... (0)
Oct 02 12:26:02 compute-0 systemd[1]: libpod-efeaadbacd2cfebc798478efe8b521ec1f0bec9ea511e2de8017568a1e74c73f.scope: Deactivated successfully.
Oct 02 12:26:02 compute-0 podman[239966]: 2025-10-02 12:26:02.282236036 +0000 UTC m=+0.044150404 container died efeaadbacd2cfebc798478efe8b521ec1f0bec9ea511e2de8017568a1e74c73f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS)
Oct 02 12:26:02 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-efeaadbacd2cfebc798478efe8b521ec1f0bec9ea511e2de8017568a1e74c73f-userdata-shm.mount: Deactivated successfully.
Oct 02 12:26:02 compute-0 systemd[1]: var-lib-containers-storage-overlay-4726a4cd226093453f99b40308054307bbd0325142e7ebf688e82d400650b91a-merged.mount: Deactivated successfully.
Oct 02 12:26:02 compute-0 podman[239966]: 2025-10-02 12:26:02.328845997 +0000 UTC m=+0.090760355 container cleanup efeaadbacd2cfebc798478efe8b521ec1f0bec9ea511e2de8017568a1e74c73f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2)
Oct 02 12:26:02 compute-0 systemd[1]: libpod-conmon-efeaadbacd2cfebc798478efe8b521ec1f0bec9ea511e2de8017568a1e74c73f.scope: Deactivated successfully.
Oct 02 12:26:02 compute-0 podman[240010]: 2025-10-02 12:26:02.388816671 +0000 UTC m=+0.039515588 container remove efeaadbacd2cfebc798478efe8b521ec1f0bec9ea511e2de8017568a1e74c73f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS)
Oct 02 12:26:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:02.393 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c45773e0-af1f-4864-800f-9cc9367b4a68]: (4, ('Thu Oct  2 12:26:02 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f (efeaadbacd2cfebc798478efe8b521ec1f0bec9ea511e2de8017568a1e74c73f)\nefeaadbacd2cfebc798478efe8b521ec1f0bec9ea511e2de8017568a1e74c73f\nThu Oct  2 12:26:02 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f (efeaadbacd2cfebc798478efe8b521ec1f0bec9ea511e2de8017568a1e74c73f)\nefeaadbacd2cfebc798478efe8b521ec1f0bec9ea511e2de8017568a1e74c73f\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:02.395 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0f54737a-013a-49ab-bbaf-284bfaa7adc9]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:02.396 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapa04f937a-30, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:26:02 compute-0 kernel: tapa04f937a-30: left promiscuous mode
Oct 02 12:26:02 compute-0 nova_compute[192079]: 2025-10-02 12:26:02.398 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:02 compute-0 nova_compute[192079]: 2025-10-02 12:26:02.413 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:02.417 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e3700136-4998-46e7-9782-01e2d0ede4cd]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:02.455 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[06b616e3-d066-4a96-b55e-188d94327ce9]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:02.456 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[65598792-95dd-4541-8f29-6f3bf8c8c490]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:02.470 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9646b4be-80f6-4e27-8e14-720db8ad1c3d]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 591640, 'reachable_time': 38535, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 240029, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:02.472 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:26:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:02.472 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[eaf45b1d-6c91-49a4-ab40-b412c9a49a97]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:02 compute-0 systemd[1]: run-netns-ovnmeta\x2da04f937a\x2d375f\x2d4fb0\x2d90fe\x2d5f514a88668f.mount: Deactivated successfully.
Oct 02 12:26:02 compute-0 nova_compute[192079]: 2025-10-02 12:26:02.530 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:02 compute-0 nova_compute[192079]: 2025-10-02 12:26:02.933 2 INFO nova.virt.libvirt.driver [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Instance shutdown successfully after 3 seconds.
Oct 02 12:26:02 compute-0 nova_compute[192079]: 2025-10-02 12:26:02.938 2 INFO nova.virt.libvirt.driver [-] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Instance destroyed successfully.
Oct 02 12:26:02 compute-0 nova_compute[192079]: 2025-10-02 12:26:02.939 2 DEBUG nova.virt.libvirt.vif [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:25:11Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerActionsTestJSON-server-300185996',display_name='tempest-ServerActionsTestJSON-server-300185996',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serveractionstestjson-server-300185996',id=121,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBJJLom+UJzZg9dduKQv+725QaYDZoMXvP/xlpKnb/K05SGc4dkyLwCDweJ3QifTmxLWqK9Sz5A12yMJbzpa36v5C4bUqj8uiWk/vbR1BAjBdKM9d/Ug8M2nT8LwDBGP/9A==',key_name='tempest-keypair-1006285918',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:25:19Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=MigrationContext,new_flavor=Flavor(2),node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='e564a4cad5d443dba81ec04d2a05ced9',ramdisk_id='',reservation_id='r-als9bbed',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=ServiceList,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',old_vm_state='active',owner_project_name='tempest-ServerActionsTestJSON-1646745100',owner_user_name='tempest-ServerActionsTestJSON-1646745100-project-member'},tags=<?>,task_state='resize_migrating',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:25:56Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='d54b1826121b47caba89932a78c06ccd',uuid=40c8eb3a-547f-435e-8e59-ce9dcddb5f8e,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-ServerActionsTestJSON-1926715354-network", "vif_mac": "fa:16:3e:d5:ef:4c"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:26:02 compute-0 nova_compute[192079]: 2025-10-02 12:26:02.940 2 DEBUG nova.network.os_vif_util [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converting VIF {"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-ServerActionsTestJSON-1926715354-network", "vif_mac": "fa:16:3e:d5:ef:4c"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:26:02 compute-0 nova_compute[192079]: 2025-10-02 12:26:02.941 2 DEBUG nova.network.os_vif_util [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:d5:ef:4c,bridge_name='br-int',has_traffic_filtering=True,id=ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapae0f2dc4-de') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:26:02 compute-0 nova_compute[192079]: 2025-10-02 12:26:02.941 2 DEBUG os_vif [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:d5:ef:4c,bridge_name='br-int',has_traffic_filtering=True,id=ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapae0f2dc4-de') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:26:02 compute-0 nova_compute[192079]: 2025-10-02 12:26:02.943 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:02 compute-0 nova_compute[192079]: 2025-10-02 12:26:02.943 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapae0f2dc4-de, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:26:02 compute-0 nova_compute[192079]: 2025-10-02 12:26:02.944 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:02 compute-0 nova_compute[192079]: 2025-10-02 12:26:02.947 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:02 compute-0 nova_compute[192079]: 2025-10-02 12:26:02.949 2 INFO os_vif [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:d5:ef:4c,bridge_name='br-int',has_traffic_filtering=True,id=ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapae0f2dc4-de')
Oct 02 12:26:02 compute-0 nova_compute[192079]: 2025-10-02 12:26:02.952 2 DEBUG oslo_concurrency.processutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:26:03 compute-0 nova_compute[192079]: 2025-10-02 12:26:03.031 2 DEBUG oslo_concurrency.processutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk --force-share --output=json" returned: 0 in 0.079s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:26:03 compute-0 nova_compute[192079]: 2025-10-02 12:26:03.033 2 DEBUG oslo_concurrency.processutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:26:03 compute-0 nova_compute[192079]: 2025-10-02 12:26:03.089 2 DEBUG oslo_concurrency.processutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:26:03 compute-0 nova_compute[192079]: 2025-10-02 12:26:03.091 2 DEBUG oslo_concurrency.processutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): cp -r /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e_resize/disk /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:26:03 compute-0 nova_compute[192079]: 2025-10-02 12:26:03.112 2 DEBUG oslo_concurrency.processutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "cp -r /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e_resize/disk /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk" returned: 0 in 0.021s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:26:03 compute-0 nova_compute[192079]: 2025-10-02 12:26:03.114 2 DEBUG oslo_concurrency.processutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): cp -r /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e_resize/disk.config /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk.config execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:26:03 compute-0 nova_compute[192079]: 2025-10-02 12:26:03.144 2 DEBUG oslo_concurrency.processutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "cp -r /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e_resize/disk.config /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk.config" returned: 0 in 0.031s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:26:03 compute-0 nova_compute[192079]: 2025-10-02 12:26:03.146 2 DEBUG oslo_concurrency.processutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): cp -r /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e_resize/disk.info /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk.info execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:26:03 compute-0 nova_compute[192079]: 2025-10-02 12:26:03.176 2 DEBUG oslo_concurrency.processutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "cp -r /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e_resize/disk.info /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk.info" returned: 0 in 0.029s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:26:03 compute-0 nova_compute[192079]: 2025-10-02 12:26:03.478 2 DEBUG nova.network.neutron [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Port ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 binding to destination host compute-0.ctlplane.example.com is already ACTIVE migrate_instance_start /usr/lib/python3.9/site-packages/nova/network/neutron.py:3171
Oct 02 12:26:04 compute-0 nova_compute[192079]: 2025-10-02 12:26:04.611 2 DEBUG oslo_concurrency.lockutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:26:04 compute-0 nova_compute[192079]: 2025-10-02 12:26:04.612 2 DEBUG oslo_concurrency.lockutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:26:04 compute-0 nova_compute[192079]: 2025-10-02 12:26:04.612 2 DEBUG oslo_concurrency.lockutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:26:04 compute-0 nova_compute[192079]: 2025-10-02 12:26:04.895 2 DEBUG oslo_concurrency.lockutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "refresh_cache-40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:26:04 compute-0 nova_compute[192079]: 2025-10-02 12:26:04.896 2 DEBUG oslo_concurrency.lockutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquired lock "refresh_cache-40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:26:04 compute-0 nova_compute[192079]: 2025-10-02 12:26:04.896 2 DEBUG nova.network.neutron [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:26:05 compute-0 podman[240041]: 2025-10-02 12:26:05.144253835 +0000 UTC m=+0.054753224 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=ceilometer_agent_compute, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.schema-version=1.0)
Oct 02 12:26:06 compute-0 nova_compute[192079]: 2025-10-02 12:26:06.158 2 DEBUG nova.compute.manager [req-3487149b-aeca-4c48-b870-975acd05ab27 req-c9873785-e65f-425d-8e68-90ec9f8cf102 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Received event network-vif-unplugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:26:06 compute-0 nova_compute[192079]: 2025-10-02 12:26:06.159 2 DEBUG oslo_concurrency.lockutils [req-3487149b-aeca-4c48-b870-975acd05ab27 req-c9873785-e65f-425d-8e68-90ec9f8cf102 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:26:06 compute-0 nova_compute[192079]: 2025-10-02 12:26:06.159 2 DEBUG oslo_concurrency.lockutils [req-3487149b-aeca-4c48-b870-975acd05ab27 req-c9873785-e65f-425d-8e68-90ec9f8cf102 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:26:06 compute-0 nova_compute[192079]: 2025-10-02 12:26:06.159 2 DEBUG oslo_concurrency.lockutils [req-3487149b-aeca-4c48-b870-975acd05ab27 req-c9873785-e65f-425d-8e68-90ec9f8cf102 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:26:06 compute-0 nova_compute[192079]: 2025-10-02 12:26:06.160 2 DEBUG nova.compute.manager [req-3487149b-aeca-4c48-b870-975acd05ab27 req-c9873785-e65f-425d-8e68-90ec9f8cf102 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] No waiting events found dispatching network-vif-unplugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:26:06 compute-0 nova_compute[192079]: 2025-10-02 12:26:06.160 2 WARNING nova.compute.manager [req-3487149b-aeca-4c48-b870-975acd05ab27 req-c9873785-e65f-425d-8e68-90ec9f8cf102 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Received unexpected event network-vif-unplugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 for instance with vm_state active and task_state resize_migrated.
Oct 02 12:26:06 compute-0 nova_compute[192079]: 2025-10-02 12:26:06.160 2 DEBUG nova.compute.manager [req-3487149b-aeca-4c48-b870-975acd05ab27 req-c9873785-e65f-425d-8e68-90ec9f8cf102 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Received event network-vif-plugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:26:06 compute-0 nova_compute[192079]: 2025-10-02 12:26:06.160 2 DEBUG oslo_concurrency.lockutils [req-3487149b-aeca-4c48-b870-975acd05ab27 req-c9873785-e65f-425d-8e68-90ec9f8cf102 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:26:06 compute-0 nova_compute[192079]: 2025-10-02 12:26:06.161 2 DEBUG oslo_concurrency.lockutils [req-3487149b-aeca-4c48-b870-975acd05ab27 req-c9873785-e65f-425d-8e68-90ec9f8cf102 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:26:06 compute-0 nova_compute[192079]: 2025-10-02 12:26:06.161 2 DEBUG oslo_concurrency.lockutils [req-3487149b-aeca-4c48-b870-975acd05ab27 req-c9873785-e65f-425d-8e68-90ec9f8cf102 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:26:06 compute-0 nova_compute[192079]: 2025-10-02 12:26:06.161 2 DEBUG nova.compute.manager [req-3487149b-aeca-4c48-b870-975acd05ab27 req-c9873785-e65f-425d-8e68-90ec9f8cf102 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] No waiting events found dispatching network-vif-plugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:26:06 compute-0 nova_compute[192079]: 2025-10-02 12:26:06.161 2 WARNING nova.compute.manager [req-3487149b-aeca-4c48-b870-975acd05ab27 req-c9873785-e65f-425d-8e68-90ec9f8cf102 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Received unexpected event network-vif-plugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 for instance with vm_state active and task_state resize_migrated.
Oct 02 12:26:07 compute-0 nova_compute[192079]: 2025-10-02 12:26:07.039 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:07 compute-0 nova_compute[192079]: 2025-10-02 12:26:07.986 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:11 compute-0 nova_compute[192079]: 2025-10-02 12:26:11.225 2 DEBUG nova.network.neutron [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Updating instance_info_cache with network_info: [{"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:26:11 compute-0 nova_compute[192079]: 2025-10-02 12:26:11.529 2 DEBUG oslo_concurrency.lockutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Releasing lock "refresh_cache-40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:26:11 compute-0 nova_compute[192079]: 2025-10-02 12:26:11.866 2 DEBUG nova.compute.manager [req-2b8b4a50-3c5d-4a07-9590-6f2ac02c68b1 req-2d5e7dd6-e1bb-4720-a88e-35b9708e4c30 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Received event network-changed-84dc02cc-883a-4f1b-a938-49e678b5f445 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:26:11 compute-0 nova_compute[192079]: 2025-10-02 12:26:11.867 2 DEBUG nova.compute.manager [req-2b8b4a50-3c5d-4a07-9590-6f2ac02c68b1 req-2d5e7dd6-e1bb-4720-a88e-35b9708e4c30 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Refreshing instance network info cache due to event network-changed-84dc02cc-883a-4f1b-a938-49e678b5f445. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:26:11 compute-0 nova_compute[192079]: 2025-10-02 12:26:11.867 2 DEBUG oslo_concurrency.lockutils [req-2b8b4a50-3c5d-4a07-9590-6f2ac02c68b1 req-2d5e7dd6-e1bb-4720-a88e-35b9708e4c30 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-ab7610b5-3462-4dc2-a802-0998246e8cdb" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:26:11 compute-0 nova_compute[192079]: 2025-10-02 12:26:11.867 2 DEBUG oslo_concurrency.lockutils [req-2b8b4a50-3c5d-4a07-9590-6f2ac02c68b1 req-2d5e7dd6-e1bb-4720-a88e-35b9708e4c30 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-ab7610b5-3462-4dc2-a802-0998246e8cdb" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:26:11 compute-0 nova_compute[192079]: 2025-10-02 12:26:11.868 2 DEBUG nova.network.neutron [req-2b8b4a50-3c5d-4a07-9590-6f2ac02c68b1 req-2d5e7dd6-e1bb-4720-a88e-35b9708e4c30 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Refreshing network info cache for port 84dc02cc-883a-4f1b-a938-49e678b5f445 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:26:12 compute-0 nova_compute[192079]: 2025-10-02 12:26:12.041 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:12 compute-0 nova_compute[192079]: 2025-10-02 12:26:12.988 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.135 2 DEBUG nova.virt.libvirt.driver [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Starting finish_migration finish_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:11698
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.136 2 DEBUG nova.virt.libvirt.driver [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Instance directory exists: not creating _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4719
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.137 2 INFO nova.virt.libvirt.driver [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Creating image(s)
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.138 2 DEBUG nova.objects.instance [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'trusted_certs' on Instance uuid 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:26:13 compute-0 podman[240063]: 2025-10-02 12:26:13.14870193 +0000 UTC m=+0.060349477 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, config_id=multipathd, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, container_name=multipathd, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']})
Oct 02 12:26:13 compute-0 podman[240062]: 2025-10-02 12:26:13.164832999 +0000 UTC m=+0.083525827 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, release=1755695350, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, distribution-scope=public, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vcs-type=git, io.openshift.tags=minimal rhel9, io.openshift.expose-services=, version=9.6, managed_by=edpm_ansible, name=ubi9-minimal, url=https://catalog.redhat.com/en/search?searchType=containers, com.redhat.component=ubi9-minimal-container, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., maintainer=Red Hat, Inc., build-date=2025-08-20T13:12:41, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, vendor=Red Hat, Inc., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, architecture=x86_64, config_id=edpm, io.buildah.version=1.33.7, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, container_name=openstack_network_exporter, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9.)
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.530 2 DEBUG oslo_concurrency.lockutils [None req-8a8e6a8f-0e3d-4595-ae3a-40ce7fca6b9f 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "ab7610b5-3462-4dc2-a802-0998246e8cdb" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.530 2 DEBUG oslo_concurrency.lockutils [None req-8a8e6a8f-0e3d-4595-ae3a-40ce7fca6b9f 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "ab7610b5-3462-4dc2-a802-0998246e8cdb" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.530 2 DEBUG oslo_concurrency.lockutils [None req-8a8e6a8f-0e3d-4595-ae3a-40ce7fca6b9f 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "ab7610b5-3462-4dc2-a802-0998246e8cdb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.531 2 DEBUG oslo_concurrency.lockutils [None req-8a8e6a8f-0e3d-4595-ae3a-40ce7fca6b9f 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "ab7610b5-3462-4dc2-a802-0998246e8cdb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.531 2 DEBUG oslo_concurrency.lockutils [None req-8a8e6a8f-0e3d-4595-ae3a-40ce7fca6b9f 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "ab7610b5-3462-4dc2-a802-0998246e8cdb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.557 2 DEBUG oslo_concurrency.processutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.617 2 DEBUG oslo_concurrency.processutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.060s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.618 2 DEBUG nova.virt.disk.api [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Checking if we can resize image /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.618 2 DEBUG oslo_concurrency.processutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.675 2 DEBUG oslo_concurrency.processutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk --force-share --output=json" returned: 0 in 0.057s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.676 2 DEBUG nova.virt.disk.api [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Cannot resize image /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.853 2 INFO nova.compute.manager [None req-8a8e6a8f-0e3d-4595-ae3a-40ce7fca6b9f 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Terminating instance
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.902 2 DEBUG nova.virt.libvirt.driver [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Did not create local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4859
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.903 2 DEBUG nova.virt.libvirt.driver [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Ensure instance console log exists: /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.904 2 DEBUG oslo_concurrency.lockutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.904 2 DEBUG oslo_concurrency.lockutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.904 2 DEBUG oslo_concurrency.lockutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.907 2 DEBUG nova.virt.libvirt.driver [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Start _get_guest_xml network_info=[{"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-ServerActionsTestJSON-1926715354-network", "vif_mac": "fa:16:3e:d5:ef:4c"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.911 2 WARNING nova.virt.libvirt.driver [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.916 2 DEBUG nova.virt.libvirt.host [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.917 2 DEBUG nova.virt.libvirt.host [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.922 2 DEBUG nova.virt.libvirt.host [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.922 2 DEBUG nova.virt.libvirt.host [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.923 2 DEBUG nova.virt.libvirt.driver [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.924 2 DEBUG nova.virt.hardware [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:25Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9949d9da-6314-4ede-8797-6f2f0a6a64fc',id=2,is_public=True,memory_mb=192,name='m1.micro',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.924 2 DEBUG nova.virt.hardware [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.925 2 DEBUG nova.virt.hardware [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.925 2 DEBUG nova.virt.hardware [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.925 2 DEBUG nova.virt.hardware [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.925 2 DEBUG nova.virt.hardware [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.926 2 DEBUG nova.virt.hardware [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.926 2 DEBUG nova.virt.hardware [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.926 2 DEBUG nova.virt.hardware [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.926 2 DEBUG nova.virt.hardware [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.926 2 DEBUG nova.virt.hardware [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:26:13 compute-0 nova_compute[192079]: 2025-10-02 12:26:13.927 2 DEBUG nova.objects.instance [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'vcpu_model' on Instance uuid 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:26:14 compute-0 nova_compute[192079]: 2025-10-02 12:26:14.455 2 DEBUG oslo_concurrency.processutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk.config --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:26:14 compute-0 nova_compute[192079]: 2025-10-02 12:26:14.544 2 DEBUG oslo_concurrency.processutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk.config --force-share --output=json" returned: 0 in 0.088s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:26:14 compute-0 nova_compute[192079]: 2025-10-02 12:26:14.545 2 DEBUG oslo_concurrency.lockutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "/var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:26:14 compute-0 nova_compute[192079]: 2025-10-02 12:26:14.545 2 DEBUG oslo_concurrency.lockutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "/var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:26:14 compute-0 nova_compute[192079]: 2025-10-02 12:26:14.546 2 DEBUG oslo_concurrency.lockutils [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "/var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:26:14 compute-0 nova_compute[192079]: 2025-10-02 12:26:14.547 2 DEBUG nova.virt.libvirt.vif [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:25:11Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerActionsTestJSON-server-300185996',display_name='tempest-ServerActionsTestJSON-server-300185996',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(2),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serveractionstestjson-server-300185996',id=121,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=2,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBJJLom+UJzZg9dduKQv+725QaYDZoMXvP/xlpKnb/K05SGc4dkyLwCDweJ3QifTmxLWqK9Sz5A12yMJbzpa36v5C4bUqj8uiWk/vbR1BAjBdKM9d/Ug8M2nT8LwDBGP/9A==',key_name='tempest-keypair-1006285918',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:25:19Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=192,metadata={},migration_context=MigrationContext,new_flavor=Flavor(2),node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=Flavor(1),os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='e564a4cad5d443dba81ec04d2a05ced9',ramdisk_id='',reservation_id='r-als9bbed',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=ServiceList,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',old_vm_state='active',owner_project_name='tempest-ServerActionsTestJSON-1646745100',owner_user_name='tempest-ServerActionsTestJSON-1646745100-project-member'},tags=<?>,task_state='resize_finish',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:26:03Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='d54b1826121b47caba89932a78c06ccd',uuid=40c8eb3a-547f-435e-8e59-ce9dcddb5f8e,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-ServerActionsTestJSON-1926715354-network", "vif_mac": "fa:16:3e:d5:ef:4c"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:26:14 compute-0 nova_compute[192079]: 2025-10-02 12:26:14.548 2 DEBUG nova.network.os_vif_util [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converting VIF {"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-ServerActionsTestJSON-1926715354-network", "vif_mac": "fa:16:3e:d5:ef:4c"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:26:14 compute-0 nova_compute[192079]: 2025-10-02 12:26:14.549 2 DEBUG nova.network.os_vif_util [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:d5:ef:4c,bridge_name='br-int',has_traffic_filtering=True,id=ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapae0f2dc4-de') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:26:14 compute-0 nova_compute[192079]: 2025-10-02 12:26:14.551 2 DEBUG nova.virt.libvirt.driver [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:26:14 compute-0 nova_compute[192079]:   <uuid>40c8eb3a-547f-435e-8e59-ce9dcddb5f8e</uuid>
Oct 02 12:26:14 compute-0 nova_compute[192079]:   <name>instance-00000079</name>
Oct 02 12:26:14 compute-0 nova_compute[192079]:   <memory>196608</memory>
Oct 02 12:26:14 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:26:14 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:26:14 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:       <nova:name>tempest-ServerActionsTestJSON-server-300185996</nova:name>
Oct 02 12:26:14 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:26:13</nova:creationTime>
Oct 02 12:26:14 compute-0 nova_compute[192079]:       <nova:flavor name="m1.micro">
Oct 02 12:26:14 compute-0 nova_compute[192079]:         <nova:memory>192</nova:memory>
Oct 02 12:26:14 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:26:14 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:26:14 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:26:14 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:26:14 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:26:14 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:26:14 compute-0 nova_compute[192079]:         <nova:user uuid="d54b1826121b47caba89932a78c06ccd">tempest-ServerActionsTestJSON-1646745100-project-member</nova:user>
Oct 02 12:26:14 compute-0 nova_compute[192079]:         <nova:project uuid="e564a4cad5d443dba81ec04d2a05ced9">tempest-ServerActionsTestJSON-1646745100</nova:project>
Oct 02 12:26:14 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:26:14 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:26:14 compute-0 nova_compute[192079]:         <nova:port uuid="ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0">
Oct 02 12:26:14 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.9" ipVersion="4"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:26:14 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:26:14 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:26:14 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <system>
Oct 02 12:26:14 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:26:14 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:26:14 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:26:14 compute-0 nova_compute[192079]:       <entry name="serial">40c8eb3a-547f-435e-8e59-ce9dcddb5f8e</entry>
Oct 02 12:26:14 compute-0 nova_compute[192079]:       <entry name="uuid">40c8eb3a-547f-435e-8e59-ce9dcddb5f8e</entry>
Oct 02 12:26:14 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     </system>
Oct 02 12:26:14 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:26:14 compute-0 nova_compute[192079]:   <os>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:   </os>
Oct 02 12:26:14 compute-0 nova_compute[192079]:   <features>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:   </features>
Oct 02 12:26:14 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:26:14 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:26:14 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:26:14 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:26:14 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk.config"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:26:14 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:d5:ef:4c"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:       <target dev="tapae0f2dc4-de"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:26:14 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/console.log" append="off"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <video>
Oct 02 12:26:14 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     </video>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:26:14 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:26:14 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:26:14 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:26:14 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:26:14 compute-0 nova_compute[192079]: </domain>
Oct 02 12:26:14 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:26:14 compute-0 nova_compute[192079]: 2025-10-02 12:26:14.554 2 DEBUG nova.virt.libvirt.vif [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:25:11Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerActionsTestJSON-server-300185996',display_name='tempest-ServerActionsTestJSON-server-300185996',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(2),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serveractionstestjson-server-300185996',id=121,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=2,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBJJLom+UJzZg9dduKQv+725QaYDZoMXvP/xlpKnb/K05SGc4dkyLwCDweJ3QifTmxLWqK9Sz5A12yMJbzpa36v5C4bUqj8uiWk/vbR1BAjBdKM9d/Ug8M2nT8LwDBGP/9A==',key_name='tempest-keypair-1006285918',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:25:19Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=192,metadata={},migration_context=MigrationContext,new_flavor=Flavor(2),node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=Flavor(1),os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='e564a4cad5d443dba81ec04d2a05ced9',ramdisk_id='',reservation_id='r-als9bbed',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=ServiceList,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',old_vm_state='active',owner_project_name='tempest-ServerActionsTestJSON-1646745100',owner_user_name='tempest-ServerActionsTestJSON-1646745100-project-member'},tags=<?>,task_state='resize_finish',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:26:03Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='d54b1826121b47caba89932a78c06ccd',uuid=40c8eb3a-547f-435e-8e59-ce9dcddb5f8e,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-ServerActionsTestJSON-1926715354-network", "vif_mac": "fa:16:3e:d5:ef:4c"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:26:14 compute-0 nova_compute[192079]: 2025-10-02 12:26:14.554 2 DEBUG nova.network.os_vif_util [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converting VIF {"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-ServerActionsTestJSON-1926715354-network", "vif_mac": "fa:16:3e:d5:ef:4c"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:26:14 compute-0 nova_compute[192079]: 2025-10-02 12:26:14.555 2 DEBUG nova.network.os_vif_util [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:d5:ef:4c,bridge_name='br-int',has_traffic_filtering=True,id=ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapae0f2dc4-de') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:26:14 compute-0 nova_compute[192079]: 2025-10-02 12:26:14.555 2 DEBUG os_vif [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:d5:ef:4c,bridge_name='br-int',has_traffic_filtering=True,id=ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapae0f2dc4-de') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:26:14 compute-0 nova_compute[192079]: 2025-10-02 12:26:14.556 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:14 compute-0 nova_compute[192079]: 2025-10-02 12:26:14.557 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:26:14 compute-0 nova_compute[192079]: 2025-10-02 12:26:14.557 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:26:14 compute-0 nova_compute[192079]: 2025-10-02 12:26:14.560 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:14 compute-0 nova_compute[192079]: 2025-10-02 12:26:14.560 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapae0f2dc4-de, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:26:14 compute-0 nova_compute[192079]: 2025-10-02 12:26:14.561 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapae0f2dc4-de, col_values=(('external_ids', {'iface-id': 'ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:d5:ef:4c', 'vm-uuid': '40c8eb3a-547f-435e-8e59-ce9dcddb5f8e'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:26:14 compute-0 nova_compute[192079]: 2025-10-02 12:26:14.562 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:14 compute-0 NetworkManager[51160]: <info>  [1759407974.5636] manager: (tapae0f2dc4-de): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/223)
Oct 02 12:26:14 compute-0 nova_compute[192079]: 2025-10-02 12:26:14.565 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:26:14 compute-0 nova_compute[192079]: 2025-10-02 12:26:14.567 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:14 compute-0 nova_compute[192079]: 2025-10-02 12:26:14.568 2 INFO os_vif [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:d5:ef:4c,bridge_name='br-int',has_traffic_filtering=True,id=ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapae0f2dc4-de')
Oct 02 12:26:14 compute-0 nova_compute[192079]: 2025-10-02 12:26:14.753 2 DEBUG nova.compute.manager [None req-8a8e6a8f-0e3d-4595-ae3a-40ce7fca6b9f 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:26:14 compute-0 kernel: tap84dc02cc-88 (unregistering): left promiscuous mode
Oct 02 12:26:14 compute-0 NetworkManager[51160]: <info>  [1759407974.7835] device (tap84dc02cc-88): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:26:14 compute-0 nova_compute[192079]: 2025-10-02 12:26:14.785 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:14 compute-0 nova_compute[192079]: 2025-10-02 12:26:14.801 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:14 compute-0 ovn_controller[94336]: 2025-10-02T12:26:14Z|00440|binding|INFO|Releasing lport 84dc02cc-883a-4f1b-a938-49e678b5f445 from this chassis (sb_readonly=0)
Oct 02 12:26:14 compute-0 ovn_controller[94336]: 2025-10-02T12:26:14Z|00441|binding|INFO|Setting lport 84dc02cc-883a-4f1b-a938-49e678b5f445 down in Southbound
Oct 02 12:26:14 compute-0 ovn_controller[94336]: 2025-10-02T12:26:14Z|00442|binding|INFO|Removing iface tap84dc02cc-88 ovn-installed in OVS
Oct 02 12:26:14 compute-0 nova_compute[192079]: 2025-10-02 12:26:14.803 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:14 compute-0 nova_compute[192079]: 2025-10-02 12:26:14.829 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:14 compute-0 systemd[1]: machine-qemu\x2d57\x2dinstance\x2d0000007b.scope: Deactivated successfully.
Oct 02 12:26:14 compute-0 systemd[1]: machine-qemu\x2d57\x2dinstance\x2d0000007b.scope: Consumed 14.084s CPU time.
Oct 02 12:26:14 compute-0 systemd-machined[152150]: Machine qemu-57-instance-0000007b terminated.
Oct 02 12:26:15 compute-0 nova_compute[192079]: 2025-10-02 12:26:15.014 2 INFO nova.virt.libvirt.driver [-] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Instance destroyed successfully.
Oct 02 12:26:15 compute-0 nova_compute[192079]: 2025-10-02 12:26:15.014 2 DEBUG nova.objects.instance [None req-8a8e6a8f-0e3d-4595-ae3a-40ce7fca6b9f 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lazy-loading 'resources' on Instance uuid ab7610b5-3462-4dc2-a802-0998246e8cdb obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:26:16 compute-0 nova_compute[192079]: 2025-10-02 12:26:16.492 2 DEBUG nova.network.neutron [req-2b8b4a50-3c5d-4a07-9590-6f2ac02c68b1 req-2d5e7dd6-e1bb-4720-a88e-35b9708e4c30 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Updated VIF entry in instance network info cache for port 84dc02cc-883a-4f1b-a938-49e678b5f445. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:26:16 compute-0 nova_compute[192079]: 2025-10-02 12:26:16.492 2 DEBUG nova.network.neutron [req-2b8b4a50-3c5d-4a07-9590-6f2ac02c68b1 req-2d5e7dd6-e1bb-4720-a88e-35b9708e4c30 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Updating instance_info_cache with network_info: [{"id": "84dc02cc-883a-4f1b-a938-49e678b5f445", "address": "fa:16:3e:80:c4:31", "network": {"id": "26df2dcf-f57c-4dae-8522-0277df741ed3", "bridge": "br-int", "label": "tempest-network-smoke--1584637508", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}, {"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe80:c431", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap84dc02cc-88", "ovs_interfaceid": "84dc02cc-883a-4f1b-a938-49e678b5f445", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:26:17 compute-0 nova_compute[192079]: 2025-10-02 12:26:17.072 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:17 compute-0 nova_compute[192079]: 2025-10-02 12:26:17.338 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759407962.3365452, 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:26:17 compute-0 nova_compute[192079]: 2025-10-02 12:26:17.338 2 INFO nova.compute.manager [-] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] VM Stopped (Lifecycle Event)
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.217 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:80:c4:31 10.100.0.10 2001:db8::f816:3eff:fe80:c431'], port_security=['fa:16:3e:80:c4:31 10.100.0.10 2001:db8::f816:3eff:fe80:c431'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.10/28 2001:db8::f816:3eff:fe80:c431/64', 'neutron:device_id': 'ab7610b5-3462-4dc2-a802-0998246e8cdb', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-26df2dcf-f57c-4dae-8522-0277df741ed3', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'neutron:revision_number': '4', 'neutron:security_group_ids': '2c57d713-64e3-4621-a624-32092d283319', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=e2784fb0-50ac-4c91-ba90-3b5c38b8adf4, chassis=[], tunnel_key=5, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=84dc02cc-883a-4f1b-a938-49e678b5f445) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.218 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 84dc02cc-883a-4f1b-a938-49e678b5f445 in datapath 26df2dcf-f57c-4dae-8522-0277df741ed3 unbound from our chassis
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.219 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 26df2dcf-f57c-4dae-8522-0277df741ed3, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.220 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[748758b3-b0fe-4ca6-b27c-ba21eddc6cb2]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.220 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-26df2dcf-f57c-4dae-8522-0277df741ed3 namespace which is not needed anymore
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.267 2 DEBUG nova.virt.libvirt.vif [None req-8a8e6a8f-0e3d-4595-ae3a-40ce7fca6b9f 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:25:36Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestGettingAddress-server-1815158103',display_name='tempest-TestGettingAddress-server-1815158103',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testgettingaddress-server-1815158103',id=123,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBN27qqZO7DS6SotTIkgadWOrlyFzalcMBya6l3P3FHA92Trdk8QzNk/bIfeVZHQyyH9bzXdJACR3sdrkH4czxiQm1W3dnbgCG/vLQtAxveP29c1TkzsAJfjG23nfB+bI6Q==',key_name='tempest-TestGettingAddress-794970227',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:25:46Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='fd801958556f4c8aab047ecdef6b5ee8',ramdisk_id='',reservation_id='r-bhcbl67j',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-TestGettingAddress-1355720650',owner_user_name='tempest-TestGettingAddress-1355720650-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:25:46Z,user_data=None,user_id='97ce9f1898484e0e9a1f7c84a9f0dfe3',uuid=ab7610b5-3462-4dc2-a802-0998246e8cdb,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "84dc02cc-883a-4f1b-a938-49e678b5f445", "address": "fa:16:3e:80:c4:31", "network": {"id": "26df2dcf-f57c-4dae-8522-0277df741ed3", "bridge": "br-int", "label": "tempest-network-smoke--1584637508", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}, {"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe80:c431", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap84dc02cc-88", "ovs_interfaceid": "84dc02cc-883a-4f1b-a938-49e678b5f445", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.268 2 DEBUG nova.network.os_vif_util [None req-8a8e6a8f-0e3d-4595-ae3a-40ce7fca6b9f 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converting VIF {"id": "84dc02cc-883a-4f1b-a938-49e678b5f445", "address": "fa:16:3e:80:c4:31", "network": {"id": "26df2dcf-f57c-4dae-8522-0277df741ed3", "bridge": "br-int", "label": "tempest-network-smoke--1584637508", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}, {"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe80:c431", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap84dc02cc-88", "ovs_interfaceid": "84dc02cc-883a-4f1b-a938-49e678b5f445", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.270 2 DEBUG nova.network.os_vif_util [None req-8a8e6a8f-0e3d-4595-ae3a-40ce7fca6b9f 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:80:c4:31,bridge_name='br-int',has_traffic_filtering=True,id=84dc02cc-883a-4f1b-a938-49e678b5f445,network=Network(26df2dcf-f57c-4dae-8522-0277df741ed3),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap84dc02cc-88') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.271 2 DEBUG os_vif [None req-8a8e6a8f-0e3d-4595-ae3a-40ce7fca6b9f 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:80:c4:31,bridge_name='br-int',has_traffic_filtering=True,id=84dc02cc-883a-4f1b-a938-49e678b5f445,network=Network(26df2dcf-f57c-4dae-8522-0277df741ed3),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap84dc02cc-88') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.272 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.272 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap84dc02cc-88, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.274 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.275 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.279 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.280 2 INFO os_vif [None req-8a8e6a8f-0e3d-4595-ae3a-40ce7fca6b9f 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:80:c4:31,bridge_name='br-int',has_traffic_filtering=True,id=84dc02cc-883a-4f1b-a938-49e678b5f445,network=Network(26df2dcf-f57c-4dae-8522-0277df741ed3),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap84dc02cc-88')
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.281 2 INFO nova.virt.libvirt.driver [None req-8a8e6a8f-0e3d-4595-ae3a-40ce7fca6b9f 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Deleting instance files /var/lib/nova/instances/ab7610b5-3462-4dc2-a802-0998246e8cdb_del
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.281 2 INFO nova.virt.libvirt.driver [None req-8a8e6a8f-0e3d-4595-ae3a-40ce7fca6b9f 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Deletion of /var/lib/nova/instances/ab7610b5-3462-4dc2-a802-0998246e8cdb_del complete
Oct 02 12:26:18 compute-0 neutron-haproxy-ovnmeta-26df2dcf-f57c-4dae-8522-0277df741ed3[239789]: [NOTICE]   (239793) : haproxy version is 2.8.14-c23fe91
Oct 02 12:26:18 compute-0 neutron-haproxy-ovnmeta-26df2dcf-f57c-4dae-8522-0277df741ed3[239789]: [NOTICE]   (239793) : path to executable is /usr/sbin/haproxy
Oct 02 12:26:18 compute-0 neutron-haproxy-ovnmeta-26df2dcf-f57c-4dae-8522-0277df741ed3[239789]: [WARNING]  (239793) : Exiting Master process...
Oct 02 12:26:18 compute-0 neutron-haproxy-ovnmeta-26df2dcf-f57c-4dae-8522-0277df741ed3[239789]: [ALERT]    (239793) : Current worker (239795) exited with code 143 (Terminated)
Oct 02 12:26:18 compute-0 neutron-haproxy-ovnmeta-26df2dcf-f57c-4dae-8522-0277df741ed3[239789]: [WARNING]  (239793) : All workers exited. Exiting... (0)
Oct 02 12:26:18 compute-0 systemd[1]: libpod-3b8d51259ad8cd53160f2f60e5dc2a0fca47d6bd733bfa5d5b454cc726afa5b2.scope: Deactivated successfully.
Oct 02 12:26:18 compute-0 podman[240157]: 2025-10-02 12:26:18.337641834 +0000 UTC m=+0.041938224 container died 3b8d51259ad8cd53160f2f60e5dc2a0fca47d6bd733bfa5d5b454cc726afa5b2 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-26df2dcf-f57c-4dae-8522-0277df741ed3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_managed=true)
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.351 2 DEBUG nova.compute.manager [None req-cce489e5-f924-4053-8cd6-75a95166021c - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.354 2 DEBUG nova.compute.manager [None req-cce489e5-f924-4053-8cd6-75a95166021c - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Synchronizing instance power state after lifecycle event "Stopped"; current vm_state: active, current task_state: resize_finish, current DB power_state: 1, VM power_state: 4 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:26:18 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-3b8d51259ad8cd53160f2f60e5dc2a0fca47d6bd733bfa5d5b454cc726afa5b2-userdata-shm.mount: Deactivated successfully.
Oct 02 12:26:18 compute-0 systemd[1]: var-lib-containers-storage-overlay-24affa7f1a261e8d3f6c8eadd147a9c8e38c7c7206b4caf8e61c5c2658b341ac-merged.mount: Deactivated successfully.
Oct 02 12:26:18 compute-0 podman[240157]: 2025-10-02 12:26:18.374121628 +0000 UTC m=+0.078418018 container cleanup 3b8d51259ad8cd53160f2f60e5dc2a0fca47d6bd733bfa5d5b454cc726afa5b2 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-26df2dcf-f57c-4dae-8522-0277df741ed3, org.label-schema.build-date=20251001, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:26:18 compute-0 systemd[1]: libpod-conmon-3b8d51259ad8cd53160f2f60e5dc2a0fca47d6bd733bfa5d5b454cc726afa5b2.scope: Deactivated successfully.
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.381 2 DEBUG nova.virt.libvirt.driver [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.382 2 DEBUG nova.virt.libvirt.driver [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.382 2 DEBUG nova.virt.libvirt.driver [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] No VIF found with MAC fa:16:3e:d5:ef:4c, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.383 2 INFO nova.virt.libvirt.driver [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Using config drive
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.392 2 DEBUG oslo_concurrency.lockutils [req-2b8b4a50-3c5d-4a07-9590-6f2ac02c68b1 req-2d5e7dd6-e1bb-4720-a88e-35b9708e4c30 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-ab7610b5-3462-4dc2-a802-0998246e8cdb" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.393 2 INFO nova.compute.manager [None req-cce489e5-f924-4053-8cd6-75a95166021c - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] During sync_power_state the instance has a pending task (resize_finish). Skip.
Oct 02 12:26:18 compute-0 podman[240172]: 2025-10-02 12:26:18.418172059 +0000 UTC m=+0.061436406 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:26:18 compute-0 podman[240205]: 2025-10-02 12:26:18.43658899 +0000 UTC m=+0.039729324 container remove 3b8d51259ad8cd53160f2f60e5dc2a0fca47d6bd733bfa5d5b454cc726afa5b2 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-26df2dcf-f57c-4dae-8522-0277df741ed3, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001)
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.442 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[174d91cc-9d06-4b7f-bc4c-821117222040]: (4, ('Thu Oct  2 12:26:18 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-26df2dcf-f57c-4dae-8522-0277df741ed3 (3b8d51259ad8cd53160f2f60e5dc2a0fca47d6bd733bfa5d5b454cc726afa5b2)\n3b8d51259ad8cd53160f2f60e5dc2a0fca47d6bd733bfa5d5b454cc726afa5b2\nThu Oct  2 12:26:18 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-26df2dcf-f57c-4dae-8522-0277df741ed3 (3b8d51259ad8cd53160f2f60e5dc2a0fca47d6bd733bfa5d5b454cc726afa5b2)\n3b8d51259ad8cd53160f2f60e5dc2a0fca47d6bd733bfa5d5b454cc726afa5b2\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.445 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4e422ed7-7040-4701-8ba4-c9a6a79c78a0]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:18 compute-0 kernel: tapae0f2dc4-de: entered promiscuous mode
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.446 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap26df2dcf-f0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:26:18 compute-0 ovn_controller[94336]: 2025-10-02T12:26:18Z|00443|binding|INFO|Claiming lport ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 for this chassis.
Oct 02 12:26:18 compute-0 ovn_controller[94336]: 2025-10-02T12:26:18Z|00444|binding|INFO|ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0: Claiming fa:16:3e:d5:ef:4c 10.100.0.9
Oct 02 12:26:18 compute-0 podman[240180]: 2025-10-02 12:26:18.448909686 +0000 UTC m=+0.087953718 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, container_name=iscsid, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=iscsid, io.buildah.version=1.41.3, org.label-schema.license=GPLv2)
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.448 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:18 compute-0 NetworkManager[51160]: <info>  [1759407978.4524] manager: (tapae0f2dc4-de): new Tun device (/org/freedesktop/NetworkManager/Devices/224)
Oct 02 12:26:18 compute-0 kernel: tap26df2dcf-f0: left promiscuous mode
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.462 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:18 compute-0 ovn_controller[94336]: 2025-10-02T12:26:18Z|00445|binding|INFO|Setting lport ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 ovn-installed in OVS
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.471 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.472 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.474 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a4007321-bd9f-46b1-b364-072782396419]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.476 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:18 compute-0 systemd-udevd[240252]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:26:18 compute-0 systemd-machined[152150]: New machine qemu-58-instance-00000079.
Oct 02 12:26:18 compute-0 ovn_controller[94336]: 2025-10-02T12:26:18Z|00446|binding|INFO|Setting lport ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 up in Southbound
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.488 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:d5:ef:4c 10.100.0.9'], port_security=['fa:16:3e:d5:ef:4c 10.100.0.9'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.9/28', 'neutron:device_id': '40c8eb3a-547f-435e-8e59-ce9dcddb5f8e', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-a04f937a-375f-4fb0-90fe-5f514a88668f', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'neutron:revision_number': '5', 'neutron:security_group_ids': 'c0383701-0ec7-4f3b-8585-5effc4f5ca5a', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:port_fip': '192.168.122.248'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=50c0aa38-5fd8-41c7-b4bf-85b59722c5c3, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:26:18 compute-0 NetworkManager[51160]: <info>  [1759407978.4939] device (tapae0f2dc4-de): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:26:18 compute-0 NetworkManager[51160]: <info>  [1759407978.4947] device (tapae0f2dc4-de): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:26:18 compute-0 systemd[1]: Started Virtual Machine qemu-58-instance-00000079.
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.510 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[aa6e4518-c60c-485f-987f-88d67bc707de]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.511 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a4ece713-e98e-4c72-8cb8-789195c07a7e]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.525 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d7bd950c-69bb-4184-b8ae-ddabb8ddfba7]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 594230, 'reachable_time': 19064, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 240258, 'error': None, 'target': 'ovnmeta-26df2dcf-f57c-4dae-8522-0277df741ed3', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:18 compute-0 systemd[1]: run-netns-ovnmeta\x2d26df2dcf\x2df57c\x2d4dae\x2d8522\x2d0277df741ed3.mount: Deactivated successfully.
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.527 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-26df2dcf-f57c-4dae-8522-0277df741ed3 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.527 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[a0b7dc7a-1626-4228-bdb3-e0bfa15129ef]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.528 103294 INFO neutron.agent.ovn.metadata.agent [-] Port ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 in datapath a04f937a-375f-4fb0-90fe-5f514a88668f unbound from our chassis
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.529 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network a04f937a-375f-4fb0-90fe-5f514a88668f
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.540 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[31eeacda-54d0-4568-9706-975e6ae7fc35]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.541 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tapa04f937a-31 in ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.543 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tapa04f937a-30 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.543 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9094c76d-81f4-426e-83e6-8fcb3e8232a0]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.543 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[bf0170db-5537-4c25-bad6-de2a17dd42bc]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.554 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[fdf46d62-4628-4d8f-8196-0f26e599b3d6]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.576 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8f2bada3-4701-4010-9b00-f5224e3d0f34]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.591 2 INFO nova.compute.manager [None req-8a8e6a8f-0e3d-4595-ae3a-40ce7fca6b9f 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Took 3.84 seconds to destroy the instance on the hypervisor.
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.592 2 DEBUG oslo.service.loopingcall [None req-8a8e6a8f-0e3d-4595-ae3a-40ce7fca6b9f 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.592 2 DEBUG nova.compute.manager [-] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.592 2 DEBUG nova.network.neutron [-] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.605 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[e2b334e9-525c-4ee1-b534-2d8de62ccc1b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.609 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4da1ef73-dc5c-4a08-9741-22dc04690588]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:18 compute-0 systemd-udevd[240255]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:26:18 compute-0 NetworkManager[51160]: <info>  [1759407978.6121] manager: (tapa04f937a-30): new Veth device (/org/freedesktop/NetworkManager/Devices/225)
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.639 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[05ec5af7-c3d0-433f-bdeb-02a4cc104ada]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.642 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[c57c9e66-efe3-40e1-aa57-611d16633c4a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:18 compute-0 NetworkManager[51160]: <info>  [1759407978.6618] device (tapa04f937a-30): carrier: link connected
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.668 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[1a59b90a-30a7-4a20-8712-73a13671c53c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.682 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c9ea7a00-d959-40f9-9307-9d7df0edb21f]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapa04f937a-31'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:33:93:68'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 143], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 597629, 'reachable_time': 15641, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 240289, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.695 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b09e8ed4-f281-4265-b8c5-870cb5efb2e8]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe33:9368'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 597629, 'tstamp': 597629}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 240290, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.712 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f1227d00-4814-4414-8804-c87345a0849b]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapa04f937a-31'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:33:93:68'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 143], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 597629, 'reachable_time': 15641, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 240291, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.737 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[597e3130-bdd4-462b-bcba-bf9270cb00f7]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.805 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[270e0605-2444-4c0f-8768-42a75436a766]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.807 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapa04f937a-30, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.807 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.807 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapa04f937a-30, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.809 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:18 compute-0 NetworkManager[51160]: <info>  [1759407978.8101] manager: (tapa04f937a-30): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/226)
Oct 02 12:26:18 compute-0 kernel: tapa04f937a-30: entered promiscuous mode
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.812 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.813 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tapa04f937a-30, col_values=(('external_ids', {'iface-id': '38f1ac16-18c6-4b4a-b769-ebc7dd5181d4'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.814 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:18 compute-0 ovn_controller[94336]: 2025-10-02T12:26:18Z|00447|binding|INFO|Releasing lport 38f1ac16-18c6-4b4a-b769-ebc7dd5181d4 from this chassis (sb_readonly=0)
Oct 02 12:26:18 compute-0 nova_compute[192079]: 2025-10-02 12:26:18.829 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.829 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/a04f937a-375f-4fb0-90fe-5f514a88668f.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/a04f937a-375f-4fb0-90fe-5f514a88668f.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.830 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e33b1890-1370-43b2-aa76-92c0b26efe3b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.831 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-a04f937a-375f-4fb0-90fe-5f514a88668f
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/a04f937a-375f-4fb0-90fe-5f514a88668f.pid.haproxy
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID a04f937a-375f-4fb0-90fe-5f514a88668f
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:26:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:18.831 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'env', 'PROCESS_TAG=haproxy-a04f937a-375f-4fb0-90fe-5f514a88668f', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/a04f937a-375f-4fb0-90fe-5f514a88668f.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:26:19 compute-0 nova_compute[192079]: 2025-10-02 12:26:19.042 2 DEBUG nova.compute.manager [req-6102f98e-2745-47c7-9884-aefdf01633b1 req-90a4380a-f256-4789-8013-f53afe8bccf9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Received event network-vif-unplugged-84dc02cc-883a-4f1b-a938-49e678b5f445 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:26:19 compute-0 nova_compute[192079]: 2025-10-02 12:26:19.043 2 DEBUG oslo_concurrency.lockutils [req-6102f98e-2745-47c7-9884-aefdf01633b1 req-90a4380a-f256-4789-8013-f53afe8bccf9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "ab7610b5-3462-4dc2-a802-0998246e8cdb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:26:19 compute-0 nova_compute[192079]: 2025-10-02 12:26:19.043 2 DEBUG oslo_concurrency.lockutils [req-6102f98e-2745-47c7-9884-aefdf01633b1 req-90a4380a-f256-4789-8013-f53afe8bccf9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ab7610b5-3462-4dc2-a802-0998246e8cdb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:26:19 compute-0 nova_compute[192079]: 2025-10-02 12:26:19.044 2 DEBUG oslo_concurrency.lockutils [req-6102f98e-2745-47c7-9884-aefdf01633b1 req-90a4380a-f256-4789-8013-f53afe8bccf9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ab7610b5-3462-4dc2-a802-0998246e8cdb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:26:19 compute-0 nova_compute[192079]: 2025-10-02 12:26:19.044 2 DEBUG nova.compute.manager [req-6102f98e-2745-47c7-9884-aefdf01633b1 req-90a4380a-f256-4789-8013-f53afe8bccf9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] No waiting events found dispatching network-vif-unplugged-84dc02cc-883a-4f1b-a938-49e678b5f445 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:26:19 compute-0 nova_compute[192079]: 2025-10-02 12:26:19.044 2 DEBUG nova.compute.manager [req-6102f98e-2745-47c7-9884-aefdf01633b1 req-90a4380a-f256-4789-8013-f53afe8bccf9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Received event network-vif-unplugged-84dc02cc-883a-4f1b-a938-49e678b5f445 for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:26:19 compute-0 podman[240330]: 2025-10-02 12:26:19.222777086 +0000 UTC m=+0.048563944 container create 9cfa0c0ba1c814fe55d2bccb235eac10ef5929d4bc3a06d8be794d4d6d75f493 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.vendor=CentOS, tcib_managed=true, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001)
Oct 02 12:26:19 compute-0 systemd[1]: Started libpod-conmon-9cfa0c0ba1c814fe55d2bccb235eac10ef5929d4bc3a06d8be794d4d6d75f493.scope.
Oct 02 12:26:19 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:26:19 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/04b7cad8323f843d89f58c824162e8f78bb00b687d5f4e97c6f6ed7c043d9c8b/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:26:19 compute-0 podman[240330]: 2025-10-02 12:26:19.289461924 +0000 UTC m=+0.115248782 container init 9cfa0c0ba1c814fe55d2bccb235eac10ef5929d4bc3a06d8be794d4d6d75f493 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:26:19 compute-0 podman[240330]: 2025-10-02 12:26:19.196869661 +0000 UTC m=+0.022656539 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:26:19 compute-0 podman[240330]: 2025-10-02 12:26:19.295632472 +0000 UTC m=+0.121419330 container start 9cfa0c0ba1c814fe55d2bccb235eac10ef5929d4bc3a06d8be794d4d6d75f493 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:26:19 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[240345]: [NOTICE]   (240349) : New worker (240351) forked
Oct 02 12:26:19 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[240345]: [NOTICE]   (240349) : Loading success.
Oct 02 12:26:19 compute-0 nova_compute[192079]: 2025-10-02 12:26:19.470 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407979.4697583, 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:26:19 compute-0 nova_compute[192079]: 2025-10-02 12:26:19.471 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] VM Resumed (Lifecycle Event)
Oct 02 12:26:19 compute-0 nova_compute[192079]: 2025-10-02 12:26:19.474 2 DEBUG nova.compute.manager [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:26:19 compute-0 nova_compute[192079]: 2025-10-02 12:26:19.479 2 INFO nova.virt.libvirt.driver [-] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Instance running successfully.
Oct 02 12:26:19 compute-0 virtqemud[191807]: argument unsupported: QEMU guest agent is not configured
Oct 02 12:26:19 compute-0 nova_compute[192079]: 2025-10-02 12:26:19.484 2 DEBUG nova.virt.libvirt.guest [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Failed to set time: agent not configured sync_guest_time /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:200
Oct 02 12:26:19 compute-0 nova_compute[192079]: 2025-10-02 12:26:19.484 2 DEBUG nova.virt.libvirt.driver [None req-af579600-37b1-4b8a-aa59-d3b500f64716 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] finish_migration finished successfully. finish_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:11793
Oct 02 12:26:19 compute-0 nova_compute[192079]: 2025-10-02 12:26:19.872 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:26:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:19.875 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=30, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=29) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:26:19 compute-0 nova_compute[192079]: 2025-10-02 12:26:19.876 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:19.877 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 1 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:26:19 compute-0 nova_compute[192079]: 2025-10-02 12:26:19.878 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: active, current task_state: resize_finish, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:26:19 compute-0 nova_compute[192079]: 2025-10-02 12:26:19.927 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] During sync_power_state the instance has a pending task (resize_finish). Skip.
Oct 02 12:26:19 compute-0 nova_compute[192079]: 2025-10-02 12:26:19.928 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407979.47051, 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:26:19 compute-0 nova_compute[192079]: 2025-10-02 12:26:19.928 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] VM Started (Lifecycle Event)
Oct 02 12:26:20 compute-0 nova_compute[192079]: 2025-10-02 12:26:20.000 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:26:20 compute-0 nova_compute[192079]: 2025-10-02 12:26:20.005 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Synchronizing instance power state after lifecycle event "Started"; current vm_state: active, current task_state: resize_finish, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:26:20 compute-0 nova_compute[192079]: 2025-10-02 12:26:20.248 2 DEBUG nova.network.neutron [-] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:26:20 compute-0 nova_compute[192079]: 2025-10-02 12:26:20.378 2 DEBUG nova.compute.manager [req-02717323-49ac-4cdb-bcb6-e5fdcf6cb535 req-f8022aaa-3845-496f-987e-ef7e1679081e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Received event network-vif-deleted-84dc02cc-883a-4f1b-a938-49e678b5f445 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:26:20 compute-0 nova_compute[192079]: 2025-10-02 12:26:20.378 2 INFO nova.compute.manager [req-02717323-49ac-4cdb-bcb6-e5fdcf6cb535 req-f8022aaa-3845-496f-987e-ef7e1679081e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Neutron deleted interface 84dc02cc-883a-4f1b-a938-49e678b5f445; detaching it from the instance and deleting it from the info cache
Oct 02 12:26:20 compute-0 nova_compute[192079]: 2025-10-02 12:26:20.378 2 DEBUG nova.network.neutron [req-02717323-49ac-4cdb-bcb6-e5fdcf6cb535 req-f8022aaa-3845-496f-987e-ef7e1679081e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:26:20 compute-0 nova_compute[192079]: 2025-10-02 12:26:20.384 2 INFO nova.compute.manager [-] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Took 1.79 seconds to deallocate network for instance.
Oct 02 12:26:20 compute-0 nova_compute[192079]: 2025-10-02 12:26:20.410 2 DEBUG nova.compute.manager [req-02717323-49ac-4cdb-bcb6-e5fdcf6cb535 req-f8022aaa-3845-496f-987e-ef7e1679081e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Detach interface failed, port_id=84dc02cc-883a-4f1b-a938-49e678b5f445, reason: Instance ab7610b5-3462-4dc2-a802-0998246e8cdb could not be found. _process_instance_vif_deleted_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10882
Oct 02 12:26:20 compute-0 nova_compute[192079]: 2025-10-02 12:26:20.658 2 DEBUG oslo_concurrency.lockutils [None req-8a8e6a8f-0e3d-4595-ae3a-40ce7fca6b9f 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:26:20 compute-0 nova_compute[192079]: 2025-10-02 12:26:20.658 2 DEBUG oslo_concurrency.lockutils [None req-8a8e6a8f-0e3d-4595-ae3a-40ce7fca6b9f 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:26:20 compute-0 nova_compute[192079]: 2025-10-02 12:26:20.799 2 DEBUG nova.compute.provider_tree [None req-8a8e6a8f-0e3d-4595-ae3a-40ce7fca6b9f 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:26:20 compute-0 nova_compute[192079]: 2025-10-02 12:26:20.863 2 DEBUG nova.scheduler.client.report [None req-8a8e6a8f-0e3d-4595-ae3a-40ce7fca6b9f 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:26:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:20.879 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '30'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:26:21 compute-0 nova_compute[192079]: 2025-10-02 12:26:21.043 2 DEBUG oslo_concurrency.lockutils [None req-8a8e6a8f-0e3d-4595-ae3a-40ce7fca6b9f 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.384s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:26:21 compute-0 nova_compute[192079]: 2025-10-02 12:26:21.182 2 INFO nova.scheduler.client.report [None req-8a8e6a8f-0e3d-4595-ae3a-40ce7fca6b9f 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Deleted allocations for instance ab7610b5-3462-4dc2-a802-0998246e8cdb
Oct 02 12:26:21 compute-0 nova_compute[192079]: 2025-10-02 12:26:21.283 2 DEBUG nova.compute.manager [req-b13baa44-8f3d-41da-bd0d-ba3b931a7df8 req-d4d2043b-933a-4cfe-9d42-8352c46f6e7b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Received event network-vif-plugged-84dc02cc-883a-4f1b-a938-49e678b5f445 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:26:21 compute-0 nova_compute[192079]: 2025-10-02 12:26:21.284 2 DEBUG oslo_concurrency.lockutils [req-b13baa44-8f3d-41da-bd0d-ba3b931a7df8 req-d4d2043b-933a-4cfe-9d42-8352c46f6e7b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "ab7610b5-3462-4dc2-a802-0998246e8cdb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:26:21 compute-0 nova_compute[192079]: 2025-10-02 12:26:21.285 2 DEBUG oslo_concurrency.lockutils [req-b13baa44-8f3d-41da-bd0d-ba3b931a7df8 req-d4d2043b-933a-4cfe-9d42-8352c46f6e7b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ab7610b5-3462-4dc2-a802-0998246e8cdb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:26:21 compute-0 nova_compute[192079]: 2025-10-02 12:26:21.285 2 DEBUG oslo_concurrency.lockutils [req-b13baa44-8f3d-41da-bd0d-ba3b931a7df8 req-d4d2043b-933a-4cfe-9d42-8352c46f6e7b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "ab7610b5-3462-4dc2-a802-0998246e8cdb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:26:21 compute-0 nova_compute[192079]: 2025-10-02 12:26:21.285 2 DEBUG nova.compute.manager [req-b13baa44-8f3d-41da-bd0d-ba3b931a7df8 req-d4d2043b-933a-4cfe-9d42-8352c46f6e7b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] No waiting events found dispatching network-vif-plugged-84dc02cc-883a-4f1b-a938-49e678b5f445 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:26:21 compute-0 nova_compute[192079]: 2025-10-02 12:26:21.286 2 WARNING nova.compute.manager [req-b13baa44-8f3d-41da-bd0d-ba3b931a7df8 req-d4d2043b-933a-4cfe-9d42-8352c46f6e7b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Received unexpected event network-vif-plugged-84dc02cc-883a-4f1b-a938-49e678b5f445 for instance with vm_state deleted and task_state None.
Oct 02 12:26:21 compute-0 nova_compute[192079]: 2025-10-02 12:26:21.286 2 DEBUG nova.compute.manager [req-b13baa44-8f3d-41da-bd0d-ba3b931a7df8 req-d4d2043b-933a-4cfe-9d42-8352c46f6e7b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Received event network-vif-plugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:26:21 compute-0 nova_compute[192079]: 2025-10-02 12:26:21.287 2 DEBUG oslo_concurrency.lockutils [req-b13baa44-8f3d-41da-bd0d-ba3b931a7df8 req-d4d2043b-933a-4cfe-9d42-8352c46f6e7b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:26:21 compute-0 nova_compute[192079]: 2025-10-02 12:26:21.287 2 DEBUG oslo_concurrency.lockutils [req-b13baa44-8f3d-41da-bd0d-ba3b931a7df8 req-d4d2043b-933a-4cfe-9d42-8352c46f6e7b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:26:21 compute-0 nova_compute[192079]: 2025-10-02 12:26:21.287 2 DEBUG oslo_concurrency.lockutils [req-b13baa44-8f3d-41da-bd0d-ba3b931a7df8 req-d4d2043b-933a-4cfe-9d42-8352c46f6e7b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:26:21 compute-0 nova_compute[192079]: 2025-10-02 12:26:21.288 2 DEBUG nova.compute.manager [req-b13baa44-8f3d-41da-bd0d-ba3b931a7df8 req-d4d2043b-933a-4cfe-9d42-8352c46f6e7b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] No waiting events found dispatching network-vif-plugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:26:21 compute-0 nova_compute[192079]: 2025-10-02 12:26:21.288 2 WARNING nova.compute.manager [req-b13baa44-8f3d-41da-bd0d-ba3b931a7df8 req-d4d2043b-933a-4cfe-9d42-8352c46f6e7b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Received unexpected event network-vif-plugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 for instance with vm_state resized and task_state None.
Oct 02 12:26:21 compute-0 nova_compute[192079]: 2025-10-02 12:26:21.288 2 DEBUG nova.compute.manager [req-b13baa44-8f3d-41da-bd0d-ba3b931a7df8 req-d4d2043b-933a-4cfe-9d42-8352c46f6e7b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Received event network-vif-plugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:26:21 compute-0 nova_compute[192079]: 2025-10-02 12:26:21.289 2 DEBUG oslo_concurrency.lockutils [req-b13baa44-8f3d-41da-bd0d-ba3b931a7df8 req-d4d2043b-933a-4cfe-9d42-8352c46f6e7b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:26:21 compute-0 nova_compute[192079]: 2025-10-02 12:26:21.289 2 DEBUG oslo_concurrency.lockutils [req-b13baa44-8f3d-41da-bd0d-ba3b931a7df8 req-d4d2043b-933a-4cfe-9d42-8352c46f6e7b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:26:21 compute-0 nova_compute[192079]: 2025-10-02 12:26:21.289 2 DEBUG oslo_concurrency.lockutils [req-b13baa44-8f3d-41da-bd0d-ba3b931a7df8 req-d4d2043b-933a-4cfe-9d42-8352c46f6e7b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:26:21 compute-0 nova_compute[192079]: 2025-10-02 12:26:21.289 2 DEBUG nova.compute.manager [req-b13baa44-8f3d-41da-bd0d-ba3b931a7df8 req-d4d2043b-933a-4cfe-9d42-8352c46f6e7b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] No waiting events found dispatching network-vif-plugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:26:21 compute-0 nova_compute[192079]: 2025-10-02 12:26:21.290 2 WARNING nova.compute.manager [req-b13baa44-8f3d-41da-bd0d-ba3b931a7df8 req-d4d2043b-933a-4cfe-9d42-8352c46f6e7b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Received unexpected event network-vif-plugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 for instance with vm_state resized and task_state None.
Oct 02 12:26:21 compute-0 nova_compute[192079]: 2025-10-02 12:26:21.668 2 DEBUG oslo_concurrency.lockutils [None req-8a8e6a8f-0e3d-4595-ae3a-40ce7fca6b9f 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "ab7610b5-3462-4dc2-a802-0998246e8cdb" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 8.138s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:26:22 compute-0 nova_compute[192079]: 2025-10-02 12:26:22.074 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:22 compute-0 nova_compute[192079]: 2025-10-02 12:26:22.918 2 DEBUG nova.network.neutron [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Port ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 binding to destination host compute-0.ctlplane.example.com is already ACTIVE migrate_instance_start /usr/lib/python3.9/site-packages/nova/network/neutron.py:3171
Oct 02 12:26:22 compute-0 nova_compute[192079]: 2025-10-02 12:26:22.919 2 DEBUG oslo_concurrency.lockutils [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "refresh_cache-40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:26:22 compute-0 nova_compute[192079]: 2025-10-02 12:26:22.919 2 DEBUG oslo_concurrency.lockutils [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquired lock "refresh_cache-40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:26:22 compute-0 nova_compute[192079]: 2025-10-02 12:26:22.919 2 DEBUG nova.network.neutron [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:26:23 compute-0 nova_compute[192079]: 2025-10-02 12:26:23.275 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:27 compute-0 nova_compute[192079]: 2025-10-02 12:26:27.103 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:27 compute-0 podman[240364]: 2025-10-02 12:26:27.134970847 +0000 UTC m=+0.045590683 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:26:27 compute-0 podman[240362]: 2025-10-02 12:26:27.1588973 +0000 UTC m=+0.073642048 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, container_name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_metadata_agent, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, tcib_managed=true)
Oct 02 12:26:27 compute-0 podman[240363]: 2025-10-02 12:26:27.172792278 +0000 UTC m=+0.073337409 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, config_id=ovn_controller, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=ovn_controller, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 12:26:28 compute-0 nova_compute[192079]: 2025-10-02 12:26:28.246 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:26:28 compute-0 nova_compute[192079]: 2025-10-02 12:26:28.278 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:28 compute-0 nova_compute[192079]: 2025-10-02 12:26:28.509 2 DEBUG nova.network.neutron [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Updating instance_info_cache with network_info: [{"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:26:28 compute-0 nova_compute[192079]: 2025-10-02 12:26:28.551 2 DEBUG oslo_concurrency.lockutils [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Releasing lock "refresh_cache-40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:26:28 compute-0 nova_compute[192079]: 2025-10-02 12:26:28.577 2 DEBUG nova.virt.libvirt.driver [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Creating tmpfile /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/tmpwy8i_p30 to verify with other compute node that the instance is on the same shared storage. check_instance_shared_storage_local /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:9618
Oct 02 12:26:28 compute-0 kernel: tapae0f2dc4-de (unregistering): left promiscuous mode
Oct 02 12:26:28 compute-0 NetworkManager[51160]: <info>  [1759407988.6256] device (tapae0f2dc4-de): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:26:28 compute-0 nova_compute[192079]: 2025-10-02 12:26:28.637 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:28 compute-0 ovn_controller[94336]: 2025-10-02T12:26:28Z|00448|binding|INFO|Releasing lport ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 from this chassis (sb_readonly=0)
Oct 02 12:26:28 compute-0 ovn_controller[94336]: 2025-10-02T12:26:28Z|00449|binding|INFO|Setting lport ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 down in Southbound
Oct 02 12:26:28 compute-0 ovn_controller[94336]: 2025-10-02T12:26:28Z|00450|binding|INFO|Removing iface tapae0f2dc4-de ovn-installed in OVS
Oct 02 12:26:28 compute-0 nova_compute[192079]: 2025-10-02 12:26:28.640 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:28.649 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:d5:ef:4c 10.100.0.9'], port_security=['fa:16:3e:d5:ef:4c 10.100.0.9'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.9/28', 'neutron:device_id': '40c8eb3a-547f-435e-8e59-ce9dcddb5f8e', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-a04f937a-375f-4fb0-90fe-5f514a88668f', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'neutron:revision_number': '6', 'neutron:security_group_ids': 'c0383701-0ec7-4f3b-8585-5effc4f5ca5a', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:port_fip': '192.168.122.248', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=50c0aa38-5fd8-41c7-b4bf-85b59722c5c3, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:26:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:28.652 103294 INFO neutron.agent.ovn.metadata.agent [-] Port ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 in datapath a04f937a-375f-4fb0-90fe-5f514a88668f unbound from our chassis
Oct 02 12:26:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:28.654 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network a04f937a-375f-4fb0-90fe-5f514a88668f, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:26:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:28.656 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7f753683-e5c2-491e-86dd-6da4c8b3c31d]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:28.657 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f namespace which is not needed anymore
Oct 02 12:26:28 compute-0 nova_compute[192079]: 2025-10-02 12:26:28.677 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:28 compute-0 systemd[1]: machine-qemu\x2d58\x2dinstance\x2d00000079.scope: Deactivated successfully.
Oct 02 12:26:28 compute-0 systemd[1]: machine-qemu\x2d58\x2dinstance\x2d00000079.scope: Consumed 10.161s CPU time.
Oct 02 12:26:28 compute-0 systemd-machined[152150]: Machine qemu-58-instance-00000079 terminated.
Oct 02 12:26:28 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[240345]: [NOTICE]   (240349) : haproxy version is 2.8.14-c23fe91
Oct 02 12:26:28 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[240345]: [NOTICE]   (240349) : path to executable is /usr/sbin/haproxy
Oct 02 12:26:28 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[240345]: [WARNING]  (240349) : Exiting Master process...
Oct 02 12:26:28 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[240345]: [ALERT]    (240349) : Current worker (240351) exited with code 143 (Terminated)
Oct 02 12:26:28 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[240345]: [WARNING]  (240349) : All workers exited. Exiting... (0)
Oct 02 12:26:28 compute-0 systemd[1]: libpod-9cfa0c0ba1c814fe55d2bccb235eac10ef5929d4bc3a06d8be794d4d6d75f493.scope: Deactivated successfully.
Oct 02 12:26:28 compute-0 podman[240451]: 2025-10-02 12:26:28.831877463 +0000 UTC m=+0.059464691 container died 9cfa0c0ba1c814fe55d2bccb235eac10ef5929d4bc3a06d8be794d4d6d75f493 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:26:28 compute-0 nova_compute[192079]: 2025-10-02 12:26:28.864 2 INFO nova.virt.libvirt.driver [-] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Instance destroyed successfully.
Oct 02 12:26:28 compute-0 nova_compute[192079]: 2025-10-02 12:26:28.864 2 DEBUG nova.objects.instance [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'resources' on Instance uuid 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:26:28 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-9cfa0c0ba1c814fe55d2bccb235eac10ef5929d4bc3a06d8be794d4d6d75f493-userdata-shm.mount: Deactivated successfully.
Oct 02 12:26:28 compute-0 systemd[1]: var-lib-containers-storage-overlay-04b7cad8323f843d89f58c824162e8f78bb00b687d5f4e97c6f6ed7c043d9c8b-merged.mount: Deactivated successfully.
Oct 02 12:26:28 compute-0 podman[240451]: 2025-10-02 12:26:28.884246311 +0000 UTC m=+0.111833519 container cleanup 9cfa0c0ba1c814fe55d2bccb235eac10ef5929d4bc3a06d8be794d4d6d75f493 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0)
Oct 02 12:26:28 compute-0 systemd[1]: libpod-conmon-9cfa0c0ba1c814fe55d2bccb235eac10ef5929d4bc3a06d8be794d4d6d75f493.scope: Deactivated successfully.
Oct 02 12:26:28 compute-0 nova_compute[192079]: 2025-10-02 12:26:28.894 2 DEBUG nova.virt.libvirt.vif [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:25:11Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerActionsTestJSON-server-300185996',display_name='tempest-ServerActionsTestJSON-server-300185996',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(2),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serveractionstestjson-server-300185996',id=121,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=2,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBJJLom+UJzZg9dduKQv+725QaYDZoMXvP/xlpKnb/K05SGc4dkyLwCDweJ3QifTmxLWqK9Sz5A12yMJbzpa36v5C4bUqj8uiWk/vbR1BAjBdKM9d/Ug8M2nT8LwDBGP/9A==',key_name='tempest-keypair-1006285918',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:26:19Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=192,metadata={},migration_context=<?>,new_flavor=Flavor(2),node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=Flavor(1),os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='e564a4cad5d443dba81ec04d2a05ced9',ramdisk_id='',reservation_id='r-als9bbed',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',old_vm_state='active',owner_project_name='tempest-ServerActionsTestJSON-1646745100',owner_user_name='tempest-ServerActionsTestJSON-1646745100-project-member'},tags=<?>,task_state='resize_reverting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:26:19Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='d54b1826121b47caba89932a78c06ccd',uuid=40c8eb3a-547f-435e-8e59-ce9dcddb5f8e,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='resized') vif={"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:26:28 compute-0 nova_compute[192079]: 2025-10-02 12:26:28.895 2 DEBUG nova.network.os_vif_util [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converting VIF {"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:26:28 compute-0 nova_compute[192079]: 2025-10-02 12:26:28.896 2 DEBUG nova.network.os_vif_util [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:d5:ef:4c,bridge_name='br-int',has_traffic_filtering=True,id=ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapae0f2dc4-de') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:26:28 compute-0 nova_compute[192079]: 2025-10-02 12:26:28.897 2 DEBUG os_vif [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:d5:ef:4c,bridge_name='br-int',has_traffic_filtering=True,id=ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapae0f2dc4-de') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:26:28 compute-0 nova_compute[192079]: 2025-10-02 12:26:28.900 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:28 compute-0 nova_compute[192079]: 2025-10-02 12:26:28.900 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapae0f2dc4-de, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:26:28 compute-0 nova_compute[192079]: 2025-10-02 12:26:28.902 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:28 compute-0 nova_compute[192079]: 2025-10-02 12:26:28.905 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:26:28 compute-0 nova_compute[192079]: 2025-10-02 12:26:28.908 2 INFO os_vif [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:d5:ef:4c,bridge_name='br-int',has_traffic_filtering=True,id=ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapae0f2dc4-de')
Oct 02 12:26:28 compute-0 nova_compute[192079]: 2025-10-02 12:26:28.913 2 DEBUG oslo_concurrency.lockutils [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:26:28 compute-0 nova_compute[192079]: 2025-10-02 12:26:28.913 2 DEBUG oslo_concurrency.lockutils [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:26:28 compute-0 nova_compute[192079]: 2025-10-02 12:26:28.936 2 DEBUG nova.objects.instance [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'migration_context' on Instance uuid 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:26:28 compute-0 podman[240497]: 2025-10-02 12:26:28.957769394 +0000 UTC m=+0.047504396 container remove 9cfa0c0ba1c814fe55d2bccb235eac10ef5929d4bc3a06d8be794d4d6d75f493 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:26:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:28.963 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5535cf77-1c91-4706-a9fc-1f96694f74f8]: (4, ('Thu Oct  2 12:26:28 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f (9cfa0c0ba1c814fe55d2bccb235eac10ef5929d4bc3a06d8be794d4d6d75f493)\n9cfa0c0ba1c814fe55d2bccb235eac10ef5929d4bc3a06d8be794d4d6d75f493\nThu Oct  2 12:26:28 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f (9cfa0c0ba1c814fe55d2bccb235eac10ef5929d4bc3a06d8be794d4d6d75f493)\n9cfa0c0ba1c814fe55d2bccb235eac10ef5929d4bc3a06d8be794d4d6d75f493\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:28.965 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[444244a9-a427-4157-9541-3392f58594fa]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:28.966 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapa04f937a-30, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:26:28 compute-0 nova_compute[192079]: 2025-10-02 12:26:28.968 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:28 compute-0 kernel: tapa04f937a-30: left promiscuous mode
Oct 02 12:26:28 compute-0 nova_compute[192079]: 2025-10-02 12:26:28.991 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:28.995 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d86de91e-f945-4dad-b623-c83483319bad]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:29.033 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0003ae0a-0d6b-4974-b1a5-ed16195a4b52]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:29.035 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[77a5550b-3e21-467f-8b90-83bb244f377f]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:29 compute-0 nova_compute[192079]: 2025-10-02 12:26:29.040 2 DEBUG nova.scheduler.client.report [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Refreshing inventories for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708 _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:804
Oct 02 12:26:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:29.052 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[78ee4c8c-d593-45c3-8f83-457731a58062]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 597622, 'reachable_time': 21014, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 240512, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:29.055 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:26:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:29.055 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[8671b164-4fab-42c6-a445-8343c4b61fd8]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:29 compute-0 systemd[1]: run-netns-ovnmeta\x2da04f937a\x2d375f\x2d4fb0\x2d90fe\x2d5f514a88668f.mount: Deactivated successfully.
Oct 02 12:26:29 compute-0 nova_compute[192079]: 2025-10-02 12:26:29.062 2 DEBUG nova.scheduler.client.report [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Updating ProviderTree inventory for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 from _refresh_and_get_inventory using data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} _refresh_and_get_inventory /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:768
Oct 02 12:26:29 compute-0 nova_compute[192079]: 2025-10-02 12:26:29.062 2 DEBUG nova.compute.provider_tree [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Updating inventory in ProviderTree for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 with inventory: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:176
Oct 02 12:26:29 compute-0 nova_compute[192079]: 2025-10-02 12:26:29.081 2 DEBUG nova.scheduler.client.report [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Refreshing aggregate associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, aggregates: None _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:813
Oct 02 12:26:29 compute-0 nova_compute[192079]: 2025-10-02 12:26:29.105 2 DEBUG nova.scheduler.client.report [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Refreshing trait associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, traits: COMPUTE_SECURITY_UEFI_SECURE_BOOT,COMPUTE_VIOMMU_MODEL_VIRTIO,COMPUTE_VIOMMU_MODEL_AUTO,COMPUTE_IMAGE_TYPE_AKI,COMPUTE_GRAPHICS_MODEL_VIRTIO,COMPUTE_NET_VIF_MODEL_PCNET,HW_CPU_X86_SSE42,COMPUTE_RESCUE_BFV,COMPUTE_VOLUME_EXTEND,COMPUTE_IMAGE_TYPE_QCOW2,COMPUTE_TRUSTED_CERTS,COMPUTE_SOCKET_PCI_NUMA_AFFINITY,COMPUTE_GRAPHICS_MODEL_CIRRUS,HW_CPU_X86_MMX,COMPUTE_STORAGE_BUS_VIRTIO,COMPUTE_NET_ATTACH_INTERFACE_WITH_TAG,COMPUTE_STORAGE_BUS_FDC,COMPUTE_STORAGE_BUS_USB,COMPUTE_NODE,HW_CPU_X86_SSSE3,HW_CPU_X86_SSE2,COMPUTE_GRAPHICS_MODEL_BOCHS,COMPUTE_NET_VIF_MODEL_E1000E,COMPUTE_IMAGE_TYPE_RAW,COMPUTE_NET_VIF_MODEL_NE2K_PCI,COMPUTE_IMAGE_TYPE_AMI,COMPUTE_VIOMMU_MODEL_INTEL,COMPUTE_SECURITY_TPM_2_0,COMPUTE_STORAGE_BUS_SCSI,COMPUTE_IMAGE_TYPE_ARI,COMPUTE_NET_VIF_MODEL_VMXNET3,COMPUTE_SECURITY_TPM_1_2,COMPUTE_NET_VIF_MODEL_E1000,HW_CPU_X86_SSE,COMPUTE_VOLUME_MULTI_ATTACH,COMPUTE_STORAGE_BUS_IDE,COMPUTE_GRAPHICS_MODEL_NONE,COMPUTE_VOLUME_ATTACH_WITH_TAG,COMPUTE_NET_VIF_MODEL_VIRTIO,HW_CPU_X86_SSE41,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_DEVICE_TAGGING,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_ACCELERATORS,COMPUTE_NET_VIF_MODEL_RTL8139,COMPUTE_GRAPHICS_MODEL_VGA,COMPUTE_STORAGE_BUS_SATA,COMPUTE_NET_VIF_MODEL_SPAPR_VLAN _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:825
Oct 02 12:26:29 compute-0 nova_compute[192079]: 2025-10-02 12:26:29.169 2 DEBUG nova.compute.provider_tree [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:26:29 compute-0 nova_compute[192079]: 2025-10-02 12:26:29.449 2 DEBUG nova.scheduler.client.report [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:26:29 compute-0 nova_compute[192079]: 2025-10-02 12:26:29.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:26:30 compute-0 nova_compute[192079]: 2025-10-02 12:26:30.012 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759407975.0120163, ab7610b5-3462-4dc2-a802-0998246e8cdb => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:26:30 compute-0 nova_compute[192079]: 2025-10-02 12:26:30.013 2 INFO nova.compute.manager [-] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] VM Stopped (Lifecycle Event)
Oct 02 12:26:30 compute-0 nova_compute[192079]: 2025-10-02 12:26:30.293 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:26:30 compute-0 nova_compute[192079]: 2025-10-02 12:26:30.607 2 DEBUG nova.compute.manager [req-644f6bb5-fc2f-42e6-a167-803dbc011a78 req-9b3f2a49-4acb-4a4f-820e-0024bd9fc397 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Received event network-vif-unplugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:26:30 compute-0 nova_compute[192079]: 2025-10-02 12:26:30.607 2 DEBUG oslo_concurrency.lockutils [req-644f6bb5-fc2f-42e6-a167-803dbc011a78 req-9b3f2a49-4acb-4a4f-820e-0024bd9fc397 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:26:30 compute-0 nova_compute[192079]: 2025-10-02 12:26:30.607 2 DEBUG oslo_concurrency.lockutils [req-644f6bb5-fc2f-42e6-a167-803dbc011a78 req-9b3f2a49-4acb-4a4f-820e-0024bd9fc397 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:26:30 compute-0 nova_compute[192079]: 2025-10-02 12:26:30.607 2 DEBUG oslo_concurrency.lockutils [req-644f6bb5-fc2f-42e6-a167-803dbc011a78 req-9b3f2a49-4acb-4a4f-820e-0024bd9fc397 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:26:30 compute-0 nova_compute[192079]: 2025-10-02 12:26:30.608 2 DEBUG nova.compute.manager [req-644f6bb5-fc2f-42e6-a167-803dbc011a78 req-9b3f2a49-4acb-4a4f-820e-0024bd9fc397 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] No waiting events found dispatching network-vif-unplugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:26:30 compute-0 nova_compute[192079]: 2025-10-02 12:26:30.608 2 WARNING nova.compute.manager [req-644f6bb5-fc2f-42e6-a167-803dbc011a78 req-9b3f2a49-4acb-4a4f-820e-0024bd9fc397 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Received unexpected event network-vif-unplugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 for instance with vm_state resized and task_state resize_reverting.
Oct 02 12:26:30 compute-0 nova_compute[192079]: 2025-10-02 12:26:30.624 2 DEBUG nova.compute.manager [None req-49d5e6cb-54a6-4d60-8e40-548969cfe9df - - - - - -] [instance: ab7610b5-3462-4dc2-a802-0998246e8cdb] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:26:30 compute-0 nova_compute[192079]: 2025-10-02 12:26:30.994 2 DEBUG oslo_concurrency.lockutils [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.080s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:26:31 compute-0 nova_compute[192079]: 2025-10-02 12:26:31.000 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.707s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:26:31 compute-0 nova_compute[192079]: 2025-10-02 12:26:31.000 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:26:31 compute-0 nova_compute[192079]: 2025-10-02 12:26:31.001 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:26:31 compute-0 nova_compute[192079]: 2025-10-02 12:26:31.189 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:26:31 compute-0 nova_compute[192079]: 2025-10-02 12:26:31.191 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5730MB free_disk=73.31958389282227GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:26:31 compute-0 nova_compute[192079]: 2025-10-02 12:26:31.191 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:26:31 compute-0 nova_compute[192079]: 2025-10-02 12:26:31.192 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:26:31 compute-0 nova_compute[192079]: 2025-10-02 12:26:31.407 2 INFO nova.compute.manager [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Swapping old allocation on dict_keys(['55f2ae21-42ea-47d7-8c73-c3134981d708']) held by migration e7859187-b33d-44dd-b078-3c66a9a847e5 for instance
Oct 02 12:26:31 compute-0 nova_compute[192079]: 2025-10-02 12:26:31.425 2 INFO nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance e7859187-b33d-44dd-b078-3c66a9a847e5 has allocations against this compute host but is not found in the database.
Oct 02 12:26:31 compute-0 nova_compute[192079]: 2025-10-02 12:26:31.426 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:26:31 compute-0 nova_compute[192079]: 2025-10-02 12:26:31.426 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 1 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:26:31 compute-0 nova_compute[192079]: 2025-10-02 12:26:31.426 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=640MB phys_disk=79GB used_disk=1GB total_vcpus=8 used_vcpus=1 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:26:31 compute-0 nova_compute[192079]: 2025-10-02 12:26:31.433 2 DEBUG nova.scheduler.client.report [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Overwriting current allocation {'allocations': {'55f2ae21-42ea-47d7-8c73-c3134981d708': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}, 'generation': 68}}, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'consumer_generation': 1} on consumer 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e move_allocations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:2018
Oct 02 12:26:31 compute-0 nova_compute[192079]: 2025-10-02 12:26:31.490 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:26:31 compute-0 nova_compute[192079]: 2025-10-02 12:26:31.509 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:26:31 compute-0 nova_compute[192079]: 2025-10-02 12:26:31.540 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:26:31 compute-0 nova_compute[192079]: 2025-10-02 12:26:31.540 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.349s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:26:31 compute-0 nova_compute[192079]: 2025-10-02 12:26:31.691 2 DEBUG oslo_concurrency.lockutils [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "refresh_cache-40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:26:31 compute-0 nova_compute[192079]: 2025-10-02 12:26:31.691 2 DEBUG oslo_concurrency.lockutils [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquired lock "refresh_cache-40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:26:31 compute-0 nova_compute[192079]: 2025-10-02 12:26:31.691 2 DEBUG nova.network.neutron [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:26:32 compute-0 nova_compute[192079]: 2025-10-02 12:26:32.105 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:32 compute-0 nova_compute[192079]: 2025-10-02 12:26:32.541 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:26:32 compute-0 nova_compute[192079]: 2025-10-02 12:26:32.542 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:26:32 compute-0 nova_compute[192079]: 2025-10-02 12:26:32.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:26:32 compute-0 nova_compute[192079]: 2025-10-02 12:26:32.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:26:33 compute-0 nova_compute[192079]: 2025-10-02 12:26:33.905 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:34 compute-0 nova_compute[192079]: 2025-10-02 12:26:34.724 2 DEBUG nova.compute.manager [req-83aab4e4-e7b4-4ec8-990a-2f8f7817a9a6 req-b1a85451-1fcb-481f-80a4-44b004b86261 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Received event network-vif-plugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:26:34 compute-0 nova_compute[192079]: 2025-10-02 12:26:34.725 2 DEBUG oslo_concurrency.lockutils [req-83aab4e4-e7b4-4ec8-990a-2f8f7817a9a6 req-b1a85451-1fcb-481f-80a4-44b004b86261 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:26:34 compute-0 nova_compute[192079]: 2025-10-02 12:26:34.725 2 DEBUG oslo_concurrency.lockutils [req-83aab4e4-e7b4-4ec8-990a-2f8f7817a9a6 req-b1a85451-1fcb-481f-80a4-44b004b86261 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:26:34 compute-0 nova_compute[192079]: 2025-10-02 12:26:34.725 2 DEBUG oslo_concurrency.lockutils [req-83aab4e4-e7b4-4ec8-990a-2f8f7817a9a6 req-b1a85451-1fcb-481f-80a4-44b004b86261 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:26:34 compute-0 nova_compute[192079]: 2025-10-02 12:26:34.726 2 DEBUG nova.compute.manager [req-83aab4e4-e7b4-4ec8-990a-2f8f7817a9a6 req-b1a85451-1fcb-481f-80a4-44b004b86261 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] No waiting events found dispatching network-vif-plugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:26:34 compute-0 nova_compute[192079]: 2025-10-02 12:26:34.726 2 WARNING nova.compute.manager [req-83aab4e4-e7b4-4ec8-990a-2f8f7817a9a6 req-b1a85451-1fcb-481f-80a4-44b004b86261 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Received unexpected event network-vif-plugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 for instance with vm_state resized and task_state resize_reverting.
Oct 02 12:26:35 compute-0 nova_compute[192079]: 2025-10-02 12:26:35.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:26:35 compute-0 nova_compute[192079]: 2025-10-02 12:26:35.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:26:36 compute-0 podman[240514]: 2025-10-02 12:26:36.149832129 +0000 UTC m=+0.058524605 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, container_name=ceilometer_agent_compute, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_managed=true, managed_by=edpm_ansible, org.label-schema.schema-version=1.0)
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.508 2 DEBUG nova.network.neutron [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Updating instance_info_cache with network_info: [{"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.559 2 DEBUG oslo_concurrency.lockutils [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Releasing lock "refresh_cache-40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.560 2 DEBUG nova.virt.libvirt.driver [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Starting finish_revert_migration finish_revert_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:11843
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.568 2 DEBUG nova.virt.libvirt.driver [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Start _get_guest_xml network_info=[{"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum=<?>,container_format='bare',created_at=<?>,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=1,min_ram=0,name=<?>,owner=<?>,properties=ImageMetaProps,protected=<?>,size=<?>,status=<?>,tags=<?>,updated_at=<?>,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.571 2 WARNING nova.virt.libvirt.driver [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.575 2 DEBUG nova.virt.libvirt.host [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.576 2 DEBUG nova.virt.libvirt.host [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.579 2 DEBUG nova.virt.libvirt.host [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.579 2 DEBUG nova.virt.libvirt.host [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.580 2 DEBUG nova.virt.libvirt.driver [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.580 2 DEBUG nova.virt.hardware [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=<?>,container_format='bare',created_at=<?>,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=1,min_ram=0,name=<?>,owner=<?>,properties=ImageMetaProps,protected=<?>,size=<?>,status=<?>,tags=<?>,updated_at=<?>,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.581 2 DEBUG nova.virt.hardware [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.581 2 DEBUG nova.virt.hardware [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.581 2 DEBUG nova.virt.hardware [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.581 2 DEBUG nova.virt.hardware [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.581 2 DEBUG nova.virt.hardware [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.582 2 DEBUG nova.virt.hardware [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.582 2 DEBUG nova.virt.hardware [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.582 2 DEBUG nova.virt.hardware [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.582 2 DEBUG nova.virt.hardware [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.583 2 DEBUG nova.virt.hardware [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.583 2 DEBUG nova.objects.instance [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'vcpu_model' on Instance uuid 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.656 2 DEBUG oslo_concurrency.processutils [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk.config --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.678 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.679 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.679 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.725 2 DEBUG oslo_concurrency.processutils [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk.config --force-share --output=json" returned: 0 in 0.069s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.726 2 DEBUG oslo_concurrency.lockutils [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "/var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.726 2 DEBUG oslo_concurrency.lockutils [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "/var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.727 2 DEBUG oslo_concurrency.lockutils [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "/var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.728 2 DEBUG nova.virt.libvirt.vif [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:25:11Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerActionsTestJSON-server-300185996',display_name='tempest-ServerActionsTestJSON-server-300185996',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serveractionstestjson-server-300185996',id=121,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBJJLom+UJzZg9dduKQv+725QaYDZoMXvP/xlpKnb/K05SGc4dkyLwCDweJ3QifTmxLWqK9Sz5A12yMJbzpa36v5C4bUqj8uiWk/vbR1BAjBdKM9d/Ug8M2nT8LwDBGP/9A==',key_name='tempest-keypair-1006285918',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:26:19Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=MigrationContext,new_flavor=Flavor(2),node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=Flavor(1),os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='e564a4cad5d443dba81ec04d2a05ced9',ramdisk_id='',reservation_id='r-als9bbed',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',old_vm_state='active',owner_project_name='tempest-ServerActionsTestJSON-1646745100',owner_user_name='tempest-ServerActionsTestJSON-1646745100-project-member'},tags=<?>,task_state='resize_reverting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:26:21Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='d54b1826121b47caba89932a78c06ccd',uuid=40c8eb3a-547f-435e-8e59-ce9dcddb5f8e,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='resized') vif={"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.729 2 DEBUG nova.network.os_vif_util [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converting VIF {"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.730 2 DEBUG nova.network.os_vif_util [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:d5:ef:4c,bridge_name='br-int',has_traffic_filtering=True,id=ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapae0f2dc4-de') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.733 2 DEBUG nova.virt.libvirt.driver [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:26:36 compute-0 nova_compute[192079]:   <uuid>40c8eb3a-547f-435e-8e59-ce9dcddb5f8e</uuid>
Oct 02 12:26:36 compute-0 nova_compute[192079]:   <name>instance-00000079</name>
Oct 02 12:26:36 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:26:36 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:26:36 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:26:36 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:       <nova:name>tempest-ServerActionsTestJSON-server-300185996</nova:name>
Oct 02 12:26:36 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:26:36</nova:creationTime>
Oct 02 12:26:36 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:26:36 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:26:36 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:26:36 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:26:36 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:26:36 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:26:36 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:26:36 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:26:36 compute-0 nova_compute[192079]:         <nova:user uuid="d54b1826121b47caba89932a78c06ccd">tempest-ServerActionsTestJSON-1646745100-project-member</nova:user>
Oct 02 12:26:36 compute-0 nova_compute[192079]:         <nova:project uuid="e564a4cad5d443dba81ec04d2a05ced9">tempest-ServerActionsTestJSON-1646745100</nova:project>
Oct 02 12:26:36 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:26:36 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:26:36 compute-0 nova_compute[192079]:         <nova:port uuid="ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0">
Oct 02 12:26:36 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.9" ipVersion="4"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:26:36 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:26:36 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:26:36 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <system>
Oct 02 12:26:36 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:26:36 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:26:36 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:26:36 compute-0 nova_compute[192079]:       <entry name="serial">40c8eb3a-547f-435e-8e59-ce9dcddb5f8e</entry>
Oct 02 12:26:36 compute-0 nova_compute[192079]:       <entry name="uuid">40c8eb3a-547f-435e-8e59-ce9dcddb5f8e</entry>
Oct 02 12:26:36 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     </system>
Oct 02 12:26:36 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:26:36 compute-0 nova_compute[192079]:   <os>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:   </os>
Oct 02 12:26:36 compute-0 nova_compute[192079]:   <features>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:   </features>
Oct 02 12:26:36 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:26:36 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:26:36 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:26:36 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:26:36 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/disk.config"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:26:36 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:d5:ef:4c"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:       <target dev="tapae0f2dc4-de"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:26:36 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e/console.log" append="off"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <video>
Oct 02 12:26:36 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     </video>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <input type="keyboard" bus="usb"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:26:36 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:26:36 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:26:36 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:26:36 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:26:36 compute-0 nova_compute[192079]: </domain>
Oct 02 12:26:36 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.734 2 DEBUG nova.compute.manager [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Preparing to wait for external event network-vif-plugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.735 2 DEBUG oslo_concurrency.lockutils [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.735 2 DEBUG oslo_concurrency.lockutils [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.735 2 DEBUG oslo_concurrency.lockutils [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.735 2 DEBUG nova.virt.libvirt.vif [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:25:11Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerActionsTestJSON-server-300185996',display_name='tempest-ServerActionsTestJSON-server-300185996',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serveractionstestjson-server-300185996',id=121,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBJJLom+UJzZg9dduKQv+725QaYDZoMXvP/xlpKnb/K05SGc4dkyLwCDweJ3QifTmxLWqK9Sz5A12yMJbzpa36v5C4bUqj8uiWk/vbR1BAjBdKM9d/Ug8M2nT8LwDBGP/9A==',key_name='tempest-keypair-1006285918',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:26:19Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=MigrationContext,new_flavor=Flavor(2),node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=Flavor(1),os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='e564a4cad5d443dba81ec04d2a05ced9',ramdisk_id='',reservation_id='r-als9bbed',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',old_vm_state='active',owner_project_name='tempest-ServerActionsTestJSON-1646745100',owner_user_name='tempest-ServerActionsTestJSON-1646745100-project-member'},tags=<?>,task_state='resize_reverting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:26:21Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='d54b1826121b47caba89932a78c06ccd',uuid=40c8eb3a-547f-435e-8e59-ce9dcddb5f8e,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='resized') vif={"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.736 2 DEBUG nova.network.os_vif_util [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converting VIF {"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.736 2 DEBUG nova.network.os_vif_util [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:d5:ef:4c,bridge_name='br-int',has_traffic_filtering=True,id=ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapae0f2dc4-de') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.737 2 DEBUG os_vif [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:d5:ef:4c,bridge_name='br-int',has_traffic_filtering=True,id=ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapae0f2dc4-de') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.738 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.739 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.739 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.740 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "refresh_cache-40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.740 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquired lock "refresh_cache-40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.740 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Forcefully refreshing network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2004
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.740 2 DEBUG nova.objects.instance [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lazy-loading 'info_cache' on Instance uuid 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.743 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.743 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapae0f2dc4-de, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.744 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapae0f2dc4-de, col_values=(('external_ids', {'iface-id': 'ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:d5:ef:4c', 'vm-uuid': '40c8eb3a-547f-435e-8e59-ce9dcddb5f8e'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.745 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:36 compute-0 NetworkManager[51160]: <info>  [1759407996.7472] manager: (tapae0f2dc4-de): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/227)
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.748 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.750 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.752 2 INFO os_vif [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:d5:ef:4c,bridge_name='br-int',has_traffic_filtering=True,id=ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapae0f2dc4-de')
Oct 02 12:26:36 compute-0 kernel: tapae0f2dc4-de: entered promiscuous mode
Oct 02 12:26:36 compute-0 NetworkManager[51160]: <info>  [1759407996.8219] manager: (tapae0f2dc4-de): new Tun device (/org/freedesktop/NetworkManager/Devices/228)
Oct 02 12:26:36 compute-0 ovn_controller[94336]: 2025-10-02T12:26:36Z|00451|binding|INFO|Claiming lport ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 for this chassis.
Oct 02 12:26:36 compute-0 ovn_controller[94336]: 2025-10-02T12:26:36Z|00452|binding|INFO|ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0: Claiming fa:16:3e:d5:ef:4c 10.100.0.9
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.827 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:36 compute-0 ovn_controller[94336]: 2025-10-02T12:26:36Z|00453|binding|INFO|Setting lport ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 ovn-installed in OVS
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.835 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:36 compute-0 nova_compute[192079]: 2025-10-02 12:26:36.837 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:36 compute-0 ovn_controller[94336]: 2025-10-02T12:26:36Z|00454|binding|INFO|Setting lport ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 up in Southbound
Oct 02 12:26:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:36.845 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:d5:ef:4c 10.100.0.9'], port_security=['fa:16:3e:d5:ef:4c 10.100.0.9'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.9/28', 'neutron:device_id': '40c8eb3a-547f-435e-8e59-ce9dcddb5f8e', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-a04f937a-375f-4fb0-90fe-5f514a88668f', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'neutron:revision_number': '7', 'neutron:security_group_ids': 'c0383701-0ec7-4f3b-8585-5effc4f5ca5a', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:port_fip': '192.168.122.248'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=50c0aa38-5fd8-41c7-b4bf-85b59722c5c3, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:26:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:36.846 103294 INFO neutron.agent.ovn.metadata.agent [-] Port ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 in datapath a04f937a-375f-4fb0-90fe-5f514a88668f bound to our chassis
Oct 02 12:26:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:36.847 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network a04f937a-375f-4fb0-90fe-5f514a88668f
Oct 02 12:26:36 compute-0 systemd-udevd[240552]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:26:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:36.858 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0f9a1920-b66d-4a08-9890-4fe7c3671756]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:36.859 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tapa04f937a-31 in ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:26:36 compute-0 systemd-machined[152150]: New machine qemu-59-instance-00000079.
Oct 02 12:26:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:36.862 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tapa04f937a-30 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:26:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:36.862 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d8af69db-5d31-4d17-bf9b-4ebf9a9b7364]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:36 compute-0 NetworkManager[51160]: <info>  [1759407996.8634] device (tapae0f2dc4-de): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:26:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:36.863 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[dbc46bf8-713b-4556-95ee-85c778a69bbc]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:36 compute-0 NetworkManager[51160]: <info>  [1759407996.8656] device (tapae0f2dc4-de): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:26:36 compute-0 systemd[1]: Started Virtual Machine qemu-59-instance-00000079.
Oct 02 12:26:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:36.873 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[89ae774a-8028-46e8-aef2-5a433698f60d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:36.895 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ea1c8180-3624-4365-8751-10ea3fb24daf]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:36.922 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[f22f51b7-136e-4102-9a9a-feb5adaa0ae0]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:36 compute-0 NetworkManager[51160]: <info>  [1759407996.9273] manager: (tapa04f937a-30): new Veth device (/org/freedesktop/NetworkManager/Devices/229)
Oct 02 12:26:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:36.927 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b656c296-efc4-4219-a3fe-4d3b30566671]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:36.953 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[e459baa6-f81e-4c2e-bc91-0b9f51643c13]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:36.956 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[c349d00a-baed-474f-97f7-eeca43cdee6c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:36 compute-0 NetworkManager[51160]: <info>  [1759407996.9774] device (tapa04f937a-30): carrier: link connected
Oct 02 12:26:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:36.983 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[d587d7c1-4f80-4297-af71-a9893b494634]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:36.999 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[dcd6ba9b-2374-45d6-a0ad-d6acf6e57fcb]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapa04f937a-31'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:33:93:68'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 146], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 599460, 'reachable_time': 19278, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 240586, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:37.014 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[def8c5ed-5079-4caf-8e19-e293bdd70006]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe33:9368'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 599460, 'tstamp': 599460}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 240587, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:37.030 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[52eec48e-f16f-4222-95d0-02d49e8e0578]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapa04f937a-31'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:33:93:68'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 146], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 599460, 'reachable_time': 19278, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 240588, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:37.064 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[bf7a30d0-f241-48ed-997d-8a45ea3c410b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:37 compute-0 nova_compute[192079]: 2025-10-02 12:26:37.077 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:37 compute-0 nova_compute[192079]: 2025-10-02 12:26:37.106 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:37.125 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a7e0600c-a150-4d1b-999f-0ceb596e7354]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:37.126 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapa04f937a-30, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:37.127 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:37.127 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapa04f937a-30, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:26:37 compute-0 nova_compute[192079]: 2025-10-02 12:26:37.128 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:37 compute-0 kernel: tapa04f937a-30: entered promiscuous mode
Oct 02 12:26:37 compute-0 NetworkManager[51160]: <info>  [1759407997.1295] manager: (tapa04f937a-30): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/230)
Oct 02 12:26:37 compute-0 nova_compute[192079]: 2025-10-02 12:26:37.131 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:37.132 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tapa04f937a-30, col_values=(('external_ids', {'iface-id': '38f1ac16-18c6-4b4a-b769-ebc7dd5181d4'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:26:37 compute-0 ovn_controller[94336]: 2025-10-02T12:26:37Z|00455|binding|INFO|Releasing lport 38f1ac16-18c6-4b4a-b769-ebc7dd5181d4 from this chassis (sb_readonly=0)
Oct 02 12:26:37 compute-0 nova_compute[192079]: 2025-10-02 12:26:37.133 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:37 compute-0 nova_compute[192079]: 2025-10-02 12:26:37.144 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:37.144 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/a04f937a-375f-4fb0-90fe-5f514a88668f.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/a04f937a-375f-4fb0-90fe-5f514a88668f.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:26:37 compute-0 nova_compute[192079]: 2025-10-02 12:26:37.144 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:37.145 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[66022c05-aa09-4da3-9509-a1c158eb0bdf]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:37.146 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-a04f937a-375f-4fb0-90fe-5f514a88668f
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/a04f937a-375f-4fb0-90fe-5f514a88668f.pid.haproxy
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID a04f937a-375f-4fb0-90fe-5f514a88668f
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:26:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:37.146 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'env', 'PROCESS_TAG=haproxy-a04f937a-375f-4fb0-90fe-5f514a88668f', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/a04f937a-375f-4fb0-90fe-5f514a88668f.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:26:37 compute-0 nova_compute[192079]: 2025-10-02 12:26:37.233 2 DEBUG nova.compute.manager [req-86b34f3d-d03a-4c4f-9d11-054756c42c51 req-194c25b6-614c-4487-9614-4ab9417d98e7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Received event network-vif-plugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:26:37 compute-0 nova_compute[192079]: 2025-10-02 12:26:37.234 2 DEBUG oslo_concurrency.lockutils [req-86b34f3d-d03a-4c4f-9d11-054756c42c51 req-194c25b6-614c-4487-9614-4ab9417d98e7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:26:37 compute-0 nova_compute[192079]: 2025-10-02 12:26:37.234 2 DEBUG oslo_concurrency.lockutils [req-86b34f3d-d03a-4c4f-9d11-054756c42c51 req-194c25b6-614c-4487-9614-4ab9417d98e7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:26:37 compute-0 nova_compute[192079]: 2025-10-02 12:26:37.235 2 DEBUG oslo_concurrency.lockutils [req-86b34f3d-d03a-4c4f-9d11-054756c42c51 req-194c25b6-614c-4487-9614-4ab9417d98e7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:26:37 compute-0 nova_compute[192079]: 2025-10-02 12:26:37.235 2 DEBUG nova.compute.manager [req-86b34f3d-d03a-4c4f-9d11-054756c42c51 req-194c25b6-614c-4487-9614-4ab9417d98e7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Processing event network-vif-plugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:26:37 compute-0 podman[240627]: 2025-10-02 12:26:37.543242173 +0000 UTC m=+0.064918490 container create 56992571fa2538edbae4ddbeace6cf741246c8dfb632bb83c08a31181d96d2ae (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:26:37 compute-0 systemd[1]: Started libpod-conmon-56992571fa2538edbae4ddbeace6cf741246c8dfb632bb83c08a31181d96d2ae.scope.
Oct 02 12:26:37 compute-0 podman[240627]: 2025-10-02 12:26:37.505217796 +0000 UTC m=+0.026894143 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:26:37 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:26:37 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/a35c840b159c74032436bace12789f6512ba5ebe316b1c50f13e8fb87142004d/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:26:37 compute-0 nova_compute[192079]: 2025-10-02 12:26:37.615 2 DEBUG nova.compute.manager [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:26:37 compute-0 nova_compute[192079]: 2025-10-02 12:26:37.617 2 DEBUG nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Removed pending event for 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e due to event _event_emit_delayed /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:438
Oct 02 12:26:37 compute-0 nova_compute[192079]: 2025-10-02 12:26:37.618 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407997.6152294, 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:26:37 compute-0 nova_compute[192079]: 2025-10-02 12:26:37.618 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] VM Started (Lifecycle Event)
Oct 02 12:26:37 compute-0 nova_compute[192079]: 2025-10-02 12:26:37.629 2 INFO nova.virt.libvirt.driver [-] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Instance running successfully.
Oct 02 12:26:37 compute-0 nova_compute[192079]: 2025-10-02 12:26:37.630 2 DEBUG nova.virt.libvirt.driver [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] finish_revert_migration finished successfully. finish_revert_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:11887
Oct 02 12:26:37 compute-0 podman[240627]: 2025-10-02 12:26:37.631876409 +0000 UTC m=+0.153552756 container init 56992571fa2538edbae4ddbeace6cf741246c8dfb632bb83c08a31181d96d2ae (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.vendor=CentOS, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.license=GPLv2)
Oct 02 12:26:37 compute-0 podman[240627]: 2025-10-02 12:26:37.642868577 +0000 UTC m=+0.164544904 container start 56992571fa2538edbae4ddbeace6cf741246c8dfb632bb83c08a31181d96d2ae (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 12:26:37 compute-0 nova_compute[192079]: 2025-10-02 12:26:37.665 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:26:37 compute-0 nova_compute[192079]: 2025-10-02 12:26:37.668 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Synchronizing instance power state after lifecycle event "Started"; current vm_state: resized, current task_state: resize_reverting, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:26:37 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[240641]: [NOTICE]   (240646) : New worker (240648) forked
Oct 02 12:26:37 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[240641]: [NOTICE]   (240646) : Loading success.
Oct 02 12:26:37 compute-0 nova_compute[192079]: 2025-10-02 12:26:37.711 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] During sync_power_state the instance has a pending task (resize_reverting). Skip.
Oct 02 12:26:37 compute-0 nova_compute[192079]: 2025-10-02 12:26:37.712 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407997.616479, 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:26:37 compute-0 nova_compute[192079]: 2025-10-02 12:26:37.712 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] VM Paused (Lifecycle Event)
Oct 02 12:26:37 compute-0 nova_compute[192079]: 2025-10-02 12:26:37.790 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:26:37 compute-0 nova_compute[192079]: 2025-10-02 12:26:37.793 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759407997.6252027, 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:26:37 compute-0 nova_compute[192079]: 2025-10-02 12:26:37.794 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] VM Resumed (Lifecycle Event)
Oct 02 12:26:37 compute-0 nova_compute[192079]: 2025-10-02 12:26:37.824 2 INFO nova.compute.manager [None req-ffbac01b-5631-46c2-9a2e-73af0ae3a897 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Updating instance to original state: 'active'
Oct 02 12:26:37 compute-0 nova_compute[192079]: 2025-10-02 12:26:37.833 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:26:37 compute-0 nova_compute[192079]: 2025-10-02 12:26:37.839 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: resized, current task_state: resize_reverting, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:26:38 compute-0 nova_compute[192079]: 2025-10-02 12:26:38.054 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] During sync_power_state the instance has a pending task (resize_reverting). Skip.
Oct 02 12:26:38 compute-0 nova_compute[192079]: 2025-10-02 12:26:38.141 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Updating instance_info_cache with network_info: [{"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:26:41 compute-0 nova_compute[192079]: 2025-10-02 12:26:41.747 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:42 compute-0 nova_compute[192079]: 2025-10-02 12:26:42.110 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:42 compute-0 nova_compute[192079]: 2025-10-02 12:26:42.245 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Releasing lock "refresh_cache-40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:26:42 compute-0 nova_compute[192079]: 2025-10-02 12:26:42.246 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Updated the network info_cache for instance _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9929
Oct 02 12:26:42 compute-0 nova_compute[192079]: 2025-10-02 12:26:42.247 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:26:43 compute-0 nova_compute[192079]: 2025-10-02 12:26:43.229 2 DEBUG nova.compute.manager [req-83b4d88d-2c41-477f-8619-634b7a6b6947 req-f5742659-24a9-4ce1-958e-d7c90c65f894 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Received event network-vif-plugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:26:43 compute-0 nova_compute[192079]: 2025-10-02 12:26:43.231 2 DEBUG oslo_concurrency.lockutils [req-83b4d88d-2c41-477f-8619-634b7a6b6947 req-f5742659-24a9-4ce1-958e-d7c90c65f894 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:26:43 compute-0 nova_compute[192079]: 2025-10-02 12:26:43.231 2 DEBUG oslo_concurrency.lockutils [req-83b4d88d-2c41-477f-8619-634b7a6b6947 req-f5742659-24a9-4ce1-958e-d7c90c65f894 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:26:43 compute-0 nova_compute[192079]: 2025-10-02 12:26:43.232 2 DEBUG oslo_concurrency.lockutils [req-83b4d88d-2c41-477f-8619-634b7a6b6947 req-f5742659-24a9-4ce1-958e-d7c90c65f894 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:26:43 compute-0 nova_compute[192079]: 2025-10-02 12:26:43.232 2 DEBUG nova.compute.manager [req-83b4d88d-2c41-477f-8619-634b7a6b6947 req-f5742659-24a9-4ce1-958e-d7c90c65f894 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] No waiting events found dispatching network-vif-plugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:26:43 compute-0 nova_compute[192079]: 2025-10-02 12:26:43.232 2 WARNING nova.compute.manager [req-83b4d88d-2c41-477f-8619-634b7a6b6947 req-f5742659-24a9-4ce1-958e-d7c90c65f894 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Received unexpected event network-vif-plugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 for instance with vm_state active and task_state None.
Oct 02 12:26:44 compute-0 podman[240658]: 2025-10-02 12:26:44.151688081 +0000 UTC m=+0.061413425 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, name=ubi9-minimal, distribution-scope=public, vcs-type=git, com.redhat.component=ubi9-minimal-container, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., maintainer=Red Hat, Inc., managed_by=edpm_ansible, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., version=9.6, config_id=edpm, release=1755695350, container_name=openstack_network_exporter, url=https://catalog.redhat.com/en/search?searchType=containers, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, vendor=Red Hat, Inc., io.buildah.version=1.33.7, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., build-date=2025-08-20T13:12:41, io.openshift.tags=minimal rhel9, architecture=x86_64, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, io.openshift.expose-services=)
Oct 02 12:26:44 compute-0 podman[240659]: 2025-10-02 12:26:44.155620958 +0000 UTC m=+0.062782592 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, config_id=multipathd, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=multipathd, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.schema-version=1.0)
Oct 02 12:26:44 compute-0 nova_compute[192079]: 2025-10-02 12:26:44.229 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:26:46 compute-0 nova_compute[192079]: 2025-10-02 12:26:46.751 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:47 compute-0 nova_compute[192079]: 2025-10-02 12:26:47.112 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:49 compute-0 podman[240708]: 2025-10-02 12:26:49.140618654 +0000 UTC m=+0.047673930 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter)
Oct 02 12:26:49 compute-0 podman[240709]: 2025-10-02 12:26:49.149912497 +0000 UTC m=+0.058088464 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=iscsid, managed_by=edpm_ansible, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid)
Oct 02 12:26:51 compute-0 ovn_controller[94336]: 2025-10-02T12:26:51Z|00045|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:d5:ef:4c 10.100.0.9
Oct 02 12:26:51 compute-0 nova_compute[192079]: 2025-10-02 12:26:51.754 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:52 compute-0 nova_compute[192079]: 2025-10-02 12:26:52.112 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:53 compute-0 ovn_controller[94336]: 2025-10-02T12:26:53Z|00456|binding|INFO|Releasing lport 38f1ac16-18c6-4b4a-b769-ebc7dd5181d4 from this chassis (sb_readonly=0)
Oct 02 12:26:53 compute-0 nova_compute[192079]: 2025-10-02 12:26:53.221 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:53 compute-0 nova_compute[192079]: 2025-10-02 12:26:53.613 2 DEBUG oslo_concurrency.lockutils [None req-d7a95f09-8ab2-4b92-b19f-8a4425267d23 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:26:53 compute-0 nova_compute[192079]: 2025-10-02 12:26:53.614 2 DEBUG oslo_concurrency.lockutils [None req-d7a95f09-8ab2-4b92-b19f-8a4425267d23 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:26:53 compute-0 nova_compute[192079]: 2025-10-02 12:26:53.614 2 DEBUG oslo_concurrency.lockutils [None req-d7a95f09-8ab2-4b92-b19f-8a4425267d23 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:26:53 compute-0 nova_compute[192079]: 2025-10-02 12:26:53.614 2 DEBUG oslo_concurrency.lockutils [None req-d7a95f09-8ab2-4b92-b19f-8a4425267d23 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:26:53 compute-0 nova_compute[192079]: 2025-10-02 12:26:53.614 2 DEBUG oslo_concurrency.lockutils [None req-d7a95f09-8ab2-4b92-b19f-8a4425267d23 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:26:53 compute-0 nova_compute[192079]: 2025-10-02 12:26:53.640 2 INFO nova.compute.manager [None req-d7a95f09-8ab2-4b92-b19f-8a4425267d23 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Terminating instance
Oct 02 12:26:53 compute-0 nova_compute[192079]: 2025-10-02 12:26:53.669 2 DEBUG nova.compute.manager [None req-d7a95f09-8ab2-4b92-b19f-8a4425267d23 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:26:53 compute-0 kernel: tapae0f2dc4-de (unregistering): left promiscuous mode
Oct 02 12:26:53 compute-0 NetworkManager[51160]: <info>  [1759408013.7138] device (tapae0f2dc4-de): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:26:53 compute-0 ovn_controller[94336]: 2025-10-02T12:26:53Z|00457|binding|INFO|Releasing lport ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 from this chassis (sb_readonly=0)
Oct 02 12:26:53 compute-0 ovn_controller[94336]: 2025-10-02T12:26:53Z|00458|binding|INFO|Setting lport ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 down in Southbound
Oct 02 12:26:53 compute-0 nova_compute[192079]: 2025-10-02 12:26:53.723 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:53 compute-0 ovn_controller[94336]: 2025-10-02T12:26:53Z|00459|binding|INFO|Removing iface tapae0f2dc4-de ovn-installed in OVS
Oct 02 12:26:53 compute-0 nova_compute[192079]: 2025-10-02 12:26:53.726 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:53 compute-0 nova_compute[192079]: 2025-10-02 12:26:53.744 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:53 compute-0 systemd[1]: machine-qemu\x2d59\x2dinstance\x2d00000079.scope: Deactivated successfully.
Oct 02 12:26:53 compute-0 systemd[1]: machine-qemu\x2d59\x2dinstance\x2d00000079.scope: Consumed 13.297s CPU time.
Oct 02 12:26:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:53.794 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:d5:ef:4c 10.100.0.9'], port_security=['fa:16:3e:d5:ef:4c 10.100.0.9'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.9/28', 'neutron:device_id': '40c8eb3a-547f-435e-8e59-ce9dcddb5f8e', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-a04f937a-375f-4fb0-90fe-5f514a88668f', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'neutron:revision_number': '8', 'neutron:security_group_ids': 'c0383701-0ec7-4f3b-8585-5effc4f5ca5a', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:port_fip': '192.168.122.248', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=50c0aa38-5fd8-41c7-b4bf-85b59722c5c3, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:26:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:53.795 103294 INFO neutron.agent.ovn.metadata.agent [-] Port ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 in datapath a04f937a-375f-4fb0-90fe-5f514a88668f unbound from our chassis
Oct 02 12:26:53 compute-0 systemd-machined[152150]: Machine qemu-59-instance-00000079 terminated.
Oct 02 12:26:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:53.796 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network a04f937a-375f-4fb0-90fe-5f514a88668f, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:26:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:53.797 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[846a95db-5e7a-4dc4-a7e5-4fd4ef4df51b]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:53.798 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f namespace which is not needed anymore
Oct 02 12:26:53 compute-0 kernel: tapae0f2dc4-de: entered promiscuous mode
Oct 02 12:26:53 compute-0 kernel: tapae0f2dc4-de (unregistering): left promiscuous mode
Oct 02 12:26:53 compute-0 NetworkManager[51160]: <info>  [1759408013.8890] manager: (tapae0f2dc4-de): new Tun device (/org/freedesktop/NetworkManager/Devices/231)
Oct 02 12:26:53 compute-0 nova_compute[192079]: 2025-10-02 12:26:53.893 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:53 compute-0 nova_compute[192079]: 2025-10-02 12:26:53.931 2 INFO nova.virt.libvirt.driver [-] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Instance destroyed successfully.
Oct 02 12:26:53 compute-0 nova_compute[192079]: 2025-10-02 12:26:53.931 2 DEBUG nova.objects.instance [None req-d7a95f09-8ab2-4b92-b19f-8a4425267d23 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'resources' on Instance uuid 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:26:53 compute-0 nova_compute[192079]: 2025-10-02 12:26:53.947 2 DEBUG nova.virt.libvirt.vif [None req-d7a95f09-8ab2-4b92-b19f-8a4425267d23 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:25:11Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerActionsTestJSON-server-300185996',display_name='tempest-ServerActionsTestJSON-server-300185996',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serveractionstestjson-server-300185996',id=121,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBJJLom+UJzZg9dduKQv+725QaYDZoMXvP/xlpKnb/K05SGc4dkyLwCDweJ3QifTmxLWqK9Sz5A12yMJbzpa36v5C4bUqj8uiWk/vbR1BAjBdKM9d/Ug8M2nT8LwDBGP/9A==',key_name='tempest-keypair-1006285918',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:26:37Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='e564a4cad5d443dba81ec04d2a05ced9',ramdisk_id='',reservation_id='r-als9bbed',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServerActionsTestJSON-1646745100',owner_user_name='tempest-ServerActionsTestJSON-1646745100-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:26:46Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='d54b1826121b47caba89932a78c06ccd',uuid=40c8eb3a-547f-435e-8e59-ce9dcddb5f8e,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:26:53 compute-0 nova_compute[192079]: 2025-10-02 12:26:53.948 2 DEBUG nova.network.os_vif_util [None req-d7a95f09-8ab2-4b92-b19f-8a4425267d23 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converting VIF {"id": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "address": "fa:16:3e:d5:ef:4c", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapae0f2dc4-de", "ovs_interfaceid": "ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:26:53 compute-0 nova_compute[192079]: 2025-10-02 12:26:53.949 2 DEBUG nova.network.os_vif_util [None req-d7a95f09-8ab2-4b92-b19f-8a4425267d23 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:d5:ef:4c,bridge_name='br-int',has_traffic_filtering=True,id=ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapae0f2dc4-de') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:26:53 compute-0 nova_compute[192079]: 2025-10-02 12:26:53.949 2 DEBUG os_vif [None req-d7a95f09-8ab2-4b92-b19f-8a4425267d23 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:d5:ef:4c,bridge_name='br-int',has_traffic_filtering=True,id=ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapae0f2dc4-de') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:26:53 compute-0 nova_compute[192079]: 2025-10-02 12:26:53.952 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:53 compute-0 nova_compute[192079]: 2025-10-02 12:26:53.952 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapae0f2dc4-de, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:26:53 compute-0 nova_compute[192079]: 2025-10-02 12:26:53.954 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:53 compute-0 nova_compute[192079]: 2025-10-02 12:26:53.955 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:53 compute-0 nova_compute[192079]: 2025-10-02 12:26:53.957 2 INFO os_vif [None req-d7a95f09-8ab2-4b92-b19f-8a4425267d23 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:d5:ef:4c,bridge_name='br-int',has_traffic_filtering=True,id=ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapae0f2dc4-de')
Oct 02 12:26:53 compute-0 nova_compute[192079]: 2025-10-02 12:26:53.958 2 INFO nova.virt.libvirt.driver [None req-d7a95f09-8ab2-4b92-b19f-8a4425267d23 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Deleting instance files /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e_del
Oct 02 12:26:53 compute-0 nova_compute[192079]: 2025-10-02 12:26:53.959 2 INFO nova.virt.libvirt.driver [None req-d7a95f09-8ab2-4b92-b19f-8a4425267d23 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Deletion of /var/lib/nova/instances/40c8eb3a-547f-435e-8e59-ce9dcddb5f8e_del complete
Oct 02 12:26:54 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[240641]: [NOTICE]   (240646) : haproxy version is 2.8.14-c23fe91
Oct 02 12:26:54 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[240641]: [NOTICE]   (240646) : path to executable is /usr/sbin/haproxy
Oct 02 12:26:54 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[240641]: [WARNING]  (240646) : Exiting Master process...
Oct 02 12:26:54 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[240641]: [ALERT]    (240646) : Current worker (240648) exited with code 143 (Terminated)
Oct 02 12:26:54 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[240641]: [WARNING]  (240646) : All workers exited. Exiting... (0)
Oct 02 12:26:54 compute-0 systemd[1]: libpod-56992571fa2538edbae4ddbeace6cf741246c8dfb632bb83c08a31181d96d2ae.scope: Deactivated successfully.
Oct 02 12:26:54 compute-0 podman[240777]: 2025-10-02 12:26:54.015741115 +0000 UTC m=+0.135462943 container died 56992571fa2538edbae4ddbeace6cf741246c8dfb632bb83c08a31181d96d2ae (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3)
Oct 02 12:26:54 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-56992571fa2538edbae4ddbeace6cf741246c8dfb632bb83c08a31181d96d2ae-userdata-shm.mount: Deactivated successfully.
Oct 02 12:26:54 compute-0 systemd[1]: var-lib-containers-storage-overlay-a35c840b159c74032436bace12789f6512ba5ebe316b1c50f13e8fb87142004d-merged.mount: Deactivated successfully.
Oct 02 12:26:54 compute-0 podman[240777]: 2025-10-02 12:26:54.047308305 +0000 UTC m=+0.167030123 container cleanup 56992571fa2538edbae4ddbeace6cf741246c8dfb632bb83c08a31181d96d2ae (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:26:54 compute-0 systemd[1]: libpod-conmon-56992571fa2538edbae4ddbeace6cf741246c8dfb632bb83c08a31181d96d2ae.scope: Deactivated successfully.
Oct 02 12:26:54 compute-0 nova_compute[192079]: 2025-10-02 12:26:54.059 2 INFO nova.compute.manager [None req-d7a95f09-8ab2-4b92-b19f-8a4425267d23 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Took 0.39 seconds to destroy the instance on the hypervisor.
Oct 02 12:26:54 compute-0 nova_compute[192079]: 2025-10-02 12:26:54.060 2 DEBUG oslo.service.loopingcall [None req-d7a95f09-8ab2-4b92-b19f-8a4425267d23 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:26:54 compute-0 nova_compute[192079]: 2025-10-02 12:26:54.061 2 DEBUG nova.compute.manager [-] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:26:54 compute-0 nova_compute[192079]: 2025-10-02 12:26:54.061 2 DEBUG nova.network.neutron [-] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:26:54 compute-0 podman[240823]: 2025-10-02 12:26:54.10801313 +0000 UTC m=+0.038752888 container remove 56992571fa2538edbae4ddbeace6cf741246c8dfb632bb83c08a31181d96d2ae (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:26:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:54.114 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[92984dd8-c3f9-4431-b30f-839db68434c9]: (4, ('Thu Oct  2 12:26:53 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f (56992571fa2538edbae4ddbeace6cf741246c8dfb632bb83c08a31181d96d2ae)\n56992571fa2538edbae4ddbeace6cf741246c8dfb632bb83c08a31181d96d2ae\nThu Oct  2 12:26:54 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f (56992571fa2538edbae4ddbeace6cf741246c8dfb632bb83c08a31181d96d2ae)\n56992571fa2538edbae4ddbeace6cf741246c8dfb632bb83c08a31181d96d2ae\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:54.116 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[da06d95e-c1b1-4e92-b8b9-f9fb7f4972a5]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:54.116 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapa04f937a-30, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:26:54 compute-0 nova_compute[192079]: 2025-10-02 12:26:54.119 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:54 compute-0 kernel: tapa04f937a-30: left promiscuous mode
Oct 02 12:26:54 compute-0 nova_compute[192079]: 2025-10-02 12:26:54.130 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:54.133 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d8c0c569-94a0-4311-a47c-98e8ef2cec9a]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:54.163 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7799bf16-81e7-43a7-a629-0a07c703d178]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:54.164 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0acf53e7-90fe-4365-aaed-733522ce31b3]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:54.180 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[82623cb1-1630-4d1b-92bd-18f1f64ab48b]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 599454, 'reachable_time': 40084, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 240838, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:54.183 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:26:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:26:54.183 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[4dbdd30b-7264-4836-b844-666a59acdee5]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:26:54 compute-0 systemd[1]: run-netns-ovnmeta\x2da04f937a\x2d375f\x2d4fb0\x2d90fe\x2d5f514a88668f.mount: Deactivated successfully.
Oct 02 12:26:54 compute-0 nova_compute[192079]: 2025-10-02 12:26:54.599 2 DEBUG nova.compute.manager [req-b97e2d3e-102c-4e99-a8fc-311ce2e5ca4e req-f30d778c-5de6-4c02-bb33-92df51b7e237 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Received event network-vif-unplugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:26:54 compute-0 nova_compute[192079]: 2025-10-02 12:26:54.599 2 DEBUG oslo_concurrency.lockutils [req-b97e2d3e-102c-4e99-a8fc-311ce2e5ca4e req-f30d778c-5de6-4c02-bb33-92df51b7e237 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:26:54 compute-0 nova_compute[192079]: 2025-10-02 12:26:54.600 2 DEBUG oslo_concurrency.lockutils [req-b97e2d3e-102c-4e99-a8fc-311ce2e5ca4e req-f30d778c-5de6-4c02-bb33-92df51b7e237 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:26:54 compute-0 nova_compute[192079]: 2025-10-02 12:26:54.600 2 DEBUG oslo_concurrency.lockutils [req-b97e2d3e-102c-4e99-a8fc-311ce2e5ca4e req-f30d778c-5de6-4c02-bb33-92df51b7e237 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:26:54 compute-0 nova_compute[192079]: 2025-10-02 12:26:54.601 2 DEBUG nova.compute.manager [req-b97e2d3e-102c-4e99-a8fc-311ce2e5ca4e req-f30d778c-5de6-4c02-bb33-92df51b7e237 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] No waiting events found dispatching network-vif-unplugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:26:54 compute-0 nova_compute[192079]: 2025-10-02 12:26:54.601 2 DEBUG nova.compute.manager [req-b97e2d3e-102c-4e99-a8fc-311ce2e5ca4e req-f30d778c-5de6-4c02-bb33-92df51b7e237 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Received event network-vif-unplugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:26:56 compute-0 nova_compute[192079]: 2025-10-02 12:26:56.663 2 DEBUG nova.network.neutron [-] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:26:56 compute-0 nova_compute[192079]: 2025-10-02 12:26:56.756 2 INFO nova.compute.manager [-] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Took 2.70 seconds to deallocate network for instance.
Oct 02 12:26:56 compute-0 nova_compute[192079]: 2025-10-02 12:26:56.812 2 DEBUG nova.compute.manager [req-9f64c22b-6654-49d0-916c-61b11af7e87e req-523f1694-0315-4b10-8677-78ed6b1f7e91 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Received event network-vif-deleted-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:26:56 compute-0 nova_compute[192079]: 2025-10-02 12:26:56.835 2 DEBUG nova.compute.manager [req-7b94502b-3e4d-4fb2-b9dd-5e142a99bad8 req-411d30b7-d01a-44ab-8d56-893f20b3fa58 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Received event network-vif-plugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:26:56 compute-0 nova_compute[192079]: 2025-10-02 12:26:56.835 2 DEBUG oslo_concurrency.lockutils [req-7b94502b-3e4d-4fb2-b9dd-5e142a99bad8 req-411d30b7-d01a-44ab-8d56-893f20b3fa58 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:26:56 compute-0 nova_compute[192079]: 2025-10-02 12:26:56.836 2 DEBUG oslo_concurrency.lockutils [req-7b94502b-3e4d-4fb2-b9dd-5e142a99bad8 req-411d30b7-d01a-44ab-8d56-893f20b3fa58 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:26:56 compute-0 nova_compute[192079]: 2025-10-02 12:26:56.836 2 DEBUG oslo_concurrency.lockutils [req-7b94502b-3e4d-4fb2-b9dd-5e142a99bad8 req-411d30b7-d01a-44ab-8d56-893f20b3fa58 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:26:56 compute-0 nova_compute[192079]: 2025-10-02 12:26:56.836 2 DEBUG nova.compute.manager [req-7b94502b-3e4d-4fb2-b9dd-5e142a99bad8 req-411d30b7-d01a-44ab-8d56-893f20b3fa58 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] No waiting events found dispatching network-vif-plugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:26:56 compute-0 nova_compute[192079]: 2025-10-02 12:26:56.836 2 WARNING nova.compute.manager [req-7b94502b-3e4d-4fb2-b9dd-5e142a99bad8 req-411d30b7-d01a-44ab-8d56-893f20b3fa58 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Received unexpected event network-vif-plugged-ae0f2dc4-de79-412d-aad1-ad7e1dec7aa0 for instance with vm_state active and task_state deleting.
Oct 02 12:26:56 compute-0 nova_compute[192079]: 2025-10-02 12:26:56.907 2 DEBUG oslo_concurrency.lockutils [None req-d7a95f09-8ab2-4b92-b19f-8a4425267d23 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:26:56 compute-0 nova_compute[192079]: 2025-10-02 12:26:56.907 2 DEBUG oslo_concurrency.lockutils [None req-d7a95f09-8ab2-4b92-b19f-8a4425267d23 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:26:57 compute-0 nova_compute[192079]: 2025-10-02 12:26:57.026 2 DEBUG nova.compute.provider_tree [None req-d7a95f09-8ab2-4b92-b19f-8a4425267d23 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:26:57 compute-0 nova_compute[192079]: 2025-10-02 12:26:57.052 2 DEBUG nova.scheduler.client.report [None req-d7a95f09-8ab2-4b92-b19f-8a4425267d23 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:26:57 compute-0 nova_compute[192079]: 2025-10-02 12:26:57.084 2 DEBUG oslo_concurrency.lockutils [None req-d7a95f09-8ab2-4b92-b19f-8a4425267d23 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.177s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:26:57 compute-0 nova_compute[192079]: 2025-10-02 12:26:57.116 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:26:57 compute-0 nova_compute[192079]: 2025-10-02 12:26:57.196 2 INFO nova.scheduler.client.report [None req-d7a95f09-8ab2-4b92-b19f-8a4425267d23 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Deleted allocations for instance 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e
Oct 02 12:26:57 compute-0 nova_compute[192079]: 2025-10-02 12:26:57.499 2 DEBUG oslo_concurrency.lockutils [None req-d7a95f09-8ab2-4b92-b19f-8a4425267d23 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "40c8eb3a-547f-435e-8e59-ce9dcddb5f8e" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 3.885s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:26:58 compute-0 podman[240839]: 2025-10-02 12:26:58.13032416 +0000 UTC m=+0.044357860 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, container_name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_metadata_agent, io.buildah.version=1.41.3)
Oct 02 12:26:58 compute-0 podman[240841]: 2025-10-02 12:26:58.137434393 +0000 UTC m=+0.046135457 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:26:58 compute-0 podman[240840]: 2025-10-02 12:26:58.161787287 +0000 UTC m=+0.073091892 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, container_name=ovn_controller, org.label-schema.build-date=20251001, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, io.buildah.version=1.41.3)
Oct 02 12:26:59 compute-0 nova_compute[192079]: 2025-10-02 12:26:59.016 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:02 compute-0 nova_compute[192079]: 2025-10-02 12:27:02.119 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:02.226 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:27:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:02.226 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:27:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:02.226 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:27:04 compute-0 nova_compute[192079]: 2025-10-02 12:27:04.005 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:04 compute-0 nova_compute[192079]: 2025-10-02 12:27:04.017 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:07 compute-0 nova_compute[192079]: 2025-10-02 12:27:07.119 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:07 compute-0 podman[240908]: 2025-10-02 12:27:07.138427289 +0000 UTC m=+0.057645342 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_id=edpm)
Oct 02 12:27:07 compute-0 nova_compute[192079]: 2025-10-02 12:27:07.654 2 DEBUG oslo_concurrency.lockutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "2fcfea17-10df-499a-8692-facbbc76266b" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:27:07 compute-0 nova_compute[192079]: 2025-10-02 12:27:07.654 2 DEBUG oslo_concurrency.lockutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:27:07 compute-0 nova_compute[192079]: 2025-10-02 12:27:07.685 2 DEBUG nova.compute.manager [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:27:07 compute-0 nova_compute[192079]: 2025-10-02 12:27:07.855 2 DEBUG oslo_concurrency.lockutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:27:07 compute-0 nova_compute[192079]: 2025-10-02 12:27:07.856 2 DEBUG oslo_concurrency.lockutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:27:07 compute-0 nova_compute[192079]: 2025-10-02 12:27:07.863 2 DEBUG nova.virt.hardware [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:27:07 compute-0 nova_compute[192079]: 2025-10-02 12:27:07.864 2 INFO nova.compute.claims [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:27:08 compute-0 nova_compute[192079]: 2025-10-02 12:27:08.149 2 DEBUG nova.compute.provider_tree [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:27:08 compute-0 nova_compute[192079]: 2025-10-02 12:27:08.166 2 DEBUG nova.scheduler.client.report [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:27:08 compute-0 nova_compute[192079]: 2025-10-02 12:27:08.218 2 DEBUG oslo_concurrency.lockutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.362s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:27:08 compute-0 nova_compute[192079]: 2025-10-02 12:27:08.218 2 DEBUG nova.compute.manager [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:27:08 compute-0 nova_compute[192079]: 2025-10-02 12:27:08.295 2 DEBUG nova.compute.manager [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:27:08 compute-0 nova_compute[192079]: 2025-10-02 12:27:08.295 2 DEBUG nova.network.neutron [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:27:08 compute-0 nova_compute[192079]: 2025-10-02 12:27:08.474 2 INFO nova.virt.libvirt.driver [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:27:08 compute-0 nova_compute[192079]: 2025-10-02 12:27:08.570 2 DEBUG nova.policy [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:27:08 compute-0 nova_compute[192079]: 2025-10-02 12:27:08.815 2 DEBUG nova.compute.manager [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:27:08 compute-0 nova_compute[192079]: 2025-10-02 12:27:08.929 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759408013.9289315, 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:27:08 compute-0 nova_compute[192079]: 2025-10-02 12:27:08.930 2 INFO nova.compute.manager [-] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] VM Stopped (Lifecycle Event)
Oct 02 12:27:09 compute-0 nova_compute[192079]: 2025-10-02 12:27:09.022 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:09 compute-0 nova_compute[192079]: 2025-10-02 12:27:09.250 2 DEBUG nova.compute.manager [None req-5f1cd32c-45cc-43d2-8622-1d7fcacecec8 - - - - - -] [instance: 40c8eb3a-547f-435e-8e59-ce9dcddb5f8e] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:27:10 compute-0 nova_compute[192079]: 2025-10-02 12:27:10.105 2 DEBUG nova.compute.manager [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:27:10 compute-0 nova_compute[192079]: 2025-10-02 12:27:10.106 2 DEBUG nova.virt.libvirt.driver [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:27:10 compute-0 nova_compute[192079]: 2025-10-02 12:27:10.106 2 INFO nova.virt.libvirt.driver [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Creating image(s)
Oct 02 12:27:10 compute-0 nova_compute[192079]: 2025-10-02 12:27:10.107 2 DEBUG oslo_concurrency.lockutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "/var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:27:10 compute-0 nova_compute[192079]: 2025-10-02 12:27:10.107 2 DEBUG oslo_concurrency.lockutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "/var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:27:10 compute-0 nova_compute[192079]: 2025-10-02 12:27:10.107 2 DEBUG oslo_concurrency.lockutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "/var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:27:10 compute-0 nova_compute[192079]: 2025-10-02 12:27:10.119 2 DEBUG oslo_concurrency.processutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:27:10 compute-0 nova_compute[192079]: 2025-10-02 12:27:10.174 2 DEBUG oslo_concurrency.processutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:27:10 compute-0 nova_compute[192079]: 2025-10-02 12:27:10.175 2 DEBUG oslo_concurrency.lockutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:27:10 compute-0 nova_compute[192079]: 2025-10-02 12:27:10.176 2 DEBUG oslo_concurrency.lockutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:27:10 compute-0 nova_compute[192079]: 2025-10-02 12:27:10.187 2 DEBUG oslo_concurrency.processutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:27:10 compute-0 nova_compute[192079]: 2025-10-02 12:27:10.242 2 DEBUG oslo_concurrency.processutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:27:10 compute-0 nova_compute[192079]: 2025-10-02 12:27:10.243 2 DEBUG oslo_concurrency.processutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:27:10 compute-0 nova_compute[192079]: 2025-10-02 12:27:10.296 2 DEBUG oslo_concurrency.processutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk 1073741824" returned: 0 in 0.052s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:27:10 compute-0 nova_compute[192079]: 2025-10-02 12:27:10.297 2 DEBUG oslo_concurrency.lockutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.121s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:27:10 compute-0 nova_compute[192079]: 2025-10-02 12:27:10.298 2 DEBUG oslo_concurrency.processutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:27:10 compute-0 nova_compute[192079]: 2025-10-02 12:27:10.360 2 DEBUG oslo_concurrency.processutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.062s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:27:10 compute-0 nova_compute[192079]: 2025-10-02 12:27:10.362 2 DEBUG nova.virt.disk.api [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Checking if we can resize image /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:27:10 compute-0 nova_compute[192079]: 2025-10-02 12:27:10.363 2 DEBUG oslo_concurrency.processutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:27:10 compute-0 nova_compute[192079]: 2025-10-02 12:27:10.429 2 DEBUG oslo_concurrency.processutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk --force-share --output=json" returned: 0 in 0.066s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:27:10 compute-0 nova_compute[192079]: 2025-10-02 12:27:10.431 2 DEBUG nova.virt.disk.api [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Cannot resize image /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:27:10 compute-0 nova_compute[192079]: 2025-10-02 12:27:10.431 2 DEBUG nova.objects.instance [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'migration_context' on Instance uuid 2fcfea17-10df-499a-8692-facbbc76266b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:27:10 compute-0 nova_compute[192079]: 2025-10-02 12:27:10.472 2 DEBUG nova.virt.libvirt.driver [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:27:10 compute-0 nova_compute[192079]: 2025-10-02 12:27:10.473 2 DEBUG nova.virt.libvirt.driver [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Ensure instance console log exists: /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:27:10 compute-0 nova_compute[192079]: 2025-10-02 12:27:10.473 2 DEBUG oslo_concurrency.lockutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:27:10 compute-0 nova_compute[192079]: 2025-10-02 12:27:10.474 2 DEBUG oslo_concurrency.lockutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:27:10 compute-0 nova_compute[192079]: 2025-10-02 12:27:10.474 2 DEBUG oslo_concurrency.lockutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:27:10 compute-0 nova_compute[192079]: 2025-10-02 12:27:10.967 2 DEBUG nova.network.neutron [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Successfully created port: 5f268bcb-29c1-4e4e-a36d-b2ec144d3dca _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:27:12 compute-0 nova_compute[192079]: 2025-10-02 12:27:12.122 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:13 compute-0 nova_compute[192079]: 2025-10-02 12:27:13.095 2 DEBUG nova.network.neutron [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Successfully updated port: 5f268bcb-29c1-4e4e-a36d-b2ec144d3dca _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:27:13 compute-0 nova_compute[192079]: 2025-10-02 12:27:13.124 2 DEBUG oslo_concurrency.lockutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "refresh_cache-2fcfea17-10df-499a-8692-facbbc76266b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:27:13 compute-0 nova_compute[192079]: 2025-10-02 12:27:13.125 2 DEBUG oslo_concurrency.lockutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquired lock "refresh_cache-2fcfea17-10df-499a-8692-facbbc76266b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:27:13 compute-0 nova_compute[192079]: 2025-10-02 12:27:13.125 2 DEBUG nova.network.neutron [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:27:13 compute-0 nova_compute[192079]: 2025-10-02 12:27:13.211 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:13 compute-0 nova_compute[192079]: 2025-10-02 12:27:13.264 2 DEBUG nova.compute.manager [req-0876746e-8cf5-40b8-9de3-9ec2a3bbae05 req-12d2dfc9-c3f8-412f-bb99-43c74d8f5bf9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Received event network-changed-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:27:13 compute-0 nova_compute[192079]: 2025-10-02 12:27:13.265 2 DEBUG nova.compute.manager [req-0876746e-8cf5-40b8-9de3-9ec2a3bbae05 req-12d2dfc9-c3f8-412f-bb99-43c74d8f5bf9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Refreshing instance network info cache due to event network-changed-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:27:13 compute-0 nova_compute[192079]: 2025-10-02 12:27:13.265 2 DEBUG oslo_concurrency.lockutils [req-0876746e-8cf5-40b8-9de3-9ec2a3bbae05 req-12d2dfc9-c3f8-412f-bb99-43c74d8f5bf9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-2fcfea17-10df-499a-8692-facbbc76266b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:27:13 compute-0 nova_compute[192079]: 2025-10-02 12:27:13.429 2 DEBUG nova.network.neutron [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.061 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.844 2 DEBUG nova.network.neutron [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Updating instance_info_cache with network_info: [{"id": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "address": "fa:16:3e:cb:cf:9a", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5f268bcb-29", "ovs_interfaceid": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.888 2 DEBUG oslo_concurrency.lockutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Releasing lock "refresh_cache-2fcfea17-10df-499a-8692-facbbc76266b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.889 2 DEBUG nova.compute.manager [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Instance network_info: |[{"id": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "address": "fa:16:3e:cb:cf:9a", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5f268bcb-29", "ovs_interfaceid": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.890 2 DEBUG oslo_concurrency.lockutils [req-0876746e-8cf5-40b8-9de3-9ec2a3bbae05 req-12d2dfc9-c3f8-412f-bb99-43c74d8f5bf9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-2fcfea17-10df-499a-8692-facbbc76266b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.891 2 DEBUG nova.network.neutron [req-0876746e-8cf5-40b8-9de3-9ec2a3bbae05 req-12d2dfc9-c3f8-412f-bb99-43c74d8f5bf9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Refreshing network info cache for port 5f268bcb-29c1-4e4e-a36d-b2ec144d3dca _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.895 2 DEBUG nova.virt.libvirt.driver [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Start _get_guest_xml network_info=[{"id": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "address": "fa:16:3e:cb:cf:9a", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5f268bcb-29", "ovs_interfaceid": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.900 2 WARNING nova.virt.libvirt.driver [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.908 2 DEBUG nova.virt.libvirt.host [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.908 2 DEBUG nova.virt.libvirt.host [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.912 2 DEBUG nova.virt.libvirt.host [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.913 2 DEBUG nova.virt.libvirt.host [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.914 2 DEBUG nova.virt.libvirt.driver [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.914 2 DEBUG nova.virt.hardware [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.914 2 DEBUG nova.virt.hardware [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.915 2 DEBUG nova.virt.hardware [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.915 2 DEBUG nova.virt.hardware [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.915 2 DEBUG nova.virt.hardware [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.916 2 DEBUG nova.virt.hardware [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.916 2 DEBUG nova.virt.hardware [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.916 2 DEBUG nova.virt.hardware [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.916 2 DEBUG nova.virt.hardware [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.917 2 DEBUG nova.virt.hardware [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.917 2 DEBUG nova.virt.hardware [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.920 2 DEBUG nova.virt.libvirt.vif [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:27:05Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ServerActionsTestJSON-server-1253918640',display_name='tempest-ServerActionsTestJSON-server-1253918640',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serveractionstestjson-server-1253918640',id=125,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBJJLom+UJzZg9dduKQv+725QaYDZoMXvP/xlpKnb/K05SGc4dkyLwCDweJ3QifTmxLWqK9Sz5A12yMJbzpa36v5C4bUqj8uiWk/vbR1BAjBdKM9d/Ug8M2nT8LwDBGP/9A==',key_name='tempest-keypair-1006285918',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='e564a4cad5d443dba81ec04d2a05ced9',ramdisk_id='',reservation_id='r-r8iquef4',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServerActionsTestJSON-1646745100',owner_user_name='tempest-ServerActionsTestJSON-1646745100-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:27:09Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='d54b1826121b47caba89932a78c06ccd',uuid=2fcfea17-10df-499a-8692-facbbc76266b,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "address": "fa:16:3e:cb:cf:9a", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5f268bcb-29", "ovs_interfaceid": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.921 2 DEBUG nova.network.os_vif_util [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converting VIF {"id": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "address": "fa:16:3e:cb:cf:9a", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5f268bcb-29", "ovs_interfaceid": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.921 2 DEBUG nova.network.os_vif_util [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:cb:cf:9a,bridge_name='br-int',has_traffic_filtering=True,id=5f268bcb-29c1-4e4e-a36d-b2ec144d3dca,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5f268bcb-29') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.922 2 DEBUG nova.objects.instance [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'pci_devices' on Instance uuid 2fcfea17-10df-499a-8692-facbbc76266b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.956 2 DEBUG nova.virt.libvirt.driver [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:27:14 compute-0 nova_compute[192079]:   <uuid>2fcfea17-10df-499a-8692-facbbc76266b</uuid>
Oct 02 12:27:14 compute-0 nova_compute[192079]:   <name>instance-0000007d</name>
Oct 02 12:27:14 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:27:14 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:27:14 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:27:14 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:       <nova:name>tempest-ServerActionsTestJSON-server-1253918640</nova:name>
Oct 02 12:27:14 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:27:14</nova:creationTime>
Oct 02 12:27:14 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:27:14 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:27:14 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:27:14 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:27:14 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:27:14 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:27:14 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:27:14 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:27:14 compute-0 nova_compute[192079]:         <nova:user uuid="d54b1826121b47caba89932a78c06ccd">tempest-ServerActionsTestJSON-1646745100-project-member</nova:user>
Oct 02 12:27:14 compute-0 nova_compute[192079]:         <nova:project uuid="e564a4cad5d443dba81ec04d2a05ced9">tempest-ServerActionsTestJSON-1646745100</nova:project>
Oct 02 12:27:14 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:27:14 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:27:14 compute-0 nova_compute[192079]:         <nova:port uuid="5f268bcb-29c1-4e4e-a36d-b2ec144d3dca">
Oct 02 12:27:14 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.4" ipVersion="4"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:27:14 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:27:14 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:27:14 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <system>
Oct 02 12:27:14 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:27:14 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:27:14 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:27:14 compute-0 nova_compute[192079]:       <entry name="serial">2fcfea17-10df-499a-8692-facbbc76266b</entry>
Oct 02 12:27:14 compute-0 nova_compute[192079]:       <entry name="uuid">2fcfea17-10df-499a-8692-facbbc76266b</entry>
Oct 02 12:27:14 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     </system>
Oct 02 12:27:14 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:27:14 compute-0 nova_compute[192079]:   <os>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:   </os>
Oct 02 12:27:14 compute-0 nova_compute[192079]:   <features>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:   </features>
Oct 02 12:27:14 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:27:14 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:27:14 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:27:14 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:27:14 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk.config"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:27:14 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:cb:cf:9a"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:       <target dev="tap5f268bcb-29"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:27:14 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/console.log" append="off"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <video>
Oct 02 12:27:14 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     </video>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:27:14 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:27:14 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:27:14 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:27:14 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:27:14 compute-0 nova_compute[192079]: </domain>
Oct 02 12:27:14 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.957 2 DEBUG nova.compute.manager [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Preparing to wait for external event network-vif-plugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.957 2 DEBUG oslo_concurrency.lockutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "2fcfea17-10df-499a-8692-facbbc76266b-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.957 2 DEBUG oslo_concurrency.lockutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.957 2 DEBUG oslo_concurrency.lockutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.958 2 DEBUG nova.virt.libvirt.vif [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:27:05Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ServerActionsTestJSON-server-1253918640',display_name='tempest-ServerActionsTestJSON-server-1253918640',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serveractionstestjson-server-1253918640',id=125,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBJJLom+UJzZg9dduKQv+725QaYDZoMXvP/xlpKnb/K05SGc4dkyLwCDweJ3QifTmxLWqK9Sz5A12yMJbzpa36v5C4bUqj8uiWk/vbR1BAjBdKM9d/Ug8M2nT8LwDBGP/9A==',key_name='tempest-keypair-1006285918',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='e564a4cad5d443dba81ec04d2a05ced9',ramdisk_id='',reservation_id='r-r8iquef4',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServerActionsTestJSON-1646745100',owner_user_name='tempest-ServerActionsTestJSON-1646745100-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:27:09Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='d54b1826121b47caba89932a78c06ccd',uuid=2fcfea17-10df-499a-8692-facbbc76266b,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "address": "fa:16:3e:cb:cf:9a", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5f268bcb-29", "ovs_interfaceid": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.958 2 DEBUG nova.network.os_vif_util [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converting VIF {"id": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "address": "fa:16:3e:cb:cf:9a", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5f268bcb-29", "ovs_interfaceid": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.959 2 DEBUG nova.network.os_vif_util [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:cb:cf:9a,bridge_name='br-int',has_traffic_filtering=True,id=5f268bcb-29c1-4e4e-a36d-b2ec144d3dca,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5f268bcb-29') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.959 2 DEBUG os_vif [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:cb:cf:9a,bridge_name='br-int',has_traffic_filtering=True,id=5f268bcb-29c1-4e4e-a36d-b2ec144d3dca,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5f268bcb-29') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.960 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.960 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.960 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.962 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.962 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap5f268bcb-29, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.963 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap5f268bcb-29, col_values=(('external_ids', {'iface-id': '5f268bcb-29c1-4e4e-a36d-b2ec144d3dca', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:cb:cf:9a', 'vm-uuid': '2fcfea17-10df-499a-8692-facbbc76266b'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.964 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:14 compute-0 NetworkManager[51160]: <info>  [1759408034.9657] manager: (tap5f268bcb-29): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/232)
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.967 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.970 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:14 compute-0 nova_compute[192079]: 2025-10-02 12:27:14.971 2 INFO os_vif [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:cb:cf:9a,bridge_name='br-int',has_traffic_filtering=True,id=5f268bcb-29c1-4e4e-a36d-b2ec144d3dca,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5f268bcb-29')
Oct 02 12:27:15 compute-0 nova_compute[192079]: 2025-10-02 12:27:15.068 2 DEBUG nova.virt.libvirt.driver [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:27:15 compute-0 nova_compute[192079]: 2025-10-02 12:27:15.068 2 DEBUG nova.virt.libvirt.driver [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:27:15 compute-0 nova_compute[192079]: 2025-10-02 12:27:15.069 2 DEBUG nova.virt.libvirt.driver [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] No VIF found with MAC fa:16:3e:cb:cf:9a, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:27:15 compute-0 nova_compute[192079]: 2025-10-02 12:27:15.069 2 INFO nova.virt.libvirt.driver [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Using config drive
Oct 02 12:27:15 compute-0 podman[240946]: 2025-10-02 12:27:15.0715285 +0000 UTC m=+0.062400931 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, distribution-scope=public, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, io.openshift.tags=minimal rhel9, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., url=https://catalog.redhat.com/en/search?searchType=containers, vendor=Red Hat, Inc., version=9.6, architecture=x86_64, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.expose-services=, release=1755695350, managed_by=edpm_ansible, name=ubi9-minimal, com.redhat.component=ubi9-minimal-container, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, maintainer=Red Hat, Inc., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, vcs-type=git, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., build-date=2025-08-20T13:12:41, container_name=openstack_network_exporter, io.buildah.version=1.33.7)
Oct 02 12:27:15 compute-0 podman[240947]: 2025-10-02 12:27:15.093070558 +0000 UTC m=+0.078662945 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=multipathd, org.label-schema.license=GPLv2, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_id=multipathd, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, io.buildah.version=1.41.3)
Oct 02 12:27:15 compute-0 nova_compute[192079]: 2025-10-02 12:27:15.892 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:16 compute-0 nova_compute[192079]: 2025-10-02 12:27:16.592 2 INFO nova.virt.libvirt.driver [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Creating config drive at /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk.config
Oct 02 12:27:16 compute-0 nova_compute[192079]: 2025-10-02 12:27:16.598 2 DEBUG oslo_concurrency.processutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpdqtjxz1s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:27:16 compute-0 nova_compute[192079]: 2025-10-02 12:27:16.723 2 DEBUG oslo_concurrency.processutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpdqtjxz1s" returned: 0 in 0.125s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:27:16 compute-0 kernel: tap5f268bcb-29: entered promiscuous mode
Oct 02 12:27:16 compute-0 NetworkManager[51160]: <info>  [1759408036.7825] manager: (tap5f268bcb-29): new Tun device (/org/freedesktop/NetworkManager/Devices/233)
Oct 02 12:27:16 compute-0 ovn_controller[94336]: 2025-10-02T12:27:16Z|00460|binding|INFO|Claiming lport 5f268bcb-29c1-4e4e-a36d-b2ec144d3dca for this chassis.
Oct 02 12:27:16 compute-0 ovn_controller[94336]: 2025-10-02T12:27:16Z|00461|binding|INFO|5f268bcb-29c1-4e4e-a36d-b2ec144d3dca: Claiming fa:16:3e:cb:cf:9a 10.100.0.4
Oct 02 12:27:16 compute-0 nova_compute[192079]: 2025-10-02 12:27:16.785 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:16 compute-0 ovn_controller[94336]: 2025-10-02T12:27:16Z|00462|binding|INFO|Setting lport 5f268bcb-29c1-4e4e-a36d-b2ec144d3dca ovn-installed in OVS
Oct 02 12:27:16 compute-0 nova_compute[192079]: 2025-10-02 12:27:16.800 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:16 compute-0 nova_compute[192079]: 2025-10-02 12:27:16.801 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:16 compute-0 systemd-udevd[241002]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:27:16 compute-0 NetworkManager[51160]: <info>  [1759408036.8244] device (tap5f268bcb-29): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:27:16 compute-0 NetworkManager[51160]: <info>  [1759408036.8250] device (tap5f268bcb-29): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:27:16 compute-0 systemd-machined[152150]: New machine qemu-60-instance-0000007d.
Oct 02 12:27:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:16.841 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:cb:cf:9a 10.100.0.4'], port_security=['fa:16:3e:cb:cf:9a 10.100.0.4'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.4/28', 'neutron:device_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-a04f937a-375f-4fb0-90fe-5f514a88668f', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'neutron:revision_number': '2', 'neutron:security_group_ids': 'c0383701-0ec7-4f3b-8585-5effc4f5ca5a', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=50c0aa38-5fd8-41c7-b4bf-85b59722c5c3, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=5f268bcb-29c1-4e4e-a36d-b2ec144d3dca) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:27:16 compute-0 ovn_controller[94336]: 2025-10-02T12:27:16Z|00463|binding|INFO|Setting lport 5f268bcb-29c1-4e4e-a36d-b2ec144d3dca up in Southbound
Oct 02 12:27:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:16.842 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 5f268bcb-29c1-4e4e-a36d-b2ec144d3dca in datapath a04f937a-375f-4fb0-90fe-5f514a88668f bound to our chassis
Oct 02 12:27:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:16.843 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network a04f937a-375f-4fb0-90fe-5f514a88668f
Oct 02 12:27:16 compute-0 systemd[1]: Started Virtual Machine qemu-60-instance-0000007d.
Oct 02 12:27:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:16.857 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[07ca9ca8-cca7-41d0-93ff-18fe8069db41]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:16.859 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tapa04f937a-31 in ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:27:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:16.861 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tapa04f937a-30 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:27:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:16.861 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[58fdbf94-f725-4b97-9d83-0b67c7b251aa]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:16.863 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[532b036a-8783-4b6e-be51-72be3ccc6583]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:16.873 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[03f69a82-623a-42b9-a1e9-881ae8ff840e]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:16.899 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e4d8397c-2969-4036-b06e-d168f5b09a54]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:16.926 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[8114018f-33cb-4ab8-9145-e7d14b0cebb1]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:16 compute-0 NetworkManager[51160]: <info>  [1759408036.9344] manager: (tapa04f937a-30): new Veth device (/org/freedesktop/NetworkManager/Devices/234)
Oct 02 12:27:16 compute-0 systemd-udevd[241006]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:27:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:16.935 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[122b15fd-d63f-4121-8d69-194acdcf1daf]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:16.974 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[6b8f23a6-f1b4-40e6-88b5-137bf8e18392]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:16.977 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[0ebbe5df-f770-48bc-a7fb-7c7cd9a94817]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:17 compute-0 NetworkManager[51160]: <info>  [1759408037.0009] device (tapa04f937a-30): carrier: link connected
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:17.008 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[dcde67b9-30c1-4124-819a-b4d11db1b249]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:17.026 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a074df04-58db-4063-8ed9-f7fd95fd427a]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapa04f937a-31'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:33:93:68'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 149], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 603462, 'reachable_time': 19042, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 241044, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:17.050 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[982cdacc-8b87-4887-954b-0ec349b8fcde]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe33:9368'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 603462, 'tstamp': 603462}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 241046, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:17.081 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4153a53e-a253-4963-adb8-c4dbb1f48cbd]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapa04f937a-31'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:33:93:68'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 149], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 603462, 'reachable_time': 19042, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 241047, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.106 12 DEBUG ceilometer.compute.discovery [-] instance data: {'id': '2fcfea17-10df-499a-8692-facbbc76266b', 'name': 'tempest-ServerActionsTestJSON-server-1253918640', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'os_type': 'hvm', 'architecture': 'x86_64', 'OS-EXT-SRV-ATTR:instance_name': 'instance-0000007d', 'OS-EXT-SRV-ATTR:host': 'compute-0.ctlplane.example.com', 'OS-EXT-STS:vm_state': 'paused', 'tenant_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'hostId': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'status': 'paused', 'metadata': {}} discover_libvirt_polling /usr/lib/python3.9/site-packages/ceilometer/compute/discovery.py:228
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.107 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets.error in the context of pollsters
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.109 12 DEBUG ceilometer.compute.virt.libvirt.inspector [-] No delta meter predecessor for 2fcfea17-10df-499a-8692-facbbc76266b / tap5f268bcb-29 inspect_vnics /usr/lib/python3.9/site-packages/ceilometer/compute/virt/libvirt/inspector.py:136
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.109 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/network.incoming.packets.error volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '59289569-135d-4044-b83a-1e2a226b2e3b', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets.error', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_name': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_name': None, 'resource_id': 'instance-0000007d-2fcfea17-10df-499a-8692-facbbc76266b-tap5f268bcb-29', 'timestamp': '2025-10-02T12:27:17.107287', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestJSON-server-1253918640', 'name': 'tap5f268bcb-29', 'instance_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'instance_type': 'm1.nano', 'host': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'paused', 'state': 'paused', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:cb:cf:9a', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap5f268bcb-29'}, 'message_id': '222c9540-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6034.794351669, 'message_signature': 'a263654c0ec04a8eb22eddd4c4c050f972ebe7b4216b556edc3adc9b8361938b'}]}, 'timestamp': '2025-10-02 12:27:17.110511', '_unique_id': '45c38afec8a04c7ba99d0cb7ee79def8'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.111 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.112 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.requests in the context of pollsters
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:17.112 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[373fc88f-9d35-457b-addf-de60ae3bcdfd]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.124 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:17.188 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[fc817d50-25de-4377-b191-b7122021dd31]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:17.190 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapa04f937a-30, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:17.190 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:17.191 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapa04f937a-30, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:27:17 compute-0 NetworkManager[51160]: <info>  [1759408037.2275] manager: (tapa04f937a-30): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/235)
Oct 02 12:27:17 compute-0 kernel: tapa04f937a-30: entered promiscuous mode
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.228 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:17.230 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tapa04f937a-30, col_values=(('external_ids', {'iface-id': '38f1ac16-18c6-4b4a-b769-ebc7dd5181d4'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.231 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:17 compute-0 ovn_controller[94336]: 2025-10-02T12:27:17Z|00464|binding|INFO|Releasing lport 38f1ac16-18c6-4b4a-b769-ebc7dd5181d4 from this chassis (sb_readonly=0)
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.232 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:17.233 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/a04f937a-375f-4fb0-90fe-5f514a88668f.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/a04f937a-375f-4fb0-90fe-5f514a88668f.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:17.234 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[624b2799-ae90-4f37-a831-be45d5a20896]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:17.235 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-a04f937a-375f-4fb0-90fe-5f514a88668f
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/a04f937a-375f-4fb0-90fe-5f514a88668f.pid.haproxy
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID a04f937a-375f-4fb0-90fe-5f514a88668f
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:27:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:17.235 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'env', 'PROCESS_TAG=haproxy-a04f937a-375f-4fb0-90fe-5f514a88668f', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/a04f937a-375f-4fb0-90fe-5f514a88668f.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.242 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.448 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408037.44774, 2fcfea17-10df-499a-8692-facbbc76266b => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.448 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] VM Started (Lifecycle Event)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.468 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/disk.device.write.requests volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.469 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/disk.device.write.requests volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'ddb6f9c9-25ac-4fb0-8c7a-6bfc33e10f8f', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 0, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_name': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_name': None, 'resource_id': '2fcfea17-10df-499a-8692-facbbc76266b-vda', 'timestamp': '2025-10-02T12:27:17.112723', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestJSON-server-1253918640', 'name': 'instance-0000007d', 'instance_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'instance_type': 'm1.nano', 'host': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'paused', 'state': 'paused', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '226359ea-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6034.799801497, 'message_signature': '9f1af75fcadab0f3f183040b034c83014bde28c00ca572fc9ea5ff2b5c9117ad'}, {'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 0, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_name': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_name': None, 'resource_id': '2fcfea17-10df-499a-8692-facbbc76266b-sda', 'timestamp': '2025-10-02T12:27:17.112723', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestJSON-server-1253918640', 'name': 'instance-0000007d', 'instance_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'instance_type': 'm1.nano', 'host': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'paused', 'state': 'paused', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '2263669c-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6034.799801497, 'message_signature': 'a8a3de134cc3fa675fe3da69d9425b35b78e1ef3f4a3048a07f5e51e199c40ec'}]}, 'timestamp': '2025-10-02 12:27:17.469708', '_unique_id': 'a3aed89a86e84e02b909872e07ef046d'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.470 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.471 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.latency in the context of pollsters
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.471 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/disk.device.read.latency volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.472 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/disk.device.read.latency volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '40d355f1-79fd-42e6-815f-e4f2f46c0e68', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 0, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_name': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_name': None, 'resource_id': '2fcfea17-10df-499a-8692-facbbc76266b-vda', 'timestamp': '2025-10-02T12:27:17.471744', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestJSON-server-1253918640', 'name': 'instance-0000007d', 'instance_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'instance_type': 'm1.nano', 'host': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'paused', 'state': 'paused', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '2263c240-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6034.799801497, 'message_signature': 'cf03fe4a1190f622c548e4bfd0badbc2de47b4df16cc4e5b5155a391defce518'}, {'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 0, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_name': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_name': None, 'resource_id': '2fcfea17-10df-499a-8692-facbbc76266b-sda', 'timestamp': '2025-10-02T12:27:17.471744', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestJSON-server-1253918640', 'name': 'instance-0000007d', 'instance_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'instance_type': 'm1.nano', 'host': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'paused', 'state': 'paused', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '2263ceac-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6034.799801497, 'message_signature': '7725b8556b2532584629124a0ffe3b5f96eb0db7d425ccd5aae58e782080b61b'}]}, 'timestamp': '2025-10-02 12:27:17.472366', '_unique_id': '5bb856666d4f4166b6c9cc525ac6f628'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.bytes in the context of pollsters
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.473 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/disk.device.read.bytes volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.474 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/disk.device.read.bytes volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '3002e446-125f-4ebe-a681-564d8cf0f9bb', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_name': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_name': None, 'resource_id': '2fcfea17-10df-499a-8692-facbbc76266b-vda', 'timestamp': '2025-10-02T12:27:17.473862', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestJSON-server-1253918640', 'name': 'instance-0000007d', 'instance_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'instance_type': 'm1.nano', 'host': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'paused', 'state': 'paused', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '22641560-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6034.799801497, 'message_signature': '95735eacf15ffea898e794f0d3558431aeb5b5c26124e8b2f13a95a8a0be9459'}, {'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_name': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_name': None, 'resource_id': '2fcfea17-10df-499a-8692-facbbc76266b-sda', 'timestamp': '2025-10-02T12:27:17.473862', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestJSON-server-1253918640', 'name': 'instance-0000007d', 'instance_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'instance_type': 'm1.nano', 'host': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'paused', 'state': 'paused', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '22642096-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6034.799801497, 'message_signature': '4ee956f093c9b2c4020cb8469d7a248eff00dc0221d5c848526eb19cdb4595c4'}]}, 'timestamp': '2025-10-02 12:27:17.474456', '_unique_id': '2807ca8a3cc849dc80eb7402e75d78c8'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.475 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.476 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets.drop in the context of pollsters
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.476 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/network.incoming.packets.drop volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '26075176-ffe5-48d6-bf5d-d8c727c57ed4', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets.drop', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_name': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_name': None, 'resource_id': 'instance-0000007d-2fcfea17-10df-499a-8692-facbbc76266b-tap5f268bcb-29', 'timestamp': '2025-10-02T12:27:17.476198', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestJSON-server-1253918640', 'name': 'tap5f268bcb-29', 'instance_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'instance_type': 'm1.nano', 'host': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'paused', 'state': 'paused', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:cb:cf:9a', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap5f268bcb-29'}, 'message_id': '2264706e-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6034.794351669, 'message_signature': 'b005788fcb8e3f9106bbb54eea412445717f15d104b275df5b094bd14676dabd'}]}, 'timestamp': '2025-10-02 12:27:17.476514', '_unique_id': '3f02659cad4049d4969fca01f9e6f03c'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes.delta in the context of pollsters
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.477 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/network.incoming.bytes.delta volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'af577155-aaef-45d7-b043-9d4c43be0dab', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.bytes.delta', 'counter_type': 'delta', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_name': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_name': None, 'resource_id': 'instance-0000007d-2fcfea17-10df-499a-8692-facbbc76266b-tap5f268bcb-29', 'timestamp': '2025-10-02T12:27:17.477926', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestJSON-server-1253918640', 'name': 'tap5f268bcb-29', 'instance_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'instance_type': 'm1.nano', 'host': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'paused', 'state': 'paused', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:cb:cf:9a', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap5f268bcb-29'}, 'message_id': '2264b402-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6034.794351669, 'message_signature': 'f22eede35253a36eabc846e8623d36f661232a1f23f2fc2d0bb696182fadf8d2'}]}, 'timestamp': '2025-10-02 12:27:17.478231', '_unique_id': 'cee3672da3b042c789e2ce8e54f30f5b'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.478 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.480 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.requests in the context of pollsters
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.480 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/disk.device.read.requests volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.480 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/disk.device.read.requests volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '2498be0b-1d81-46d1-8549-e787d16131c7', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 0, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_name': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_name': None, 'resource_id': '2fcfea17-10df-499a-8692-facbbc76266b-vda', 'timestamp': '2025-10-02T12:27:17.480172', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestJSON-server-1253918640', 'name': 'instance-0000007d', 'instance_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'instance_type': 'm1.nano', 'host': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'paused', 'state': 'paused', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '22650b5a-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6034.799801497, 'message_signature': '68401d735f5f50b3fe8d4c7e903c16655c4a261e85e5f9469ea4f4e2e3a9e5b8'}, {'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 0, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_name': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_name': None, 'resource_id': '2fcfea17-10df-499a-8692-facbbc76266b-sda', 'timestamp': '2025-10-02T12:27:17.480172', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestJSON-server-1253918640', 'name': 'instance-0000007d', 'instance_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'instance_type': 'm1.nano', 'host': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'paused', 'state': 'paused', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '226515d2-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6034.799801497, 'message_signature': '145f82e09878c94ddad02a3b1041d34a36912b142f27496c4f9228c2b48ff157'}]}, 'timestamp': '2025-10-02 12:27:17.480727', '_unique_id': '55c0a2b55d7a4aadbd3e827565279a10'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.481 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.482 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes.rate in the context of pollsters
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.482 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for IncomingBytesRatePollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.482 12 ERROR ceilometer.polling.manager [-] Prevent pollster network.incoming.bytes.rate from polling [<NovaLikeServer: tempest-ServerActionsTestJSON-server-1253918640>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-ServerActionsTestJSON-server-1253918640>]
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.482 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.iops in the context of pollsters
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.482 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for PerDeviceDiskIOPSPollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.482 12 ERROR ceilometer.polling.manager [-] Prevent pollster disk.device.iops from polling [<NovaLikeServer: tempest-ServerActionsTestJSON-server-1253918640>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-ServerActionsTestJSON-server-1253918640>]
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.483 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets in the context of pollsters
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.483 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/network.incoming.packets volume: 1 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'd61ff049-3ff7-4d50-acd0-2399c81a6f86', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 1, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_name': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_name': None, 'resource_id': 'instance-0000007d-2fcfea17-10df-499a-8692-facbbc76266b-tap5f268bcb-29', 'timestamp': '2025-10-02T12:27:17.483229', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestJSON-server-1253918640', 'name': 'tap5f268bcb-29', 'instance_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'instance_type': 'm1.nano', 'host': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'paused', 'state': 'paused', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:cb:cf:9a', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap5f268bcb-29'}, 'message_id': '22658274-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6034.794351669, 'message_signature': '475f3c566ef0b0ecba906fceefcf0431d98908caac2b507b5a2aee33ee14ef38'}]}, 'timestamp': '2025-10-02 12:27:17.483518', '_unique_id': 'c79fa234e33e493c9a1811bbea0bc0ed'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.484 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.485 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.latency in the context of pollsters
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.485 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for PerDeviceDiskLatencyPollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.485 12 ERROR ceilometer.polling.manager [-] Prevent pollster disk.device.latency from polling [<NovaLikeServer: tempest-ServerActionsTestJSON-server-1253918640>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-ServerActionsTestJSON-server-1253918640>]
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.485 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets.drop in the context of pollsters
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.485 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/network.outgoing.packets.drop volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '4312f7cb-752a-4060-8edf-18f941353be5', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets.drop', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_name': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_name': None, 'resource_id': 'instance-0000007d-2fcfea17-10df-499a-8692-facbbc76266b-tap5f268bcb-29', 'timestamp': '2025-10-02T12:27:17.485628', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestJSON-server-1253918640', 'name': 'tap5f268bcb-29', 'instance_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'instance_type': 'm1.nano', 'host': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'paused', 'state': 'paused', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:cb:cf:9a', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap5f268bcb-29'}, 'message_id': '2265e106-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6034.794351669, 'message_signature': 'e341002fa7bea2634af6f043ad970de201fba8e084ad2878ffd5c5c2606de0f8'}]}, 'timestamp': '2025-10-02 12:27:17.485956', '_unique_id': '278fd4dc353444da8882c74a0e779130'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.486 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.487 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes in the context of pollsters
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.487 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/network.incoming.bytes volume: 110 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'd239e9a6-5631-4394-b1ea-739cb0623cdd', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 110, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_name': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_name': None, 'resource_id': 'instance-0000007d-2fcfea17-10df-499a-8692-facbbc76266b-tap5f268bcb-29', 'timestamp': '2025-10-02T12:27:17.487714', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestJSON-server-1253918640', 'name': 'tap5f268bcb-29', 'instance_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'instance_type': 'm1.nano', 'host': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'paused', 'state': 'paused', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:cb:cf:9a', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap5f268bcb-29'}, 'message_id': '2266320a-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6034.794351669, 'message_signature': '55429740631c1ccf4809673e713d64287ecbc63fae99c0b851bada1231bc8ba1'}]}, 'timestamp': '2025-10-02 12:27:17.488046', '_unique_id': '553344e9180140f082de15ac16f10c78'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.488 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.489 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets.error in the context of pollsters
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.489 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/network.outgoing.packets.error volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '1f95039b-338c-4b81-9ccb-5fad636b99f7', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets.error', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_name': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_name': None, 'resource_id': 'instance-0000007d-2fcfea17-10df-499a-8692-facbbc76266b-tap5f268bcb-29', 'timestamp': '2025-10-02T12:27:17.489470', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestJSON-server-1253918640', 'name': 'tap5f268bcb-29', 'instance_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'instance_type': 'm1.nano', 'host': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'paused', 'state': 'paused', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:cb:cf:9a', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap5f268bcb-29'}, 'message_id': '22667684-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6034.794351669, 'message_signature': 'cc0c01f779481f39a1d93dbf64bdf92ff8cc95a158bb99be8afd3d059344a724'}]}, 'timestamp': '2025-10-02 12:27:17.489779', '_unique_id': 'b53ee50224694d03a9f440d7687957b9'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.490 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.491 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.bytes in the context of pollsters
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.491 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/disk.device.write.bytes volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.491 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/disk.device.write.bytes volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '65d15da3-19e5-4f23-987c-a78a6be3f4d0', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_name': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_name': None, 'resource_id': '2fcfea17-10df-499a-8692-facbbc76266b-vda', 'timestamp': '2025-10-02T12:27:17.491300', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestJSON-server-1253918640', 'name': 'instance-0000007d', 'instance_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'instance_type': 'm1.nano', 'host': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'paused', 'state': 'paused', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '2266be28-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6034.799801497, 'message_signature': '077a28f0a02d0606093f06324cb0f31e292736dcfbec4a11ad9419d9f76fa5e4'}, {'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_name': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_name': None, 'resource_id': '2fcfea17-10df-499a-8692-facbbc76266b-sda', 'timestamp': '2025-10-02T12:27:17.491300', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestJSON-server-1253918640', 'name': 'instance-0000007d', 'instance_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'instance_type': 'm1.nano', 'host': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'paused', 'state': 'paused', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '2266c922-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6034.799801497, 'message_signature': '3060a6a80ba828d3da63b86edf6c0446bff3acac5f0b063fc81ecb59df88b6e1'}]}, 'timestamp': '2025-10-02 12:27:17.491875', '_unique_id': '78f7cf1a219441a68988cd582d3b12d2'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.492 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.493 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets in the context of pollsters
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.493 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/network.outgoing.packets volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '1d0a4dcc-772e-4a4c-ae1a-7b0e87b6c1c6', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_name': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_name': None, 'resource_id': 'instance-0000007d-2fcfea17-10df-499a-8692-facbbc76266b-tap5f268bcb-29', 'timestamp': '2025-10-02T12:27:17.493369', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestJSON-server-1253918640', 'name': 'tap5f268bcb-29', 'instance_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'instance_type': 'm1.nano', 'host': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'paused', 'state': 'paused', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:cb:cf:9a', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap5f268bcb-29'}, 'message_id': '22670ed2-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6034.794351669, 'message_signature': '9f83d86125c1f9262073ab8798fe6b5d3f7fd88b72223b865d816554d03d68ec'}]}, 'timestamp': '2025-10-02 12:27:17.493674', '_unique_id': '7ba53d18bfef423e800c39cacc418910'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.494 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.495 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.capacity in the context of pollsters
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.504 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/disk.device.capacity volume: 1073741824 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.504 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/disk.device.capacity volume: 509952 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'dec793b4-849b-457c-a20b-0c7033225703', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 1073741824, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_name': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_name': None, 'resource_id': '2fcfea17-10df-499a-8692-facbbc76266b-vda', 'timestamp': '2025-10-02T12:27:17.495128', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestJSON-server-1253918640', 'name': 'instance-0000007d', 'instance_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'instance_type': 'm1.nano', 'host': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'paused', 'state': 'paused', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '2268c740-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6035.182210608, 'message_signature': '22d4f2311cd4b852a66e490205547e4f21d279a25ebac8b073cc5ec3ccd0ee7b'}, {'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 509952, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_name': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_name': None, 'resource_id': '2fcfea17-10df-499a-8692-facbbc76266b-sda', 'timestamp': '2025-10-02T12:27:17.495128', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestJSON-server-1253918640', 'name': 'instance-0000007d', 'instance_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'instance_type': 'm1.nano', 'host': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'paused', 'state': 'paused', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '2268d2f8-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6035.182210608, 'message_signature': '813f4fee2ca894eb717451091607e47089324a0a45320c3085f4b2ae58c19fac'}]}, 'timestamp': '2025-10-02 12:27:17.505211', '_unique_id': '853d02670d964157b0ad7e92c78faf1a'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.506 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes in the context of pollsters
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/network.outgoing.bytes volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '5c081e61-fb2e-427a-a116-9cd440c4ea4e', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_name': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_name': None, 'resource_id': 'instance-0000007d-2fcfea17-10df-499a-8692-facbbc76266b-tap5f268bcb-29', 'timestamp': '2025-10-02T12:27:17.507082', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestJSON-server-1253918640', 'name': 'tap5f268bcb-29', 'instance_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'instance_type': 'm1.nano', 'host': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'paused', 'state': 'paused', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:cb:cf:9a', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap5f268bcb-29'}, 'message_id': '2269271c-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6034.794351669, 'message_signature': 'b6dfadc42cc00ea55251df8e8588408b3f1f8a5890f30523f1225dd2a05a03d6'}]}, 'timestamp': '2025-10-02 12:27:17.507370', '_unique_id': '2faa05630b9949bfb24aa16b0f2f59ca'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.507 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.508 12 INFO ceilometer.polling.manager [-] Polling pollster cpu in the context of pollsters
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.523 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/cpu volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '3eef73ea-b936-4edd-88e1-9e7ef4481a40', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'cpu', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 0, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_name': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_name': None, 'resource_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'timestamp': '2025-10-02T12:27:17.508670', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestJSON-server-1253918640', 'name': 'instance-0000007d', 'instance_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'instance_type': 'm1.nano', 'host': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'paused', 'state': 'paused', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'cpu_number': 1}, 'message_id': '226bbbbc-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6035.210902311, 'message_signature': 'e3ab05ebdb101b1e36ae0793d6dfb8e045c100bd780e23495b7ac028df47d4a6'}]}, 'timestamp': '2025-10-02 12:27:17.524311', '_unique_id': '33696c64044c454c85831d02c62d45c3'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.524 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.525 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.latency in the context of pollsters
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.525 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/disk.device.write.latency volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/disk.device.write.latency volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'c8bd3dc6-1149-4071-aa89-72980354e9ac', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 0, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_name': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_name': None, 'resource_id': '2fcfea17-10df-499a-8692-facbbc76266b-vda', 'timestamp': '2025-10-02T12:27:17.525700', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestJSON-server-1253918640', 'name': 'instance-0000007d', 'instance_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'instance_type': 'm1.nano', 'host': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'paused', 'state': 'paused', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '226bfdb6-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6034.799801497, 'message_signature': '208482ef7231268a0d51aa6b7dfda129a428891cd0c0939bf8f5c543264f3494'}, {'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 0, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_name': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_name': None, 'resource_id': '2fcfea17-10df-499a-8692-facbbc76266b-sda', 'timestamp': '2025-10-02T12:27:17.525700', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestJSON-server-1253918640', 'name': 'instance-0000007d', 'instance_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'instance_type': 'm1.nano', 'host': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'paused', 'state': 'paused', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '226c0a0e-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6034.799801497, 'message_signature': 'e88e053ac8944f363c96293ccadd40d2c0112e05478497415a76753f92d7ccb8'}]}, 'timestamp': '2025-10-02 12:27:17.526280', '_unique_id': '1640b6bd52bc4ed09aac745d9beb3682'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.526 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.527 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes.rate in the context of pollsters
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.527 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for OutgoingBytesRatePollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.527 12 ERROR ceilometer.polling.manager [-] Prevent pollster network.outgoing.bytes.rate from polling [<NovaLikeServer: tempest-ServerActionsTestJSON-server-1253918640>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-ServerActionsTestJSON-server-1253918640>]
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.527 12 INFO ceilometer.polling.manager [-] Polling pollster memory.usage in the context of pollsters
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.527 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/memory.usage volume: Unavailable _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.528 12 WARNING ceilometer.compute.pollsters [-] memory.usage statistic in not available for instance 2fcfea17-10df-499a-8692-facbbc76266b: ceilometer.compute.pollsters.NoVolumeException
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.528 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes.delta in the context of pollsters
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.528 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/network.outgoing.bytes.delta volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '8bef95c1-4137-4995-ab4e-e3f3fe2ad64b', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.bytes.delta', 'counter_type': 'delta', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_name': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_name': None, 'resource_id': 'instance-0000007d-2fcfea17-10df-499a-8692-facbbc76266b-tap5f268bcb-29', 'timestamp': '2025-10-02T12:27:17.528210', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestJSON-server-1253918640', 'name': 'tap5f268bcb-29', 'instance_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'instance_type': 'm1.nano', 'host': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'paused', 'state': 'paused', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:cb:cf:9a', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap5f268bcb-29'}, 'message_id': '226c5fe0-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6034.794351669, 'message_signature': 'f72499e2bc47d51dea737ade5528deaeb6cfb47a8a3da5fa23a828fc7a809ea1'}]}, 'timestamp': '2025-10-02 12:27:17.528493', '_unique_id': 'cf890e26808c4af8bfb827d958f5dc33'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.usage in the context of pollsters
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.529 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/disk.device.usage volume: 196624 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.530 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/disk.device.usage volume: 509952 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '0a881f70-e5e2-4380-9044-0aa456e62f4e', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 196624, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_name': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_name': None, 'resource_id': '2fcfea17-10df-499a-8692-facbbc76266b-vda', 'timestamp': '2025-10-02T12:27:17.529738', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestJSON-server-1253918640', 'name': 'instance-0000007d', 'instance_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'instance_type': 'm1.nano', 'host': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'paused', 'state': 'paused', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '226c9dca-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6035.182210608, 'message_signature': 'eea84ff441f9a63375f2ec6ba39248d120ee6046d58568c28fc238f94529641f'}, {'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 509952, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_name': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_name': None, 'resource_id': '2fcfea17-10df-499a-8692-facbbc76266b-sda', 'timestamp': '2025-10-02T12:27:17.529738', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestJSON-server-1253918640', 'name': 'instance-0000007d', 'instance_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'instance_type': 'm1.nano', 'host': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'paused', 'state': 'paused', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '226caa68-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6035.182210608, 'message_signature': 'ea56a675bc673ebbdfc7550899205cdc49939ab20a9cdcf9e16614358c2d1dd5'}]}, 'timestamp': '2025-10-02 12:27:17.530427', '_unique_id': '8c26d50900d345fba6e8eb1385867ef3'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.531 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.allocation in the context of pollsters
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.532 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/disk.device.allocation volume: 204800 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.532 12 DEBUG ceilometer.compute.pollsters [-] 2fcfea17-10df-499a-8692-facbbc76266b/disk.device.allocation volume: 512000 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '31f737d3-d417-43d8-9102-2650b76c377f', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 204800, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_name': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_name': None, 'resource_id': '2fcfea17-10df-499a-8692-facbbc76266b-vda', 'timestamp': '2025-10-02T12:27:17.532077', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestJSON-server-1253918640', 'name': 'instance-0000007d', 'instance_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'instance_type': 'm1.nano', 'host': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'paused', 'state': 'paused', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '226cfb1c-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6035.182210608, 'message_signature': 'b8cc7933fa5bd173b1098d4da6c63ac14b0bd94fd104614499bf4c575165d19c'}, {'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 512000, 'user_id': 'd54b1826121b47caba89932a78c06ccd', 'user_name': None, 'project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'project_name': None, 'resource_id': '2fcfea17-10df-499a-8692-facbbc76266b-sda', 'timestamp': '2025-10-02T12:27:17.532077', 'resource_metadata': {'display_name': 'tempest-ServerActionsTestJSON-server-1253918640', 'name': 'instance-0000007d', 'instance_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'instance_type': 'm1.nano', 'host': '2cc122857deffce0731024d5ffa3934b29ad06b49c589b2ede122362', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'paused', 'state': 'paused', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '226d06a2-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6035.182210608, 'message_signature': '26a14337a84e4944a311d7fd6ace597df8100d0800cf72fccb0ca0c5048ddb1e'}]}, 'timestamp': '2025-10-02 12:27:17.532737', '_unique_id': '0c2c1dae808944eda29f9248d95fb30f'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:27:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:27:17.533 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.537 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.539 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408037.450665, 2fcfea17-10df-499a-8692-facbbc76266b => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.539 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] VM Paused (Lifecycle Event)
Oct 02 12:27:17 compute-0 podman[241078]: 2025-10-02 12:27:17.596557706 +0000 UTC m=+0.048743130 container create 98e9159928f908e03b232c4892233de63e6ddfbb4e38ab9c6b1a45fb984b5b71 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_managed=true, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2)
Oct 02 12:27:17 compute-0 systemd[1]: Started libpod-conmon-98e9159928f908e03b232c4892233de63e6ddfbb4e38ab9c6b1a45fb984b5b71.scope.
Oct 02 12:27:17 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:27:17 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/13eb36f7ba48232c72605ffe00abf08809a2d1b90d9a54e07f9bb8c6367051f0/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:27:17 compute-0 podman[241078]: 2025-10-02 12:27:17.572846289 +0000 UTC m=+0.025031733 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:27:17 compute-0 podman[241078]: 2025-10-02 12:27:17.672511755 +0000 UTC m=+0.124697199 container init 98e9159928f908e03b232c4892233de63e6ddfbb4e38ab9c6b1a45fb984b5b71 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0)
Oct 02 12:27:17 compute-0 podman[241078]: 2025-10-02 12:27:17.678505609 +0000 UTC m=+0.130691033 container start 98e9159928f908e03b232c4892233de63e6ddfbb4e38ab9c6b1a45fb984b5b71 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0)
Oct 02 12:27:17 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[241093]: [NOTICE]   (241097) : New worker (241099) forked
Oct 02 12:27:17 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[241093]: [NOTICE]   (241097) : Loading success.
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.732 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.752 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.856 2 DEBUG nova.compute.manager [req-8e8037bd-6abb-4bd1-8c86-e6abd8f10b5b req-50860373-e5da-4697-a914-ed99f51f87bf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Received event network-vif-plugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.856 2 DEBUG oslo_concurrency.lockutils [req-8e8037bd-6abb-4bd1-8c86-e6abd8f10b5b req-50860373-e5da-4697-a914-ed99f51f87bf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "2fcfea17-10df-499a-8692-facbbc76266b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.857 2 DEBUG oslo_concurrency.lockutils [req-8e8037bd-6abb-4bd1-8c86-e6abd8f10b5b req-50860373-e5da-4697-a914-ed99f51f87bf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.857 2 DEBUG oslo_concurrency.lockutils [req-8e8037bd-6abb-4bd1-8c86-e6abd8f10b5b req-50860373-e5da-4697-a914-ed99f51f87bf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.857 2 DEBUG nova.compute.manager [req-8e8037bd-6abb-4bd1-8c86-e6abd8f10b5b req-50860373-e5da-4697-a914-ed99f51f87bf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Processing event network-vif-plugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.859 2 DEBUG nova.compute.manager [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.859 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.863 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408037.862872, 2fcfea17-10df-499a-8692-facbbc76266b => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.863 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] VM Resumed (Lifecycle Event)
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.865 2 DEBUG nova.virt.libvirt.driver [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.868 2 INFO nova.virt.libvirt.driver [-] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Instance spawned successfully.
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.868 2 DEBUG nova.virt.libvirt.driver [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.924 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.930 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.933 2 DEBUG nova.virt.libvirt.driver [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.933 2 DEBUG nova.virt.libvirt.driver [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.934 2 DEBUG nova.virt.libvirt.driver [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.934 2 DEBUG nova.virt.libvirt.driver [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.934 2 DEBUG nova.virt.libvirt.driver [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.935 2 DEBUG nova.virt.libvirt.driver [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:27:17 compute-0 nova_compute[192079]: 2025-10-02 12:27:17.997 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:27:18 compute-0 nova_compute[192079]: 2025-10-02 12:27:18.255 2 INFO nova.compute.manager [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Took 8.15 seconds to spawn the instance on the hypervisor.
Oct 02 12:27:18 compute-0 nova_compute[192079]: 2025-10-02 12:27:18.256 2 DEBUG nova.compute.manager [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:27:18 compute-0 nova_compute[192079]: 2025-10-02 12:27:18.495 2 DEBUG nova.network.neutron [req-0876746e-8cf5-40b8-9de3-9ec2a3bbae05 req-12d2dfc9-c3f8-412f-bb99-43c74d8f5bf9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Updated VIF entry in instance network info cache for port 5f268bcb-29c1-4e4e-a36d-b2ec144d3dca. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:27:18 compute-0 nova_compute[192079]: 2025-10-02 12:27:18.496 2 DEBUG nova.network.neutron [req-0876746e-8cf5-40b8-9de3-9ec2a3bbae05 req-12d2dfc9-c3f8-412f-bb99-43c74d8f5bf9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Updating instance_info_cache with network_info: [{"id": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "address": "fa:16:3e:cb:cf:9a", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5f268bcb-29", "ovs_interfaceid": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:27:18 compute-0 nova_compute[192079]: 2025-10-02 12:27:18.533 2 INFO nova.compute.manager [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Took 10.74 seconds to build instance.
Oct 02 12:27:18 compute-0 nova_compute[192079]: 2025-10-02 12:27:18.555 2 DEBUG oslo_concurrency.lockutils [req-0876746e-8cf5-40b8-9de3-9ec2a3bbae05 req-12d2dfc9-c3f8-412f-bb99-43c74d8f5bf9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-2fcfea17-10df-499a-8692-facbbc76266b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:27:18 compute-0 nova_compute[192079]: 2025-10-02 12:27:18.604 2 DEBUG oslo_concurrency.lockutils [None req-540799a9-5f5d-48ec-bf8e-ab3cd9a6daff d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 10.950s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:27:19 compute-0 nova_compute[192079]: 2025-10-02 12:27:19.964 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:20 compute-0 podman[241109]: 2025-10-02 12:27:20.042090684 +0000 UTC m=+0.051659640 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_id=iscsid, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, managed_by=edpm_ansible, org.label-schema.build-date=20251001)
Oct 02 12:27:20 compute-0 podman[241108]: 2025-10-02 12:27:20.067855546 +0000 UTC m=+0.077913475 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter)
Oct 02 12:27:20 compute-0 nova_compute[192079]: 2025-10-02 12:27:20.076 2 DEBUG nova.compute.manager [req-89405d61-69cd-4cbe-a57f-38a2ee2295ef req-f9025aa0-ad06-4b4f-ad21-c3cfded90b2a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Received event network-vif-plugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:27:20 compute-0 nova_compute[192079]: 2025-10-02 12:27:20.077 2 DEBUG oslo_concurrency.lockutils [req-89405d61-69cd-4cbe-a57f-38a2ee2295ef req-f9025aa0-ad06-4b4f-ad21-c3cfded90b2a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "2fcfea17-10df-499a-8692-facbbc76266b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:27:20 compute-0 nova_compute[192079]: 2025-10-02 12:27:20.077 2 DEBUG oslo_concurrency.lockutils [req-89405d61-69cd-4cbe-a57f-38a2ee2295ef req-f9025aa0-ad06-4b4f-ad21-c3cfded90b2a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:27:20 compute-0 nova_compute[192079]: 2025-10-02 12:27:20.077 2 DEBUG oslo_concurrency.lockutils [req-89405d61-69cd-4cbe-a57f-38a2ee2295ef req-f9025aa0-ad06-4b4f-ad21-c3cfded90b2a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:27:20 compute-0 nova_compute[192079]: 2025-10-02 12:27:20.077 2 DEBUG nova.compute.manager [req-89405d61-69cd-4cbe-a57f-38a2ee2295ef req-f9025aa0-ad06-4b4f-ad21-c3cfded90b2a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] No waiting events found dispatching network-vif-plugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:27:20 compute-0 nova_compute[192079]: 2025-10-02 12:27:20.077 2 WARNING nova.compute.manager [req-89405d61-69cd-4cbe-a57f-38a2ee2295ef req-f9025aa0-ad06-4b4f-ad21-c3cfded90b2a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Received unexpected event network-vif-plugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca for instance with vm_state active and task_state None.
Oct 02 12:27:20 compute-0 nova_compute[192079]: 2025-10-02 12:27:20.970 2 DEBUG oslo_concurrency.lockutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "957c4e10-f195-4d5e-97c0-0928296aba31" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:27:20 compute-0 nova_compute[192079]: 2025-10-02 12:27:20.971 2 DEBUG oslo_concurrency.lockutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "957c4e10-f195-4d5e-97c0-0928296aba31" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:27:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:20.991 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=31, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=30) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:27:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:20.993 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 1 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:27:20 compute-0 nova_compute[192079]: 2025-10-02 12:27:20.993 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:21 compute-0 nova_compute[192079]: 2025-10-02 12:27:21.064 2 DEBUG nova.compute.manager [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:27:21 compute-0 nova_compute[192079]: 2025-10-02 12:27:21.250 2 DEBUG oslo_concurrency.lockutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:27:21 compute-0 nova_compute[192079]: 2025-10-02 12:27:21.250 2 DEBUG oslo_concurrency.lockutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:27:21 compute-0 nova_compute[192079]: 2025-10-02 12:27:21.259 2 DEBUG nova.virt.hardware [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:27:21 compute-0 nova_compute[192079]: 2025-10-02 12:27:21.259 2 INFO nova.compute.claims [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:27:21 compute-0 nova_compute[192079]: 2025-10-02 12:27:21.530 2 DEBUG nova.compute.provider_tree [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:27:21 compute-0 nova_compute[192079]: 2025-10-02 12:27:21.554 2 DEBUG nova.scheduler.client.report [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:27:21 compute-0 nova_compute[192079]: 2025-10-02 12:27:21.583 2 DEBUG oslo_concurrency.lockutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.333s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:27:21 compute-0 nova_compute[192079]: 2025-10-02 12:27:21.584 2 DEBUG nova.compute.manager [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:27:21 compute-0 nova_compute[192079]: 2025-10-02 12:27:21.700 2 DEBUG nova.compute.manager [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:27:21 compute-0 nova_compute[192079]: 2025-10-02 12:27:21.701 2 DEBUG nova.network.neutron [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:27:21 compute-0 nova_compute[192079]: 2025-10-02 12:27:21.747 2 INFO nova.virt.libvirt.driver [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:27:21 compute-0 nova_compute[192079]: 2025-10-02 12:27:21.779 2 DEBUG nova.compute.manager [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:27:21 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:21.995 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '31'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:27:22 compute-0 nova_compute[192079]: 2025-10-02 12:27:22.030 2 DEBUG nova.compute.manager [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:27:22 compute-0 nova_compute[192079]: 2025-10-02 12:27:22.031 2 DEBUG nova.virt.libvirt.driver [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:27:22 compute-0 nova_compute[192079]: 2025-10-02 12:27:22.031 2 INFO nova.virt.libvirt.driver [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Creating image(s)
Oct 02 12:27:22 compute-0 nova_compute[192079]: 2025-10-02 12:27:22.032 2 DEBUG oslo_concurrency.lockutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "/var/lib/nova/instances/957c4e10-f195-4d5e-97c0-0928296aba31/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:27:22 compute-0 nova_compute[192079]: 2025-10-02 12:27:22.032 2 DEBUG oslo_concurrency.lockutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "/var/lib/nova/instances/957c4e10-f195-4d5e-97c0-0928296aba31/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:27:22 compute-0 nova_compute[192079]: 2025-10-02 12:27:22.033 2 DEBUG oslo_concurrency.lockutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "/var/lib/nova/instances/957c4e10-f195-4d5e-97c0-0928296aba31/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:27:22 compute-0 nova_compute[192079]: 2025-10-02 12:27:22.051 2 DEBUG oslo_concurrency.processutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:27:22 compute-0 nova_compute[192079]: 2025-10-02 12:27:22.107 2 DEBUG oslo_concurrency.processutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:27:22 compute-0 nova_compute[192079]: 2025-10-02 12:27:22.108 2 DEBUG oslo_concurrency.lockutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:27:22 compute-0 nova_compute[192079]: 2025-10-02 12:27:22.109 2 DEBUG oslo_concurrency.lockutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:27:22 compute-0 nova_compute[192079]: 2025-10-02 12:27:22.119 2 DEBUG oslo_concurrency.processutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:27:22 compute-0 nova_compute[192079]: 2025-10-02 12:27:22.134 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:22 compute-0 nova_compute[192079]: 2025-10-02 12:27:22.173 2 DEBUG oslo_concurrency.processutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:27:22 compute-0 nova_compute[192079]: 2025-10-02 12:27:22.174 2 DEBUG oslo_concurrency.processutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/957c4e10-f195-4d5e-97c0-0928296aba31/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:27:22 compute-0 nova_compute[192079]: 2025-10-02 12:27:22.205 2 DEBUG oslo_concurrency.processutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/957c4e10-f195-4d5e-97c0-0928296aba31/disk 1073741824" returned: 0 in 0.031s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:27:22 compute-0 nova_compute[192079]: 2025-10-02 12:27:22.206 2 DEBUG oslo_concurrency.lockutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.097s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:27:22 compute-0 nova_compute[192079]: 2025-10-02 12:27:22.206 2 DEBUG oslo_concurrency.processutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:27:22 compute-0 nova_compute[192079]: 2025-10-02 12:27:22.261 2 DEBUG oslo_concurrency.processutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:27:22 compute-0 nova_compute[192079]: 2025-10-02 12:27:22.262 2 DEBUG nova.virt.disk.api [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Checking if we can resize image /var/lib/nova/instances/957c4e10-f195-4d5e-97c0-0928296aba31/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:27:22 compute-0 nova_compute[192079]: 2025-10-02 12:27:22.263 2 DEBUG oslo_concurrency.processutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/957c4e10-f195-4d5e-97c0-0928296aba31/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:27:22 compute-0 nova_compute[192079]: 2025-10-02 12:27:22.318 2 DEBUG oslo_concurrency.processutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/957c4e10-f195-4d5e-97c0-0928296aba31/disk --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:27:22 compute-0 nova_compute[192079]: 2025-10-02 12:27:22.319 2 DEBUG nova.virt.disk.api [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Cannot resize image /var/lib/nova/instances/957c4e10-f195-4d5e-97c0-0928296aba31/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:27:22 compute-0 nova_compute[192079]: 2025-10-02 12:27:22.320 2 DEBUG nova.objects.instance [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'migration_context' on Instance uuid 957c4e10-f195-4d5e-97c0-0928296aba31 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:27:22 compute-0 nova_compute[192079]: 2025-10-02 12:27:22.459 2 DEBUG nova.virt.libvirt.driver [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:27:22 compute-0 nova_compute[192079]: 2025-10-02 12:27:22.459 2 DEBUG nova.virt.libvirt.driver [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Ensure instance console log exists: /var/lib/nova/instances/957c4e10-f195-4d5e-97c0-0928296aba31/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:27:22 compute-0 nova_compute[192079]: 2025-10-02 12:27:22.460 2 DEBUG oslo_concurrency.lockutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:27:22 compute-0 nova_compute[192079]: 2025-10-02 12:27:22.460 2 DEBUG oslo_concurrency.lockutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:27:22 compute-0 nova_compute[192079]: 2025-10-02 12:27:22.460 2 DEBUG oslo_concurrency.lockutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:27:22 compute-0 nova_compute[192079]: 2025-10-02 12:27:22.596 2 DEBUG nova.policy [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1faa7e121a0e43ad8cb4ae5b2cfcc6a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '76c7dd40d83e4e3ca71abbebf57921b6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:27:24 compute-0 nova_compute[192079]: 2025-10-02 12:27:24.607 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:24 compute-0 nova_compute[192079]: 2025-10-02 12:27:24.966 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:25 compute-0 nova_compute[192079]: 2025-10-02 12:27:25.655 2 DEBUG nova.network.neutron [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Successfully created port: f52dd1e9-092b-4ab8-946d-dd1aea183ec4 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:27:26 compute-0 nova_compute[192079]: 2025-10-02 12:27:26.339 2 DEBUG nova.compute.manager [req-fc933b2f-ecfa-488d-991f-366670c875ac req-42015be0-3113-4610-aa00-ade6dec3da3f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Received event network-changed-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:27:26 compute-0 nova_compute[192079]: 2025-10-02 12:27:26.339 2 DEBUG nova.compute.manager [req-fc933b2f-ecfa-488d-991f-366670c875ac req-42015be0-3113-4610-aa00-ade6dec3da3f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Refreshing instance network info cache due to event network-changed-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:27:26 compute-0 nova_compute[192079]: 2025-10-02 12:27:26.340 2 DEBUG oslo_concurrency.lockutils [req-fc933b2f-ecfa-488d-991f-366670c875ac req-42015be0-3113-4610-aa00-ade6dec3da3f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-2fcfea17-10df-499a-8692-facbbc76266b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:27:26 compute-0 nova_compute[192079]: 2025-10-02 12:27:26.340 2 DEBUG oslo_concurrency.lockutils [req-fc933b2f-ecfa-488d-991f-366670c875ac req-42015be0-3113-4610-aa00-ade6dec3da3f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-2fcfea17-10df-499a-8692-facbbc76266b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:27:26 compute-0 nova_compute[192079]: 2025-10-02 12:27:26.340 2 DEBUG nova.network.neutron [req-fc933b2f-ecfa-488d-991f-366670c875ac req-42015be0-3113-4610-aa00-ade6dec3da3f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Refreshing network info cache for port 5f268bcb-29c1-4e4e-a36d-b2ec144d3dca _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:27:27 compute-0 nova_compute[192079]: 2025-10-02 12:27:27.127 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:28 compute-0 nova_compute[192079]: 2025-10-02 12:27:28.619 2 DEBUG nova.network.neutron [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Successfully updated port: f52dd1e9-092b-4ab8-946d-dd1aea183ec4 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:27:28 compute-0 nova_compute[192079]: 2025-10-02 12:27:28.644 2 DEBUG oslo_concurrency.lockutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "refresh_cache-957c4e10-f195-4d5e-97c0-0928296aba31" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:27:28 compute-0 nova_compute[192079]: 2025-10-02 12:27:28.645 2 DEBUG oslo_concurrency.lockutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquired lock "refresh_cache-957c4e10-f195-4d5e-97c0-0928296aba31" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:27:28 compute-0 nova_compute[192079]: 2025-10-02 12:27:28.645 2 DEBUG nova.network.neutron [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:27:28 compute-0 nova_compute[192079]: 2025-10-02 12:27:28.739 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:27:28 compute-0 nova_compute[192079]: 2025-10-02 12:27:28.781 2 DEBUG nova.compute.manager [req-cda82cf9-d4da-4123-b05a-d858061f4db6 req-c2b07c98-c4cb-497f-8c8d-bf38bf6745cc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Received event network-changed-f52dd1e9-092b-4ab8-946d-dd1aea183ec4 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:27:28 compute-0 nova_compute[192079]: 2025-10-02 12:27:28.782 2 DEBUG nova.compute.manager [req-cda82cf9-d4da-4123-b05a-d858061f4db6 req-c2b07c98-c4cb-497f-8c8d-bf38bf6745cc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Refreshing instance network info cache due to event network-changed-f52dd1e9-092b-4ab8-946d-dd1aea183ec4. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:27:28 compute-0 nova_compute[192079]: 2025-10-02 12:27:28.782 2 DEBUG oslo_concurrency.lockutils [req-cda82cf9-d4da-4123-b05a-d858061f4db6 req-c2b07c98-c4cb-497f-8c8d-bf38bf6745cc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-957c4e10-f195-4d5e-97c0-0928296aba31" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:27:28 compute-0 nova_compute[192079]: 2025-10-02 12:27:28.965 2 DEBUG nova.network.neutron [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:27:29 compute-0 nova_compute[192079]: 2025-10-02 12:27:29.118 2 DEBUG nova.network.neutron [req-fc933b2f-ecfa-488d-991f-366670c875ac req-42015be0-3113-4610-aa00-ade6dec3da3f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Updated VIF entry in instance network info cache for port 5f268bcb-29c1-4e4e-a36d-b2ec144d3dca. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:27:29 compute-0 nova_compute[192079]: 2025-10-02 12:27:29.118 2 DEBUG nova.network.neutron [req-fc933b2f-ecfa-488d-991f-366670c875ac req-42015be0-3113-4610-aa00-ade6dec3da3f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Updating instance_info_cache with network_info: [{"id": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "address": "fa:16:3e:cb:cf:9a", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5f268bcb-29", "ovs_interfaceid": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:27:29 compute-0 nova_compute[192079]: 2025-10-02 12:27:29.146 2 DEBUG oslo_concurrency.lockutils [req-fc933b2f-ecfa-488d-991f-366670c875ac req-42015be0-3113-4610-aa00-ade6dec3da3f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-2fcfea17-10df-499a-8692-facbbc76266b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:27:29 compute-0 podman[241164]: 2025-10-02 12:27:29.147905564 +0000 UTC m=+0.053037666 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, container_name=ovn_metadata_agent, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible)
Oct 02 12:27:29 compute-0 podman[241165]: 2025-10-02 12:27:29.166556733 +0000 UTC m=+0.071298685 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.license=GPLv2, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:27:29 compute-0 podman[241166]: 2025-10-02 12:27:29.195158002 +0000 UTC m=+0.089111590 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 12:27:29 compute-0 nova_compute[192079]: 2025-10-02 12:27:29.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:27:29 compute-0 nova_compute[192079]: 2025-10-02 12:27:29.710 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:27:29 compute-0 nova_compute[192079]: 2025-10-02 12:27:29.710 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:27:29 compute-0 nova_compute[192079]: 2025-10-02 12:27:29.711 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:27:29 compute-0 nova_compute[192079]: 2025-10-02 12:27:29.711 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:27:29 compute-0 nova_compute[192079]: 2025-10-02 12:27:29.801 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:27:29 compute-0 nova_compute[192079]: 2025-10-02 12:27:29.916 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk --force-share --output=json" returned: 0 in 0.115s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:27:29 compute-0 nova_compute[192079]: 2025-10-02 12:27:29.917 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:27:29 compute-0 nova_compute[192079]: 2025-10-02 12:27:29.967 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:29 compute-0 nova_compute[192079]: 2025-10-02 12:27:29.996 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk --force-share --output=json" returned: 0 in 0.079s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.133 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.135 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5531MB free_disk=73.34712219238281GB free_vcpus=7 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.135 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.135 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.296 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance 2fcfea17-10df-499a-8692-facbbc76266b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.297 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance 957c4e10-f195-4d5e-97c0-0928296aba31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.297 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 2 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.298 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=768MB phys_disk=79GB used_disk=2GB total_vcpus=8 used_vcpus=2 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.470 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.554 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.587 2 DEBUG nova.network.neutron [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Updating instance_info_cache with network_info: [{"id": "f52dd1e9-092b-4ab8-946d-dd1aea183ec4", "address": "fa:16:3e:94:1a:4b", "network": {"id": "1a3d22d6-addb-4c33-bccc-61618673b1b6", "bridge": "br-int", "label": "tempest-network-smoke--81262139", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf52dd1e9-09", "ovs_interfaceid": "f52dd1e9-092b-4ab8-946d-dd1aea183ec4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.618 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.619 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.484s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.619 2 DEBUG oslo_concurrency.lockutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Releasing lock "refresh_cache-957c4e10-f195-4d5e-97c0-0928296aba31" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.619 2 DEBUG nova.compute.manager [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Instance network_info: |[{"id": "f52dd1e9-092b-4ab8-946d-dd1aea183ec4", "address": "fa:16:3e:94:1a:4b", "network": {"id": "1a3d22d6-addb-4c33-bccc-61618673b1b6", "bridge": "br-int", "label": "tempest-network-smoke--81262139", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf52dd1e9-09", "ovs_interfaceid": "f52dd1e9-092b-4ab8-946d-dd1aea183ec4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.620 2 DEBUG oslo_concurrency.lockutils [req-cda82cf9-d4da-4123-b05a-d858061f4db6 req-c2b07c98-c4cb-497f-8c8d-bf38bf6745cc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-957c4e10-f195-4d5e-97c0-0928296aba31" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.620 2 DEBUG nova.network.neutron [req-cda82cf9-d4da-4123-b05a-d858061f4db6 req-c2b07c98-c4cb-497f-8c8d-bf38bf6745cc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Refreshing network info cache for port f52dd1e9-092b-4ab8-946d-dd1aea183ec4 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.622 2 DEBUG nova.virt.libvirt.driver [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Start _get_guest_xml network_info=[{"id": "f52dd1e9-092b-4ab8-946d-dd1aea183ec4", "address": "fa:16:3e:94:1a:4b", "network": {"id": "1a3d22d6-addb-4c33-bccc-61618673b1b6", "bridge": "br-int", "label": "tempest-network-smoke--81262139", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf52dd1e9-09", "ovs_interfaceid": "f52dd1e9-092b-4ab8-946d-dd1aea183ec4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.626 2 WARNING nova.virt.libvirt.driver [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.631 2 DEBUG nova.virt.libvirt.host [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.631 2 DEBUG nova.virt.libvirt.host [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.634 2 DEBUG nova.virt.libvirt.host [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.635 2 DEBUG nova.virt.libvirt.host [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.636 2 DEBUG nova.virt.libvirt.driver [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.636 2 DEBUG nova.virt.hardware [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.637 2 DEBUG nova.virt.hardware [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.637 2 DEBUG nova.virt.hardware [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.637 2 DEBUG nova.virt.hardware [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.638 2 DEBUG nova.virt.hardware [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.638 2 DEBUG nova.virt.hardware [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.638 2 DEBUG nova.virt.hardware [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.638 2 DEBUG nova.virt.hardware [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.638 2 DEBUG nova.virt.hardware [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.639 2 DEBUG nova.virt.hardware [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.639 2 DEBUG nova.virt.hardware [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.643 2 DEBUG nova.virt.libvirt.vif [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:27:18Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestNetworkAdvancedServerOps-server-491875560',display_name='tempest-TestNetworkAdvancedServerOps-server-491875560',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkadvancedserverops-server-491875560',id=127,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBBFtPcDKg7oBwtPeK2kB3C5slIvpeY9IQucVLArPcU8FLh7VlTWx62ZUGaRO0OLeoqO638ZiwAuYZfHu9NjsOskJMQqY4NwG01wMMMi/eVKSBjzkTyhN0wIh//zV9tpx5Q==',key_name='tempest-TestNetworkAdvancedServerOps-1596699922',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='76c7dd40d83e4e3ca71abbebf57921b6',ramdisk_id='',reservation_id='r-86f6iqbj',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestNetworkAdvancedServerOps-597114071',owner_user_name='tempest-TestNetworkAdvancedServerOps-597114071-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:27:21Z,user_data=None,user_id='1faa7e121a0e43ad8cb4ae5b2cfcc6a2',uuid=957c4e10-f195-4d5e-97c0-0928296aba31,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "f52dd1e9-092b-4ab8-946d-dd1aea183ec4", "address": "fa:16:3e:94:1a:4b", "network": {"id": "1a3d22d6-addb-4c33-bccc-61618673b1b6", "bridge": "br-int", "label": "tempest-network-smoke--81262139", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf52dd1e9-09", "ovs_interfaceid": "f52dd1e9-092b-4ab8-946d-dd1aea183ec4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.643 2 DEBUG nova.network.os_vif_util [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converting VIF {"id": "f52dd1e9-092b-4ab8-946d-dd1aea183ec4", "address": "fa:16:3e:94:1a:4b", "network": {"id": "1a3d22d6-addb-4c33-bccc-61618673b1b6", "bridge": "br-int", "label": "tempest-network-smoke--81262139", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf52dd1e9-09", "ovs_interfaceid": "f52dd1e9-092b-4ab8-946d-dd1aea183ec4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.643 2 DEBUG nova.network.os_vif_util [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:94:1a:4b,bridge_name='br-int',has_traffic_filtering=True,id=f52dd1e9-092b-4ab8-946d-dd1aea183ec4,network=Network(1a3d22d6-addb-4c33-bccc-61618673b1b6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf52dd1e9-09') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.644 2 DEBUG nova.objects.instance [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'pci_devices' on Instance uuid 957c4e10-f195-4d5e-97c0-0928296aba31 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.658 2 DEBUG nova.virt.libvirt.driver [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:27:30 compute-0 nova_compute[192079]:   <uuid>957c4e10-f195-4d5e-97c0-0928296aba31</uuid>
Oct 02 12:27:30 compute-0 nova_compute[192079]:   <name>instance-0000007f</name>
Oct 02 12:27:30 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:27:30 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:27:30 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:27:30 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:       <nova:name>tempest-TestNetworkAdvancedServerOps-server-491875560</nova:name>
Oct 02 12:27:30 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:27:30</nova:creationTime>
Oct 02 12:27:30 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:27:30 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:27:30 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:27:30 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:27:30 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:27:30 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:27:30 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:27:30 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:27:30 compute-0 nova_compute[192079]:         <nova:user uuid="1faa7e121a0e43ad8cb4ae5b2cfcc6a2">tempest-TestNetworkAdvancedServerOps-597114071-project-member</nova:user>
Oct 02 12:27:30 compute-0 nova_compute[192079]:         <nova:project uuid="76c7dd40d83e4e3ca71abbebf57921b6">tempest-TestNetworkAdvancedServerOps-597114071</nova:project>
Oct 02 12:27:30 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:27:30 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:27:30 compute-0 nova_compute[192079]:         <nova:port uuid="f52dd1e9-092b-4ab8-946d-dd1aea183ec4">
Oct 02 12:27:30 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.5" ipVersion="4"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:27:30 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:27:30 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:27:30 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <system>
Oct 02 12:27:30 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:27:30 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:27:30 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:27:30 compute-0 nova_compute[192079]:       <entry name="serial">957c4e10-f195-4d5e-97c0-0928296aba31</entry>
Oct 02 12:27:30 compute-0 nova_compute[192079]:       <entry name="uuid">957c4e10-f195-4d5e-97c0-0928296aba31</entry>
Oct 02 12:27:30 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     </system>
Oct 02 12:27:30 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:27:30 compute-0 nova_compute[192079]:   <os>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:   </os>
Oct 02 12:27:30 compute-0 nova_compute[192079]:   <features>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:   </features>
Oct 02 12:27:30 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:27:30 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:27:30 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:27:30 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/957c4e10-f195-4d5e-97c0-0928296aba31/disk"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:27:30 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/957c4e10-f195-4d5e-97c0-0928296aba31/disk.config"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:27:30 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:94:1a:4b"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:       <target dev="tapf52dd1e9-09"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:27:30 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/957c4e10-f195-4d5e-97c0-0928296aba31/console.log" append="off"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <video>
Oct 02 12:27:30 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     </video>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:27:30 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:27:30 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:27:30 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:27:30 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:27:30 compute-0 nova_compute[192079]: </domain>
Oct 02 12:27:30 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.658 2 DEBUG nova.compute.manager [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Preparing to wait for external event network-vif-plugged-f52dd1e9-092b-4ab8-946d-dd1aea183ec4 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.658 2 DEBUG oslo_concurrency.lockutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "957c4e10-f195-4d5e-97c0-0928296aba31-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.659 2 DEBUG oslo_concurrency.lockutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "957c4e10-f195-4d5e-97c0-0928296aba31-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.659 2 DEBUG oslo_concurrency.lockutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "957c4e10-f195-4d5e-97c0-0928296aba31-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.659 2 DEBUG nova.virt.libvirt.vif [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:27:18Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestNetworkAdvancedServerOps-server-491875560',display_name='tempest-TestNetworkAdvancedServerOps-server-491875560',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkadvancedserverops-server-491875560',id=127,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBBFtPcDKg7oBwtPeK2kB3C5slIvpeY9IQucVLArPcU8FLh7VlTWx62ZUGaRO0OLeoqO638ZiwAuYZfHu9NjsOskJMQqY4NwG01wMMMi/eVKSBjzkTyhN0wIh//zV9tpx5Q==',key_name='tempest-TestNetworkAdvancedServerOps-1596699922',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='76c7dd40d83e4e3ca71abbebf57921b6',ramdisk_id='',reservation_id='r-86f6iqbj',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestNetworkAdvancedServerOps-597114071',owner_user_name='tempest-TestNetworkAdvancedServerOps-597114071-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:27:21Z,user_data=None,user_id='1faa7e121a0e43ad8cb4ae5b2cfcc6a2',uuid=957c4e10-f195-4d5e-97c0-0928296aba31,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "f52dd1e9-092b-4ab8-946d-dd1aea183ec4", "address": "fa:16:3e:94:1a:4b", "network": {"id": "1a3d22d6-addb-4c33-bccc-61618673b1b6", "bridge": "br-int", "label": "tempest-network-smoke--81262139", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf52dd1e9-09", "ovs_interfaceid": "f52dd1e9-092b-4ab8-946d-dd1aea183ec4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.659 2 DEBUG nova.network.os_vif_util [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converting VIF {"id": "f52dd1e9-092b-4ab8-946d-dd1aea183ec4", "address": "fa:16:3e:94:1a:4b", "network": {"id": "1a3d22d6-addb-4c33-bccc-61618673b1b6", "bridge": "br-int", "label": "tempest-network-smoke--81262139", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf52dd1e9-09", "ovs_interfaceid": "f52dd1e9-092b-4ab8-946d-dd1aea183ec4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.660 2 DEBUG nova.network.os_vif_util [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:94:1a:4b,bridge_name='br-int',has_traffic_filtering=True,id=f52dd1e9-092b-4ab8-946d-dd1aea183ec4,network=Network(1a3d22d6-addb-4c33-bccc-61618673b1b6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf52dd1e9-09') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.660 2 DEBUG os_vif [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:94:1a:4b,bridge_name='br-int',has_traffic_filtering=True,id=f52dd1e9-092b-4ab8-946d-dd1aea183ec4,network=Network(1a3d22d6-addb-4c33-bccc-61618673b1b6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf52dd1e9-09') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.661 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.661 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.661 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.664 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.665 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapf52dd1e9-09, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.665 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapf52dd1e9-09, col_values=(('external_ids', {'iface-id': 'f52dd1e9-092b-4ab8-946d-dd1aea183ec4', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:94:1a:4b', 'vm-uuid': '957c4e10-f195-4d5e-97c0-0928296aba31'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.666 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:30 compute-0 NetworkManager[51160]: <info>  [1759408050.6675] manager: (tapf52dd1e9-09): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/236)
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.668 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.677 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.679 2 INFO os_vif [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:94:1a:4b,bridge_name='br-int',has_traffic_filtering=True,id=f52dd1e9-092b-4ab8-946d-dd1aea183ec4,network=Network(1a3d22d6-addb-4c33-bccc-61618673b1b6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf52dd1e9-09')
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.754 2 DEBUG nova.virt.libvirt.driver [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.754 2 DEBUG nova.virt.libvirt.driver [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.754 2 DEBUG nova.virt.libvirt.driver [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] No VIF found with MAC fa:16:3e:94:1a:4b, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:27:30 compute-0 nova_compute[192079]: 2025-10-02 12:27:30.755 2 INFO nova.virt.libvirt.driver [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Using config drive
Oct 02 12:27:31 compute-0 nova_compute[192079]: 2025-10-02 12:27:31.046 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:31 compute-0 nova_compute[192079]: 2025-10-02 12:27:31.581 2 INFO nova.virt.libvirt.driver [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Creating config drive at /var/lib/nova/instances/957c4e10-f195-4d5e-97c0-0928296aba31/disk.config
Oct 02 12:27:31 compute-0 nova_compute[192079]: 2025-10-02 12:27:31.586 2 DEBUG oslo_concurrency.processutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/957c4e10-f195-4d5e-97c0-0928296aba31/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpmtpxlb6i execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:27:31 compute-0 nova_compute[192079]: 2025-10-02 12:27:31.710 2 DEBUG oslo_concurrency.processutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/957c4e10-f195-4d5e-97c0-0928296aba31/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpmtpxlb6i" returned: 0 in 0.124s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:27:31 compute-0 kernel: tapf52dd1e9-09: entered promiscuous mode
Oct 02 12:27:31 compute-0 NetworkManager[51160]: <info>  [1759408051.7710] manager: (tapf52dd1e9-09): new Tun device (/org/freedesktop/NetworkManager/Devices/237)
Oct 02 12:27:31 compute-0 ovn_controller[94336]: 2025-10-02T12:27:31Z|00465|binding|INFO|Claiming lport f52dd1e9-092b-4ab8-946d-dd1aea183ec4 for this chassis.
Oct 02 12:27:31 compute-0 ovn_controller[94336]: 2025-10-02T12:27:31Z|00466|binding|INFO|f52dd1e9-092b-4ab8-946d-dd1aea183ec4: Claiming fa:16:3e:94:1a:4b 10.100.0.5
Oct 02 12:27:31 compute-0 nova_compute[192079]: 2025-10-02 12:27:31.773 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:31.782 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:94:1a:4b 10.100.0.5'], port_security=['fa:16:3e:94:1a:4b 10.100.0.5'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.5/28', 'neutron:device_id': '957c4e10-f195-4d5e-97c0-0928296aba31', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-1a3d22d6-addb-4c33-bccc-61618673b1b6', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '76c7dd40d83e4e3ca71abbebf57921b6', 'neutron:revision_number': '2', 'neutron:security_group_ids': '09e396bd-52a1-49cf-8120-7997324047ec', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=da127e9e-be4d-48d5-98c6-9ea13304c295, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=f52dd1e9-092b-4ab8-946d-dd1aea183ec4) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:27:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:31.783 103294 INFO neutron.agent.ovn.metadata.agent [-] Port f52dd1e9-092b-4ab8-946d-dd1aea183ec4 in datapath 1a3d22d6-addb-4c33-bccc-61618673b1b6 bound to our chassis
Oct 02 12:27:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:31.785 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 1a3d22d6-addb-4c33-bccc-61618673b1b6
Oct 02 12:27:31 compute-0 ovn_controller[94336]: 2025-10-02T12:27:31Z|00467|binding|INFO|Setting lport f52dd1e9-092b-4ab8-946d-dd1aea183ec4 ovn-installed in OVS
Oct 02 12:27:31 compute-0 ovn_controller[94336]: 2025-10-02T12:27:31Z|00468|binding|INFO|Setting lport f52dd1e9-092b-4ab8-946d-dd1aea183ec4 up in Southbound
Oct 02 12:27:31 compute-0 nova_compute[192079]: 2025-10-02 12:27:31.789 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:31 compute-0 nova_compute[192079]: 2025-10-02 12:27:31.791 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:31.798 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[fb465946-56b8-4dc5-b738-812c276fdfd7]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:31.800 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap1a3d22d6-a1 in ovnmeta-1a3d22d6-addb-4c33-bccc-61618673b1b6 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:27:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:31.802 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap1a3d22d6-a0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:27:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:31.802 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7ec0412f-bbf5-411c-9855-e9307abc17e3]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:31.803 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[47dafae0-6e0a-40e1-9722-eb1d174b3dc4]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:31.814 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[aa2dded7-75cc-4745-8d84-59d4d49eee16]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:31 compute-0 systemd-machined[152150]: New machine qemu-61-instance-0000007f.
Oct 02 12:27:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:31.829 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6a705101-5293-45eb-b07b-0066671005d9]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:31 compute-0 systemd[1]: Started Virtual Machine qemu-61-instance-0000007f.
Oct 02 12:27:31 compute-0 systemd-udevd[241276]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:27:31 compute-0 NetworkManager[51160]: <info>  [1759408051.8539] device (tapf52dd1e9-09): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:27:31 compute-0 NetworkManager[51160]: <info>  [1759408051.8548] device (tapf52dd1e9-09): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:27:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:31.861 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[ca2cc050-e42a-4da6-9a3c-d7fbd2a42666]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:31 compute-0 NetworkManager[51160]: <info>  [1759408051.8661] manager: (tap1a3d22d6-a0): new Veth device (/org/freedesktop/NetworkManager/Devices/238)
Oct 02 12:27:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:31.866 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3fd661cd-bdb3-438c-a40b-675f80d817f4]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:31.903 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[bdd9533f-49f7-40cf-9c87-0f0fb1c337fb]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:31.906 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[b199e263-fb09-4f2f-9622-5b09ee00b00f]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:31 compute-0 NetworkManager[51160]: <info>  [1759408051.9263] device (tap1a3d22d6-a0): carrier: link connected
Oct 02 12:27:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:31.931 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[09c3ca66-7c3f-400a-9066-1f70fddbc231]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:31.947 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3dd998ed-347d-4252-869c-1360474d7ca7]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap1a3d22d6-a1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:94:a2:c3'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 151], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 604955, 'reachable_time': 15043, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 241305, 'error': None, 'target': 'ovnmeta-1a3d22d6-addb-4c33-bccc-61618673b1b6', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:31.962 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[bf7d7fe9-96b8-4c83-90a6-a9373c6ef483]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe94:a2c3'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 604955, 'tstamp': 604955}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 241306, 'error': None, 'target': 'ovnmeta-1a3d22d6-addb-4c33-bccc-61618673b1b6', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:31.976 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9cd19394-a029-4318-bd23-d923d99e8f2e]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap1a3d22d6-a1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:94:a2:c3'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 2, 'tx_packets': 1, 'rx_bytes': 196, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 2, 'tx_packets': 1, 'rx_bytes': 196, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 151], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 604955, 'reachable_time': 15043, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 2, 'inoctets': 168, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 2, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 168, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 2, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 241307, 'error': None, 'target': 'ovnmeta-1a3d22d6-addb-4c33-bccc-61618673b1b6', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:32.002 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f4d84027-d844-44df-9480-b4a64355a44b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:32.062 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d30ebfc5-27fc-44f1-8453-5754fce91c69]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:32.063 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap1a3d22d6-a0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:32.064 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:32.064 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap1a3d22d6-a0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:27:32 compute-0 nova_compute[192079]: 2025-10-02 12:27:32.066 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:32 compute-0 NetworkManager[51160]: <info>  [1759408052.0668] manager: (tap1a3d22d6-a0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/239)
Oct 02 12:27:32 compute-0 kernel: tap1a3d22d6-a0: entered promiscuous mode
Oct 02 12:27:32 compute-0 nova_compute[192079]: 2025-10-02 12:27:32.069 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:32.070 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap1a3d22d6-a0, col_values=(('external_ids', {'iface-id': '35f4d497-b349-45ac-9eba-6dd608a41834'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:27:32 compute-0 nova_compute[192079]: 2025-10-02 12:27:32.072 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:32 compute-0 ovn_controller[94336]: 2025-10-02T12:27:32Z|00469|binding|INFO|Releasing lport 35f4d497-b349-45ac-9eba-6dd608a41834 from this chassis (sb_readonly=0)
Oct 02 12:27:32 compute-0 nova_compute[192079]: 2025-10-02 12:27:32.073 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:32.076 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/1a3d22d6-addb-4c33-bccc-61618673b1b6.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/1a3d22d6-addb-4c33-bccc-61618673b1b6.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:27:32 compute-0 nova_compute[192079]: 2025-10-02 12:27:32.086 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:32.088 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a1b3a6ac-c4e2-4204-a972-6a1ea8af4726]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:32.090 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-1a3d22d6-addb-4c33-bccc-61618673b1b6
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/1a3d22d6-addb-4c33-bccc-61618673b1b6.pid.haproxy
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 1a3d22d6-addb-4c33-bccc-61618673b1b6
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:27:32 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:32.092 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-1a3d22d6-addb-4c33-bccc-61618673b1b6', 'env', 'PROCESS_TAG=haproxy-1a3d22d6-addb-4c33-bccc-61618673b1b6', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/1a3d22d6-addb-4c33-bccc-61618673b1b6.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:27:32 compute-0 nova_compute[192079]: 2025-10-02 12:27:32.128 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:32 compute-0 podman[241345]: 2025-10-02 12:27:32.463051611 +0000 UTC m=+0.050797215 container create 38f79e3ed94cadb851b9b2f2092c06f9e8444180964b550f52592e6a444629a9 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-1a3d22d6-addb-4c33-bccc-61618673b1b6, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:27:32 compute-0 systemd[1]: Started libpod-conmon-38f79e3ed94cadb851b9b2f2092c06f9e8444180964b550f52592e6a444629a9.scope.
Oct 02 12:27:32 compute-0 podman[241345]: 2025-10-02 12:27:32.433486856 +0000 UTC m=+0.021232480 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:27:32 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:27:32 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/3ee338042088adfab8e08da45a3be044e875fd8789c17be6cb34786200b97f01/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:27:32 compute-0 podman[241345]: 2025-10-02 12:27:32.558685419 +0000 UTC m=+0.146431053 container init 38f79e3ed94cadb851b9b2f2092c06f9e8444180964b550f52592e6a444629a9 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-1a3d22d6-addb-4c33-bccc-61618673b1b6, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2)
Oct 02 12:27:32 compute-0 podman[241345]: 2025-10-02 12:27:32.564137046 +0000 UTC m=+0.151882660 container start 38f79e3ed94cadb851b9b2f2092c06f9e8444180964b550f52592e6a444629a9 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-1a3d22d6-addb-4c33-bccc-61618673b1b6, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:27:32 compute-0 neutron-haproxy-ovnmeta-1a3d22d6-addb-4c33-bccc-61618673b1b6[241362]: [NOTICE]   (241366) : New worker (241368) forked
Oct 02 12:27:32 compute-0 neutron-haproxy-ovnmeta-1a3d22d6-addb-4c33-bccc-61618673b1b6[241362]: [NOTICE]   (241366) : Loading success.
Oct 02 12:27:32 compute-0 ovn_controller[94336]: 2025-10-02T12:27:32Z|00046|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:cb:cf:9a 10.100.0.4
Oct 02 12:27:32 compute-0 ovn_controller[94336]: 2025-10-02T12:27:32Z|00047|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:cb:cf:9a 10.100.0.4
Oct 02 12:27:32 compute-0 nova_compute[192079]: 2025-10-02 12:27:32.619 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:27:32 compute-0 nova_compute[192079]: 2025-10-02 12:27:32.620 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:27:32 compute-0 nova_compute[192079]: 2025-10-02 12:27:32.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:27:32 compute-0 nova_compute[192079]: 2025-10-02 12:27:32.808 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408052.8075092, 957c4e10-f195-4d5e-97c0-0928296aba31 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:27:32 compute-0 nova_compute[192079]: 2025-10-02 12:27:32.809 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] VM Started (Lifecycle Event)
Oct 02 12:27:32 compute-0 nova_compute[192079]: 2025-10-02 12:27:32.849 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:27:32 compute-0 nova_compute[192079]: 2025-10-02 12:27:32.853 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408052.808122, 957c4e10-f195-4d5e-97c0-0928296aba31 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:27:32 compute-0 nova_compute[192079]: 2025-10-02 12:27:32.853 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] VM Paused (Lifecycle Event)
Oct 02 12:27:32 compute-0 nova_compute[192079]: 2025-10-02 12:27:32.888 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:27:32 compute-0 nova_compute[192079]: 2025-10-02 12:27:32.891 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:27:32 compute-0 nova_compute[192079]: 2025-10-02 12:27:32.925 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:27:33 compute-0 nova_compute[192079]: 2025-10-02 12:27:33.318 2 DEBUG nova.compute.manager [req-fdd57bf6-f984-451e-afe3-6a880a180e5f req-9680a4fe-6339-4dd3-82dc-3cf7f3604fe6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Received event network-vif-plugged-f52dd1e9-092b-4ab8-946d-dd1aea183ec4 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:27:33 compute-0 nova_compute[192079]: 2025-10-02 12:27:33.319 2 DEBUG oslo_concurrency.lockutils [req-fdd57bf6-f984-451e-afe3-6a880a180e5f req-9680a4fe-6339-4dd3-82dc-3cf7f3604fe6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "957c4e10-f195-4d5e-97c0-0928296aba31-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:27:33 compute-0 nova_compute[192079]: 2025-10-02 12:27:33.320 2 DEBUG oslo_concurrency.lockutils [req-fdd57bf6-f984-451e-afe3-6a880a180e5f req-9680a4fe-6339-4dd3-82dc-3cf7f3604fe6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "957c4e10-f195-4d5e-97c0-0928296aba31-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:27:33 compute-0 nova_compute[192079]: 2025-10-02 12:27:33.320 2 DEBUG oslo_concurrency.lockutils [req-fdd57bf6-f984-451e-afe3-6a880a180e5f req-9680a4fe-6339-4dd3-82dc-3cf7f3604fe6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "957c4e10-f195-4d5e-97c0-0928296aba31-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:27:33 compute-0 nova_compute[192079]: 2025-10-02 12:27:33.321 2 DEBUG nova.compute.manager [req-fdd57bf6-f984-451e-afe3-6a880a180e5f req-9680a4fe-6339-4dd3-82dc-3cf7f3604fe6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Processing event network-vif-plugged-f52dd1e9-092b-4ab8-946d-dd1aea183ec4 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:27:33 compute-0 nova_compute[192079]: 2025-10-02 12:27:33.322 2 DEBUG nova.compute.manager [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:27:33 compute-0 nova_compute[192079]: 2025-10-02 12:27:33.327 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408053.326781, 957c4e10-f195-4d5e-97c0-0928296aba31 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:27:33 compute-0 nova_compute[192079]: 2025-10-02 12:27:33.327 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] VM Resumed (Lifecycle Event)
Oct 02 12:27:33 compute-0 nova_compute[192079]: 2025-10-02 12:27:33.329 2 DEBUG nova.virt.libvirt.driver [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:27:33 compute-0 nova_compute[192079]: 2025-10-02 12:27:33.332 2 INFO nova.virt.libvirt.driver [-] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Instance spawned successfully.
Oct 02 12:27:33 compute-0 nova_compute[192079]: 2025-10-02 12:27:33.333 2 DEBUG nova.virt.libvirt.driver [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:27:33 compute-0 nova_compute[192079]: 2025-10-02 12:27:33.375 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:27:33 compute-0 nova_compute[192079]: 2025-10-02 12:27:33.380 2 DEBUG nova.virt.libvirt.driver [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:27:33 compute-0 nova_compute[192079]: 2025-10-02 12:27:33.380 2 DEBUG nova.virt.libvirt.driver [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:27:33 compute-0 nova_compute[192079]: 2025-10-02 12:27:33.381 2 DEBUG nova.virt.libvirt.driver [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:27:33 compute-0 nova_compute[192079]: 2025-10-02 12:27:33.381 2 DEBUG nova.virt.libvirt.driver [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:27:33 compute-0 nova_compute[192079]: 2025-10-02 12:27:33.382 2 DEBUG nova.virt.libvirt.driver [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:27:33 compute-0 nova_compute[192079]: 2025-10-02 12:27:33.382 2 DEBUG nova.virt.libvirt.driver [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:27:33 compute-0 nova_compute[192079]: 2025-10-02 12:27:33.386 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:27:33 compute-0 nova_compute[192079]: 2025-10-02 12:27:33.452 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:27:33 compute-0 nova_compute[192079]: 2025-10-02 12:27:33.496 2 DEBUG nova.network.neutron [req-cda82cf9-d4da-4123-b05a-d858061f4db6 req-c2b07c98-c4cb-497f-8c8d-bf38bf6745cc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Updated VIF entry in instance network info cache for port f52dd1e9-092b-4ab8-946d-dd1aea183ec4. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:27:33 compute-0 nova_compute[192079]: 2025-10-02 12:27:33.497 2 DEBUG nova.network.neutron [req-cda82cf9-d4da-4123-b05a-d858061f4db6 req-c2b07c98-c4cb-497f-8c8d-bf38bf6745cc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Updating instance_info_cache with network_info: [{"id": "f52dd1e9-092b-4ab8-946d-dd1aea183ec4", "address": "fa:16:3e:94:1a:4b", "network": {"id": "1a3d22d6-addb-4c33-bccc-61618673b1b6", "bridge": "br-int", "label": "tempest-network-smoke--81262139", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf52dd1e9-09", "ovs_interfaceid": "f52dd1e9-092b-4ab8-946d-dd1aea183ec4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:27:33 compute-0 nova_compute[192079]: 2025-10-02 12:27:33.607 2 INFO nova.compute.manager [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Took 11.58 seconds to spawn the instance on the hypervisor.
Oct 02 12:27:33 compute-0 nova_compute[192079]: 2025-10-02 12:27:33.608 2 DEBUG nova.compute.manager [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:27:33 compute-0 nova_compute[192079]: 2025-10-02 12:27:33.611 2 DEBUG oslo_concurrency.lockutils [req-cda82cf9-d4da-4123-b05a-d858061f4db6 req-c2b07c98-c4cb-497f-8c8d-bf38bf6745cc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-957c4e10-f195-4d5e-97c0-0928296aba31" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:27:33 compute-0 nova_compute[192079]: 2025-10-02 12:27:33.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:27:34 compute-0 nova_compute[192079]: 2025-10-02 12:27:34.088 2 INFO nova.compute.manager [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Took 12.89 seconds to build instance.
Oct 02 12:27:34 compute-0 nova_compute[192079]: 2025-10-02 12:27:34.252 2 DEBUG oslo_concurrency.lockutils [None req-536f2308-fad8-4e98-a523-af781fc5b3fb 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "957c4e10-f195-4d5e-97c0-0928296aba31" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 13.281s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:27:35 compute-0 nova_compute[192079]: 2025-10-02 12:27:35.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:27:35 compute-0 nova_compute[192079]: 2025-10-02 12:27:35.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:27:35 compute-0 nova_compute[192079]: 2025-10-02 12:27:35.667 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:36 compute-0 nova_compute[192079]: 2025-10-02 12:27:36.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:27:36 compute-0 nova_compute[192079]: 2025-10-02 12:27:36.668 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:27:36 compute-0 nova_compute[192079]: 2025-10-02 12:27:36.668 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:27:36 compute-0 nova_compute[192079]: 2025-10-02 12:27:36.694 2 DEBUG nova.compute.manager [req-aecd637a-a220-4cbe-a755-3dac1b75c036 req-34222be2-3a1a-452f-addb-261002d4eec3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Received event network-vif-plugged-f52dd1e9-092b-4ab8-946d-dd1aea183ec4 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:27:36 compute-0 nova_compute[192079]: 2025-10-02 12:27:36.695 2 DEBUG oslo_concurrency.lockutils [req-aecd637a-a220-4cbe-a755-3dac1b75c036 req-34222be2-3a1a-452f-addb-261002d4eec3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "957c4e10-f195-4d5e-97c0-0928296aba31-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:27:36 compute-0 nova_compute[192079]: 2025-10-02 12:27:36.696 2 DEBUG oslo_concurrency.lockutils [req-aecd637a-a220-4cbe-a755-3dac1b75c036 req-34222be2-3a1a-452f-addb-261002d4eec3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "957c4e10-f195-4d5e-97c0-0928296aba31-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:27:36 compute-0 nova_compute[192079]: 2025-10-02 12:27:36.696 2 DEBUG oslo_concurrency.lockutils [req-aecd637a-a220-4cbe-a755-3dac1b75c036 req-34222be2-3a1a-452f-addb-261002d4eec3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "957c4e10-f195-4d5e-97c0-0928296aba31-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:27:36 compute-0 nova_compute[192079]: 2025-10-02 12:27:36.697 2 DEBUG nova.compute.manager [req-aecd637a-a220-4cbe-a755-3dac1b75c036 req-34222be2-3a1a-452f-addb-261002d4eec3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] No waiting events found dispatching network-vif-plugged-f52dd1e9-092b-4ab8-946d-dd1aea183ec4 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:27:36 compute-0 nova_compute[192079]: 2025-10-02 12:27:36.697 2 WARNING nova.compute.manager [req-aecd637a-a220-4cbe-a755-3dac1b75c036 req-34222be2-3a1a-452f-addb-261002d4eec3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Received unexpected event network-vif-plugged-f52dd1e9-092b-4ab8-946d-dd1aea183ec4 for instance with vm_state active and task_state None.
Oct 02 12:27:37 compute-0 nova_compute[192079]: 2025-10-02 12:27:37.021 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "refresh_cache-2fcfea17-10df-499a-8692-facbbc76266b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:27:37 compute-0 nova_compute[192079]: 2025-10-02 12:27:37.021 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquired lock "refresh_cache-2fcfea17-10df-499a-8692-facbbc76266b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:27:37 compute-0 nova_compute[192079]: 2025-10-02 12:27:37.022 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Forcefully refreshing network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2004
Oct 02 12:27:37 compute-0 nova_compute[192079]: 2025-10-02 12:27:37.022 2 DEBUG nova.objects.instance [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lazy-loading 'info_cache' on Instance uuid 2fcfea17-10df-499a-8692-facbbc76266b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:27:37 compute-0 nova_compute[192079]: 2025-10-02 12:27:37.132 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:38 compute-0 podman[241377]: 2025-10-02 12:27:38.149123804 +0000 UTC m=+0.066790441 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, container_name=ceilometer_agent_compute, managed_by=edpm_ansible, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:27:40 compute-0 nova_compute[192079]: 2025-10-02 12:27:40.669 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:41 compute-0 nova_compute[192079]: 2025-10-02 12:27:41.143 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Updating instance_info_cache with network_info: [{"id": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "address": "fa:16:3e:cb:cf:9a", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5f268bcb-29", "ovs_interfaceid": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:27:41 compute-0 nova_compute[192079]: 2025-10-02 12:27:41.826 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Releasing lock "refresh_cache-2fcfea17-10df-499a-8692-facbbc76266b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:27:41 compute-0 nova_compute[192079]: 2025-10-02 12:27:41.826 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Updated the network info_cache for instance _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9929
Oct 02 12:27:41 compute-0 nova_compute[192079]: 2025-10-02 12:27:41.827 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:27:42 compute-0 nova_compute[192079]: 2025-10-02 12:27:42.134 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:43 compute-0 nova_compute[192079]: 2025-10-02 12:27:43.586 2 DEBUG oslo_concurrency.lockutils [None req-e9fcf56a-1466-44a7-993f-738c7e77372d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "2fcfea17-10df-499a-8692-facbbc76266b" by "nova.compute.manager.ComputeManager.stop_instance.<locals>.do_stop_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:27:43 compute-0 nova_compute[192079]: 2025-10-02 12:27:43.587 2 DEBUG oslo_concurrency.lockutils [None req-e9fcf56a-1466-44a7-993f-738c7e77372d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b" acquired by "nova.compute.manager.ComputeManager.stop_instance.<locals>.do_stop_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:27:43 compute-0 nova_compute[192079]: 2025-10-02 12:27:43.587 2 DEBUG nova.compute.manager [None req-e9fcf56a-1466-44a7-993f-738c7e77372d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:27:43 compute-0 nova_compute[192079]: 2025-10-02 12:27:43.592 2 DEBUG nova.compute.manager [None req-e9fcf56a-1466-44a7-993f-738c7e77372d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 do_stop_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3338
Oct 02 12:27:43 compute-0 nova_compute[192079]: 2025-10-02 12:27:43.593 2 DEBUG nova.objects.instance [None req-e9fcf56a-1466-44a7-993f-738c7e77372d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'flavor' on Instance uuid 2fcfea17-10df-499a-8692-facbbc76266b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:27:43 compute-0 nova_compute[192079]: 2025-10-02 12:27:43.874 2 DEBUG nova.compute.manager [req-65882bcd-4865-4bff-821c-6408cdfe3c14 req-59483f9d-f642-4f99-b968-e44b92762c30 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Received event network-changed-f52dd1e9-092b-4ab8-946d-dd1aea183ec4 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:27:43 compute-0 nova_compute[192079]: 2025-10-02 12:27:43.875 2 DEBUG nova.compute.manager [req-65882bcd-4865-4bff-821c-6408cdfe3c14 req-59483f9d-f642-4f99-b968-e44b92762c30 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Refreshing instance network info cache due to event network-changed-f52dd1e9-092b-4ab8-946d-dd1aea183ec4. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:27:43 compute-0 nova_compute[192079]: 2025-10-02 12:27:43.876 2 DEBUG oslo_concurrency.lockutils [req-65882bcd-4865-4bff-821c-6408cdfe3c14 req-59483f9d-f642-4f99-b968-e44b92762c30 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-957c4e10-f195-4d5e-97c0-0928296aba31" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:27:43 compute-0 nova_compute[192079]: 2025-10-02 12:27:43.876 2 DEBUG oslo_concurrency.lockutils [req-65882bcd-4865-4bff-821c-6408cdfe3c14 req-59483f9d-f642-4f99-b968-e44b92762c30 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-957c4e10-f195-4d5e-97c0-0928296aba31" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:27:43 compute-0 nova_compute[192079]: 2025-10-02 12:27:43.876 2 DEBUG nova.network.neutron [req-65882bcd-4865-4bff-821c-6408cdfe3c14 req-59483f9d-f642-4f99-b968-e44b92762c30 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Refreshing network info cache for port f52dd1e9-092b-4ab8-946d-dd1aea183ec4 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:27:44 compute-0 nova_compute[192079]: 2025-10-02 12:27:44.575 2 DEBUG nova.objects.instance [None req-e9fcf56a-1466-44a7-993f-738c7e77372d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'info_cache' on Instance uuid 2fcfea17-10df-499a-8692-facbbc76266b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:27:45 compute-0 nova_compute[192079]: 2025-10-02 12:27:45.672 2 DEBUG nova.virt.libvirt.driver [None req-e9fcf56a-1466-44a7-993f-738c7e77372d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Shutting down instance from state 1 _clean_shutdown /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4071
Oct 02 12:27:45 compute-0 nova_compute[192079]: 2025-10-02 12:27:45.674 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:46 compute-0 podman[241414]: 2025-10-02 12:27:46.1546145 +0000 UTC m=+0.060908452 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, name=ubi9-minimal, build-date=2025-08-20T13:12:41, com.redhat.component=ubi9-minimal-container, vendor=Red Hat, Inc., container_name=openstack_network_exporter, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, version=9.6, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., url=https://catalog.redhat.com/en/search?searchType=containers, config_id=edpm, maintainer=Red Hat, Inc., io.buildah.version=1.33.7, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.tags=minimal rhel9, distribution-scope=public, release=1755695350, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, managed_by=edpm_ansible, vcs-type=git, architecture=x86_64, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.openshift.expose-services=)
Oct 02 12:27:46 compute-0 podman[241415]: 2025-10-02 12:27:46.183691463 +0000 UTC m=+0.090908630 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_id=multipathd, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=multipathd, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:27:47 compute-0 nova_compute[192079]: 2025-10-02 12:27:47.136 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:48 compute-0 kernel: tap5f268bcb-29 (unregistering): left promiscuous mode
Oct 02 12:27:48 compute-0 NetworkManager[51160]: <info>  [1759408068.0534] device (tap5f268bcb-29): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:27:48 compute-0 ovn_controller[94336]: 2025-10-02T12:27:48Z|00470|binding|INFO|Releasing lport 5f268bcb-29c1-4e4e-a36d-b2ec144d3dca from this chassis (sb_readonly=0)
Oct 02 12:27:48 compute-0 ovn_controller[94336]: 2025-10-02T12:27:48Z|00471|binding|INFO|Setting lport 5f268bcb-29c1-4e4e-a36d-b2ec144d3dca down in Southbound
Oct 02 12:27:48 compute-0 nova_compute[192079]: 2025-10-02 12:27:48.062 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:48 compute-0 ovn_controller[94336]: 2025-10-02T12:27:48Z|00472|binding|INFO|Removing iface tap5f268bcb-29 ovn-installed in OVS
Oct 02 12:27:48 compute-0 nova_compute[192079]: 2025-10-02 12:27:48.075 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:48 compute-0 systemd[1]: machine-qemu\x2d60\x2dinstance\x2d0000007d.scope: Deactivated successfully.
Oct 02 12:27:48 compute-0 systemd[1]: machine-qemu\x2d60\x2dinstance\x2d0000007d.scope: Consumed 13.891s CPU time.
Oct 02 12:27:48 compute-0 systemd-machined[152150]: Machine qemu-60-instance-0000007d terminated.
Oct 02 12:27:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:48.320 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:cb:cf:9a 10.100.0.4'], port_security=['fa:16:3e:cb:cf:9a 10.100.0.4'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.4/28', 'neutron:device_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-a04f937a-375f-4fb0-90fe-5f514a88668f', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'neutron:revision_number': '4', 'neutron:security_group_ids': 'c0383701-0ec7-4f3b-8585-5effc4f5ca5a', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com', 'neutron:port_fip': '192.168.122.248'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=50c0aa38-5fd8-41c7-b4bf-85b59722c5c3, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=5f268bcb-29c1-4e4e-a36d-b2ec144d3dca) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:27:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:48.322 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 5f268bcb-29c1-4e4e-a36d-b2ec144d3dca in datapath a04f937a-375f-4fb0-90fe-5f514a88668f unbound from our chassis
Oct 02 12:27:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:48.324 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network a04f937a-375f-4fb0-90fe-5f514a88668f, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:27:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:48.325 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7b4f22e8-eeea-44e8-84c9-13091aa6f1c8]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:48.325 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f namespace which is not needed anymore
Oct 02 12:27:48 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[241093]: [NOTICE]   (241097) : haproxy version is 2.8.14-c23fe91
Oct 02 12:27:48 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[241093]: [NOTICE]   (241097) : path to executable is /usr/sbin/haproxy
Oct 02 12:27:48 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[241093]: [WARNING]  (241097) : Exiting Master process...
Oct 02 12:27:48 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[241093]: [ALERT]    (241097) : Current worker (241099) exited with code 143 (Terminated)
Oct 02 12:27:48 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[241093]: [WARNING]  (241097) : All workers exited. Exiting... (0)
Oct 02 12:27:48 compute-0 systemd[1]: libpod-98e9159928f908e03b232c4892233de63e6ddfbb4e38ab9c6b1a45fb984b5b71.scope: Deactivated successfully.
Oct 02 12:27:48 compute-0 podman[241493]: 2025-10-02 12:27:48.465711374 +0000 UTC m=+0.063574546 container died 98e9159928f908e03b232c4892233de63e6ddfbb4e38ab9c6b1a45fb984b5b71 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS)
Oct 02 12:27:48 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-98e9159928f908e03b232c4892233de63e6ddfbb4e38ab9c6b1a45fb984b5b71-userdata-shm.mount: Deactivated successfully.
Oct 02 12:27:48 compute-0 systemd[1]: var-lib-containers-storage-overlay-13eb36f7ba48232c72605ffe00abf08809a2d1b90d9a54e07f9bb8c6367051f0-merged.mount: Deactivated successfully.
Oct 02 12:27:48 compute-0 podman[241493]: 2025-10-02 12:27:48.566720731 +0000 UTC m=+0.164583893 container cleanup 98e9159928f908e03b232c4892233de63e6ddfbb4e38ab9c6b1a45fb984b5b71 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:27:48 compute-0 systemd[1]: libpod-conmon-98e9159928f908e03b232c4892233de63e6ddfbb4e38ab9c6b1a45fb984b5b71.scope: Deactivated successfully.
Oct 02 12:27:48 compute-0 podman[241524]: 2025-10-02 12:27:48.640958646 +0000 UTC m=+0.054714453 container remove 98e9159928f908e03b232c4892233de63e6ddfbb4e38ab9c6b1a45fb984b5b71 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:27:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:48.649 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ac315551-8a6c-48ac-9c24-3c3a3be41d6b]: (4, ('Thu Oct  2 12:27:48 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f (98e9159928f908e03b232c4892233de63e6ddfbb4e38ab9c6b1a45fb984b5b71)\n98e9159928f908e03b232c4892233de63e6ddfbb4e38ab9c6b1a45fb984b5b71\nThu Oct  2 12:27:48 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f (98e9159928f908e03b232c4892233de63e6ddfbb4e38ab9c6b1a45fb984b5b71)\n98e9159928f908e03b232c4892233de63e6ddfbb4e38ab9c6b1a45fb984b5b71\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:48.652 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9a73a5e0-91d5-47f7-9f91-1a9de9cf0a2c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:48.653 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapa04f937a-30, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:27:48 compute-0 nova_compute[192079]: 2025-10-02 12:27:48.654 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:48 compute-0 kernel: tapa04f937a-30: left promiscuous mode
Oct 02 12:27:48 compute-0 nova_compute[192079]: 2025-10-02 12:27:48.670 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:48.673 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[08c06809-c211-465b-a5c5-7b71fb4b2874]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:48 compute-0 nova_compute[192079]: 2025-10-02 12:27:48.689 2 INFO nova.virt.libvirt.driver [None req-e9fcf56a-1466-44a7-993f-738c7e77372d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Instance shutdown successfully after 3 seconds.
Oct 02 12:27:48 compute-0 nova_compute[192079]: 2025-10-02 12:27:48.695 2 INFO nova.virt.libvirt.driver [-] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Instance destroyed successfully.
Oct 02 12:27:48 compute-0 nova_compute[192079]: 2025-10-02 12:27:48.695 2 DEBUG nova.objects.instance [None req-e9fcf56a-1466-44a7-993f-738c7e77372d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'numa_topology' on Instance uuid 2fcfea17-10df-499a-8692-facbbc76266b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:27:48 compute-0 nova_compute[192079]: 2025-10-02 12:27:48.697 2 DEBUG nova.network.neutron [req-65882bcd-4865-4bff-821c-6408cdfe3c14 req-59483f9d-f642-4f99-b968-e44b92762c30 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Updated VIF entry in instance network info cache for port f52dd1e9-092b-4ab8-946d-dd1aea183ec4. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:27:48 compute-0 nova_compute[192079]: 2025-10-02 12:27:48.698 2 DEBUG nova.network.neutron [req-65882bcd-4865-4bff-821c-6408cdfe3c14 req-59483f9d-f642-4f99-b968-e44b92762c30 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Updating instance_info_cache with network_info: [{"id": "f52dd1e9-092b-4ab8-946d-dd1aea183ec4", "address": "fa:16:3e:94:1a:4b", "network": {"id": "1a3d22d6-addb-4c33-bccc-61618673b1b6", "bridge": "br-int", "label": "tempest-network-smoke--81262139", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf52dd1e9-09", "ovs_interfaceid": "f52dd1e9-092b-4ab8-946d-dd1aea183ec4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:27:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:48.713 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f15d9f8b-3bb4-480b-a925-8ad59bec1d46]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:48.715 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[898ada5b-8040-4ff9-87f8-38fe1c54f3d0]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:48.738 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6b603d5a-54db-4fbd-8c98-9b51d7218c98]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 603455, 'reachable_time': 34845, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 241543, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:48.741 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:27:48 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:27:48.742 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[ecf1123f-d506-4698-b04c-e45d3e19d81e]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:27:48 compute-0 systemd[1]: run-netns-ovnmeta\x2da04f937a\x2d375f\x2d4fb0\x2d90fe\x2d5f514a88668f.mount: Deactivated successfully.
Oct 02 12:27:48 compute-0 nova_compute[192079]: 2025-10-02 12:27:48.846 2 DEBUG nova.compute.manager [None req-e9fcf56a-1466-44a7-993f-738c7e77372d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:27:48 compute-0 ovn_controller[94336]: 2025-10-02T12:27:48Z|00048|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:94:1a:4b 10.100.0.5
Oct 02 12:27:48 compute-0 ovn_controller[94336]: 2025-10-02T12:27:48Z|00049|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:94:1a:4b 10.100.0.5
Oct 02 12:27:49 compute-0 nova_compute[192079]: 2025-10-02 12:27:49.104 2 DEBUG oslo_concurrency.lockutils [req-65882bcd-4865-4bff-821c-6408cdfe3c14 req-59483f9d-f642-4f99-b968-e44b92762c30 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-957c4e10-f195-4d5e-97c0-0928296aba31" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:27:49 compute-0 nova_compute[192079]: 2025-10-02 12:27:49.706 2 DEBUG oslo_concurrency.lockutils [None req-e9fcf56a-1466-44a7-993f-738c7e77372d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b" "released" by "nova.compute.manager.ComputeManager.stop_instance.<locals>.do_stop_instance" :: held 6.119s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:27:50 compute-0 podman[241545]: 2025-10-02 12:27:50.171078471 +0000 UTC m=+0.075199054 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>)
Oct 02 12:27:50 compute-0 podman[241544]: 2025-10-02 12:27:50.173344192 +0000 UTC m=+0.079670545 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, tcib_managed=true, config_id=iscsid, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=iscsid, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible)
Oct 02 12:27:50 compute-0 nova_compute[192079]: 2025-10-02 12:27:50.300 2 DEBUG nova.compute.manager [req-3a85eeb9-af95-440d-97fc-b48117ed2155 req-e7abfc4e-632b-4197-b115-cba88424d15d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Received event network-vif-unplugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:27:50 compute-0 nova_compute[192079]: 2025-10-02 12:27:50.301 2 DEBUG oslo_concurrency.lockutils [req-3a85eeb9-af95-440d-97fc-b48117ed2155 req-e7abfc4e-632b-4197-b115-cba88424d15d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "2fcfea17-10df-499a-8692-facbbc76266b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:27:50 compute-0 rsyslogd[1013]: imjournal: journal files changed, reloading...  [v8.2506.0-2.el9 try https://www.rsyslog.com/e/0 ]
Oct 02 12:27:50 compute-0 nova_compute[192079]: 2025-10-02 12:27:50.302 2 DEBUG oslo_concurrency.lockutils [req-3a85eeb9-af95-440d-97fc-b48117ed2155 req-e7abfc4e-632b-4197-b115-cba88424d15d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:27:50 compute-0 rsyslogd[1013]: imjournal: journal files changed, reloading...  [v8.2506.0-2.el9 try https://www.rsyslog.com/e/0 ]
Oct 02 12:27:50 compute-0 nova_compute[192079]: 2025-10-02 12:27:50.302 2 DEBUG oslo_concurrency.lockutils [req-3a85eeb9-af95-440d-97fc-b48117ed2155 req-e7abfc4e-632b-4197-b115-cba88424d15d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:27:50 compute-0 nova_compute[192079]: 2025-10-02 12:27:50.303 2 DEBUG nova.compute.manager [req-3a85eeb9-af95-440d-97fc-b48117ed2155 req-e7abfc4e-632b-4197-b115-cba88424d15d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] No waiting events found dispatching network-vif-unplugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:27:50 compute-0 nova_compute[192079]: 2025-10-02 12:27:50.303 2 WARNING nova.compute.manager [req-3a85eeb9-af95-440d-97fc-b48117ed2155 req-e7abfc4e-632b-4197-b115-cba88424d15d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Received unexpected event network-vif-unplugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca for instance with vm_state stopped and task_state None.
Oct 02 12:27:50 compute-0 nova_compute[192079]: 2025-10-02 12:27:50.677 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:52 compute-0 nova_compute[192079]: 2025-10-02 12:27:52.140 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:53 compute-0 nova_compute[192079]: 2025-10-02 12:27:53.617 2 DEBUG nova.compute.manager [req-0602387d-2621-4467-a869-84d0deedcebb req-d53dc63d-6309-4025-b9d8-568ca8b3f451 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Received event network-vif-plugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:27:53 compute-0 nova_compute[192079]: 2025-10-02 12:27:53.617 2 DEBUG oslo_concurrency.lockutils [req-0602387d-2621-4467-a869-84d0deedcebb req-d53dc63d-6309-4025-b9d8-568ca8b3f451 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "2fcfea17-10df-499a-8692-facbbc76266b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:27:53 compute-0 nova_compute[192079]: 2025-10-02 12:27:53.618 2 DEBUG oslo_concurrency.lockutils [req-0602387d-2621-4467-a869-84d0deedcebb req-d53dc63d-6309-4025-b9d8-568ca8b3f451 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:27:53 compute-0 nova_compute[192079]: 2025-10-02 12:27:53.618 2 DEBUG oslo_concurrency.lockutils [req-0602387d-2621-4467-a869-84d0deedcebb req-d53dc63d-6309-4025-b9d8-568ca8b3f451 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:27:53 compute-0 nova_compute[192079]: 2025-10-02 12:27:53.619 2 DEBUG nova.compute.manager [req-0602387d-2621-4467-a869-84d0deedcebb req-d53dc63d-6309-4025-b9d8-568ca8b3f451 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] No waiting events found dispatching network-vif-plugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:27:53 compute-0 nova_compute[192079]: 2025-10-02 12:27:53.619 2 WARNING nova.compute.manager [req-0602387d-2621-4467-a869-84d0deedcebb req-d53dc63d-6309-4025-b9d8-568ca8b3f451 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Received unexpected event network-vif-plugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca for instance with vm_state stopped and task_state None.
Oct 02 12:27:55 compute-0 nova_compute[192079]: 2025-10-02 12:27:55.181 2 INFO nova.compute.manager [None req-941f8ed0-ac14-4988-969c-fecc1e40203e 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Get console output
Oct 02 12:27:55 compute-0 nova_compute[192079]: 2025-10-02 12:27:55.187 55 INFO nova.privsep.libvirt [-] Ignored error while reading from instance console pty: can't concat NoneType to bytes
Oct 02 12:27:55 compute-0 nova_compute[192079]: 2025-10-02 12:27:55.679 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:56 compute-0 nova_compute[192079]: 2025-10-02 12:27:56.029 2 DEBUG nova.objects.instance [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'flavor' on Instance uuid 2fcfea17-10df-499a-8692-facbbc76266b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:27:57 compute-0 nova_compute[192079]: 2025-10-02 12:27:57.142 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:27:57 compute-0 nova_compute[192079]: 2025-10-02 12:27:57.981 2 DEBUG nova.objects.instance [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'info_cache' on Instance uuid 2fcfea17-10df-499a-8692-facbbc76266b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:27:59 compute-0 nova_compute[192079]: 2025-10-02 12:27:59.167 2 DEBUG oslo_concurrency.lockutils [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "refresh_cache-2fcfea17-10df-499a-8692-facbbc76266b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:27:59 compute-0 nova_compute[192079]: 2025-10-02 12:27:59.168 2 DEBUG oslo_concurrency.lockutils [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquired lock "refresh_cache-2fcfea17-10df-499a-8692-facbbc76266b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:27:59 compute-0 nova_compute[192079]: 2025-10-02 12:27:59.168 2 DEBUG nova.network.neutron [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:28:00 compute-0 nova_compute[192079]: 2025-10-02 12:28:00.086 2 INFO nova.compute.manager [None req-e50f67d2-77f1-4393-804f-da9a0b99eacc 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Pausing
Oct 02 12:28:00 compute-0 nova_compute[192079]: 2025-10-02 12:28:00.087 2 DEBUG nova.objects.instance [None req-e50f67d2-77f1-4393-804f-da9a0b99eacc 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'flavor' on Instance uuid 957c4e10-f195-4d5e-97c0-0928296aba31 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:28:00 compute-0 podman[241590]: 2025-10-02 12:28:00.12682893 +0000 UTC m=+0.044840344 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_id=ovn_metadata_agent, container_name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0)
Oct 02 12:28:00 compute-0 podman[241592]: 2025-10-02 12:28:00.164894349 +0000 UTC m=+0.075259684 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:28:00 compute-0 podman[241591]: 2025-10-02 12:28:00.175725725 +0000 UTC m=+0.085918626 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, config_id=ovn_controller, container_name=ovn_controller, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.license=GPLv2, tcib_managed=true, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:28:00 compute-0 nova_compute[192079]: 2025-10-02 12:28:00.681 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:01 compute-0 nova_compute[192079]: 2025-10-02 12:28:01.398 2 DEBUG nova.network.neutron [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Updating instance_info_cache with network_info: [{"id": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "address": "fa:16:3e:cb:cf:9a", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5f268bcb-29", "ovs_interfaceid": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:28:01 compute-0 nova_compute[192079]: 2025-10-02 12:28:01.400 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408081.400713, 957c4e10-f195-4d5e-97c0-0928296aba31 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:28:01 compute-0 nova_compute[192079]: 2025-10-02 12:28:01.401 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] VM Paused (Lifecycle Event)
Oct 02 12:28:01 compute-0 nova_compute[192079]: 2025-10-02 12:28:01.402 2 DEBUG nova.compute.manager [None req-e50f67d2-77f1-4393-804f-da9a0b99eacc 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:28:01 compute-0 nova_compute[192079]: 2025-10-02 12:28:01.487 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:28:01 compute-0 nova_compute[192079]: 2025-10-02 12:28:01.490 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: active, current task_state: pausing, current DB power_state: 1, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:28:01 compute-0 nova_compute[192079]: 2025-10-02 12:28:01.584 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] During sync_power_state the instance has a pending task (pausing). Skip.
Oct 02 12:28:01 compute-0 nova_compute[192079]: 2025-10-02 12:28:01.646 2 DEBUG oslo_concurrency.lockutils [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Releasing lock "refresh_cache-2fcfea17-10df-499a-8692-facbbc76266b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:28:01 compute-0 nova_compute[192079]: 2025-10-02 12:28:01.723 2 INFO nova.virt.libvirt.driver [-] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Instance destroyed successfully.
Oct 02 12:28:01 compute-0 nova_compute[192079]: 2025-10-02 12:28:01.723 2 DEBUG nova.objects.instance [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'numa_topology' on Instance uuid 2fcfea17-10df-499a-8692-facbbc76266b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:28:02 compute-0 nova_compute[192079]: 2025-10-02 12:28:02.144 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:02.229 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:28:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:02.229 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:28:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:02.230 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:28:02 compute-0 nova_compute[192079]: 2025-10-02 12:28:02.441 2 DEBUG nova.objects.instance [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'resources' on Instance uuid 2fcfea17-10df-499a-8692-facbbc76266b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.106 2 DEBUG nova.virt.libvirt.vif [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:27:05Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerActionsTestJSON-server-1253918640',display_name='tempest-ServerActionsTestJSON-server-1253918640',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serveractionstestjson-server-1253918640',id=125,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBJJLom+UJzZg9dduKQv+725QaYDZoMXvP/xlpKnb/K05SGc4dkyLwCDweJ3QifTmxLWqK9Sz5A12yMJbzpa36v5C4bUqj8uiWk/vbR1BAjBdKM9d/Ug8M2nT8LwDBGP/9A==',key_name='tempest-keypair-1006285918',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:27:18Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=4,progress=0,project_id='e564a4cad5d443dba81ec04d2a05ced9',ramdisk_id='',reservation_id='r-r8iquef4',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=<?>,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServerActionsTestJSON-1646745100',owner_user_name='tempest-ServerActionsTestJSON-1646745100-project-member'},tags=<?>,task_state='powering-on',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:27:49Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='d54b1826121b47caba89932a78c06ccd',uuid=2fcfea17-10df-499a-8692-facbbc76266b,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='stopped') vif={"id": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "address": "fa:16:3e:cb:cf:9a", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5f268bcb-29", "ovs_interfaceid": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.107 2 DEBUG nova.network.os_vif_util [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converting VIF {"id": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "address": "fa:16:3e:cb:cf:9a", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5f268bcb-29", "ovs_interfaceid": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.108 2 DEBUG nova.network.os_vif_util [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:cb:cf:9a,bridge_name='br-int',has_traffic_filtering=True,id=5f268bcb-29c1-4e4e-a36d-b2ec144d3dca,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5f268bcb-29') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.109 2 DEBUG os_vif [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:cb:cf:9a,bridge_name='br-int',has_traffic_filtering=True,id=5f268bcb-29c1-4e4e-a36d-b2ec144d3dca,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5f268bcb-29') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.112 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.113 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap5f268bcb-29, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.115 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.117 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.120 2 INFO os_vif [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:cb:cf:9a,bridge_name='br-int',has_traffic_filtering=True,id=5f268bcb-29c1-4e4e-a36d-b2ec144d3dca,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5f268bcb-29')
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.130 2 DEBUG nova.virt.libvirt.driver [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Start _get_guest_xml network_info=[{"id": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "address": "fa:16:3e:cb:cf:9a", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5f268bcb-29", "ovs_interfaceid": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum=<?>,container_format='bare',created_at=<?>,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=1,min_ram=0,name=<?>,owner=<?>,properties=ImageMetaProps,protected=<?>,size=<?>,status=<?>,tags=<?>,updated_at=<?>,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.135 2 WARNING nova.virt.libvirt.driver [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.142 2 DEBUG nova.virt.libvirt.host [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.143 2 DEBUG nova.virt.libvirt.host [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.146 2 DEBUG nova.virt.libvirt.host [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.147 2 DEBUG nova.virt.libvirt.host [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.148 2 DEBUG nova.virt.libvirt.driver [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.148 2 DEBUG nova.virt.hardware [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=<?>,container_format='bare',created_at=<?>,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=1,min_ram=0,name=<?>,owner=<?>,properties=ImageMetaProps,protected=<?>,size=<?>,status=<?>,tags=<?>,updated_at=<?>,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.149 2 DEBUG nova.virt.hardware [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.149 2 DEBUG nova.virt.hardware [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.149 2 DEBUG nova.virt.hardware [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.149 2 DEBUG nova.virt.hardware [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.150 2 DEBUG nova.virt.hardware [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.150 2 DEBUG nova.virt.hardware [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.150 2 DEBUG nova.virt.hardware [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.150 2 DEBUG nova.virt.hardware [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.151 2 DEBUG nova.virt.hardware [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.151 2 DEBUG nova.virt.hardware [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.151 2 DEBUG nova.objects.instance [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'vcpu_model' on Instance uuid 2fcfea17-10df-499a-8692-facbbc76266b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.229 2 DEBUG oslo_concurrency.processutils [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk.config --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.318 2 DEBUG oslo_concurrency.processutils [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk.config --force-share --output=json" returned: 0 in 0.089s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.320 2 DEBUG oslo_concurrency.lockutils [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "/var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.321 2 DEBUG oslo_concurrency.lockutils [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "/var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.322 2 DEBUG oslo_concurrency.lockutils [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "/var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.323 2 DEBUG nova.virt.libvirt.vif [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:27:05Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerActionsTestJSON-server-1253918640',display_name='tempest-ServerActionsTestJSON-server-1253918640',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serveractionstestjson-server-1253918640',id=125,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBJJLom+UJzZg9dduKQv+725QaYDZoMXvP/xlpKnb/K05SGc4dkyLwCDweJ3QifTmxLWqK9Sz5A12yMJbzpa36v5C4bUqj8uiWk/vbR1BAjBdKM9d/Ug8M2nT8LwDBGP/9A==',key_name='tempest-keypair-1006285918',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:27:18Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=4,progress=0,project_id='e564a4cad5d443dba81ec04d2a05ced9',ramdisk_id='',reservation_id='r-r8iquef4',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=<?>,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServerActionsTestJSON-1646745100',owner_user_name='tempest-ServerActionsTestJSON-1646745100-project-member'},tags=<?>,task_state='powering-on',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:27:49Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='d54b1826121b47caba89932a78c06ccd',uuid=2fcfea17-10df-499a-8692-facbbc76266b,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='stopped') vif={"id": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "address": "fa:16:3e:cb:cf:9a", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5f268bcb-29", "ovs_interfaceid": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.323 2 DEBUG nova.network.os_vif_util [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converting VIF {"id": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "address": "fa:16:3e:cb:cf:9a", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5f268bcb-29", "ovs_interfaceid": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.324 2 DEBUG nova.network.os_vif_util [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:cb:cf:9a,bridge_name='br-int',has_traffic_filtering=True,id=5f268bcb-29c1-4e4e-a36d-b2ec144d3dca,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5f268bcb-29') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.325 2 DEBUG nova.objects.instance [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'pci_devices' on Instance uuid 2fcfea17-10df-499a-8692-facbbc76266b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.338 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759408068.337456, 2fcfea17-10df-499a-8692-facbbc76266b => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.339 2 INFO nova.compute.manager [-] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] VM Stopped (Lifecycle Event)
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.375 2 DEBUG nova.virt.libvirt.driver [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:28:03 compute-0 nova_compute[192079]:   <uuid>2fcfea17-10df-499a-8692-facbbc76266b</uuid>
Oct 02 12:28:03 compute-0 nova_compute[192079]:   <name>instance-0000007d</name>
Oct 02 12:28:03 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:28:03 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:28:03 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:28:03 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:       <nova:name>tempest-ServerActionsTestJSON-server-1253918640</nova:name>
Oct 02 12:28:03 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:28:03</nova:creationTime>
Oct 02 12:28:03 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:28:03 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:28:03 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:28:03 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:28:03 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:28:03 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:28:03 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:28:03 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:28:03 compute-0 nova_compute[192079]:         <nova:user uuid="d54b1826121b47caba89932a78c06ccd">tempest-ServerActionsTestJSON-1646745100-project-member</nova:user>
Oct 02 12:28:03 compute-0 nova_compute[192079]:         <nova:project uuid="e564a4cad5d443dba81ec04d2a05ced9">tempest-ServerActionsTestJSON-1646745100</nova:project>
Oct 02 12:28:03 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:28:03 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:28:03 compute-0 nova_compute[192079]:         <nova:port uuid="5f268bcb-29c1-4e4e-a36d-b2ec144d3dca">
Oct 02 12:28:03 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.4" ipVersion="4"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:28:03 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:28:03 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:28:03 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <system>
Oct 02 12:28:03 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:28:03 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:28:03 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:28:03 compute-0 nova_compute[192079]:       <entry name="serial">2fcfea17-10df-499a-8692-facbbc76266b</entry>
Oct 02 12:28:03 compute-0 nova_compute[192079]:       <entry name="uuid">2fcfea17-10df-499a-8692-facbbc76266b</entry>
Oct 02 12:28:03 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     </system>
Oct 02 12:28:03 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:28:03 compute-0 nova_compute[192079]:   <os>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:   </os>
Oct 02 12:28:03 compute-0 nova_compute[192079]:   <features>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:   </features>
Oct 02 12:28:03 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:28:03 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:28:03 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:28:03 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:28:03 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk.config"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:28:03 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:cb:cf:9a"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:       <target dev="tap5f268bcb-29"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:28:03 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/console.log" append="off"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <video>
Oct 02 12:28:03 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     </video>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <input type="keyboard" bus="usb"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:28:03 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:28:03 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:28:03 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:28:03 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:28:03 compute-0 nova_compute[192079]: </domain>
Oct 02 12:28:03 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.377 2 DEBUG oslo_concurrency.processutils [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.399 2 DEBUG nova.compute.manager [None req-7b652046-a95f-45ef-9671-77984b52f6ee - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.452 2 DEBUG oslo_concurrency.processutils [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk --force-share --output=json" returned: 0 in 0.075s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.453 2 DEBUG oslo_concurrency.processutils [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.528 2 DEBUG oslo_concurrency.processutils [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk --force-share --output=json" returned: 0 in 0.075s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.529 2 DEBUG nova.objects.instance [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'trusted_certs' on Instance uuid 2fcfea17-10df-499a-8692-facbbc76266b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.579 2 DEBUG oslo_concurrency.processutils [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.647 2 DEBUG oslo_concurrency.processutils [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.068s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.648 2 DEBUG nova.virt.disk.api [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Checking if we can resize image /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.649 2 DEBUG oslo_concurrency.processutils [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.703 2 DEBUG oslo_concurrency.processutils [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk --force-share --output=json" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.704 2 DEBUG nova.virt.disk.api [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Cannot resize image /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.705 2 DEBUG nova.objects.instance [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'migration_context' on Instance uuid 2fcfea17-10df-499a-8692-facbbc76266b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.746 2 DEBUG nova.virt.libvirt.vif [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:27:05Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerActionsTestJSON-server-1253918640',display_name='tempest-ServerActionsTestJSON-server-1253918640',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serveractionstestjson-server-1253918640',id=125,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBJJLom+UJzZg9dduKQv+725QaYDZoMXvP/xlpKnb/K05SGc4dkyLwCDweJ3QifTmxLWqK9Sz5A12yMJbzpa36v5C4bUqj8uiWk/vbR1BAjBdKM9d/Ug8M2nT8LwDBGP/9A==',key_name='tempest-keypair-1006285918',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:27:18Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=<?>,power_state=4,progress=0,project_id='e564a4cad5d443dba81ec04d2a05ced9',ramdisk_id='',reservation_id='r-r8iquef4',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=<?>,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServerActionsTestJSON-1646745100',owner_user_name='tempest-ServerActionsTestJSON-1646745100-project-member'},tags=<?>,task_state='powering-on',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:27:49Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='d54b1826121b47caba89932a78c06ccd',uuid=2fcfea17-10df-499a-8692-facbbc76266b,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='stopped') vif={"id": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "address": "fa:16:3e:cb:cf:9a", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5f268bcb-29", "ovs_interfaceid": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.747 2 DEBUG nova.network.os_vif_util [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converting VIF {"id": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "address": "fa:16:3e:cb:cf:9a", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5f268bcb-29", "ovs_interfaceid": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.748 2 DEBUG nova.network.os_vif_util [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:cb:cf:9a,bridge_name='br-int',has_traffic_filtering=True,id=5f268bcb-29c1-4e4e-a36d-b2ec144d3dca,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5f268bcb-29') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.748 2 DEBUG os_vif [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:cb:cf:9a,bridge_name='br-int',has_traffic_filtering=True,id=5f268bcb-29c1-4e4e-a36d-b2ec144d3dca,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5f268bcb-29') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.749 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.749 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.750 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.753 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.753 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap5f268bcb-29, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.754 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap5f268bcb-29, col_values=(('external_ids', {'iface-id': '5f268bcb-29c1-4e4e-a36d-b2ec144d3dca', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:cb:cf:9a', 'vm-uuid': '2fcfea17-10df-499a-8692-facbbc76266b'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:28:03 compute-0 NetworkManager[51160]: <info>  [1759408083.7567] manager: (tap5f268bcb-29): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/240)
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.755 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.760 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.762 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.763 2 INFO os_vif [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:cb:cf:9a,bridge_name='br-int',has_traffic_filtering=True,id=5f268bcb-29c1-4e4e-a36d-b2ec144d3dca,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5f268bcb-29')
Oct 02 12:28:03 compute-0 kernel: tap5f268bcb-29: entered promiscuous mode
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.866 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:03 compute-0 NetworkManager[51160]: <info>  [1759408083.8669] manager: (tap5f268bcb-29): new Tun device (/org/freedesktop/NetworkManager/Devices/241)
Oct 02 12:28:03 compute-0 ovn_controller[94336]: 2025-10-02T12:28:03Z|00473|binding|INFO|Claiming lport 5f268bcb-29c1-4e4e-a36d-b2ec144d3dca for this chassis.
Oct 02 12:28:03 compute-0 ovn_controller[94336]: 2025-10-02T12:28:03Z|00474|binding|INFO|5f268bcb-29c1-4e4e-a36d-b2ec144d3dca: Claiming fa:16:3e:cb:cf:9a 10.100.0.4
Oct 02 12:28:03 compute-0 ovn_controller[94336]: 2025-10-02T12:28:03Z|00475|binding|INFO|Setting lport 5f268bcb-29c1-4e4e-a36d-b2ec144d3dca ovn-installed in OVS
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.878 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:03 compute-0 nova_compute[192079]: 2025-10-02 12:28:03.880 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:03 compute-0 systemd-udevd[241688]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:28:03 compute-0 systemd-machined[152150]: New machine qemu-62-instance-0000007d.
Oct 02 12:28:03 compute-0 NetworkManager[51160]: <info>  [1759408083.9092] device (tap5f268bcb-29): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:28:03 compute-0 NetworkManager[51160]: <info>  [1759408083.9105] device (tap5f268bcb-29): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:28:03 compute-0 systemd[1]: Started Virtual Machine qemu-62-instance-0000007d.
Oct 02 12:28:03 compute-0 ovn_controller[94336]: 2025-10-02T12:28:03Z|00476|binding|INFO|Setting lport 5f268bcb-29c1-4e4e-a36d-b2ec144d3dca up in Southbound
Oct 02 12:28:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:03.926 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:cb:cf:9a 10.100.0.4'], port_security=['fa:16:3e:cb:cf:9a 10.100.0.4'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.4/28', 'neutron:device_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-a04f937a-375f-4fb0-90fe-5f514a88668f', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'neutron:revision_number': '5', 'neutron:security_group_ids': 'c0383701-0ec7-4f3b-8585-5effc4f5ca5a', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:port_fip': '192.168.122.248'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=50c0aa38-5fd8-41c7-b4bf-85b59722c5c3, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=5f268bcb-29c1-4e4e-a36d-b2ec144d3dca) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:28:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:03.928 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 5f268bcb-29c1-4e4e-a36d-b2ec144d3dca in datapath a04f937a-375f-4fb0-90fe-5f514a88668f bound to our chassis
Oct 02 12:28:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:03.931 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network a04f937a-375f-4fb0-90fe-5f514a88668f
Oct 02 12:28:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:03.940 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f5fa1d25-46eb-4e6a-be41-ccab3306f6ba]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:03.941 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tapa04f937a-31 in ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:28:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:03.943 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tapa04f937a-30 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:28:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:03.943 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[38f6cb75-04b8-438b-96b5-e56ed70d5cc8]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:03.945 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b328be4a-bfc8-4787-9291-8cdb3833c67c]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:03.956 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[e6a410f6-aeac-4f18-920b-a642a89da34c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:03 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:03.968 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b1f321d9-784a-4c10-9480-5a29a6cc1939]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:03.999 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[52cd0500-0a0d-4849-8ef1-c9fdffee6d1f]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:04.006 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8b284f40-31b3-4a9f-9800-2c421905e7b7]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:04 compute-0 systemd-udevd[241691]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:28:04 compute-0 NetworkManager[51160]: <info>  [1759408084.0068] manager: (tapa04f937a-30): new Veth device (/org/freedesktop/NetworkManager/Devices/242)
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:04.038 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[6785b88f-d5ee-437d-bfbe-557d162f7454]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:04.042 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[20f709d5-619e-4d24-b825-7509af03ed11]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:04 compute-0 NetworkManager[51160]: <info>  [1759408084.0722] device (tapa04f937a-30): carrier: link connected
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:04.082 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[6756ba57-981c-4ea6-a2c3-3a9697108367]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:04.099 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[55f78cde-5647-4662-a7ec-9accd0f9e814]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapa04f937a-31'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:33:93:68'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 154], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 608170, 'reachable_time': 36853, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 241722, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:04.121 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[bca1fef5-4b36-4aeb-aba6-02c782263858]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe33:9368'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 608170, 'tstamp': 608170}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 241723, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:04.147 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[47ff6f3b-6468-4630-9235-e99566cccd82]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapa04f937a-31'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:33:93:68'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 154], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 608170, 'reachable_time': 36853, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 241724, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:04.181 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b74faf5b-0934-4d20-b06c-0d364c888558]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:04.232 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1f3ca7f0-96a6-41a9-9bea-8ec2e62f68da]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:04.233 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapa04f937a-30, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:04.234 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:04.234 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapa04f937a-30, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:28:04 compute-0 nova_compute[192079]: 2025-10-02 12:28:04.235 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:04 compute-0 NetworkManager[51160]: <info>  [1759408084.2363] manager: (tapa04f937a-30): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/243)
Oct 02 12:28:04 compute-0 kernel: tapa04f937a-30: entered promiscuous mode
Oct 02 12:28:04 compute-0 nova_compute[192079]: 2025-10-02 12:28:04.238 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:04.239 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tapa04f937a-30, col_values=(('external_ids', {'iface-id': '38f1ac16-18c6-4b4a-b769-ebc7dd5181d4'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:28:04 compute-0 ovn_controller[94336]: 2025-10-02T12:28:04Z|00477|binding|INFO|Releasing lport 38f1ac16-18c6-4b4a-b769-ebc7dd5181d4 from this chassis (sb_readonly=0)
Oct 02 12:28:04 compute-0 nova_compute[192079]: 2025-10-02 12:28:04.240 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:04 compute-0 nova_compute[192079]: 2025-10-02 12:28:04.263 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:04.264 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/a04f937a-375f-4fb0-90fe-5f514a88668f.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/a04f937a-375f-4fb0-90fe-5f514a88668f.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:04.265 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[81c950fd-7a48-4e17-90c9-e6d7e4056153]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:04.266 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-a04f937a-375f-4fb0-90fe-5f514a88668f
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/a04f937a-375f-4fb0-90fe-5f514a88668f.pid.haproxy
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID a04f937a-375f-4fb0-90fe-5f514a88668f
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:28:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:04.266 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'env', 'PROCESS_TAG=haproxy-a04f937a-375f-4fb0-90fe-5f514a88668f', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/a04f937a-375f-4fb0-90fe-5f514a88668f.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:28:04 compute-0 nova_compute[192079]: 2025-10-02 12:28:04.633 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408084.632877, 2fcfea17-10df-499a-8692-facbbc76266b => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:28:04 compute-0 nova_compute[192079]: 2025-10-02 12:28:04.633 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] VM Resumed (Lifecycle Event)
Oct 02 12:28:04 compute-0 nova_compute[192079]: 2025-10-02 12:28:04.635 2 DEBUG nova.compute.manager [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:28:04 compute-0 nova_compute[192079]: 2025-10-02 12:28:04.637 2 INFO nova.virt.libvirt.driver [-] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Instance rebooted successfully.
Oct 02 12:28:04 compute-0 nova_compute[192079]: 2025-10-02 12:28:04.637 2 DEBUG nova.compute.manager [None req-c0d8ef2b-357c-4532-b4b7-fd19b449cad5 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:28:04 compute-0 nova_compute[192079]: 2025-10-02 12:28:04.667 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:28:04 compute-0 nova_compute[192079]: 2025-10-02 12:28:04.670 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: stopped, current task_state: powering-on, current DB power_state: 4, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:28:04 compute-0 podman[241763]: 2025-10-02 12:28:04.599782398 +0000 UTC m=+0.018713312 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:28:04 compute-0 nova_compute[192079]: 2025-10-02 12:28:04.738 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] During sync_power_state the instance has a pending task (powering-on). Skip.
Oct 02 12:28:04 compute-0 nova_compute[192079]: 2025-10-02 12:28:04.738 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408084.633397, 2fcfea17-10df-499a-8692-facbbc76266b => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:28:04 compute-0 nova_compute[192079]: 2025-10-02 12:28:04.738 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] VM Started (Lifecycle Event)
Oct 02 12:28:04 compute-0 nova_compute[192079]: 2025-10-02 12:28:04.781 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:28:04 compute-0 nova_compute[192079]: 2025-10-02 12:28:04.784 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Synchronizing instance power state after lifecycle event "Started"; current vm_state: active, current task_state: None, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:28:04 compute-0 podman[241763]: 2025-10-02 12:28:04.877100336 +0000 UTC m=+0.296031230 container create 838a3870af09cd9fb759cf40d7013cfdfd1cb07859df224b0e19f01e1943528b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:28:05 compute-0 systemd[1]: Started libpod-conmon-838a3870af09cd9fb759cf40d7013cfdfd1cb07859df224b0e19f01e1943528b.scope.
Oct 02 12:28:05 compute-0 nova_compute[192079]: 2025-10-02 12:28:05.033 2 DEBUG nova.compute.manager [req-43f9daf2-2208-424b-88b5-a666b8fd0563 req-712259ab-2e3a-4c54-8604-0a438d18ba53 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Received event network-vif-plugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:28:05 compute-0 nova_compute[192079]: 2025-10-02 12:28:05.034 2 DEBUG oslo_concurrency.lockutils [req-43f9daf2-2208-424b-88b5-a666b8fd0563 req-712259ab-2e3a-4c54-8604-0a438d18ba53 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "2fcfea17-10df-499a-8692-facbbc76266b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:28:05 compute-0 nova_compute[192079]: 2025-10-02 12:28:05.034 2 DEBUG oslo_concurrency.lockutils [req-43f9daf2-2208-424b-88b5-a666b8fd0563 req-712259ab-2e3a-4c54-8604-0a438d18ba53 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:28:05 compute-0 nova_compute[192079]: 2025-10-02 12:28:05.035 2 DEBUG oslo_concurrency.lockutils [req-43f9daf2-2208-424b-88b5-a666b8fd0563 req-712259ab-2e3a-4c54-8604-0a438d18ba53 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:28:05 compute-0 nova_compute[192079]: 2025-10-02 12:28:05.035 2 DEBUG nova.compute.manager [req-43f9daf2-2208-424b-88b5-a666b8fd0563 req-712259ab-2e3a-4c54-8604-0a438d18ba53 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] No waiting events found dispatching network-vif-plugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:28:05 compute-0 nova_compute[192079]: 2025-10-02 12:28:05.035 2 WARNING nova.compute.manager [req-43f9daf2-2208-424b-88b5-a666b8fd0563 req-712259ab-2e3a-4c54-8604-0a438d18ba53 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Received unexpected event network-vif-plugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca for instance with vm_state active and task_state None.
Oct 02 12:28:05 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:28:05 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/b69977cb9eef18700f012774546f689fab048f2be5e667dcacfacacb8d993795/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:28:05 compute-0 podman[241763]: 2025-10-02 12:28:05.079509439 +0000 UTC m=+0.498440343 container init 838a3870af09cd9fb759cf40d7013cfdfd1cb07859df224b0e19f01e1943528b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:28:05 compute-0 podman[241763]: 2025-10-02 12:28:05.084901896 +0000 UTC m=+0.503832790 container start 838a3870af09cd9fb759cf40d7013cfdfd1cb07859df224b0e19f01e1943528b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:28:05 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[241779]: [NOTICE]   (241783) : New worker (241785) forked
Oct 02 12:28:05 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[241779]: [NOTICE]   (241783) : Loading success.
Oct 02 12:28:07 compute-0 nova_compute[192079]: 2025-10-02 12:28:07.145 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:07 compute-0 nova_compute[192079]: 2025-10-02 12:28:07.929 2 DEBUG nova.compute.manager [req-f7e78993-4534-4020-855e-b50db98cd4a1 req-51934053-8674-44ce-b7c5-2ada0ad9f4cc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Received event network-vif-plugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:28:07 compute-0 nova_compute[192079]: 2025-10-02 12:28:07.929 2 DEBUG oslo_concurrency.lockutils [req-f7e78993-4534-4020-855e-b50db98cd4a1 req-51934053-8674-44ce-b7c5-2ada0ad9f4cc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "2fcfea17-10df-499a-8692-facbbc76266b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:28:07 compute-0 nova_compute[192079]: 2025-10-02 12:28:07.929 2 DEBUG oslo_concurrency.lockutils [req-f7e78993-4534-4020-855e-b50db98cd4a1 req-51934053-8674-44ce-b7c5-2ada0ad9f4cc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:28:07 compute-0 nova_compute[192079]: 2025-10-02 12:28:07.930 2 DEBUG oslo_concurrency.lockutils [req-f7e78993-4534-4020-855e-b50db98cd4a1 req-51934053-8674-44ce-b7c5-2ada0ad9f4cc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:28:07 compute-0 nova_compute[192079]: 2025-10-02 12:28:07.930 2 DEBUG nova.compute.manager [req-f7e78993-4534-4020-855e-b50db98cd4a1 req-51934053-8674-44ce-b7c5-2ada0ad9f4cc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] No waiting events found dispatching network-vif-plugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:28:07 compute-0 nova_compute[192079]: 2025-10-02 12:28:07.930 2 WARNING nova.compute.manager [req-f7e78993-4534-4020-855e-b50db98cd4a1 req-51934053-8674-44ce-b7c5-2ada0ad9f4cc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Received unexpected event network-vif-plugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca for instance with vm_state active and task_state None.
Oct 02 12:28:08 compute-0 nova_compute[192079]: 2025-10-02 12:28:08.758 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:09 compute-0 nova_compute[192079]: 2025-10-02 12:28:09.130 2 INFO nova.compute.manager [None req-04f591fa-b706-4c25-a481-ed166005a024 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Get console output
Oct 02 12:28:09 compute-0 nova_compute[192079]: 2025-10-02 12:28:09.135 55 INFO nova.privsep.libvirt [-] Ignored error while reading from instance console pty: can't concat NoneType to bytes
Oct 02 12:28:09 compute-0 podman[241795]: 2025-10-02 12:28:09.155083133 +0000 UTC m=+0.068607373 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=edpm, io.buildah.version=1.41.3)
Oct 02 12:28:09 compute-0 nova_compute[192079]: 2025-10-02 12:28:09.433 2 INFO nova.compute.manager [None req-59abd0ce-0cc3-4a6c-b313-b1803a9d5888 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Unpausing
Oct 02 12:28:09 compute-0 nova_compute[192079]: 2025-10-02 12:28:09.434 2 DEBUG nova.objects.instance [None req-59abd0ce-0cc3-4a6c-b313-b1803a9d5888 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'flavor' on Instance uuid 957c4e10-f195-4d5e-97c0-0928296aba31 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:28:09 compute-0 nova_compute[192079]: 2025-10-02 12:28:09.487 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408089.4867988, 957c4e10-f195-4d5e-97c0-0928296aba31 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:28:09 compute-0 nova_compute[192079]: 2025-10-02 12:28:09.487 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] VM Resumed (Lifecycle Event)
Oct 02 12:28:09 compute-0 virtqemud[191807]: argument unsupported: QEMU guest agent is not configured
Oct 02 12:28:09 compute-0 nova_compute[192079]: 2025-10-02 12:28:09.492 2 DEBUG nova.virt.libvirt.guest [None req-59abd0ce-0cc3-4a6c-b313-b1803a9d5888 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Failed to set time: agent not configured sync_guest_time /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:200
Oct 02 12:28:09 compute-0 nova_compute[192079]: 2025-10-02 12:28:09.492 2 DEBUG nova.compute.manager [None req-59abd0ce-0cc3-4a6c-b313-b1803a9d5888 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:28:09 compute-0 nova_compute[192079]: 2025-10-02 12:28:09.547 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:28:09 compute-0 nova_compute[192079]: 2025-10-02 12:28:09.550 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: paused, current task_state: unpausing, current DB power_state: 3, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:28:09 compute-0 nova_compute[192079]: 2025-10-02 12:28:09.605 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] During sync_power_state the instance has a pending task (unpausing). Skip.
Oct 02 12:28:12 compute-0 nova_compute[192079]: 2025-10-02 12:28:12.149 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:13 compute-0 nova_compute[192079]: 2025-10-02 12:28:13.761 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:14 compute-0 nova_compute[192079]: 2025-10-02 12:28:14.248 2 INFO nova.compute.manager [None req-9646eff0-f00f-4ab9-9de7-fe8712ad17af 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Get console output
Oct 02 12:28:14 compute-0 nova_compute[192079]: 2025-10-02 12:28:14.253 55 INFO nova.privsep.libvirt [-] Ignored error while reading from instance console pty: can't concat NoneType to bytes
Oct 02 12:28:15 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:15.445 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=32, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=31) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:28:15 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:15.448 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 4 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:28:15 compute-0 nova_compute[192079]: 2025-10-02 12:28:15.483 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:17 compute-0 podman[241826]: 2025-10-02 12:28:17.145658899 +0000 UTC m=+0.052818362 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.openshift.expose-services=, io.buildah.version=1.33.7, vendor=Red Hat, Inc., name=ubi9-minimal, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, vcs-type=git, container_name=openstack_network_exporter, maintainer=Red Hat, Inc., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, version=9.6, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, io.openshift.tags=minimal rhel9, config_id=edpm, architecture=x86_64, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, distribution-scope=public, url=https://catalog.redhat.com/en/search?searchType=containers, com.redhat.component=ubi9-minimal-container, managed_by=edpm_ansible, release=1755695350, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., build-date=2025-08-20T13:12:41, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly.)
Oct 02 12:28:17 compute-0 nova_compute[192079]: 2025-10-02 12:28:17.150 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:17 compute-0 podman[241827]: 2025-10-02 12:28:17.158686714 +0000 UTC m=+0.063196095 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_id=multipathd, container_name=multipathd, managed_by=edpm_ansible, org.label-schema.license=GPLv2, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:28:17 compute-0 nova_compute[192079]: 2025-10-02 12:28:17.757 2 DEBUG nova.objects.instance [None req-7fbae1f5-c04d-475a-97e9-e444a77d21cb d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'pci_devices' on Instance uuid 2fcfea17-10df-499a-8692-facbbc76266b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:28:17 compute-0 nova_compute[192079]: 2025-10-02 12:28:17.929 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408097.9292548, 2fcfea17-10df-499a-8692-facbbc76266b => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:28:17 compute-0 nova_compute[192079]: 2025-10-02 12:28:17.930 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] VM Paused (Lifecycle Event)
Oct 02 12:28:17 compute-0 nova_compute[192079]: 2025-10-02 12:28:17.992 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:28:17 compute-0 nova_compute[192079]: 2025-10-02 12:28:17.999 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: active, current task_state: suspending, current DB power_state: 1, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.021 2 DEBUG oslo_concurrency.lockutils [None req-55c289b9-7de5-4436-be66-8faf3c6143b5 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "957c4e10-f195-4d5e-97c0-0928296aba31" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.022 2 DEBUG oslo_concurrency.lockutils [None req-55c289b9-7de5-4436-be66-8faf3c6143b5 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "957c4e10-f195-4d5e-97c0-0928296aba31" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.023 2 DEBUG oslo_concurrency.lockutils [None req-55c289b9-7de5-4436-be66-8faf3c6143b5 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "957c4e10-f195-4d5e-97c0-0928296aba31-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.023 2 DEBUG oslo_concurrency.lockutils [None req-55c289b9-7de5-4436-be66-8faf3c6143b5 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "957c4e10-f195-4d5e-97c0-0928296aba31-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.024 2 DEBUG oslo_concurrency.lockutils [None req-55c289b9-7de5-4436-be66-8faf3c6143b5 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "957c4e10-f195-4d5e-97c0-0928296aba31-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.124 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] During sync_power_state the instance has a pending task (suspending). Skip.
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.127 2 INFO nova.compute.manager [None req-55c289b9-7de5-4436-be66-8faf3c6143b5 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Terminating instance
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.145 2 DEBUG nova.compute.manager [None req-55c289b9-7de5-4436-be66-8faf3c6143b5 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:28:18 compute-0 kernel: tapf52dd1e9-09 (unregistering): left promiscuous mode
Oct 02 12:28:18 compute-0 NetworkManager[51160]: <info>  [1759408098.1673] device (tapf52dd1e9-09): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:28:18 compute-0 ovn_controller[94336]: 2025-10-02T12:28:18Z|00478|binding|INFO|Releasing lport f52dd1e9-092b-4ab8-946d-dd1aea183ec4 from this chassis (sb_readonly=0)
Oct 02 12:28:18 compute-0 ovn_controller[94336]: 2025-10-02T12:28:18Z|00479|binding|INFO|Setting lport f52dd1e9-092b-4ab8-946d-dd1aea183ec4 down in Southbound
Oct 02 12:28:18 compute-0 ovn_controller[94336]: 2025-10-02T12:28:18Z|00480|binding|INFO|Removing iface tapf52dd1e9-09 ovn-installed in OVS
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.178 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.193 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:18 compute-0 systemd[1]: machine-qemu\x2d61\x2dinstance\x2d0000007f.scope: Deactivated successfully.
Oct 02 12:28:18 compute-0 systemd[1]: machine-qemu\x2d61\x2dinstance\x2d0000007f.scope: Consumed 14.839s CPU time.
Oct 02 12:28:18 compute-0 systemd-machined[152150]: Machine qemu-61-instance-0000007f terminated.
Oct 02 12:28:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:18.310 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:94:1a:4b 10.100.0.5'], port_security=['fa:16:3e:94:1a:4b 10.100.0.5'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.5/28', 'neutron:device_id': '957c4e10-f195-4d5e-97c0-0928296aba31', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-1a3d22d6-addb-4c33-bccc-61618673b1b6', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '76c7dd40d83e4e3ca71abbebf57921b6', 'neutron:revision_number': '4', 'neutron:security_group_ids': '09e396bd-52a1-49cf-8120-7997324047ec', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=da127e9e-be4d-48d5-98c6-9ea13304c295, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=f52dd1e9-092b-4ab8-946d-dd1aea183ec4) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.313 2 DEBUG nova.compute.manager [req-1d8225dc-2caa-4574-9331-a85664939e38 req-7954d7a1-5798-44be-8b18-f9144ea18c79 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Received event network-changed-f52dd1e9-092b-4ab8-946d-dd1aea183ec4 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:28:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:18.313 103294 INFO neutron.agent.ovn.metadata.agent [-] Port f52dd1e9-092b-4ab8-946d-dd1aea183ec4 in datapath 1a3d22d6-addb-4c33-bccc-61618673b1b6 unbound from our chassis
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.314 2 DEBUG nova.compute.manager [req-1d8225dc-2caa-4574-9331-a85664939e38 req-7954d7a1-5798-44be-8b18-f9144ea18c79 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Refreshing instance network info cache due to event network-changed-f52dd1e9-092b-4ab8-946d-dd1aea183ec4. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:28:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:18.314 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 1a3d22d6-addb-4c33-bccc-61618673b1b6, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.314 2 DEBUG oslo_concurrency.lockutils [req-1d8225dc-2caa-4574-9331-a85664939e38 req-7954d7a1-5798-44be-8b18-f9144ea18c79 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-957c4e10-f195-4d5e-97c0-0928296aba31" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.314 2 DEBUG oslo_concurrency.lockutils [req-1d8225dc-2caa-4574-9331-a85664939e38 req-7954d7a1-5798-44be-8b18-f9144ea18c79 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-957c4e10-f195-4d5e-97c0-0928296aba31" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.314 2 DEBUG nova.network.neutron [req-1d8225dc-2caa-4574-9331-a85664939e38 req-7954d7a1-5798-44be-8b18-f9144ea18c79 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Refreshing network info cache for port f52dd1e9-092b-4ab8-946d-dd1aea183ec4 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:28:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:18.315 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[fd7442df-5520-475f-9606-20bff74fc6ad]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:18.316 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-1a3d22d6-addb-4c33-bccc-61618673b1b6 namespace which is not needed anymore
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.413 2 INFO nova.virt.libvirt.driver [-] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Instance destroyed successfully.
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.413 2 DEBUG nova.objects.instance [None req-55c289b9-7de5-4436-be66-8faf3c6143b5 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'resources' on Instance uuid 957c4e10-f195-4d5e-97c0-0928296aba31 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.442 2 DEBUG nova.virt.libvirt.vif [None req-55c289b9-7de5-4436-be66-8faf3c6143b5 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:27:18Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestNetworkAdvancedServerOps-server-491875560',display_name='tempest-TestNetworkAdvancedServerOps-server-491875560',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkadvancedserverops-server-491875560',id=127,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBBFtPcDKg7oBwtPeK2kB3C5slIvpeY9IQucVLArPcU8FLh7VlTWx62ZUGaRO0OLeoqO638ZiwAuYZfHu9NjsOskJMQqY4NwG01wMMMi/eVKSBjzkTyhN0wIh//zV9tpx5Q==',key_name='tempest-TestNetworkAdvancedServerOps-1596699922',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:27:33Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='76c7dd40d83e4e3ca71abbebf57921b6',ramdisk_id='',reservation_id='r-86f6iqbj',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-TestNetworkAdvancedServerOps-597114071',owner_user_name='tempest-TestNetworkAdvancedServerOps-597114071-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:28:09Z,user_data=None,user_id='1faa7e121a0e43ad8cb4ae5b2cfcc6a2',uuid=957c4e10-f195-4d5e-97c0-0928296aba31,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "f52dd1e9-092b-4ab8-946d-dd1aea183ec4", "address": "fa:16:3e:94:1a:4b", "network": {"id": "1a3d22d6-addb-4c33-bccc-61618673b1b6", "bridge": "br-int", "label": "tempest-network-smoke--81262139", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf52dd1e9-09", "ovs_interfaceid": "f52dd1e9-092b-4ab8-946d-dd1aea183ec4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.443 2 DEBUG nova.network.os_vif_util [None req-55c289b9-7de5-4436-be66-8faf3c6143b5 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converting VIF {"id": "f52dd1e9-092b-4ab8-946d-dd1aea183ec4", "address": "fa:16:3e:94:1a:4b", "network": {"id": "1a3d22d6-addb-4c33-bccc-61618673b1b6", "bridge": "br-int", "label": "tempest-network-smoke--81262139", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf52dd1e9-09", "ovs_interfaceid": "f52dd1e9-092b-4ab8-946d-dd1aea183ec4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.444 2 DEBUG nova.network.os_vif_util [None req-55c289b9-7de5-4436-be66-8faf3c6143b5 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:94:1a:4b,bridge_name='br-int',has_traffic_filtering=True,id=f52dd1e9-092b-4ab8-946d-dd1aea183ec4,network=Network(1a3d22d6-addb-4c33-bccc-61618673b1b6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf52dd1e9-09') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.444 2 DEBUG os_vif [None req-55c289b9-7de5-4436-be66-8faf3c6143b5 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:94:1a:4b,bridge_name='br-int',has_traffic_filtering=True,id=f52dd1e9-092b-4ab8-946d-dd1aea183ec4,network=Network(1a3d22d6-addb-4c33-bccc-61618673b1b6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf52dd1e9-09') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.446 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.446 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapf52dd1e9-09, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.448 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.449 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.451 2 INFO os_vif [None req-55c289b9-7de5-4436-be66-8faf3c6143b5 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:94:1a:4b,bridge_name='br-int',has_traffic_filtering=True,id=f52dd1e9-092b-4ab8-946d-dd1aea183ec4,network=Network(1a3d22d6-addb-4c33-bccc-61618673b1b6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf52dd1e9-09')
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.452 2 INFO nova.virt.libvirt.driver [None req-55c289b9-7de5-4436-be66-8faf3c6143b5 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Deleting instance files /var/lib/nova/instances/957c4e10-f195-4d5e-97c0-0928296aba31_del
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.453 2 INFO nova.virt.libvirt.driver [None req-55c289b9-7de5-4436-be66-8faf3c6143b5 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Deletion of /var/lib/nova/instances/957c4e10-f195-4d5e-97c0-0928296aba31_del complete
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.561 2 INFO nova.compute.manager [None req-55c289b9-7de5-4436-be66-8faf3c6143b5 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Took 0.42 seconds to destroy the instance on the hypervisor.
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.562 2 DEBUG oslo.service.loopingcall [None req-55c289b9-7de5-4436-be66-8faf3c6143b5 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.562 2 DEBUG nova.compute.manager [-] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.563 2 DEBUG nova.network.neutron [-] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:28:18 compute-0 neutron-haproxy-ovnmeta-1a3d22d6-addb-4c33-bccc-61618673b1b6[241362]: [NOTICE]   (241366) : haproxy version is 2.8.14-c23fe91
Oct 02 12:28:18 compute-0 neutron-haproxy-ovnmeta-1a3d22d6-addb-4c33-bccc-61618673b1b6[241362]: [NOTICE]   (241366) : path to executable is /usr/sbin/haproxy
Oct 02 12:28:18 compute-0 neutron-haproxy-ovnmeta-1a3d22d6-addb-4c33-bccc-61618673b1b6[241362]: [WARNING]  (241366) : Exiting Master process...
Oct 02 12:28:18 compute-0 neutron-haproxy-ovnmeta-1a3d22d6-addb-4c33-bccc-61618673b1b6[241362]: [WARNING]  (241366) : Exiting Master process...
Oct 02 12:28:18 compute-0 neutron-haproxy-ovnmeta-1a3d22d6-addb-4c33-bccc-61618673b1b6[241362]: [ALERT]    (241366) : Current worker (241368) exited with code 143 (Terminated)
Oct 02 12:28:18 compute-0 neutron-haproxy-ovnmeta-1a3d22d6-addb-4c33-bccc-61618673b1b6[241362]: [WARNING]  (241366) : All workers exited. Exiting... (0)
Oct 02 12:28:18 compute-0 kernel: tap5f268bcb-29 (unregistering): left promiscuous mode
Oct 02 12:28:18 compute-0 NetworkManager[51160]: <info>  [1759408098.7857] device (tap5f268bcb-29): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:28:18 compute-0 systemd[1]: libpod-38f79e3ed94cadb851b9b2f2092c06f9e8444180964b550f52592e6a444629a9.scope: Deactivated successfully.
Oct 02 12:28:18 compute-0 podman[241909]: 2025-10-02 12:28:18.794298716 +0000 UTC m=+0.381143831 container died 38f79e3ed94cadb851b9b2f2092c06f9e8444180964b550f52592e6a444629a9 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-1a3d22d6-addb-4c33-bccc-61618673b1b6, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001)
Oct 02 12:28:18 compute-0 ovn_controller[94336]: 2025-10-02T12:28:18Z|00481|binding|INFO|Releasing lport 5f268bcb-29c1-4e4e-a36d-b2ec144d3dca from this chassis (sb_readonly=0)
Oct 02 12:28:18 compute-0 ovn_controller[94336]: 2025-10-02T12:28:18Z|00482|binding|INFO|Setting lport 5f268bcb-29c1-4e4e-a36d-b2ec144d3dca down in Southbound
Oct 02 12:28:18 compute-0 ovn_controller[94336]: 2025-10-02T12:28:18Z|00483|binding|INFO|Removing iface tap5f268bcb-29 ovn-installed in OVS
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.832 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:18.837 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:cb:cf:9a 10.100.0.4'], port_security=['fa:16:3e:cb:cf:9a 10.100.0.4'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.4/28', 'neutron:device_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-a04f937a-375f-4fb0-90fe-5f514a88668f', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'neutron:revision_number': '6', 'neutron:security_group_ids': 'c0383701-0ec7-4f3b-8585-5effc4f5ca5a', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:port_fip': '192.168.122.248', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=50c0aa38-5fd8-41c7-b4bf-85b59722c5c3, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=5f268bcb-29c1-4e4e-a36d-b2ec144d3dca) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.840 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:18 compute-0 systemd[1]: var-lib-containers-storage-overlay-3ee338042088adfab8e08da45a3be044e875fd8789c17be6cb34786200b97f01-merged.mount: Deactivated successfully.
Oct 02 12:28:18 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-38f79e3ed94cadb851b9b2f2092c06f9e8444180964b550f52592e6a444629a9-userdata-shm.mount: Deactivated successfully.
Oct 02 12:28:18 compute-0 podman[241909]: 2025-10-02 12:28:18.869236352 +0000 UTC m=+0.456081447 container cleanup 38f79e3ed94cadb851b9b2f2092c06f9e8444180964b550f52592e6a444629a9 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-1a3d22d6-addb-4c33-bccc-61618673b1b6, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, tcib_managed=true, org.label-schema.schema-version=1.0)
Oct 02 12:28:18 compute-0 systemd[1]: libpod-conmon-38f79e3ed94cadb851b9b2f2092c06f9e8444180964b550f52592e6a444629a9.scope: Deactivated successfully.
Oct 02 12:28:18 compute-0 systemd[1]: machine-qemu\x2d62\x2dinstance\x2d0000007d.scope: Deactivated successfully.
Oct 02 12:28:18 compute-0 systemd[1]: machine-qemu\x2d62\x2dinstance\x2d0000007d.scope: Consumed 13.133s CPU time.
Oct 02 12:28:18 compute-0 systemd-machined[152150]: Machine qemu-62-instance-0000007d terminated.
Oct 02 12:28:18 compute-0 podman[241944]: 2025-10-02 12:28:18.930871523 +0000 UTC m=+0.042564112 container remove 38f79e3ed94cadb851b9b2f2092c06f9e8444180964b550f52592e6a444629a9 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-1a3d22d6-addb-4c33-bccc-61618673b1b6, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2)
Oct 02 12:28:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:18.936 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[949fd638-a8e1-4c19-a859-2a3e96dac109]: (4, ('Thu Oct  2 12:28:18 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-1a3d22d6-addb-4c33-bccc-61618673b1b6 (38f79e3ed94cadb851b9b2f2092c06f9e8444180964b550f52592e6a444629a9)\n38f79e3ed94cadb851b9b2f2092c06f9e8444180964b550f52592e6a444629a9\nThu Oct  2 12:28:18 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-1a3d22d6-addb-4c33-bccc-61618673b1b6 (38f79e3ed94cadb851b9b2f2092c06f9e8444180964b550f52592e6a444629a9)\n38f79e3ed94cadb851b9b2f2092c06f9e8444180964b550f52592e6a444629a9\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:18.938 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f3e1af4e-1c4c-4e47-8767-380884d2b2cd]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:18.939 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap1a3d22d6-a0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.941 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:18 compute-0 kernel: tap1a3d22d6-a0: left promiscuous mode
Oct 02 12:28:18 compute-0 nova_compute[192079]: 2025-10-02 12:28:18.956 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:18.961 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[19ee3065-4838-447a-9f7e-6377ea68788b]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:18.984 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1ed27c1c-c113-4eb1-b4de-b2e2b1788d36]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:18.985 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2abca4f3-746c-4a03-ad38-28222edf1fcf]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:19.003 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[cbae4105-c3d1-4026-a9e3-f58bf69c7042]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 604948, 'reachable_time': 43782, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 241967, 'error': None, 'target': 'ovnmeta-1a3d22d6-addb-4c33-bccc-61618673b1b6', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:19.005 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-1a3d22d6-addb-4c33-bccc-61618673b1b6 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:28:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:19.006 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[e5d84ce2-ded0-4073-a007-07a227e98fc1]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:19.006 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 5f268bcb-29c1-4e4e-a36d-b2ec144d3dca in datapath a04f937a-375f-4fb0-90fe-5f514a88668f unbound from our chassis
Oct 02 12:28:19 compute-0 systemd[1]: run-netns-ovnmeta\x2d1a3d22d6\x2daddb\x2d4c33\x2dbccc\x2d61618673b1b6.mount: Deactivated successfully.
Oct 02 12:28:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:19.007 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network a04f937a-375f-4fb0-90fe-5f514a88668f, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:28:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:19.008 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4b745ca5-9195-4f31-bab4-94e7211dbd59]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:19.008 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f namespace which is not needed anymore
Oct 02 12:28:19 compute-0 nova_compute[192079]: 2025-10-02 12:28:19.028 2 DEBUG nova.compute.manager [None req-7fbae1f5-c04d-475a-97e9-e444a77d21cb d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:28:19 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[241779]: [NOTICE]   (241783) : haproxy version is 2.8.14-c23fe91
Oct 02 12:28:19 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[241779]: [NOTICE]   (241783) : path to executable is /usr/sbin/haproxy
Oct 02 12:28:19 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[241779]: [WARNING]  (241783) : Exiting Master process...
Oct 02 12:28:19 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[241779]: [ALERT]    (241783) : Current worker (241785) exited with code 143 (Terminated)
Oct 02 12:28:19 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[241779]: [WARNING]  (241783) : All workers exited. Exiting... (0)
Oct 02 12:28:19 compute-0 systemd[1]: libpod-838a3870af09cd9fb759cf40d7013cfdfd1cb07859df224b0e19f01e1943528b.scope: Deactivated successfully.
Oct 02 12:28:19 compute-0 podman[241998]: 2025-10-02 12:28:19.131490427 +0000 UTC m=+0.040128605 container died 838a3870af09cd9fb759cf40d7013cfdfd1cb07859df224b0e19f01e1943528b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:28:19 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-838a3870af09cd9fb759cf40d7013cfdfd1cb07859df224b0e19f01e1943528b-userdata-shm.mount: Deactivated successfully.
Oct 02 12:28:19 compute-0 systemd[1]: var-lib-containers-storage-overlay-b69977cb9eef18700f012774546f689fab048f2be5e667dcacfacacb8d993795-merged.mount: Deactivated successfully.
Oct 02 12:28:19 compute-0 podman[241998]: 2025-10-02 12:28:19.207840442 +0000 UTC m=+0.116478620 container cleanup 838a3870af09cd9fb759cf40d7013cfdfd1cb07859df224b0e19f01e1943528b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_managed=true)
Oct 02 12:28:19 compute-0 systemd[1]: libpod-conmon-838a3870af09cd9fb759cf40d7013cfdfd1cb07859df224b0e19f01e1943528b.scope: Deactivated successfully.
Oct 02 12:28:19 compute-0 podman[242028]: 2025-10-02 12:28:19.262907624 +0000 UTC m=+0.036660602 container remove 838a3870af09cd9fb759cf40d7013cfdfd1cb07859df224b0e19f01e1943528b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:28:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:19.267 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[257f39fa-6a3d-4924-a7c4-34fab1cd5062]: (4, ('Thu Oct  2 12:28:19 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f (838a3870af09cd9fb759cf40d7013cfdfd1cb07859df224b0e19f01e1943528b)\n838a3870af09cd9fb759cf40d7013cfdfd1cb07859df224b0e19f01e1943528b\nThu Oct  2 12:28:19 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f (838a3870af09cd9fb759cf40d7013cfdfd1cb07859df224b0e19f01e1943528b)\n838a3870af09cd9fb759cf40d7013cfdfd1cb07859df224b0e19f01e1943528b\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:19.268 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3bb4f3ff-b7b0-418b-ab7a-68e8a4bc6501]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:19.269 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapa04f937a-30, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:28:19 compute-0 nova_compute[192079]: 2025-10-02 12:28:19.271 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:19 compute-0 kernel: tapa04f937a-30: left promiscuous mode
Oct 02 12:28:19 compute-0 nova_compute[192079]: 2025-10-02 12:28:19.287 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:19 compute-0 nova_compute[192079]: 2025-10-02 12:28:19.290 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:19.292 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f3b03220-3f11-439d-9cce-b25080fd85ac]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:19.318 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[147b2566-3c39-45f0-a747-1fb27058cac9]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:19.319 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[df00cc19-2c3c-4b4c-92dc-31a1de2d0d20]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:19.335 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d9418813-9790-4438-9d83-116507174d7b]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 608162, 'reachable_time': 35479, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 242046, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:19.337 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:28:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:19.337 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[4a117bef-7d44-4560-ae5d-48335bf068a9]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:19.450 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '32'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:28:19 compute-0 systemd[1]: run-netns-ovnmeta\x2da04f937a\x2d375f\x2d4fb0\x2d90fe\x2d5f514a88668f.mount: Deactivated successfully.
Oct 02 12:28:21 compute-0 podman[242047]: 2025-10-02 12:28:21.148287691 +0000 UTC m=+0.063606326 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:28:21 compute-0 podman[242048]: 2025-10-02 12:28:21.164211616 +0000 UTC m=+0.073321202 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=iscsid, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.schema-version=1.0)
Oct 02 12:28:21 compute-0 nova_compute[192079]: 2025-10-02 12:28:21.400 2 DEBUG nova.compute.manager [req-770987f5-fc78-4ce3-bf5f-f50c99976127 req-6994aaea-5626-4f6f-85b0-9bbddfcf4ef4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Received event network-vif-unplugged-f52dd1e9-092b-4ab8-946d-dd1aea183ec4 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:28:21 compute-0 nova_compute[192079]: 2025-10-02 12:28:21.401 2 DEBUG oslo_concurrency.lockutils [req-770987f5-fc78-4ce3-bf5f-f50c99976127 req-6994aaea-5626-4f6f-85b0-9bbddfcf4ef4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "957c4e10-f195-4d5e-97c0-0928296aba31-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:28:21 compute-0 nova_compute[192079]: 2025-10-02 12:28:21.401 2 DEBUG oslo_concurrency.lockutils [req-770987f5-fc78-4ce3-bf5f-f50c99976127 req-6994aaea-5626-4f6f-85b0-9bbddfcf4ef4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "957c4e10-f195-4d5e-97c0-0928296aba31-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:28:21 compute-0 nova_compute[192079]: 2025-10-02 12:28:21.402 2 DEBUG oslo_concurrency.lockutils [req-770987f5-fc78-4ce3-bf5f-f50c99976127 req-6994aaea-5626-4f6f-85b0-9bbddfcf4ef4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "957c4e10-f195-4d5e-97c0-0928296aba31-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:28:21 compute-0 nova_compute[192079]: 2025-10-02 12:28:21.402 2 DEBUG nova.compute.manager [req-770987f5-fc78-4ce3-bf5f-f50c99976127 req-6994aaea-5626-4f6f-85b0-9bbddfcf4ef4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] No waiting events found dispatching network-vif-unplugged-f52dd1e9-092b-4ab8-946d-dd1aea183ec4 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:28:21 compute-0 nova_compute[192079]: 2025-10-02 12:28:21.402 2 DEBUG nova.compute.manager [req-770987f5-fc78-4ce3-bf5f-f50c99976127 req-6994aaea-5626-4f6f-85b0-9bbddfcf4ef4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Received event network-vif-unplugged-f52dd1e9-092b-4ab8-946d-dd1aea183ec4 for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:28:21 compute-0 nova_compute[192079]: 2025-10-02 12:28:21.403 2 DEBUG nova.compute.manager [req-770987f5-fc78-4ce3-bf5f-f50c99976127 req-6994aaea-5626-4f6f-85b0-9bbddfcf4ef4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Received event network-vif-plugged-f52dd1e9-092b-4ab8-946d-dd1aea183ec4 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:28:21 compute-0 nova_compute[192079]: 2025-10-02 12:28:21.403 2 DEBUG oslo_concurrency.lockutils [req-770987f5-fc78-4ce3-bf5f-f50c99976127 req-6994aaea-5626-4f6f-85b0-9bbddfcf4ef4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "957c4e10-f195-4d5e-97c0-0928296aba31-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:28:21 compute-0 nova_compute[192079]: 2025-10-02 12:28:21.403 2 DEBUG oslo_concurrency.lockutils [req-770987f5-fc78-4ce3-bf5f-f50c99976127 req-6994aaea-5626-4f6f-85b0-9bbddfcf4ef4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "957c4e10-f195-4d5e-97c0-0928296aba31-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:28:21 compute-0 nova_compute[192079]: 2025-10-02 12:28:21.404 2 DEBUG oslo_concurrency.lockutils [req-770987f5-fc78-4ce3-bf5f-f50c99976127 req-6994aaea-5626-4f6f-85b0-9bbddfcf4ef4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "957c4e10-f195-4d5e-97c0-0928296aba31-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:28:21 compute-0 nova_compute[192079]: 2025-10-02 12:28:21.404 2 DEBUG nova.compute.manager [req-770987f5-fc78-4ce3-bf5f-f50c99976127 req-6994aaea-5626-4f6f-85b0-9bbddfcf4ef4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] No waiting events found dispatching network-vif-plugged-f52dd1e9-092b-4ab8-946d-dd1aea183ec4 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:28:21 compute-0 nova_compute[192079]: 2025-10-02 12:28:21.405 2 WARNING nova.compute.manager [req-770987f5-fc78-4ce3-bf5f-f50c99976127 req-6994aaea-5626-4f6f-85b0-9bbddfcf4ef4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Received unexpected event network-vif-plugged-f52dd1e9-092b-4ab8-946d-dd1aea183ec4 for instance with vm_state active and task_state deleting.
Oct 02 12:28:21 compute-0 nova_compute[192079]: 2025-10-02 12:28:21.754 2 DEBUG nova.network.neutron [-] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:28:21 compute-0 nova_compute[192079]: 2025-10-02 12:28:21.783 2 DEBUG nova.compute.manager [req-aed116f8-20b9-49b7-825d-0c475d0cb80f req-8c1a9f12-1cc1-47e0-9080-27ffc9836a66 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Received event network-vif-unplugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:28:21 compute-0 nova_compute[192079]: 2025-10-02 12:28:21.783 2 DEBUG oslo_concurrency.lockutils [req-aed116f8-20b9-49b7-825d-0c475d0cb80f req-8c1a9f12-1cc1-47e0-9080-27ffc9836a66 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "2fcfea17-10df-499a-8692-facbbc76266b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:28:21 compute-0 nova_compute[192079]: 2025-10-02 12:28:21.784 2 DEBUG oslo_concurrency.lockutils [req-aed116f8-20b9-49b7-825d-0c475d0cb80f req-8c1a9f12-1cc1-47e0-9080-27ffc9836a66 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:28:21 compute-0 nova_compute[192079]: 2025-10-02 12:28:21.784 2 DEBUG oslo_concurrency.lockutils [req-aed116f8-20b9-49b7-825d-0c475d0cb80f req-8c1a9f12-1cc1-47e0-9080-27ffc9836a66 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:28:21 compute-0 nova_compute[192079]: 2025-10-02 12:28:21.784 2 DEBUG nova.compute.manager [req-aed116f8-20b9-49b7-825d-0c475d0cb80f req-8c1a9f12-1cc1-47e0-9080-27ffc9836a66 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] No waiting events found dispatching network-vif-unplugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:28:21 compute-0 nova_compute[192079]: 2025-10-02 12:28:21.784 2 WARNING nova.compute.manager [req-aed116f8-20b9-49b7-825d-0c475d0cb80f req-8c1a9f12-1cc1-47e0-9080-27ffc9836a66 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Received unexpected event network-vif-unplugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca for instance with vm_state suspended and task_state None.
Oct 02 12:28:21 compute-0 nova_compute[192079]: 2025-10-02 12:28:21.784 2 DEBUG nova.compute.manager [req-aed116f8-20b9-49b7-825d-0c475d0cb80f req-8c1a9f12-1cc1-47e0-9080-27ffc9836a66 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Received event network-vif-plugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:28:21 compute-0 nova_compute[192079]: 2025-10-02 12:28:21.784 2 DEBUG oslo_concurrency.lockutils [req-aed116f8-20b9-49b7-825d-0c475d0cb80f req-8c1a9f12-1cc1-47e0-9080-27ffc9836a66 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "2fcfea17-10df-499a-8692-facbbc76266b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:28:21 compute-0 nova_compute[192079]: 2025-10-02 12:28:21.785 2 DEBUG oslo_concurrency.lockutils [req-aed116f8-20b9-49b7-825d-0c475d0cb80f req-8c1a9f12-1cc1-47e0-9080-27ffc9836a66 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:28:21 compute-0 nova_compute[192079]: 2025-10-02 12:28:21.785 2 DEBUG oslo_concurrency.lockutils [req-aed116f8-20b9-49b7-825d-0c475d0cb80f req-8c1a9f12-1cc1-47e0-9080-27ffc9836a66 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:28:21 compute-0 nova_compute[192079]: 2025-10-02 12:28:21.785 2 DEBUG nova.compute.manager [req-aed116f8-20b9-49b7-825d-0c475d0cb80f req-8c1a9f12-1cc1-47e0-9080-27ffc9836a66 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] No waiting events found dispatching network-vif-plugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:28:21 compute-0 nova_compute[192079]: 2025-10-02 12:28:21.785 2 WARNING nova.compute.manager [req-aed116f8-20b9-49b7-825d-0c475d0cb80f req-8c1a9f12-1cc1-47e0-9080-27ffc9836a66 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Received unexpected event network-vif-plugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca for instance with vm_state suspended and task_state None.
Oct 02 12:28:21 compute-0 nova_compute[192079]: 2025-10-02 12:28:21.798 2 INFO nova.compute.manager [-] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Took 3.24 seconds to deallocate network for instance.
Oct 02 12:28:21 compute-0 nova_compute[192079]: 2025-10-02 12:28:21.895 2 DEBUG oslo_concurrency.lockutils [None req-55c289b9-7de5-4436-be66-8faf3c6143b5 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:28:21 compute-0 nova_compute[192079]: 2025-10-02 12:28:21.896 2 DEBUG oslo_concurrency.lockutils [None req-55c289b9-7de5-4436-be66-8faf3c6143b5 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:28:22 compute-0 nova_compute[192079]: 2025-10-02 12:28:22.036 2 DEBUG nova.compute.provider_tree [None req-55c289b9-7de5-4436-be66-8faf3c6143b5 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:28:22 compute-0 nova_compute[192079]: 2025-10-02 12:28:22.058 2 DEBUG nova.scheduler.client.report [None req-55c289b9-7de5-4436-be66-8faf3c6143b5 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:28:22 compute-0 nova_compute[192079]: 2025-10-02 12:28:22.114 2 DEBUG oslo_concurrency.lockutils [None req-55c289b9-7de5-4436-be66-8faf3c6143b5 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.218s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:28:22 compute-0 nova_compute[192079]: 2025-10-02 12:28:22.152 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:22 compute-0 nova_compute[192079]: 2025-10-02 12:28:22.179 2 INFO nova.scheduler.client.report [None req-55c289b9-7de5-4436-be66-8faf3c6143b5 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Deleted allocations for instance 957c4e10-f195-4d5e-97c0-0928296aba31
Oct 02 12:28:22 compute-0 nova_compute[192079]: 2025-10-02 12:28:22.363 2 DEBUG oslo_concurrency.lockutils [None req-55c289b9-7de5-4436-be66-8faf3c6143b5 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "957c4e10-f195-4d5e-97c0-0928296aba31" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 4.340s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:28:23 compute-0 nova_compute[192079]: 2025-10-02 12:28:23.101 2 DEBUG nova.network.neutron [req-1d8225dc-2caa-4574-9331-a85664939e38 req-7954d7a1-5798-44be-8b18-f9144ea18c79 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Updated VIF entry in instance network info cache for port f52dd1e9-092b-4ab8-946d-dd1aea183ec4. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:28:23 compute-0 nova_compute[192079]: 2025-10-02 12:28:23.102 2 DEBUG nova.network.neutron [req-1d8225dc-2caa-4574-9331-a85664939e38 req-7954d7a1-5798-44be-8b18-f9144ea18c79 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Updating instance_info_cache with network_info: [{"id": "f52dd1e9-092b-4ab8-946d-dd1aea183ec4", "address": "fa:16:3e:94:1a:4b", "network": {"id": "1a3d22d6-addb-4c33-bccc-61618673b1b6", "bridge": "br-int", "label": "tempest-network-smoke--81262139", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf52dd1e9-09", "ovs_interfaceid": "f52dd1e9-092b-4ab8-946d-dd1aea183ec4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:28:23 compute-0 nova_compute[192079]: 2025-10-02 12:28:23.133 2 DEBUG oslo_concurrency.lockutils [req-1d8225dc-2caa-4574-9331-a85664939e38 req-7954d7a1-5798-44be-8b18-f9144ea18c79 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-957c4e10-f195-4d5e-97c0-0928296aba31" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:28:23 compute-0 nova_compute[192079]: 2025-10-02 12:28:23.449 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:23 compute-0 nova_compute[192079]: 2025-10-02 12:28:23.512 2 INFO nova.compute.manager [None req-a8e98d38-ea22-4324-b69f-4d391267f597 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Resuming
Oct 02 12:28:23 compute-0 nova_compute[192079]: 2025-10-02 12:28:23.513 2 DEBUG nova.objects.instance [None req-a8e98d38-ea22-4324-b69f-4d391267f597 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'flavor' on Instance uuid 2fcfea17-10df-499a-8692-facbbc76266b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:28:23 compute-0 nova_compute[192079]: 2025-10-02 12:28:23.714 2 DEBUG oslo_concurrency.lockutils [None req-a8e98d38-ea22-4324-b69f-4d391267f597 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "refresh_cache-2fcfea17-10df-499a-8692-facbbc76266b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:28:23 compute-0 nova_compute[192079]: 2025-10-02 12:28:23.714 2 DEBUG oslo_concurrency.lockutils [None req-a8e98d38-ea22-4324-b69f-4d391267f597 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquired lock "refresh_cache-2fcfea17-10df-499a-8692-facbbc76266b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:28:23 compute-0 nova_compute[192079]: 2025-10-02 12:28:23.714 2 DEBUG nova.network.neutron [None req-a8e98d38-ea22-4324-b69f-4d391267f597 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:28:23 compute-0 nova_compute[192079]: 2025-10-02 12:28:23.768 2 DEBUG nova.compute.manager [req-7823d81e-a236-4ee4-b631-d6964fd59187 req-aab3abe9-32ca-48e7-91f5-8e11d05d6a74 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Received event network-vif-deleted-f52dd1e9-092b-4ab8-946d-dd1aea183ec4 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:28:27 compute-0 nova_compute[192079]: 2025-10-02 12:28:27.155 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:28 compute-0 nova_compute[192079]: 2025-10-02 12:28:28.453 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:29 compute-0 nova_compute[192079]: 2025-10-02 12:28:29.874 2 DEBUG nova.network.neutron [None req-a8e98d38-ea22-4324-b69f-4d391267f597 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Updating instance_info_cache with network_info: [{"id": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "address": "fa:16:3e:cb:cf:9a", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5f268bcb-29", "ovs_interfaceid": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:28:29 compute-0 nova_compute[192079]: 2025-10-02 12:28:29.914 2 DEBUG oslo_concurrency.lockutils [None req-a8e98d38-ea22-4324-b69f-4d391267f597 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Releasing lock "refresh_cache-2fcfea17-10df-499a-8692-facbbc76266b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:28:29 compute-0 nova_compute[192079]: 2025-10-02 12:28:29.919 2 DEBUG nova.virt.libvirt.vif [None req-a8e98d38-ea22-4324-b69f-4d391267f597 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:27:05Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerActionsTestJSON-server-1253918640',display_name='tempest-ServerActionsTestJSON-server-1253918640',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serveractionstestjson-server-1253918640',id=125,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBJJLom+UJzZg9dduKQv+725QaYDZoMXvP/xlpKnb/K05SGc4dkyLwCDweJ3QifTmxLWqK9Sz5A12yMJbzpa36v5C4bUqj8uiWk/vbR1BAjBdKM9d/Ug8M2nT8LwDBGP/9A==',key_name='tempest-keypair-1006285918',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:27:18Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=<?>,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=4,progress=0,project_id='e564a4cad5d443dba81ec04d2a05ced9',ramdisk_id='',reservation_id='r-r8iquef4',resources=<?>,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',old_vm_state='active',owner_project_name='tempest-ServerActionsTestJSON-1646745100',owner_user_name='tempest-ServerActionsTestJSON-1646745100-project-member'},tags=<?>,task_state='resuming',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:28:19Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='d54b1826121b47caba89932a78c06ccd',uuid=2fcfea17-10df-499a-8692-facbbc76266b,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='suspended') vif={"id": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "address": "fa:16:3e:cb:cf:9a", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5f268bcb-29", "ovs_interfaceid": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:28:29 compute-0 nova_compute[192079]: 2025-10-02 12:28:29.920 2 DEBUG nova.network.os_vif_util [None req-a8e98d38-ea22-4324-b69f-4d391267f597 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converting VIF {"id": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "address": "fa:16:3e:cb:cf:9a", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5f268bcb-29", "ovs_interfaceid": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:28:29 compute-0 nova_compute[192079]: 2025-10-02 12:28:29.922 2 DEBUG nova.network.os_vif_util [None req-a8e98d38-ea22-4324-b69f-4d391267f597 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:cb:cf:9a,bridge_name='br-int',has_traffic_filtering=True,id=5f268bcb-29c1-4e4e-a36d-b2ec144d3dca,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5f268bcb-29') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:28:29 compute-0 nova_compute[192079]: 2025-10-02 12:28:29.922 2 DEBUG os_vif [None req-a8e98d38-ea22-4324-b69f-4d391267f597 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:cb:cf:9a,bridge_name='br-int',has_traffic_filtering=True,id=5f268bcb-29c1-4e4e-a36d-b2ec144d3dca,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5f268bcb-29') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:28:29 compute-0 nova_compute[192079]: 2025-10-02 12:28:29.923 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:29 compute-0 nova_compute[192079]: 2025-10-02 12:28:29.924 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:28:29 compute-0 nova_compute[192079]: 2025-10-02 12:28:29.925 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:28:29 compute-0 nova_compute[192079]: 2025-10-02 12:28:29.928 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:29 compute-0 nova_compute[192079]: 2025-10-02 12:28:29.929 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap5f268bcb-29, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:28:29 compute-0 nova_compute[192079]: 2025-10-02 12:28:29.929 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap5f268bcb-29, col_values=(('external_ids', {'iface-id': '5f268bcb-29c1-4e4e-a36d-b2ec144d3dca', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:cb:cf:9a', 'vm-uuid': '2fcfea17-10df-499a-8692-facbbc76266b'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:28:29 compute-0 nova_compute[192079]: 2025-10-02 12:28:29.931 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:28:29 compute-0 nova_compute[192079]: 2025-10-02 12:28:29.932 2 INFO os_vif [None req-a8e98d38-ea22-4324-b69f-4d391267f597 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:cb:cf:9a,bridge_name='br-int',has_traffic_filtering=True,id=5f268bcb-29c1-4e4e-a36d-b2ec144d3dca,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5f268bcb-29')
Oct 02 12:28:29 compute-0 nova_compute[192079]: 2025-10-02 12:28:29.955 2 DEBUG nova.objects.instance [None req-a8e98d38-ea22-4324-b69f-4d391267f597 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'numa_topology' on Instance uuid 2fcfea17-10df-499a-8692-facbbc76266b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:28:30 compute-0 kernel: tap5f268bcb-29: entered promiscuous mode
Oct 02 12:28:30 compute-0 NetworkManager[51160]: <info>  [1759408110.0782] manager: (tap5f268bcb-29): new Tun device (/org/freedesktop/NetworkManager/Devices/244)
Oct 02 12:28:30 compute-0 nova_compute[192079]: 2025-10-02 12:28:30.079 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:30 compute-0 ovn_controller[94336]: 2025-10-02T12:28:30Z|00484|binding|INFO|Claiming lport 5f268bcb-29c1-4e4e-a36d-b2ec144d3dca for this chassis.
Oct 02 12:28:30 compute-0 ovn_controller[94336]: 2025-10-02T12:28:30Z|00485|binding|INFO|5f268bcb-29c1-4e4e-a36d-b2ec144d3dca: Claiming fa:16:3e:cb:cf:9a 10.100.0.4
Oct 02 12:28:30 compute-0 systemd-udevd[242101]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:28:30 compute-0 ovn_controller[94336]: 2025-10-02T12:28:30Z|00486|binding|INFO|Setting lport 5f268bcb-29c1-4e4e-a36d-b2ec144d3dca ovn-installed in OVS
Oct 02 12:28:30 compute-0 nova_compute[192079]: 2025-10-02 12:28:30.106 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:30 compute-0 nova_compute[192079]: 2025-10-02 12:28:30.111 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:30 compute-0 NetworkManager[51160]: <info>  [1759408110.1189] device (tap5f268bcb-29): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:28:30 compute-0 NetworkManager[51160]: <info>  [1759408110.1199] device (tap5f268bcb-29): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:28:30 compute-0 ovn_controller[94336]: 2025-10-02T12:28:30Z|00487|binding|INFO|Setting lport 5f268bcb-29c1-4e4e-a36d-b2ec144d3dca up in Southbound
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:30.127 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:cb:cf:9a 10.100.0.4'], port_security=['fa:16:3e:cb:cf:9a 10.100.0.4'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.4/28', 'neutron:device_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-a04f937a-375f-4fb0-90fe-5f514a88668f', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'neutron:revision_number': '7', 'neutron:security_group_ids': 'c0383701-0ec7-4f3b-8585-5effc4f5ca5a', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:port_fip': '192.168.122.248'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=50c0aa38-5fd8-41c7-b4bf-85b59722c5c3, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=5f268bcb-29c1-4e4e-a36d-b2ec144d3dca) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:30.128 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 5f268bcb-29c1-4e4e-a36d-b2ec144d3dca in datapath a04f937a-375f-4fb0-90fe-5f514a88668f bound to our chassis
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:30.130 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network a04f937a-375f-4fb0-90fe-5f514a88668f
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:30.142 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d0c4b4e6-839b-45e8-943f-2066e6a45620]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:30.143 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tapa04f937a-31 in ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:30.146 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tapa04f937a-30 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:30.146 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3182955f-1f70-42eb-a3fb-db603f7410fd]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:30.147 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[56931284-8b27-44cc-aa85-3539d0093eda]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:30 compute-0 systemd-machined[152150]: New machine qemu-63-instance-0000007d.
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:30.160 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[0f9b6549-5901-4c03-b54c-9eac0201827a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:30 compute-0 systemd[1]: Started Virtual Machine qemu-63-instance-0000007d.
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:30.190 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[bcc768d2-3479-4f94-9dda-50edb80d04c9]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:30.220 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[b6e17485-dd09-42df-882e-6424803e03f5]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:30 compute-0 NetworkManager[51160]: <info>  [1759408110.2265] manager: (tapa04f937a-30): new Veth device (/org/freedesktop/NetworkManager/Devices/245)
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:30.226 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6b1fedf9-e445-4195-9d39-d84908d72f7e]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:30 compute-0 systemd-udevd[242104]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:28:30 compute-0 podman[242107]: 2025-10-02 12:28:30.236813619 +0000 UTC m=+0.071401330 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, container_name=ovn_metadata_agent)
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:30.257 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[0e9f5452-1b0f-4950-a14b-0135b405c4e1]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:30.260 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[2d479c01-6c60-4c48-ba92-4caabdd87334]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:30 compute-0 podman[242116]: 2025-10-02 12:28:30.276163223 +0000 UTC m=+0.090693957 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:28:30 compute-0 NetworkManager[51160]: <info>  [1759408110.2825] device (tapa04f937a-30): carrier: link connected
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:30.287 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[f27e96a0-f1c3-4334-b256-657490d433c3]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:30.301 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[bdb9b481-a7d5-4780-8ac0-03e69e49fa5b]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapa04f937a-31'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:33:93:68'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 158], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 610791, 'reachable_time': 34313, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 242187, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:30.314 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3463d095-ba8a-4f39-b154-304358ec8aef]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe33:9368'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 610791, 'tstamp': 610791}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 242194, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:30.328 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a2689fb1-e379-4a77-89c2-a3db8854dc61]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapa04f937a-31'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:33:93:68'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 158], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 610791, 'reachable_time': 34313, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 242198, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:30 compute-0 podman[242161]: 2025-10-02 12:28:30.384106198 +0000 UTC m=+0.119540462 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, tcib_managed=true, container_name=ovn_controller, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3)
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:30.392 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c239cdf1-e6b9-42af-b0f8-76b3092decbd]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:30.441 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4bec1e1a-104b-4549-ba4a-6acc3e922f13]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:30.442 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapa04f937a-30, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:30.442 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:30.443 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapa04f937a-30, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:28:30 compute-0 nova_compute[192079]: 2025-10-02 12:28:30.444 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:30 compute-0 NetworkManager[51160]: <info>  [1759408110.4465] manager: (tapa04f937a-30): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/246)
Oct 02 12:28:30 compute-0 kernel: tapa04f937a-30: entered promiscuous mode
Oct 02 12:28:30 compute-0 nova_compute[192079]: 2025-10-02 12:28:30.447 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:30.448 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tapa04f937a-30, col_values=(('external_ids', {'iface-id': '38f1ac16-18c6-4b4a-b769-ebc7dd5181d4'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:28:30 compute-0 nova_compute[192079]: 2025-10-02 12:28:30.449 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:30 compute-0 ovn_controller[94336]: 2025-10-02T12:28:30Z|00488|binding|INFO|Releasing lport 38f1ac16-18c6-4b4a-b769-ebc7dd5181d4 from this chassis (sb_readonly=0)
Oct 02 12:28:30 compute-0 nova_compute[192079]: 2025-10-02 12:28:30.451 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:30.451 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/a04f937a-375f-4fb0-90fe-5f514a88668f.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/a04f937a-375f-4fb0-90fe-5f514a88668f.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:30.460 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d5595c9a-071d-4ec2-8914-8675f1309713]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:30.461 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-a04f937a-375f-4fb0-90fe-5f514a88668f
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/a04f937a-375f-4fb0-90fe-5f514a88668f.pid.haproxy
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID a04f937a-375f-4fb0-90fe-5f514a88668f
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:28:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:30.461 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'env', 'PROCESS_TAG=haproxy-a04f937a-375f-4fb0-90fe-5f514a88668f', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/a04f937a-375f-4fb0-90fe-5f514a88668f.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:28:30 compute-0 nova_compute[192079]: 2025-10-02 12:28:30.463 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:30 compute-0 podman[242242]: 2025-10-02 12:28:30.803369509 +0000 UTC m=+0.047557729 container create 98945682a70c9fb033cb7aaf4f0000f072198f761f13f76091ef2a964c29e46f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0)
Oct 02 12:28:30 compute-0 systemd[1]: Started libpod-conmon-98945682a70c9fb033cb7aaf4f0000f072198f761f13f76091ef2a964c29e46f.scope.
Oct 02 12:28:30 compute-0 podman[242242]: 2025-10-02 12:28:30.775304033 +0000 UTC m=+0.019492273 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:28:30 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:28:30 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/bee1876cdb990a1d925f97438e4d29a211450d48021c415244f78110b792a322/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:28:30 compute-0 podman[242242]: 2025-10-02 12:28:30.898567177 +0000 UTC m=+0.142755397 container init 98945682a70c9fb033cb7aaf4f0000f072198f761f13f76091ef2a964c29e46f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.schema-version=1.0, tcib_managed=true, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001)
Oct 02 12:28:30 compute-0 podman[242242]: 2025-10-02 12:28:30.903624265 +0000 UTC m=+0.147812485 container start 98945682a70c9fb033cb7aaf4f0000f072198f761f13f76091ef2a964c29e46f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:28:30 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[242257]: [NOTICE]   (242261) : New worker (242263) forked
Oct 02 12:28:30 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[242257]: [NOTICE]   (242261) : Loading success.
Oct 02 12:28:31 compute-0 nova_compute[192079]: 2025-10-02 12:28:31.140 2 DEBUG nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Removed pending event for 2fcfea17-10df-499a-8692-facbbc76266b due to event _event_emit_delayed /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:438
Oct 02 12:28:31 compute-0 nova_compute[192079]: 2025-10-02 12:28:31.141 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408111.1388764, 2fcfea17-10df-499a-8692-facbbc76266b => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:28:31 compute-0 nova_compute[192079]: 2025-10-02 12:28:31.141 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] VM Started (Lifecycle Event)
Oct 02 12:28:31 compute-0 nova_compute[192079]: 2025-10-02 12:28:31.182 2 DEBUG nova.compute.manager [None req-a8e98d38-ea22-4324-b69f-4d391267f597 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:28:31 compute-0 nova_compute[192079]: 2025-10-02 12:28:31.182 2 DEBUG nova.objects.instance [None req-a8e98d38-ea22-4324-b69f-4d391267f597 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'pci_devices' on Instance uuid 2fcfea17-10df-499a-8692-facbbc76266b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:28:31 compute-0 ovn_controller[94336]: 2025-10-02T12:28:31Z|00050|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:cb:cf:9a 10.100.0.4
Oct 02 12:28:31 compute-0 nova_compute[192079]: 2025-10-02 12:28:31.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:28:31 compute-0 nova_compute[192079]: 2025-10-02 12:28:31.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:28:31 compute-0 nova_compute[192079]: 2025-10-02 12:28:31.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:28:31 compute-0 nova_compute[192079]: 2025-10-02 12:28:31.692 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:28:31 compute-0 nova_compute[192079]: 2025-10-02 12:28:31.695 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Synchronizing instance power state after lifecycle event "Started"; current vm_state: suspended, current task_state: resuming, current DB power_state: 4, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:28:31 compute-0 nova_compute[192079]: 2025-10-02 12:28:31.786 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:28:31 compute-0 nova_compute[192079]: 2025-10-02 12:28:31.787 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:28:31 compute-0 nova_compute[192079]: 2025-10-02 12:28:31.787 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:28:31 compute-0 nova_compute[192079]: 2025-10-02 12:28:31.787 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:28:31 compute-0 nova_compute[192079]: 2025-10-02 12:28:31.791 2 INFO nova.virt.libvirt.driver [-] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Instance running successfully.
Oct 02 12:28:31 compute-0 virtqemud[191807]: argument unsupported: QEMU guest agent is not configured
Oct 02 12:28:31 compute-0 nova_compute[192079]: 2025-10-02 12:28:31.794 2 DEBUG nova.virt.libvirt.guest [None req-a8e98d38-ea22-4324-b69f-4d391267f597 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Failed to set time: agent not configured sync_guest_time /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:200
Oct 02 12:28:31 compute-0 nova_compute[192079]: 2025-10-02 12:28:31.794 2 DEBUG nova.compute.manager [None req-a8e98d38-ea22-4324-b69f-4d391267f597 d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:28:31 compute-0 nova_compute[192079]: 2025-10-02 12:28:31.795 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] During sync_power_state the instance has a pending task (resuming). Skip.
Oct 02 12:28:31 compute-0 nova_compute[192079]: 2025-10-02 12:28:31.795 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408111.1460795, 2fcfea17-10df-499a-8692-facbbc76266b => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:28:31 compute-0 nova_compute[192079]: 2025-10-02 12:28:31.796 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] VM Resumed (Lifecycle Event)
Oct 02 12:28:31 compute-0 nova_compute[192079]: 2025-10-02 12:28:31.838 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:28:31 compute-0 nova_compute[192079]: 2025-10-02 12:28:31.841 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: suspended, current task_state: resuming, current DB power_state: 4, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:28:31 compute-0 nova_compute[192079]: 2025-10-02 12:28:31.880 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] During sync_power_state the instance has a pending task (resuming). Skip.
Oct 02 12:28:31 compute-0 nova_compute[192079]: 2025-10-02 12:28:31.909 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:28:31 compute-0 nova_compute[192079]: 2025-10-02 12:28:31.987 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk --force-share --output=json" returned: 0 in 0.077s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:28:31 compute-0 nova_compute[192079]: 2025-10-02 12:28:31.987 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:28:32 compute-0 nova_compute[192079]: 2025-10-02 12:28:32.038 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b/disk --force-share --output=json" returned: 0 in 0.050s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:28:32 compute-0 nova_compute[192079]: 2025-10-02 12:28:32.157 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:32 compute-0 nova_compute[192079]: 2025-10-02 12:28:32.175 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:28:32 compute-0 nova_compute[192079]: 2025-10-02 12:28:32.176 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5512MB free_disk=73.31896209716797GB free_vcpus=7 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:28:32 compute-0 nova_compute[192079]: 2025-10-02 12:28:32.176 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:28:32 compute-0 nova_compute[192079]: 2025-10-02 12:28:32.176 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:28:32 compute-0 nova_compute[192079]: 2025-10-02 12:28:32.399 2 DEBUG nova.compute.manager [req-339527b5-4db5-42c7-b371-c0ba38d726e2 req-006c8b0f-87f2-4778-bd9e-4876af7c43be 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Received event network-vif-plugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:28:32 compute-0 nova_compute[192079]: 2025-10-02 12:28:32.400 2 DEBUG oslo_concurrency.lockutils [req-339527b5-4db5-42c7-b371-c0ba38d726e2 req-006c8b0f-87f2-4778-bd9e-4876af7c43be 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "2fcfea17-10df-499a-8692-facbbc76266b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:28:32 compute-0 nova_compute[192079]: 2025-10-02 12:28:32.400 2 DEBUG oslo_concurrency.lockutils [req-339527b5-4db5-42c7-b371-c0ba38d726e2 req-006c8b0f-87f2-4778-bd9e-4876af7c43be 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:28:32 compute-0 nova_compute[192079]: 2025-10-02 12:28:32.400 2 DEBUG oslo_concurrency.lockutils [req-339527b5-4db5-42c7-b371-c0ba38d726e2 req-006c8b0f-87f2-4778-bd9e-4876af7c43be 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:28:32 compute-0 nova_compute[192079]: 2025-10-02 12:28:32.400 2 DEBUG nova.compute.manager [req-339527b5-4db5-42c7-b371-c0ba38d726e2 req-006c8b0f-87f2-4778-bd9e-4876af7c43be 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] No waiting events found dispatching network-vif-plugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:28:32 compute-0 nova_compute[192079]: 2025-10-02 12:28:32.401 2 WARNING nova.compute.manager [req-339527b5-4db5-42c7-b371-c0ba38d726e2 req-006c8b0f-87f2-4778-bd9e-4876af7c43be 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Received unexpected event network-vif-plugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca for instance with vm_state active and task_state None.
Oct 02 12:28:32 compute-0 nova_compute[192079]: 2025-10-02 12:28:32.469 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance 2fcfea17-10df-499a-8692-facbbc76266b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:28:32 compute-0 nova_compute[192079]: 2025-10-02 12:28:32.469 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 1 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:28:32 compute-0 nova_compute[192079]: 2025-10-02 12:28:32.469 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=640MB phys_disk=79GB used_disk=1GB total_vcpus=8 used_vcpus=1 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:28:32 compute-0 nova_compute[192079]: 2025-10-02 12:28:32.603 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:28:32 compute-0 nova_compute[192079]: 2025-10-02 12:28:32.638 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:28:32 compute-0 nova_compute[192079]: 2025-10-02 12:28:32.685 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:28:32 compute-0 nova_compute[192079]: 2025-10-02 12:28:32.686 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.509s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:28:33 compute-0 ovn_controller[94336]: 2025-10-02T12:28:33Z|00489|binding|INFO|Releasing lport 38f1ac16-18c6-4b4a-b769-ebc7dd5181d4 from this chassis (sb_readonly=0)
Oct 02 12:28:33 compute-0 nova_compute[192079]: 2025-10-02 12:28:33.260 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:33 compute-0 nova_compute[192079]: 2025-10-02 12:28:33.411 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759408098.4100895, 957c4e10-f195-4d5e-97c0-0928296aba31 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:28:33 compute-0 nova_compute[192079]: 2025-10-02 12:28:33.411 2 INFO nova.compute.manager [-] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] VM Stopped (Lifecycle Event)
Oct 02 12:28:33 compute-0 nova_compute[192079]: 2025-10-02 12:28:33.441 2 DEBUG nova.compute.manager [None req-986b4b8c-a99d-4b96-a079-3036310bfdf1 - - - - - -] [instance: 957c4e10-f195-4d5e-97c0-0928296aba31] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:28:33 compute-0 nova_compute[192079]: 2025-10-02 12:28:33.455 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:34 compute-0 nova_compute[192079]: 2025-10-02 12:28:34.634 2 DEBUG nova.compute.manager [req-4eae26b4-2feb-4797-bf55-558aaefb3622 req-111b6f07-92f9-44ee-9fe8-772faac795f0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Received event network-vif-plugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:28:34 compute-0 nova_compute[192079]: 2025-10-02 12:28:34.635 2 DEBUG oslo_concurrency.lockutils [req-4eae26b4-2feb-4797-bf55-558aaefb3622 req-111b6f07-92f9-44ee-9fe8-772faac795f0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "2fcfea17-10df-499a-8692-facbbc76266b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:28:34 compute-0 nova_compute[192079]: 2025-10-02 12:28:34.635 2 DEBUG oslo_concurrency.lockutils [req-4eae26b4-2feb-4797-bf55-558aaefb3622 req-111b6f07-92f9-44ee-9fe8-772faac795f0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:28:34 compute-0 nova_compute[192079]: 2025-10-02 12:28:34.636 2 DEBUG oslo_concurrency.lockutils [req-4eae26b4-2feb-4797-bf55-558aaefb3622 req-111b6f07-92f9-44ee-9fe8-772faac795f0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:28:34 compute-0 nova_compute[192079]: 2025-10-02 12:28:34.636 2 DEBUG nova.compute.manager [req-4eae26b4-2feb-4797-bf55-558aaefb3622 req-111b6f07-92f9-44ee-9fe8-772faac795f0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] No waiting events found dispatching network-vif-plugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:28:34 compute-0 nova_compute[192079]: 2025-10-02 12:28:34.637 2 WARNING nova.compute.manager [req-4eae26b4-2feb-4797-bf55-558aaefb3622 req-111b6f07-92f9-44ee-9fe8-772faac795f0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Received unexpected event network-vif-plugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca for instance with vm_state active and task_state None.
Oct 02 12:28:34 compute-0 nova_compute[192079]: 2025-10-02 12:28:34.686 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:28:34 compute-0 nova_compute[192079]: 2025-10-02 12:28:34.687 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:28:35 compute-0 nova_compute[192079]: 2025-10-02 12:28:35.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.159 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.385 2 DEBUG oslo_concurrency.lockutils [None req-be730d84-58cc-4f43-9779-5f6ad244ca7d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "2fcfea17-10df-499a-8692-facbbc76266b" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.386 2 DEBUG oslo_concurrency.lockutils [None req-be730d84-58cc-4f43-9779-5f6ad244ca7d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.386 2 DEBUG oslo_concurrency.lockutils [None req-be730d84-58cc-4f43-9779-5f6ad244ca7d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "2fcfea17-10df-499a-8692-facbbc76266b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.386 2 DEBUG oslo_concurrency.lockutils [None req-be730d84-58cc-4f43-9779-5f6ad244ca7d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.386 2 DEBUG oslo_concurrency.lockutils [None req-be730d84-58cc-4f43-9779-5f6ad244ca7d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.398 2 INFO nova.compute.manager [None req-be730d84-58cc-4f43-9779-5f6ad244ca7d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Terminating instance
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.408 2 DEBUG nova.compute.manager [None req-be730d84-58cc-4f43-9779-5f6ad244ca7d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:28:37 compute-0 kernel: tap5f268bcb-29 (unregistering): left promiscuous mode
Oct 02 12:28:37 compute-0 NetworkManager[51160]: <info>  [1759408117.4506] device (tap5f268bcb-29): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.460 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:37 compute-0 ovn_controller[94336]: 2025-10-02T12:28:37Z|00490|binding|INFO|Releasing lport 5f268bcb-29c1-4e4e-a36d-b2ec144d3dca from this chassis (sb_readonly=0)
Oct 02 12:28:37 compute-0 ovn_controller[94336]: 2025-10-02T12:28:37Z|00491|binding|INFO|Setting lport 5f268bcb-29c1-4e4e-a36d-b2ec144d3dca down in Southbound
Oct 02 12:28:37 compute-0 ovn_controller[94336]: 2025-10-02T12:28:37Z|00492|binding|INFO|Removing iface tap5f268bcb-29 ovn-installed in OVS
Oct 02 12:28:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:37.474 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:cb:cf:9a 10.100.0.4'], port_security=['fa:16:3e:cb:cf:9a 10.100.0.4'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.4/28', 'neutron:device_id': '2fcfea17-10df-499a-8692-facbbc76266b', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-a04f937a-375f-4fb0-90fe-5f514a88668f', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'e564a4cad5d443dba81ec04d2a05ced9', 'neutron:revision_number': '8', 'neutron:security_group_ids': 'c0383701-0ec7-4f3b-8585-5effc4f5ca5a', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:port_fip': '192.168.122.248', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=50c0aa38-5fd8-41c7-b4bf-85b59722c5c3, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=5f268bcb-29c1-4e4e-a36d-b2ec144d3dca) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:28:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:37.476 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 5f268bcb-29c1-4e4e-a36d-b2ec144d3dca in datapath a04f937a-375f-4fb0-90fe-5f514a88668f unbound from our chassis
Oct 02 12:28:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:37.478 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network a04f937a-375f-4fb0-90fe-5f514a88668f, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:28:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:37.479 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[237b8db4-6da9-4f37-9cbc-ae4dbb20f6d4]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:37.480 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f namespace which is not needed anymore
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.483 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:37 compute-0 systemd[1]: machine-qemu\x2d63\x2dinstance\x2d0000007d.scope: Deactivated successfully.
Oct 02 12:28:37 compute-0 systemd[1]: machine-qemu\x2d63\x2dinstance\x2d0000007d.scope: Consumed 1.221s CPU time.
Oct 02 12:28:37 compute-0 systemd-machined[152150]: Machine qemu-63-instance-0000007d terminated.
Oct 02 12:28:37 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[242257]: [NOTICE]   (242261) : haproxy version is 2.8.14-c23fe91
Oct 02 12:28:37 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[242257]: [NOTICE]   (242261) : path to executable is /usr/sbin/haproxy
Oct 02 12:28:37 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[242257]: [WARNING]  (242261) : Exiting Master process...
Oct 02 12:28:37 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[242257]: [WARNING]  (242261) : Exiting Master process...
Oct 02 12:28:37 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[242257]: [ALERT]    (242261) : Current worker (242263) exited with code 143 (Terminated)
Oct 02 12:28:37 compute-0 neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f[242257]: [WARNING]  (242261) : All workers exited. Exiting... (0)
Oct 02 12:28:37 compute-0 systemd[1]: libpod-98945682a70c9fb033cb7aaf4f0000f072198f761f13f76091ef2a964c29e46f.scope: Deactivated successfully.
Oct 02 12:28:37 compute-0 podman[242303]: 2025-10-02 12:28:37.625836249 +0000 UTC m=+0.046779187 container died 98945682a70c9fb033cb7aaf4f0000f072198f761f13f76091ef2a964c29e46f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.633 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.638 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:37 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-98945682a70c9fb033cb7aaf4f0000f072198f761f13f76091ef2a964c29e46f-userdata-shm.mount: Deactivated successfully.
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:28:37 compute-0 systemd[1]: var-lib-containers-storage-overlay-bee1876cdb990a1d925f97438e4d29a211450d48021c415244f78110b792a322-merged.mount: Deactivated successfully.
Oct 02 12:28:37 compute-0 podman[242303]: 2025-10-02 12:28:37.676241304 +0000 UTC m=+0.097184232 container cleanup 98945682a70c9fb033cb7aaf4f0000f072198f761f13f76091ef2a964c29e46f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.681 2 INFO nova.virt.libvirt.driver [-] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Instance destroyed successfully.
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.682 2 DEBUG nova.objects.instance [None req-be730d84-58cc-4f43-9779-5f6ad244ca7d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lazy-loading 'resources' on Instance uuid 2fcfea17-10df-499a-8692-facbbc76266b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:28:37 compute-0 systemd[1]: libpod-conmon-98945682a70c9fb033cb7aaf4f0000f072198f761f13f76091ef2a964c29e46f.scope: Deactivated successfully.
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.698 2 DEBUG nova.virt.libvirt.vif [None req-be730d84-58cc-4f43-9779-5f6ad244ca7d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:27:05Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServerActionsTestJSON-server-1253918640',display_name='tempest-ServerActionsTestJSON-server-1253918640',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serveractionstestjson-server-1253918640',id=125,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBJJLom+UJzZg9dduKQv+725QaYDZoMXvP/xlpKnb/K05SGc4dkyLwCDweJ3QifTmxLWqK9Sz5A12yMJbzpa36v5C4bUqj8uiWk/vbR1BAjBdKM9d/Ug8M2nT8LwDBGP/9A==',key_name='tempest-keypair-1006285918',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:27:18Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='e564a4cad5d443dba81ec04d2a05ced9',ramdisk_id='',reservation_id='r-r8iquef4',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServerActionsTestJSON-1646745100',owner_user_name='tempest-ServerActionsTestJSON-1646745100-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:28:31Z,user_data='IyEvYmluL3NoCmVjaG8gIlByaW50aW5nIGNpcnJvcyB1c2VyIGF1dGhvcml6ZWQga2V5cyIKY2F0IH5jaXJyb3MvLnNzaC9hdXRob3JpemVkX2tleXMgfHwgdHJ1ZQo=',user_id='d54b1826121b47caba89932a78c06ccd',uuid=2fcfea17-10df-499a-8692-facbbc76266b,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "address": "fa:16:3e:cb:cf:9a", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5f268bcb-29", "ovs_interfaceid": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.699 2 DEBUG nova.network.os_vif_util [None req-be730d84-58cc-4f43-9779-5f6ad244ca7d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converting VIF {"id": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "address": "fa:16:3e:cb:cf:9a", "network": {"id": "a04f937a-375f-4fb0-90fe-5f514a88668f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1926715354-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e564a4cad5d443dba81ec04d2a05ced9", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5f268bcb-29", "ovs_interfaceid": "5f268bcb-29c1-4e4e-a36d-b2ec144d3dca", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.700 2 DEBUG nova.network.os_vif_util [None req-be730d84-58cc-4f43-9779-5f6ad244ca7d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:cb:cf:9a,bridge_name='br-int',has_traffic_filtering=True,id=5f268bcb-29c1-4e4e-a36d-b2ec144d3dca,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5f268bcb-29') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.700 2 DEBUG os_vif [None req-be730d84-58cc-4f43-9779-5f6ad244ca7d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:cb:cf:9a,bridge_name='br-int',has_traffic_filtering=True,id=5f268bcb-29c1-4e4e-a36d-b2ec144d3dca,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5f268bcb-29') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.702 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.702 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap5f268bcb-29, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.706 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.708 2 INFO os_vif [None req-be730d84-58cc-4f43-9779-5f6ad244ca7d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:cb:cf:9a,bridge_name='br-int',has_traffic_filtering=True,id=5f268bcb-29c1-4e4e-a36d-b2ec144d3dca,network=Network(a04f937a-375f-4fb0-90fe-5f514a88668f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5f268bcb-29')
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.709 2 INFO nova.virt.libvirt.driver [None req-be730d84-58cc-4f43-9779-5f6ad244ca7d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Deleting instance files /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b_del
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.710 2 INFO nova.virt.libvirt.driver [None req-be730d84-58cc-4f43-9779-5f6ad244ca7d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Deletion of /var/lib/nova/instances/2fcfea17-10df-499a-8692-facbbc76266b_del complete
Oct 02 12:28:37 compute-0 podman[242349]: 2025-10-02 12:28:37.744806706 +0000 UTC m=+0.042481690 container remove 98945682a70c9fb033cb7aaf4f0000f072198f761f13f76091ef2a964c29e46f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.license=GPLv2)
Oct 02 12:28:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:37.749 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3fbe2cc5-63d2-43af-8178-920a70200df7]: (4, ('Thu Oct  2 12:28:37 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f (98945682a70c9fb033cb7aaf4f0000f072198f761f13f76091ef2a964c29e46f)\n98945682a70c9fb033cb7aaf4f0000f072198f761f13f76091ef2a964c29e46f\nThu Oct  2 12:28:37 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f (98945682a70c9fb033cb7aaf4f0000f072198f761f13f76091ef2a964c29e46f)\n98945682a70c9fb033cb7aaf4f0000f072198f761f13f76091ef2a964c29e46f\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:37.751 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[87ac8ae1-fb73-4e06-887d-effde35b59a0]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:37.752 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapa04f937a-30, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:28:37 compute-0 kernel: tapa04f937a-30: left promiscuous mode
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.753 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.765 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:37.769 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a3e0339c-8e13-4b63-af85-9a80e88354fc]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.773 2 DEBUG nova.compute.manager [req-e762ceb8-b8ea-4deb-9bf0-1488110a37a3 req-55154ffd-8eb4-4bf3-8272-11b5120b40de 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Received event network-vif-unplugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.774 2 DEBUG oslo_concurrency.lockutils [req-e762ceb8-b8ea-4deb-9bf0-1488110a37a3 req-55154ffd-8eb4-4bf3-8272-11b5120b40de 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "2fcfea17-10df-499a-8692-facbbc76266b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.774 2 DEBUG oslo_concurrency.lockutils [req-e762ceb8-b8ea-4deb-9bf0-1488110a37a3 req-55154ffd-8eb4-4bf3-8272-11b5120b40de 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.774 2 DEBUG oslo_concurrency.lockutils [req-e762ceb8-b8ea-4deb-9bf0-1488110a37a3 req-55154ffd-8eb4-4bf3-8272-11b5120b40de 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.775 2 DEBUG nova.compute.manager [req-e762ceb8-b8ea-4deb-9bf0-1488110a37a3 req-55154ffd-8eb4-4bf3-8272-11b5120b40de 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] No waiting events found dispatching network-vif-unplugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.775 2 DEBUG nova.compute.manager [req-e762ceb8-b8ea-4deb-9bf0-1488110a37a3 req-55154ffd-8eb4-4bf3-8272-11b5120b40de 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Received event network-vif-unplugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:28:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:37.793 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5279daf8-40a7-4468-af5b-898172cc9044]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:37.796 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f2172be3-9358-427c-8cae-fd700ca15ba3]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.800 2 INFO nova.compute.manager [None req-be730d84-58cc-4f43-9779-5f6ad244ca7d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Took 0.39 seconds to destroy the instance on the hypervisor.
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.800 2 DEBUG oslo.service.loopingcall [None req-be730d84-58cc-4f43-9779-5f6ad244ca7d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.801 2 DEBUG nova.compute.manager [-] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:28:37 compute-0 nova_compute[192079]: 2025-10-02 12:28:37.801 2 DEBUG nova.network.neutron [-] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:28:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:37.815 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[57307b02-ed8a-4c1c-a5e1-01fd1a714922]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 610784, 'reachable_time': 36550, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 242365, 'error': None, 'target': 'ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:37.818 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-a04f937a-375f-4fb0-90fe-5f514a88668f deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:28:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:28:37.818 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[d5a82764-7808-4189-a1ee-6e908c60e1a3]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:28:37 compute-0 systemd[1]: run-netns-ovnmeta\x2da04f937a\x2d375f\x2d4fb0\x2d90fe\x2d5f514a88668f.mount: Deactivated successfully.
Oct 02 12:28:38 compute-0 nova_compute[192079]: 2025-10-02 12:28:38.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:28:38 compute-0 nova_compute[192079]: 2025-10-02 12:28:38.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:28:38 compute-0 nova_compute[192079]: 2025-10-02 12:28:38.701 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:28:38 compute-0 nova_compute[192079]: 2025-10-02 12:28:38.701 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:28:39 compute-0 nova_compute[192079]: 2025-10-02 12:28:39.390 2 DEBUG nova.network.neutron [-] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:28:39 compute-0 nova_compute[192079]: 2025-10-02 12:28:39.417 2 INFO nova.compute.manager [-] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Took 1.62 seconds to deallocate network for instance.
Oct 02 12:28:39 compute-0 nova_compute[192079]: 2025-10-02 12:28:39.599 2 DEBUG nova.compute.manager [req-d80e27c5-e078-4971-89ed-cc37199bf1cb req-f59a3f6e-6f5c-4f4a-9c02-9934d2db6129 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Received event network-vif-deleted-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:28:39 compute-0 nova_compute[192079]: 2025-10-02 12:28:39.660 2 DEBUG oslo_concurrency.lockutils [None req-be730d84-58cc-4f43-9779-5f6ad244ca7d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:28:39 compute-0 nova_compute[192079]: 2025-10-02 12:28:39.661 2 DEBUG oslo_concurrency.lockutils [None req-be730d84-58cc-4f43-9779-5f6ad244ca7d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:28:39 compute-0 nova_compute[192079]: 2025-10-02 12:28:39.805 2 DEBUG nova.compute.provider_tree [None req-be730d84-58cc-4f43-9779-5f6ad244ca7d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:28:39 compute-0 nova_compute[192079]: 2025-10-02 12:28:39.837 2 DEBUG nova.scheduler.client.report [None req-be730d84-58cc-4f43-9779-5f6ad244ca7d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:28:39 compute-0 nova_compute[192079]: 2025-10-02 12:28:39.874 2 DEBUG oslo_concurrency.lockutils [None req-be730d84-58cc-4f43-9779-5f6ad244ca7d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.213s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:28:39 compute-0 nova_compute[192079]: 2025-10-02 12:28:39.911 2 DEBUG nova.compute.manager [req-8df1e34b-de42-4bd1-9add-5377cd6c9b19 req-08cdd1b5-2c79-4b7c-a268-1c745b0ad3b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Received event network-vif-plugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:28:39 compute-0 nova_compute[192079]: 2025-10-02 12:28:39.911 2 DEBUG oslo_concurrency.lockutils [req-8df1e34b-de42-4bd1-9add-5377cd6c9b19 req-08cdd1b5-2c79-4b7c-a268-1c745b0ad3b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "2fcfea17-10df-499a-8692-facbbc76266b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:28:39 compute-0 nova_compute[192079]: 2025-10-02 12:28:39.911 2 DEBUG oslo_concurrency.lockutils [req-8df1e34b-de42-4bd1-9add-5377cd6c9b19 req-08cdd1b5-2c79-4b7c-a268-1c745b0ad3b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:28:39 compute-0 nova_compute[192079]: 2025-10-02 12:28:39.912 2 DEBUG oslo_concurrency.lockutils [req-8df1e34b-de42-4bd1-9add-5377cd6c9b19 req-08cdd1b5-2c79-4b7c-a268-1c745b0ad3b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:28:39 compute-0 nova_compute[192079]: 2025-10-02 12:28:39.912 2 DEBUG nova.compute.manager [req-8df1e34b-de42-4bd1-9add-5377cd6c9b19 req-08cdd1b5-2c79-4b7c-a268-1c745b0ad3b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] No waiting events found dispatching network-vif-plugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:28:39 compute-0 nova_compute[192079]: 2025-10-02 12:28:39.912 2 WARNING nova.compute.manager [req-8df1e34b-de42-4bd1-9add-5377cd6c9b19 req-08cdd1b5-2c79-4b7c-a268-1c745b0ad3b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Received unexpected event network-vif-plugged-5f268bcb-29c1-4e4e-a36d-b2ec144d3dca for instance with vm_state deleted and task_state None.
Oct 02 12:28:39 compute-0 nova_compute[192079]: 2025-10-02 12:28:39.942 2 INFO nova.scheduler.client.report [None req-be730d84-58cc-4f43-9779-5f6ad244ca7d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Deleted allocations for instance 2fcfea17-10df-499a-8692-facbbc76266b
Oct 02 12:28:40 compute-0 nova_compute[192079]: 2025-10-02 12:28:40.046 2 DEBUG oslo_concurrency.lockutils [None req-be730d84-58cc-4f43-9779-5f6ad244ca7d d54b1826121b47caba89932a78c06ccd e564a4cad5d443dba81ec04d2a05ced9 - - default default] Lock "2fcfea17-10df-499a-8692-facbbc76266b" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 2.660s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:28:40 compute-0 podman[242366]: 2025-10-02 12:28:40.162070828 +0000 UTC m=+0.066287161 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=edpm, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, tcib_managed=true)
Oct 02 12:28:42 compute-0 nova_compute[192079]: 2025-10-02 12:28:42.162 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:42 compute-0 nova_compute[192079]: 2025-10-02 12:28:42.696 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:28:42 compute-0 nova_compute[192079]: 2025-10-02 12:28:42.705 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:46 compute-0 nova_compute[192079]: 2025-10-02 12:28:46.687 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:46 compute-0 nova_compute[192079]: 2025-10-02 12:28:46.893 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:47 compute-0 nova_compute[192079]: 2025-10-02 12:28:47.163 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:47 compute-0 nova_compute[192079]: 2025-10-02 12:28:47.708 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:48 compute-0 podman[242387]: 2025-10-02 12:28:48.157360412 +0000 UTC m=+0.064485421 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.openshift.tags=minimal rhel9, architecture=x86_64, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, release=1755695350, build-date=2025-08-20T13:12:41, distribution-scope=public, version=9.6, config_id=edpm, vcs-type=git, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, container_name=openstack_network_exporter, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., maintainer=Red Hat, Inc., name=ubi9-minimal, io.buildah.version=1.33.7, url=https://catalog.redhat.com/en/search?searchType=containers, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vendor=Red Hat, Inc., managed_by=edpm_ansible, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., com.redhat.component=ubi9-minimal-container, io.openshift.expose-services=, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal)
Oct 02 12:28:48 compute-0 podman[242388]: 2025-10-02 12:28:48.165792132 +0000 UTC m=+0.070247318 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, config_id=multipathd, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_managed=true, container_name=multipathd)
Oct 02 12:28:52 compute-0 podman[242428]: 2025-10-02 12:28:52.146704893 +0000 UTC m=+0.059951788 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_id=iscsid, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=iscsid, org.label-schema.license=GPLv2)
Oct 02 12:28:52 compute-0 nova_compute[192079]: 2025-10-02 12:28:52.165 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:52 compute-0 podman[242427]: 2025-10-02 12:28:52.191873445 +0000 UTC m=+0.101418708 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:28:52 compute-0 nova_compute[192079]: 2025-10-02 12:28:52.677 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759408117.6754618, 2fcfea17-10df-499a-8692-facbbc76266b => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:28:52 compute-0 nova_compute[192079]: 2025-10-02 12:28:52.677 2 INFO nova.compute.manager [-] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] VM Stopped (Lifecycle Event)
Oct 02 12:28:52 compute-0 nova_compute[192079]: 2025-10-02 12:28:52.762 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:52 compute-0 nova_compute[192079]: 2025-10-02 12:28:52.945 2 DEBUG nova.compute.manager [None req-0764bf60-a21c-4063-813e-3c22a2a1b984 - - - - - -] [instance: 2fcfea17-10df-499a-8692-facbbc76266b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:28:57 compute-0 nova_compute[192079]: 2025-10-02 12:28:57.203 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:28:57 compute-0 nova_compute[192079]: 2025-10-02 12:28:57.764 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:29:01 compute-0 podman[242472]: 2025-10-02 12:29:01.129762722 +0000 UTC m=+0.044085864 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, container_name=ovn_metadata_agent, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, io.buildah.version=1.41.3)
Oct 02 12:29:01 compute-0 podman[242474]: 2025-10-02 12:29:01.145789799 +0000 UTC m=+0.054653802 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:29:01 compute-0 podman[242473]: 2025-10-02 12:29:01.174066861 +0000 UTC m=+0.083642464 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_controller, org.label-schema.vendor=CentOS, tcib_managed=true, config_id=ovn_controller, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:29:02 compute-0 nova_compute[192079]: 2025-10-02 12:29:02.205 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:29:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:29:02.230 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:29:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:29:02.231 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:29:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:29:02.231 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:29:02 compute-0 nova_compute[192079]: 2025-10-02 12:29:02.528 2 DEBUG oslo_concurrency.lockutils [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Acquiring lock "f0ae2d2a-f52d-4b63-a236-8886d694c795" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:29:02 compute-0 nova_compute[192079]: 2025-10-02 12:29:02.528 2 DEBUG oslo_concurrency.lockutils [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lock "f0ae2d2a-f52d-4b63-a236-8886d694c795" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:29:02 compute-0 nova_compute[192079]: 2025-10-02 12:29:02.593 2 DEBUG nova.compute.manager [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:29:02 compute-0 nova_compute[192079]: 2025-10-02 12:29:02.766 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:29:02 compute-0 nova_compute[192079]: 2025-10-02 12:29:02.823 2 DEBUG oslo_concurrency.lockutils [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:29:02 compute-0 nova_compute[192079]: 2025-10-02 12:29:02.824 2 DEBUG oslo_concurrency.lockutils [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:29:02 compute-0 nova_compute[192079]: 2025-10-02 12:29:02.833 2 DEBUG nova.virt.hardware [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:29:02 compute-0 nova_compute[192079]: 2025-10-02 12:29:02.833 2 INFO nova.compute.claims [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.058 2 DEBUG nova.compute.provider_tree [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.074 2 DEBUG nova.scheduler.client.report [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.108 2 DEBUG oslo_concurrency.lockutils [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.284s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.109 2 DEBUG nova.compute.manager [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.196 2 DEBUG nova.compute.manager [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Not allocating networking since 'none' was specified. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1948
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.220 2 INFO nova.virt.libvirt.driver [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.270 2 DEBUG nova.compute.manager [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.445 2 DEBUG nova.compute.manager [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.447 2 DEBUG nova.virt.libvirt.driver [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.447 2 INFO nova.virt.libvirt.driver [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Creating image(s)
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.448 2 DEBUG oslo_concurrency.lockutils [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Acquiring lock "/var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.449 2 DEBUG oslo_concurrency.lockutils [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lock "/var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.451 2 DEBUG oslo_concurrency.lockutils [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lock "/var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.002s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.476 2 DEBUG oslo_concurrency.processutils [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.531 2 DEBUG oslo_concurrency.processutils [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.532 2 DEBUG oslo_concurrency.lockutils [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.533 2 DEBUG oslo_concurrency.lockutils [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.543 2 DEBUG oslo_concurrency.processutils [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.601 2 DEBUG oslo_concurrency.processutils [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.058s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.602 2 DEBUG oslo_concurrency.processutils [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.721 2 DEBUG oslo_concurrency.processutils [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/disk 1073741824" returned: 0 in 0.119s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.722 2 DEBUG oslo_concurrency.lockutils [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.189s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.722 2 DEBUG oslo_concurrency.processutils [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.798 2 DEBUG oslo_concurrency.processutils [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.075s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.799 2 DEBUG nova.virt.disk.api [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Checking if we can resize image /var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.799 2 DEBUG oslo_concurrency.processutils [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.877 2 DEBUG oslo_concurrency.processutils [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/disk --force-share --output=json" returned: 0 in 0.078s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.878 2 DEBUG nova.virt.disk.api [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Cannot resize image /var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.879 2 DEBUG nova.objects.instance [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lazy-loading 'migration_context' on Instance uuid f0ae2d2a-f52d-4b63-a236-8886d694c795 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.899 2 DEBUG nova.virt.libvirt.driver [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.899 2 DEBUG nova.virt.libvirt.driver [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Ensure instance console log exists: /var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.900 2 DEBUG oslo_concurrency.lockutils [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.900 2 DEBUG oslo_concurrency.lockutils [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.900 2 DEBUG oslo_concurrency.lockutils [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.902 2 DEBUG nova.virt.libvirt.driver [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Start _get_guest_xml network_info=[] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.907 2 WARNING nova.virt.libvirt.driver [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.910 2 DEBUG nova.virt.libvirt.host [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.911 2 DEBUG nova.virt.libvirt.host [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.915 2 DEBUG nova.virt.libvirt.host [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.916 2 DEBUG nova.virt.libvirt.host [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.919 2 DEBUG nova.virt.libvirt.driver [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.919 2 DEBUG nova.virt.hardware [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.920 2 DEBUG nova.virt.hardware [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.921 2 DEBUG nova.virt.hardware [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.921 2 DEBUG nova.virt.hardware [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.921 2 DEBUG nova.virt.hardware [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.922 2 DEBUG nova.virt.hardware [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.922 2 DEBUG nova.virt.hardware [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.923 2 DEBUG nova.virt.hardware [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.924 2 DEBUG nova.virt.hardware [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.924 2 DEBUG nova.virt.hardware [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.925 2 DEBUG nova.virt.hardware [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.932 2 DEBUG nova.objects.instance [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lazy-loading 'pci_devices' on Instance uuid f0ae2d2a-f52d-4b63-a236-8886d694c795 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:29:03 compute-0 nova_compute[192079]: 2025-10-02 12:29:03.955 2 DEBUG nova.virt.libvirt.driver [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:29:03 compute-0 nova_compute[192079]:   <uuid>f0ae2d2a-f52d-4b63-a236-8886d694c795</uuid>
Oct 02 12:29:03 compute-0 nova_compute[192079]:   <name>instance-00000082</name>
Oct 02 12:29:03 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:29:03 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:29:03 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:29:03 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:       <nova:name>tempest-ServerShowV257Test-server-216308627</nova:name>
Oct 02 12:29:03 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:29:03</nova:creationTime>
Oct 02 12:29:03 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:29:03 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:29:03 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:29:03 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:29:03 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:29:03 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:29:03 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:29:03 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:29:03 compute-0 nova_compute[192079]:         <nova:user uuid="49bbeba56f4d4f45ab5d7c1bec480a84">tempest-ServerShowV257Test-1770639198-project-member</nova:user>
Oct 02 12:29:03 compute-0 nova_compute[192079]:         <nova:project uuid="97ac0fd972e743f1aa6700bb6734c60a">tempest-ServerShowV257Test-1770639198</nova:project>
Oct 02 12:29:03 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:29:03 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:       <nova:ports/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:29:03 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:29:03 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <system>
Oct 02 12:29:03 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:29:03 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:29:03 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:29:03 compute-0 nova_compute[192079]:       <entry name="serial">f0ae2d2a-f52d-4b63-a236-8886d694c795</entry>
Oct 02 12:29:03 compute-0 nova_compute[192079]:       <entry name="uuid">f0ae2d2a-f52d-4b63-a236-8886d694c795</entry>
Oct 02 12:29:03 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     </system>
Oct 02 12:29:03 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:29:03 compute-0 nova_compute[192079]:   <os>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:   </os>
Oct 02 12:29:03 compute-0 nova_compute[192079]:   <features>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:   </features>
Oct 02 12:29:03 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:29:03 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:29:03 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:29:03 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/disk"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:29:03 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.config"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:29:03 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/console.log" append="off"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <video>
Oct 02 12:29:03 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     </video>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:29:03 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:29:03 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:29:03 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:29:03 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:29:03 compute-0 nova_compute[192079]: </domain>
Oct 02 12:29:03 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:29:04 compute-0 nova_compute[192079]: 2025-10-02 12:29:04.032 2 DEBUG nova.virt.libvirt.driver [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:29:04 compute-0 nova_compute[192079]: 2025-10-02 12:29:04.033 2 DEBUG nova.virt.libvirt.driver [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:29:04 compute-0 nova_compute[192079]: 2025-10-02 12:29:04.034 2 INFO nova.virt.libvirt.driver [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Using config drive
Oct 02 12:29:04 compute-0 nova_compute[192079]: 2025-10-02 12:29:04.321 2 INFO nova.virt.libvirt.driver [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Creating config drive at /var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.config
Oct 02 12:29:04 compute-0 nova_compute[192079]: 2025-10-02 12:29:04.327 2 DEBUG oslo_concurrency.processutils [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpnsf9vd2n execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:29:04 compute-0 nova_compute[192079]: 2025-10-02 12:29:04.451 2 DEBUG oslo_concurrency.processutils [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpnsf9vd2n" returned: 0 in 0.124s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:29:04 compute-0 systemd-machined[152150]: New machine qemu-64-instance-00000082.
Oct 02 12:29:04 compute-0 systemd[1]: Started Virtual Machine qemu-64-instance-00000082.
Oct 02 12:29:05 compute-0 nova_compute[192079]: 2025-10-02 12:29:05.294 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408145.2942252, f0ae2d2a-f52d-4b63-a236-8886d694c795 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:29:05 compute-0 nova_compute[192079]: 2025-10-02 12:29:05.295 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] VM Resumed (Lifecycle Event)
Oct 02 12:29:05 compute-0 nova_compute[192079]: 2025-10-02 12:29:05.298 2 DEBUG nova.compute.manager [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:29:05 compute-0 nova_compute[192079]: 2025-10-02 12:29:05.299 2 DEBUG nova.virt.libvirt.driver [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:29:05 compute-0 nova_compute[192079]: 2025-10-02 12:29:05.303 2 INFO nova.virt.libvirt.driver [-] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Instance spawned successfully.
Oct 02 12:29:05 compute-0 nova_compute[192079]: 2025-10-02 12:29:05.303 2 DEBUG nova.virt.libvirt.driver [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:29:05 compute-0 nova_compute[192079]: 2025-10-02 12:29:05.352 2 DEBUG nova.virt.libvirt.driver [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:29:05 compute-0 nova_compute[192079]: 2025-10-02 12:29:05.352 2 DEBUG nova.virt.libvirt.driver [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:29:05 compute-0 nova_compute[192079]: 2025-10-02 12:29:05.352 2 DEBUG nova.virt.libvirt.driver [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:29:05 compute-0 nova_compute[192079]: 2025-10-02 12:29:05.353 2 DEBUG nova.virt.libvirt.driver [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:29:05 compute-0 nova_compute[192079]: 2025-10-02 12:29:05.353 2 DEBUG nova.virt.libvirt.driver [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:29:05 compute-0 nova_compute[192079]: 2025-10-02 12:29:05.354 2 DEBUG nova.virt.libvirt.driver [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:29:05 compute-0 nova_compute[192079]: 2025-10-02 12:29:05.357 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:29:05 compute-0 nova_compute[192079]: 2025-10-02 12:29:05.361 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:29:05 compute-0 nova_compute[192079]: 2025-10-02 12:29:05.437 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:29:05 compute-0 nova_compute[192079]: 2025-10-02 12:29:05.437 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408145.297967, f0ae2d2a-f52d-4b63-a236-8886d694c795 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:29:05 compute-0 nova_compute[192079]: 2025-10-02 12:29:05.438 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] VM Started (Lifecycle Event)
Oct 02 12:29:05 compute-0 nova_compute[192079]: 2025-10-02 12:29:05.471 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:29:05 compute-0 nova_compute[192079]: 2025-10-02 12:29:05.475 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:29:05 compute-0 nova_compute[192079]: 2025-10-02 12:29:05.532 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:29:05 compute-0 nova_compute[192079]: 2025-10-02 12:29:05.558 2 INFO nova.compute.manager [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Took 2.11 seconds to spawn the instance on the hypervisor.
Oct 02 12:29:05 compute-0 nova_compute[192079]: 2025-10-02 12:29:05.559 2 DEBUG nova.compute.manager [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:29:05 compute-0 nova_compute[192079]: 2025-10-02 12:29:05.731 2 INFO nova.compute.manager [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Took 2.99 seconds to build instance.
Oct 02 12:29:05 compute-0 nova_compute[192079]: 2025-10-02 12:29:05.798 2 DEBUG oslo_concurrency.lockutils [None req-475b4175-744f-4199-8364-5ca0f1ad7e0d 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lock "f0ae2d2a-f52d-4b63-a236-8886d694c795" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 3.270s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:29:07 compute-0 nova_compute[192079]: 2025-10-02 12:29:07.206 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:29:07 compute-0 nova_compute[192079]: 2025-10-02 12:29:07.768 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:29:08 compute-0 nova_compute[192079]: 2025-10-02 12:29:08.637 2 INFO nova.compute.manager [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Rebuilding instance
Oct 02 12:29:09 compute-0 nova_compute[192079]: 2025-10-02 12:29:09.378 2 DEBUG nova.compute.manager [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:29:09 compute-0 nova_compute[192079]: 2025-10-02 12:29:09.607 2 DEBUG nova.objects.instance [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lazy-loading 'pci_requests' on Instance uuid f0ae2d2a-f52d-4b63-a236-8886d694c795 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:29:09 compute-0 nova_compute[192079]: 2025-10-02 12:29:09.629 2 DEBUG nova.objects.instance [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lazy-loading 'pci_devices' on Instance uuid f0ae2d2a-f52d-4b63-a236-8886d694c795 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:29:09 compute-0 nova_compute[192079]: 2025-10-02 12:29:09.656 2 DEBUG nova.objects.instance [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lazy-loading 'resources' on Instance uuid f0ae2d2a-f52d-4b63-a236-8886d694c795 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:29:09 compute-0 nova_compute[192079]: 2025-10-02 12:29:09.697 2 DEBUG nova.objects.instance [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lazy-loading 'migration_context' on Instance uuid f0ae2d2a-f52d-4b63-a236-8886d694c795 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:29:09 compute-0 nova_compute[192079]: 2025-10-02 12:29:09.738 2 DEBUG nova.objects.instance [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Trying to apply a migration context that does not seem to be set for this instance apply_migration_context /usr/lib/python3.9/site-packages/nova/objects/instance.py:1032
Oct 02 12:29:09 compute-0 nova_compute[192079]: 2025-10-02 12:29:09.742 2 DEBUG nova.virt.libvirt.driver [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Shutting down instance from state 1 _clean_shutdown /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4071
Oct 02 12:29:11 compute-0 podman[242584]: 2025-10-02 12:29:11.1850229 +0000 UTC m=+0.080438627 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, config_id=edpm, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3)
Oct 02 12:29:12 compute-0 nova_compute[192079]: 2025-10-02 12:29:12.209 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:29:12 compute-0 nova_compute[192079]: 2025-10-02 12:29:12.769 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.107 12 DEBUG ceilometer.compute.discovery [-] instance data: {'id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795', 'name': 'tempest-ServerShowV257Test-server-216308627', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'os_type': 'hvm', 'architecture': 'x86_64', 'OS-EXT-SRV-ATTR:instance_name': 'instance-00000082', 'OS-EXT-SRV-ATTR:host': 'compute-0.ctlplane.example.com', 'OS-EXT-STS:vm_state': 'running', 'tenant_id': '97ac0fd972e743f1aa6700bb6734c60a', 'user_id': '49bbeba56f4d4f45ab5d7c1bec480a84', 'hostId': 'ae061e3860926dfe3b7eeab0b01d2bb892527c2f3abf856edfcbefd4', 'status': 'active', 'metadata': {}} discover_libvirt_polling /usr/lib/python3.9/site-packages/ceilometer/compute/discovery.py:228
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.107 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.iops in the context of pollsters
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.107 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for PerDeviceDiskIOPSPollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.107 12 ERROR ceilometer.polling.manager [-] Prevent pollster disk.device.iops from polling [<NovaLikeServer: tempest-ServerShowV257Test-server-216308627>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-ServerShowV257Test-server-216308627>]
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.108 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets.drop in the context of pollsters
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.110 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets in the context of pollsters
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.110 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.allocation in the context of pollsters
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.122 12 DEBUG ceilometer.compute.pollsters [-] f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.device.allocation volume: 14622720 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.122 12 DEBUG ceilometer.compute.pollsters [-] f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.device.allocation volume: 487424 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '8f4b2118-81c4-47a3-85c0-9d11c832862e', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 14622720, 'user_id': '49bbeba56f4d4f45ab5d7c1bec480a84', 'user_name': None, 'project_id': '97ac0fd972e743f1aa6700bb6734c60a', 'project_name': None, 'resource_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795-vda', 'timestamp': '2025-10-02T12:29:17.110492', 'resource_metadata': {'display_name': 'tempest-ServerShowV257Test-server-216308627', 'name': 'instance-00000082', 'instance_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795', 'instance_type': 'm1.nano', 'host': 'ae061e3860926dfe3b7eeab0b01d2bb892527c2f3abf856edfcbefd4', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '69b4fdda-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6154.797574451, 'message_signature': '2a9bcbb35c60cdbb6372a3cae14701bbc037c9930dc9f8bf227c9ce370b097d3'}, {'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 487424, 'user_id': '49bbeba56f4d4f45ab5d7c1bec480a84', 'user_name': None, 'project_id': '97ac0fd972e743f1aa6700bb6734c60a', 'project_name': None, 'resource_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795-sda', 'timestamp': '2025-10-02T12:29:17.110492', 'resource_metadata': {'display_name': 'tempest-ServerShowV257Test-server-216308627', 'name': 'instance-00000082', 'instance_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795', 'instance_type': 'm1.nano', 'host': 'ae061e3860926dfe3b7eeab0b01d2bb892527c2f3abf856edfcbefd4', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '69b50942-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6154.797574451, 'message_signature': '8700d902ebb2afbaa6586e530b0374d220be4cc73314badfcf266466679eb8ee'}]}, 'timestamp': '2025-10-02 12:29:17.122924', '_unique_id': '6ce1ad2e85c247dd900f1bbb26e5604e'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes.delta in the context of pollsters
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.124 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.usage in the context of pollsters
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.125 12 DEBUG ceilometer.compute.pollsters [-] f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.device.usage volume: 13959168 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.125 12 DEBUG ceilometer.compute.pollsters [-] f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.device.usage volume: 485376 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '073e0f1b-4fc1-415d-b205-d4df9d54dd80', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 13959168, 'user_id': '49bbeba56f4d4f45ab5d7c1bec480a84', 'user_name': None, 'project_id': '97ac0fd972e743f1aa6700bb6734c60a', 'project_name': None, 'resource_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795-vda', 'timestamp': '2025-10-02T12:29:17.125071', 'resource_metadata': {'display_name': 'tempest-ServerShowV257Test-server-216308627', 'name': 'instance-00000082', 'instance_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795', 'instance_type': 'm1.nano', 'host': 'ae061e3860926dfe3b7eeab0b01d2bb892527c2f3abf856edfcbefd4', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '69b567d4-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6154.797574451, 'message_signature': '30b5def5dc572f56f322749d93a819225c4189808b5792bd9eff21da502fb480'}, {'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 485376, 'user_id': '49bbeba56f4d4f45ab5d7c1bec480a84', 'user_name': None, 'project_id': '97ac0fd972e743f1aa6700bb6734c60a', 'project_name': None, 'resource_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795-sda', 'timestamp': '2025-10-02T12:29:17.125071', 'resource_metadata': {'display_name': 'tempest-ServerShowV257Test-server-216308627', 'name': 'instance-00000082', 'instance_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795', 'instance_type': 'm1.nano', 'host': 'ae061e3860926dfe3b7eeab0b01d2bb892527c2f3abf856edfcbefd4', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '69b5722e-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6154.797574451, 'message_signature': '29085bfd060f20ce126c9e5e1743af60a93187991d6b8082d8e814cc073bdbec'}]}, 'timestamp': '2025-10-02 12:29:17.125586', '_unique_id': 'ca730ca16cae420e8eb8cca89a059fae'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.126 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.requests in the context of pollsters
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.145 12 DEBUG ceilometer.compute.pollsters [-] f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.device.write.requests volume: 111 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.145 12 DEBUG ceilometer.compute.pollsters [-] f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.device.write.requests volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'ef8fd6bc-4a63-4af0-b5b4-acfae15f1c80', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 111, 'user_id': '49bbeba56f4d4f45ab5d7c1bec480a84', 'user_name': None, 'project_id': '97ac0fd972e743f1aa6700bb6734c60a', 'project_name': None, 'resource_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795-vda', 'timestamp': '2025-10-02T12:29:17.126884', 'resource_metadata': {'display_name': 'tempest-ServerShowV257Test-server-216308627', 'name': 'instance-00000082', 'instance_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795', 'instance_type': 'm1.nano', 'host': 'ae061e3860926dfe3b7eeab0b01d2bb892527c2f3abf856edfcbefd4', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '69b87a82-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6154.813955588, 'message_signature': 'fb0a986f476a97f7e8d2ca6c6359a5cda1a79b3fd4ea516ee35266c3351a1306'}, {'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 0, 'user_id': '49bbeba56f4d4f45ab5d7c1bec480a84', 'user_name': None, 'project_id': '97ac0fd972e743f1aa6700bb6734c60a', 'project_name': None, 'resource_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795-sda', 'timestamp': '2025-10-02T12:29:17.126884', 'resource_metadata': {'display_name': 'tempest-ServerShowV257Test-server-216308627', 'name': 'instance-00000082', 'instance_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795', 'instance_type': 'm1.nano', 'host': 'ae061e3860926dfe3b7eeab0b01d2bb892527c2f3abf856edfcbefd4', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '69b88752-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6154.813955588, 'message_signature': '1d5dab2e67df42ce01d4be08537c3c8b6426db4d63c849f7728027ec7cd6f043'}]}, 'timestamp': '2025-10-02 12:29:17.145841', '_unique_id': '39ed15ba480b429caadca4bdbb7492bd'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.146 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.147 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.latency in the context of pollsters
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.147 12 DEBUG ceilometer.compute.pollsters [-] f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.device.write.latency volume: 737610173 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 DEBUG ceilometer.compute.pollsters [-] f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.device.write.latency volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '2d18b7dd-198d-41a8-a4f6-88c591549a22', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 737610173, 'user_id': '49bbeba56f4d4f45ab5d7c1bec480a84', 'user_name': None, 'project_id': '97ac0fd972e743f1aa6700bb6734c60a', 'project_name': None, 'resource_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795-vda', 'timestamp': '2025-10-02T12:29:17.147748', 'resource_metadata': {'display_name': 'tempest-ServerShowV257Test-server-216308627', 'name': 'instance-00000082', 'instance_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795', 'instance_type': 'm1.nano', 'host': 'ae061e3860926dfe3b7eeab0b01d2bb892527c2f3abf856edfcbefd4', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '69b8de28-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6154.813955588, 'message_signature': '4c26268166ee1dc810596ba3c7fe6ae1d10bb379d95e0e49df258b1ae2cbc0a4'}, {'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 0, 'user_id': '49bbeba56f4d4f45ab5d7c1bec480a84', 'user_name': None, 'project_id': '97ac0fd972e743f1aa6700bb6734c60a', 'project_name': None, 'resource_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795-sda', 'timestamp': '2025-10-02T12:29:17.147748', 'resource_metadata': {'display_name': 'tempest-ServerShowV257Test-server-216308627', 'name': 'instance-00000082', 'instance_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795', 'instance_type': 'm1.nano', 'host': 'ae061e3860926dfe3b7eeab0b01d2bb892527c2f3abf856edfcbefd4', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '69b8e9fe-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6154.813955588, 'message_signature': '93c78a1fdd272bbf7d2fc5dea477654379b2e90cb3dae223e3b9da4c96436fd0'}]}, 'timestamp': '2025-10-02 12:29:17.148317', '_unique_id': '1ff6dae53c5e48e0953c7a2845708dbd'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.148 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.149 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes in the context of pollsters
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.149 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.capacity in the context of pollsters
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.149 12 DEBUG ceilometer.compute.pollsters [-] f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.device.capacity volume: 1073741824 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 DEBUG ceilometer.compute.pollsters [-] f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.device.capacity volume: 485376 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '5189b6be-baab-45c9-ad1c-7579ea97db9d', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 1073741824, 'user_id': '49bbeba56f4d4f45ab5d7c1bec480a84', 'user_name': None, 'project_id': '97ac0fd972e743f1aa6700bb6734c60a', 'project_name': None, 'resource_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795-vda', 'timestamp': '2025-10-02T12:29:17.149786', 'resource_metadata': {'display_name': 'tempest-ServerShowV257Test-server-216308627', 'name': 'instance-00000082', 'instance_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795', 'instance_type': 'm1.nano', 'host': 'ae061e3860926dfe3b7eeab0b01d2bb892527c2f3abf856edfcbefd4', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '69b92d2e-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6154.797574451, 'message_signature': 'd8dcf638a52a10c6097dffc01b4c5eae812e95caa46e3beb2e3e3c23213fc247'}, {'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 485376, 'user_id': '49bbeba56f4d4f45ab5d7c1bec480a84', 'user_name': None, 'project_id': '97ac0fd972e743f1aa6700bb6734c60a', 'project_name': None, 'resource_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795-sda', 'timestamp': '2025-10-02T12:29:17.149786', 'resource_metadata': {'display_name': 'tempest-ServerShowV257Test-server-216308627', 'name': 'instance-00000082', 'instance_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795', 'instance_type': 'm1.nano', 'host': 'ae061e3860926dfe3b7eeab0b01d2bb892527c2f3abf856edfcbefd4', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '69b93878-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6154.797574451, 'message_signature': '65efc14fe515ab5ec005fffe8ee643fb68a0e9b8c231df66e6e46d08aa38f241'}]}, 'timestamp': '2025-10-02 12:29:17.150327', '_unique_id': '93c1b52fab354c768047bef7b2cfc11a'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.150 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.151 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets.drop in the context of pollsters
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.151 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets.error in the context of pollsters
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.151 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets in the context of pollsters
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.151 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes.rate in the context of pollsters
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.151 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for IncomingBytesRatePollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.151 12 ERROR ceilometer.polling.manager [-] Prevent pollster network.incoming.bytes.rate from polling [<NovaLikeServer: tempest-ServerShowV257Test-server-216308627>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-ServerShowV257Test-server-216308627>]
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.152 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes.rate in the context of pollsters
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.152 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for OutgoingBytesRatePollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.152 12 ERROR ceilometer.polling.manager [-] Prevent pollster network.outgoing.bytes.rate from polling [<NovaLikeServer: tempest-ServerShowV257Test-server-216308627>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-ServerShowV257Test-server-216308627>]
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.152 12 INFO ceilometer.polling.manager [-] Polling pollster cpu in the context of pollsters
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.165 12 DEBUG ceilometer.compute.pollsters [-] f0ae2d2a-f52d-4b63-a236-8886d694c795/cpu volume: 11200000000 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'a7b2a734-2047-4657-9929-9cda59e0c993', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'cpu', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 11200000000, 'user_id': '49bbeba56f4d4f45ab5d7c1bec480a84', 'user_name': None, 'project_id': '97ac0fd972e743f1aa6700bb6734c60a', 'project_name': None, 'resource_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795', 'timestamp': '2025-10-02T12:29:17.152656', 'resource_metadata': {'display_name': 'tempest-ServerShowV257Test-server-216308627', 'name': 'instance-00000082', 'instance_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795', 'instance_type': 'm1.nano', 'host': 'ae061e3860926dfe3b7eeab0b01d2bb892527c2f3abf856edfcbefd4', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'cpu_number': 1}, 'message_id': '69bb9cc6-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6154.852718756, 'message_signature': '8b479a3ca2cab8820ba12e9d1ef76257b658f8ffee834576075a57f352032532'}]}, 'timestamp': '2025-10-02 12:29:17.166067', '_unique_id': '5dfaa94f89944e6d8d79656a2fba1826'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.166 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.167 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets.error in the context of pollsters
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.167 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.bytes in the context of pollsters
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.167 12 DEBUG ceilometer.compute.pollsters [-] f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.device.read.bytes volume: 25349632 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.167 12 DEBUG ceilometer.compute.pollsters [-] f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.device.read.bytes volume: 55474 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '8f5ffcb0-bcae-442f-9ac9-ec044654b6b2', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 25349632, 'user_id': '49bbeba56f4d4f45ab5d7c1bec480a84', 'user_name': None, 'project_id': '97ac0fd972e743f1aa6700bb6734c60a', 'project_name': None, 'resource_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795-vda', 'timestamp': '2025-10-02T12:29:17.167464', 'resource_metadata': {'display_name': 'tempest-ServerShowV257Test-server-216308627', 'name': 'instance-00000082', 'instance_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795', 'instance_type': 'm1.nano', 'host': 'ae061e3860926dfe3b7eeab0b01d2bb892527c2f3abf856edfcbefd4', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '69bbdeac-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6154.813955588, 'message_signature': '4493c219a7b8a7ca27671330a89d92a304f1b71e5910f932ee1f632c15d17dfb'}, {'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 55474, 'user_id': '49bbeba56f4d4f45ab5d7c1bec480a84', 'user_name': None, 'project_id': '97ac0fd972e743f1aa6700bb6734c60a', 'project_name': None, 'resource_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795-sda', 'timestamp': '2025-10-02T12:29:17.167464', 'resource_metadata': {'display_name': 'tempest-ServerShowV257Test-server-216308627', 'name': 'instance-00000082', 'instance_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795', 'instance_type': 'm1.nano', 'host': 'ae061e3860926dfe3b7eeab0b01d2bb892527c2f3abf856edfcbefd4', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '69bbe6d6-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6154.813955588, 'message_signature': '5390fd972e296419cec29bbe896660e709ee45576a6840fdae8823ae2249952c'}]}, 'timestamp': '2025-10-02 12:29:17.167889', '_unique_id': '8113cff4346744e8a7e8a5bcda7739ae'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.168 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes.delta in the context of pollsters
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.169 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.latency in the context of pollsters
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.169 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for PerDeviceDiskLatencyPollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.169 12 ERROR ceilometer.polling.manager [-] Prevent pollster disk.device.latency from polling [<NovaLikeServer: tempest-ServerShowV257Test-server-216308627>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-ServerShowV257Test-server-216308627>]
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.169 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.latency in the context of pollsters
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.169 12 DEBUG ceilometer.compute.pollsters [-] f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.device.read.latency volume: 804633738 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.169 12 DEBUG ceilometer.compute.pollsters [-] f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.device.read.latency volume: 43207738 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '3f874895-a9ed-4f5d-a9ac-02e84f9114eb', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 804633738, 'user_id': '49bbeba56f4d4f45ab5d7c1bec480a84', 'user_name': None, 'project_id': '97ac0fd972e743f1aa6700bb6734c60a', 'project_name': None, 'resource_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795-vda', 'timestamp': '2025-10-02T12:29:17.169430', 'resource_metadata': {'display_name': 'tempest-ServerShowV257Test-server-216308627', 'name': 'instance-00000082', 'instance_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795', 'instance_type': 'm1.nano', 'host': 'ae061e3860926dfe3b7eeab0b01d2bb892527c2f3abf856edfcbefd4', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '69bc2b3c-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6154.813955588, 'message_signature': '997afa12914bf29913820084f68b48febe2ecb1c43c250530787ed497c0a7337'}, {'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 43207738, 'user_id': '49bbeba56f4d4f45ab5d7c1bec480a84', 'user_name': None, 'project_id': '97ac0fd972e743f1aa6700bb6734c60a', 'project_name': None, 'resource_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795-sda', 'timestamp': '2025-10-02T12:29:17.169430', 'resource_metadata': {'display_name': 'tempest-ServerShowV257Test-server-216308627', 'name': 'instance-00000082', 'instance_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795', 'instance_type': 'm1.nano', 'host': 'ae061e3860926dfe3b7eeab0b01d2bb892527c2f3abf856edfcbefd4', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '69bc3334-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6154.813955588, 'message_signature': '1ad936ad968d25e49a5b72c4eb6530fbc39543f9b355824b46d0d0f051d5ac15'}]}, 'timestamp': '2025-10-02 12:29:17.169850', '_unique_id': '98b551fb91da4770a2d73dc23423e5e1'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.170 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 INFO ceilometer.polling.manager [-] Polling pollster memory.usage in the context of pollsters
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 DEBUG ceilometer.compute.pollsters [-] f0ae2d2a-f52d-4b63-a236-8886d694c795/memory.usage volume: 40.34375 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'a1740c64-a57e-45de-86a4-a1d52d3628a1', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'memory.usage', 'counter_type': 'gauge', 'counter_unit': 'MB', 'counter_volume': 40.34375, 'user_id': '49bbeba56f4d4f45ab5d7c1bec480a84', 'user_name': None, 'project_id': '97ac0fd972e743f1aa6700bb6734c60a', 'project_name': None, 'resource_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795', 'timestamp': '2025-10-02T12:29:17.171217', 'resource_metadata': {'display_name': 'tempest-ServerShowV257Test-server-216308627', 'name': 'instance-00000082', 'instance_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795', 'instance_type': 'm1.nano', 'host': 'ae061e3860926dfe3b7eeab0b01d2bb892527c2f3abf856edfcbefd4', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1}, 'message_id': '69bc7146-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6154.852718756, 'message_signature': '9c9b7f39f3beb69a11d984aac2a87f65f218fe8aba0a3b38a8b510b26fc58fec'}]}, 'timestamp': '2025-10-02 12:29:17.171437', '_unique_id': 'b596fe8bd1a749fd9be5a1d27ac61e3e'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.171 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.172 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.requests in the context of pollsters
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.172 12 DEBUG ceilometer.compute.pollsters [-] f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.device.read.requests volume: 838 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.172 12 DEBUG ceilometer.compute.pollsters [-] f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.device.read.requests volume: 20 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '7c710de4-664d-4c3f-aee9-45b793b07f97', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 838, 'user_id': '49bbeba56f4d4f45ab5d7c1bec480a84', 'user_name': None, 'project_id': '97ac0fd972e743f1aa6700bb6734c60a', 'project_name': None, 'resource_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795-vda', 'timestamp': '2025-10-02T12:29:17.172624', 'resource_metadata': {'display_name': 'tempest-ServerShowV257Test-server-216308627', 'name': 'instance-00000082', 'instance_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795', 'instance_type': 'm1.nano', 'host': 'ae061e3860926dfe3b7eeab0b01d2bb892527c2f3abf856edfcbefd4', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '69bca968-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6154.813955588, 'message_signature': '5ccb6bd6f8dc0899a8d95fb8c3282d6dbe2f0c6f024493746b26c975b3cef6a9'}, {'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 20, 'user_id': '49bbeba56f4d4f45ab5d7c1bec480a84', 'user_name': None, 'project_id': '97ac0fd972e743f1aa6700bb6734c60a', 'project_name': None, 'resource_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795-sda', 'timestamp': '2025-10-02T12:29:17.172624', 'resource_metadata': {'display_name': 'tempest-ServerShowV257Test-server-216308627', 'name': 'instance-00000082', 'instance_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795', 'instance_type': 'm1.nano', 'host': 'ae061e3860926dfe3b7eeab0b01d2bb892527c2f3abf856edfcbefd4', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '69bcb2e6-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6154.813955588, 'message_signature': '5873fc62a29eafedd8c7f1a966aea3536193970a73edda8af273d6120c8d94da'}]}, 'timestamp': '2025-10-02 12:29:17.173124', '_unique_id': '07403cc312604cb99da7fe6620817003'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.173 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.174 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes in the context of pollsters
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.174 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.bytes in the context of pollsters
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.174 12 DEBUG ceilometer.compute.pollsters [-] f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.device.write.bytes volume: 14356480 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.174 12 DEBUG ceilometer.compute.pollsters [-] f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.device.write.bytes volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '842fd213-5676-4ecd-835b-6f316705bf0c', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 14356480, 'user_id': '49bbeba56f4d4f45ab5d7c1bec480a84', 'user_name': None, 'project_id': '97ac0fd972e743f1aa6700bb6734c60a', 'project_name': None, 'resource_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795-vda', 'timestamp': '2025-10-02T12:29:17.174437', 'resource_metadata': {'display_name': 'tempest-ServerShowV257Test-server-216308627', 'name': 'instance-00000082', 'instance_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795', 'instance_type': 'm1.nano', 'host': 'ae061e3860926dfe3b7eeab0b01d2bb892527c2f3abf856edfcbefd4', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '69bcef36-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6154.813955588, 'message_signature': '3e1873e7fe1128a62751a0def703aa4d0e33d5afc2f96345d0a1880d24e7e00b'}, {'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '49bbeba56f4d4f45ab5d7c1bec480a84', 'user_name': None, 'project_id': '97ac0fd972e743f1aa6700bb6734c60a', 'project_name': None, 'resource_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795-sda', 'timestamp': '2025-10-02T12:29:17.174437', 'resource_metadata': {'display_name': 'tempest-ServerShowV257Test-server-216308627', 'name': 'instance-00000082', 'instance_id': 'f0ae2d2a-f52d-4b63-a236-8886d694c795', 'instance_type': 'm1.nano', 'host': 'ae061e3860926dfe3b7eeab0b01d2bb892527c2f3abf856edfcbefd4', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '69bcf814-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6154.813955588, 'message_signature': '8a28d6d4e0d5cf5f713743d6f2e82db9d1509a824fff969c3169b1bc82b9b248'}]}, 'timestamp': '2025-10-02 12:29:17.174880', '_unique_id': 'c21f1b5b36694e9b89a0929a3b597b7e'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:29:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:29:17.175 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:29:17 compute-0 nova_compute[192079]: 2025-10-02 12:29:17.210 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:29:17 compute-0 nova_compute[192079]: 2025-10-02 12:29:17.771 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:29:19 compute-0 podman[242617]: 2025-10-02 12:29:19.158836167 +0000 UTC m=+0.071440869 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, config_id=multipathd, container_name=multipathd, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:29:19 compute-0 podman[242616]: 2025-10-02 12:29:19.164177813 +0000 UTC m=+0.071639315 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, distribution-scope=public, name=ubi9-minimal, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.buildah.version=1.33.7, managed_by=edpm_ansible, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, vendor=Red Hat, Inc., io.openshift.expose-services=, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vcs-type=git, url=https://catalog.redhat.com/en/search?searchType=containers, io.openshift.tags=minimal rhel9, release=1755695350, version=9.6, build-date=2025-08-20T13:12:41, com.redhat.component=ubi9-minimal-container, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, container_name=openstack_network_exporter, maintainer=Red Hat, Inc., summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., config_id=edpm, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., architecture=x86_64)
Oct 02 12:29:19 compute-0 nova_compute[192079]: 2025-10-02 12:29:19.797 2 DEBUG nova.virt.libvirt.driver [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Instance in state 1 after 10 seconds - resending shutdown _clean_shutdown /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4101
Oct 02 12:29:20 compute-0 nova_compute[192079]: 2025-10-02 12:29:20.256 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:29:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:29:20.256 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=33, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=32) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:29:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:29:20.258 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 5 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:29:22 compute-0 systemd[1]: machine-qemu\x2d64\x2dinstance\x2d00000082.scope: Deactivated successfully.
Oct 02 12:29:22 compute-0 systemd[1]: machine-qemu\x2d64\x2dinstance\x2d00000082.scope: Consumed 13.834s CPU time.
Oct 02 12:29:22 compute-0 systemd-machined[152150]: Machine qemu-64-instance-00000082 terminated.
Oct 02 12:29:22 compute-0 nova_compute[192079]: 2025-10-02 12:29:22.213 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:29:22 compute-0 nova_compute[192079]: 2025-10-02 12:29:22.773 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:29:22 compute-0 nova_compute[192079]: 2025-10-02 12:29:22.812 2 INFO nova.virt.libvirt.driver [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Instance shutdown successfully after 13 seconds.
Oct 02 12:29:22 compute-0 nova_compute[192079]: 2025-10-02 12:29:22.819 2 INFO nova.virt.libvirt.driver [-] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Instance destroyed successfully.
Oct 02 12:29:22 compute-0 nova_compute[192079]: 2025-10-02 12:29:22.824 2 INFO nova.virt.libvirt.driver [-] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Instance destroyed successfully.
Oct 02 12:29:22 compute-0 nova_compute[192079]: 2025-10-02 12:29:22.825 2 INFO nova.virt.libvirt.driver [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Deleting instance files /var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795_del
Oct 02 12:29:22 compute-0 nova_compute[192079]: 2025-10-02 12:29:22.826 2 INFO nova.virt.libvirt.driver [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Deletion of /var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795_del complete
Oct 02 12:29:23 compute-0 podman[242665]: 2025-10-02 12:29:23.139736218 +0000 UTC m=+0.052776051 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:29:23 compute-0 podman[242666]: 2025-10-02 12:29:23.147812809 +0000 UTC m=+0.057677045 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.license=GPLv2, config_id=iscsid, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.168 2 DEBUG nova.virt.libvirt.driver [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.169 2 INFO nova.virt.libvirt.driver [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Creating image(s)
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.170 2 DEBUG oslo_concurrency.lockutils [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Acquiring lock "/var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.170 2 DEBUG oslo_concurrency.lockutils [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lock "/var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.171 2 DEBUG oslo_concurrency.lockutils [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lock "/var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.188 2 DEBUG oslo_concurrency.processutils [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.250 2 DEBUG oslo_concurrency.processutils [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2 --force-share --output=json" returned: 0 in 0.063s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.251 2 DEBUG oslo_concurrency.lockutils [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Acquiring lock "d7f074efa852dc950deac120296f6eecf48a40d2" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.252 2 DEBUG oslo_concurrency.lockutils [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lock "d7f074efa852dc950deac120296f6eecf48a40d2" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.264 2 DEBUG oslo_concurrency.processutils [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.327 2 DEBUG oslo_concurrency.processutils [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2 --force-share --output=json" returned: 0 in 0.063s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.329 2 DEBUG oslo_concurrency.processutils [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2,backing_fmt=raw /var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.517 2 DEBUG oslo_concurrency.processutils [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2,backing_fmt=raw /var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/disk 1073741824" returned: 0 in 0.188s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.519 2 DEBUG oslo_concurrency.lockutils [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lock "d7f074efa852dc950deac120296f6eecf48a40d2" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.267s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.520 2 DEBUG oslo_concurrency.processutils [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.579 2 DEBUG oslo_concurrency.processutils [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2 --force-share --output=json" returned: 0 in 0.058s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.580 2 DEBUG nova.virt.disk.api [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Checking if we can resize image /var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.581 2 DEBUG oslo_concurrency.processutils [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.637 2 DEBUG oslo_concurrency.processutils [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/disk --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.638 2 DEBUG nova.virt.disk.api [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Cannot resize image /var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.638 2 DEBUG nova.virt.libvirt.driver [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.639 2 DEBUG nova.virt.libvirt.driver [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Ensure instance console log exists: /var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.639 2 DEBUG oslo_concurrency.lockutils [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.640 2 DEBUG oslo_concurrency.lockutils [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.640 2 DEBUG oslo_concurrency.lockutils [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.641 2 DEBUG nova.virt.libvirt.driver [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Start _get_guest_xml network_info=[] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:28Z,direct_url=<?>,disk_format='qcow2',id=062d9f80-76b6-42ce-bee7-0fb82a008353,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img_alt',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:29Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.645 2 WARNING nova.virt.libvirt.driver [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.: NotImplementedError
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.653 2 DEBUG nova.virt.libvirt.host [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.654 2 DEBUG nova.virt.libvirt.host [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.663 2 DEBUG nova.virt.libvirt.host [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.664 2 DEBUG nova.virt.libvirt.host [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.667 2 DEBUG nova.virt.libvirt.driver [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.667 2 DEBUG nova.virt.hardware [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:28Z,direct_url=<?>,disk_format='qcow2',id=062d9f80-76b6-42ce-bee7-0fb82a008353,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img_alt',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:29Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.668 2 DEBUG nova.virt.hardware [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.669 2 DEBUG nova.virt.hardware [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.669 2 DEBUG nova.virt.hardware [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.669 2 DEBUG nova.virt.hardware [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.670 2 DEBUG nova.virt.hardware [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.670 2 DEBUG nova.virt.hardware [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.671 2 DEBUG nova.virt.hardware [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.671 2 DEBUG nova.virt.hardware [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.672 2 DEBUG nova.virt.hardware [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.672 2 DEBUG nova.virt.hardware [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.673 2 DEBUG nova.objects.instance [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lazy-loading 'vcpu_model' on Instance uuid f0ae2d2a-f52d-4b63-a236-8886d694c795 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:29:23 compute-0 nova_compute[192079]: 2025-10-02 12:29:23.769 2 DEBUG nova.virt.libvirt.driver [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:29:23 compute-0 nova_compute[192079]:   <uuid>f0ae2d2a-f52d-4b63-a236-8886d694c795</uuid>
Oct 02 12:29:23 compute-0 nova_compute[192079]:   <name>instance-00000082</name>
Oct 02 12:29:23 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:29:23 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:29:23 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:29:23 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:       <nova:name>tempest-ServerShowV257Test-server-216308627</nova:name>
Oct 02 12:29:23 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:29:23</nova:creationTime>
Oct 02 12:29:23 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:29:23 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:29:23 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:29:23 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:29:23 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:29:23 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:29:23 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:29:23 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:29:23 compute-0 nova_compute[192079]:         <nova:user uuid="49bbeba56f4d4f45ab5d7c1bec480a84">tempest-ServerShowV257Test-1770639198-project-member</nova:user>
Oct 02 12:29:23 compute-0 nova_compute[192079]:         <nova:project uuid="97ac0fd972e743f1aa6700bb6734c60a">tempest-ServerShowV257Test-1770639198</nova:project>
Oct 02 12:29:23 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:29:23 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="062d9f80-76b6-42ce-bee7-0fb82a008353"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:       <nova:ports/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:29:23 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:29:23 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <system>
Oct 02 12:29:23 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:29:23 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:29:23 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:29:23 compute-0 nova_compute[192079]:       <entry name="serial">f0ae2d2a-f52d-4b63-a236-8886d694c795</entry>
Oct 02 12:29:23 compute-0 nova_compute[192079]:       <entry name="uuid">f0ae2d2a-f52d-4b63-a236-8886d694c795</entry>
Oct 02 12:29:23 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     </system>
Oct 02 12:29:23 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:29:23 compute-0 nova_compute[192079]:   <os>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:   </os>
Oct 02 12:29:23 compute-0 nova_compute[192079]:   <features>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:   </features>
Oct 02 12:29:23 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:29:23 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:29:23 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:29:23 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/disk"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:29:23 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.config"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:29:23 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/console.log" append="off"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <video>
Oct 02 12:29:23 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     </video>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:29:23 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:29:23 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:29:23 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:29:23 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:29:23 compute-0 nova_compute[192079]: </domain>
Oct 02 12:29:23 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:29:24 compute-0 nova_compute[192079]: 2025-10-02 12:29:24.251 2 DEBUG nova.virt.libvirt.driver [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:29:24 compute-0 nova_compute[192079]: 2025-10-02 12:29:24.251 2 DEBUG nova.virt.libvirt.driver [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:29:24 compute-0 nova_compute[192079]: 2025-10-02 12:29:24.252 2 INFO nova.virt.libvirt.driver [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Using config drive
Oct 02 12:29:24 compute-0 nova_compute[192079]: 2025-10-02 12:29:24.294 2 DEBUG nova.objects.instance [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lazy-loading 'ec2_ids' on Instance uuid f0ae2d2a-f52d-4b63-a236-8886d694c795 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:29:24 compute-0 nova_compute[192079]: 2025-10-02 12:29:24.358 2 DEBUG nova.objects.instance [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lazy-loading 'keypairs' on Instance uuid f0ae2d2a-f52d-4b63-a236-8886d694c795 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:29:24 compute-0 nova_compute[192079]: 2025-10-02 12:29:24.673 2 INFO nova.virt.libvirt.driver [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Creating config drive at /var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.config
Oct 02 12:29:24 compute-0 nova_compute[192079]: 2025-10-02 12:29:24.687 2 DEBUG oslo_concurrency.processutils [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpg2jq7s0x execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:29:24 compute-0 nova_compute[192079]: 2025-10-02 12:29:24.836 2 DEBUG oslo_concurrency.processutils [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpg2jq7s0x" returned: 0 in 0.149s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:29:24 compute-0 systemd-machined[152150]: New machine qemu-65-instance-00000082.
Oct 02 12:29:24 compute-0 systemd[1]: Started Virtual Machine qemu-65-instance-00000082.
Oct 02 12:29:25 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:29:25.260 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '33'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:29:25 compute-0 nova_compute[192079]: 2025-10-02 12:29:25.660 2 DEBUG nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Removed pending event for f0ae2d2a-f52d-4b63-a236-8886d694c795 due to event _event_emit_delayed /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:438
Oct 02 12:29:25 compute-0 nova_compute[192079]: 2025-10-02 12:29:25.661 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408165.6599274, f0ae2d2a-f52d-4b63-a236-8886d694c795 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:29:25 compute-0 nova_compute[192079]: 2025-10-02 12:29:25.661 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] VM Resumed (Lifecycle Event)
Oct 02 12:29:25 compute-0 nova_compute[192079]: 2025-10-02 12:29:25.663 2 DEBUG nova.compute.manager [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:29:25 compute-0 nova_compute[192079]: 2025-10-02 12:29:25.664 2 DEBUG nova.virt.libvirt.driver [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:29:25 compute-0 nova_compute[192079]: 2025-10-02 12:29:25.667 2 INFO nova.virt.libvirt.driver [-] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Instance spawned successfully.
Oct 02 12:29:25 compute-0 nova_compute[192079]: 2025-10-02 12:29:25.667 2 DEBUG nova.virt.libvirt.driver [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:29:25 compute-0 nova_compute[192079]: 2025-10-02 12:29:25.796 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:29:25 compute-0 nova_compute[192079]: 2025-10-02 12:29:25.800 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: active, current task_state: rebuild_spawning, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:29:25 compute-0 nova_compute[192079]: 2025-10-02 12:29:25.830 2 DEBUG nova.virt.libvirt.driver [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:29:25 compute-0 nova_compute[192079]: 2025-10-02 12:29:25.831 2 DEBUG nova.virt.libvirt.driver [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:29:25 compute-0 nova_compute[192079]: 2025-10-02 12:29:25.831 2 DEBUG nova.virt.libvirt.driver [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:29:25 compute-0 nova_compute[192079]: 2025-10-02 12:29:25.832 2 DEBUG nova.virt.libvirt.driver [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:29:25 compute-0 nova_compute[192079]: 2025-10-02 12:29:25.832 2 DEBUG nova.virt.libvirt.driver [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:29:25 compute-0 nova_compute[192079]: 2025-10-02 12:29:25.832 2 DEBUG nova.virt.libvirt.driver [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:29:25 compute-0 nova_compute[192079]: 2025-10-02 12:29:25.844 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] During sync_power_state the instance has a pending task (rebuild_spawning). Skip.
Oct 02 12:29:25 compute-0 nova_compute[192079]: 2025-10-02 12:29:25.845 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408165.6639647, f0ae2d2a-f52d-4b63-a236-8886d694c795 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:29:25 compute-0 nova_compute[192079]: 2025-10-02 12:29:25.845 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] VM Started (Lifecycle Event)
Oct 02 12:29:25 compute-0 nova_compute[192079]: 2025-10-02 12:29:25.922 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:29:25 compute-0 nova_compute[192079]: 2025-10-02 12:29:25.927 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Synchronizing instance power state after lifecycle event "Started"; current vm_state: active, current task_state: rebuild_spawning, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:29:25 compute-0 nova_compute[192079]: 2025-10-02 12:29:25.961 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] During sync_power_state the instance has a pending task (rebuild_spawning). Skip.
Oct 02 12:29:26 compute-0 nova_compute[192079]: 2025-10-02 12:29:26.117 2 DEBUG nova.compute.manager [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:29:26 compute-0 nova_compute[192079]: 2025-10-02 12:29:26.423 2 DEBUG oslo_concurrency.lockutils [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:29:26 compute-0 nova_compute[192079]: 2025-10-02 12:29:26.424 2 DEBUG oslo_concurrency.lockutils [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:29:26 compute-0 nova_compute[192079]: 2025-10-02 12:29:26.424 2 DEBUG nova.objects.instance [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Trying to apply a migration context that does not seem to be set for this instance apply_migration_context /usr/lib/python3.9/site-packages/nova/objects/instance.py:1032
Oct 02 12:29:26 compute-0 nova_compute[192079]: 2025-10-02 12:29:26.616 2 DEBUG oslo_concurrency.lockutils [None req-3c982ed6-8310-4df9-b772-995b1348cdb6 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 0.192s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:29:27 compute-0 nova_compute[192079]: 2025-10-02 12:29:27.215 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:29:27 compute-0 nova_compute[192079]: 2025-10-02 12:29:27.776 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:29:28 compute-0 nova_compute[192079]: 2025-10-02 12:29:28.301 2 DEBUG oslo_concurrency.lockutils [None req-a695b0b9-29a0-4b51-b861-d40f8e9c0474 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Acquiring lock "f0ae2d2a-f52d-4b63-a236-8886d694c795" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:29:28 compute-0 nova_compute[192079]: 2025-10-02 12:29:28.302 2 DEBUG oslo_concurrency.lockutils [None req-a695b0b9-29a0-4b51-b861-d40f8e9c0474 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lock "f0ae2d2a-f52d-4b63-a236-8886d694c795" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:29:28 compute-0 nova_compute[192079]: 2025-10-02 12:29:28.302 2 DEBUG oslo_concurrency.lockutils [None req-a695b0b9-29a0-4b51-b861-d40f8e9c0474 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Acquiring lock "f0ae2d2a-f52d-4b63-a236-8886d694c795-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:29:28 compute-0 nova_compute[192079]: 2025-10-02 12:29:28.302 2 DEBUG oslo_concurrency.lockutils [None req-a695b0b9-29a0-4b51-b861-d40f8e9c0474 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lock "f0ae2d2a-f52d-4b63-a236-8886d694c795-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:29:28 compute-0 nova_compute[192079]: 2025-10-02 12:29:28.303 2 DEBUG oslo_concurrency.lockutils [None req-a695b0b9-29a0-4b51-b861-d40f8e9c0474 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lock "f0ae2d2a-f52d-4b63-a236-8886d694c795-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:29:28 compute-0 nova_compute[192079]: 2025-10-02 12:29:28.391 2 INFO nova.compute.manager [None req-a695b0b9-29a0-4b51-b861-d40f8e9c0474 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Terminating instance
Oct 02 12:29:28 compute-0 nova_compute[192079]: 2025-10-02 12:29:28.432 2 DEBUG oslo_concurrency.lockutils [None req-a695b0b9-29a0-4b51-b861-d40f8e9c0474 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Acquiring lock "refresh_cache-f0ae2d2a-f52d-4b63-a236-8886d694c795" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:29:28 compute-0 nova_compute[192079]: 2025-10-02 12:29:28.432 2 DEBUG oslo_concurrency.lockutils [None req-a695b0b9-29a0-4b51-b861-d40f8e9c0474 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Acquired lock "refresh_cache-f0ae2d2a-f52d-4b63-a236-8886d694c795" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:29:28 compute-0 nova_compute[192079]: 2025-10-02 12:29:28.433 2 DEBUG nova.network.neutron [None req-a695b0b9-29a0-4b51-b861-d40f8e9c0474 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:29:28 compute-0 nova_compute[192079]: 2025-10-02 12:29:28.641 2 DEBUG nova.network.neutron [None req-a695b0b9-29a0-4b51-b861-d40f8e9c0474 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:29:28 compute-0 nova_compute[192079]: 2025-10-02 12:29:28.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:29:28 compute-0 nova_compute[192079]: 2025-10-02 12:29:28.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_incomplete_migrations run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:29:28 compute-0 nova_compute[192079]: 2025-10-02 12:29:28.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Cleaning up deleted instances with incomplete migration  _cleanup_incomplete_migrations /usr/lib/python3.9/site-packages/nova/compute/manager.py:11183
Oct 02 12:29:28 compute-0 nova_compute[192079]: 2025-10-02 12:29:28.963 2 DEBUG nova.network.neutron [None req-a695b0b9-29a0-4b51-b861-d40f8e9c0474 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:29:28 compute-0 nova_compute[192079]: 2025-10-02 12:29:28.982 2 DEBUG oslo_concurrency.lockutils [None req-a695b0b9-29a0-4b51-b861-d40f8e9c0474 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Releasing lock "refresh_cache-f0ae2d2a-f52d-4b63-a236-8886d694c795" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:29:28 compute-0 nova_compute[192079]: 2025-10-02 12:29:28.983 2 DEBUG nova.compute.manager [None req-a695b0b9-29a0-4b51-b861-d40f8e9c0474 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:29:29 compute-0 systemd[1]: machine-qemu\x2d65\x2dinstance\x2d00000082.scope: Deactivated successfully.
Oct 02 12:29:29 compute-0 systemd[1]: machine-qemu\x2d65\x2dinstance\x2d00000082.scope: Consumed 4.003s CPU time.
Oct 02 12:29:29 compute-0 systemd-machined[152150]: Machine qemu-65-instance-00000082 terminated.
Oct 02 12:29:29 compute-0 nova_compute[192079]: 2025-10-02 12:29:29.244 2 INFO nova.virt.libvirt.driver [-] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Instance destroyed successfully.
Oct 02 12:29:29 compute-0 nova_compute[192079]: 2025-10-02 12:29:29.245 2 DEBUG nova.objects.instance [None req-a695b0b9-29a0-4b51-b861-d40f8e9c0474 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lazy-loading 'resources' on Instance uuid f0ae2d2a-f52d-4b63-a236-8886d694c795 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:29:29 compute-0 nova_compute[192079]: 2025-10-02 12:29:29.266 2 INFO nova.virt.libvirt.driver [None req-a695b0b9-29a0-4b51-b861-d40f8e9c0474 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Deleting instance files /var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795_del
Oct 02 12:29:29 compute-0 nova_compute[192079]: 2025-10-02 12:29:29.267 2 INFO nova.virt.libvirt.driver [None req-a695b0b9-29a0-4b51-b861-d40f8e9c0474 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Deletion of /var/lib/nova/instances/f0ae2d2a-f52d-4b63-a236-8886d694c795_del complete
Oct 02 12:29:29 compute-0 nova_compute[192079]: 2025-10-02 12:29:29.441 2 INFO nova.compute.manager [None req-a695b0b9-29a0-4b51-b861-d40f8e9c0474 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Took 0.46 seconds to destroy the instance on the hypervisor.
Oct 02 12:29:29 compute-0 nova_compute[192079]: 2025-10-02 12:29:29.442 2 DEBUG oslo.service.loopingcall [None req-a695b0b9-29a0-4b51-b861-d40f8e9c0474 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:29:29 compute-0 nova_compute[192079]: 2025-10-02 12:29:29.442 2 DEBUG nova.compute.manager [-] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:29:29 compute-0 nova_compute[192079]: 2025-10-02 12:29:29.442 2 DEBUG nova.network.neutron [-] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:29:29 compute-0 nova_compute[192079]: 2025-10-02 12:29:29.706 2 DEBUG nova.network.neutron [-] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:29:29 compute-0 nova_compute[192079]: 2025-10-02 12:29:29.761 2 DEBUG nova.network.neutron [-] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:29:29 compute-0 nova_compute[192079]: 2025-10-02 12:29:29.879 2 INFO nova.compute.manager [-] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Took 0.44 seconds to deallocate network for instance.
Oct 02 12:29:30 compute-0 nova_compute[192079]: 2025-10-02 12:29:30.463 2 DEBUG oslo_concurrency.lockutils [None req-a695b0b9-29a0-4b51-b861-d40f8e9c0474 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:29:30 compute-0 nova_compute[192079]: 2025-10-02 12:29:30.464 2 DEBUG oslo_concurrency.lockutils [None req-a695b0b9-29a0-4b51-b861-d40f8e9c0474 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:29:30 compute-0 nova_compute[192079]: 2025-10-02 12:29:30.545 2 DEBUG nova.compute.provider_tree [None req-a695b0b9-29a0-4b51-b861-d40f8e9c0474 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:29:30 compute-0 nova_compute[192079]: 2025-10-02 12:29:30.584 2 DEBUG nova.scheduler.client.report [None req-a695b0b9-29a0-4b51-b861-d40f8e9c0474 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:29:30 compute-0 nova_compute[192079]: 2025-10-02 12:29:30.770 2 DEBUG oslo_concurrency.lockutils [None req-a695b0b9-29a0-4b51-b861-d40f8e9c0474 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.306s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:29:30 compute-0 nova_compute[192079]: 2025-10-02 12:29:30.853 2 INFO nova.scheduler.client.report [None req-a695b0b9-29a0-4b51-b861-d40f8e9c0474 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Deleted allocations for instance f0ae2d2a-f52d-4b63-a236-8886d694c795
Oct 02 12:29:31 compute-0 nova_compute[192079]: 2025-10-02 12:29:31.707 2 DEBUG oslo_concurrency.lockutils [None req-a695b0b9-29a0-4b51-b861-d40f8e9c0474 49bbeba56f4d4f45ab5d7c1bec480a84 97ac0fd972e743f1aa6700bb6734c60a - - default default] Lock "f0ae2d2a-f52d-4b63-a236-8886d694c795" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 3.405s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:29:32 compute-0 podman[242760]: 2025-10-02 12:29:32.146854994 +0000 UTC m=+0.061183831 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, container_name=ovn_metadata_agent, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, tcib_managed=true, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_metadata_agent, io.buildah.version=1.41.3)
Oct 02 12:29:32 compute-0 podman[242762]: 2025-10-02 12:29:32.182557958 +0000 UTC m=+0.091483397 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 12:29:32 compute-0 podman[242761]: 2025-10-02 12:29:32.187315388 +0000 UTC m=+0.098202241 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=ovn_controller, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=ovn_controller)
Oct 02 12:29:32 compute-0 nova_compute[192079]: 2025-10-02 12:29:32.217 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:29:32 compute-0 nova_compute[192079]: 2025-10-02 12:29:32.679 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:29:32 compute-0 nova_compute[192079]: 2025-10-02 12:29:32.777 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:29:32 compute-0 nova_compute[192079]: 2025-10-02 12:29:32.966 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:29:32 compute-0 nova_compute[192079]: 2025-10-02 12:29:32.967 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:29:32 compute-0 nova_compute[192079]: 2025-10-02 12:29:32.967 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:29:32 compute-0 nova_compute[192079]: 2025-10-02 12:29:32.967 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:29:33 compute-0 nova_compute[192079]: 2025-10-02 12:29:33.109 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:29:33 compute-0 nova_compute[192079]: 2025-10-02 12:29:33.110 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5732MB free_disk=73.34813690185547GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:29:33 compute-0 nova_compute[192079]: 2025-10-02 12:29:33.110 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:29:33 compute-0 nova_compute[192079]: 2025-10-02 12:29:33.110 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:29:33 compute-0 nova_compute[192079]: 2025-10-02 12:29:33.675 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:29:33 compute-0 nova_compute[192079]: 2025-10-02 12:29:33.676 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:29:33 compute-0 nova_compute[192079]: 2025-10-02 12:29:33.705 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:29:34 compute-0 nova_compute[192079]: 2025-10-02 12:29:34.087 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:29:34 compute-0 nova_compute[192079]: 2025-10-02 12:29:34.453 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:29:34 compute-0 nova_compute[192079]: 2025-10-02 12:29:34.454 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.344s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:29:35 compute-0 nova_compute[192079]: 2025-10-02 12:29:35.440 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:29:35 compute-0 nova_compute[192079]: 2025-10-02 12:29:35.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:29:35 compute-0 nova_compute[192079]: 2025-10-02 12:29:35.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:29:37 compute-0 nova_compute[192079]: 2025-10-02 12:29:37.218 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:29:37 compute-0 nova_compute[192079]: 2025-10-02 12:29:37.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:29:37 compute-0 nova_compute[192079]: 2025-10-02 12:29:37.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:29:37 compute-0 nova_compute[192079]: 2025-10-02 12:29:37.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:29:37 compute-0 nova_compute[192079]: 2025-10-02 12:29:37.778 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:29:39 compute-0 nova_compute[192079]: 2025-10-02 12:29:39.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:29:39 compute-0 nova_compute[192079]: 2025-10-02 12:29:39.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:29:39 compute-0 nova_compute[192079]: 2025-10-02 12:29:39.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:29:39 compute-0 nova_compute[192079]: 2025-10-02 12:29:39.750 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:29:40 compute-0 nova_compute[192079]: 2025-10-02 12:29:40.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:29:42 compute-0 podman[242826]: 2025-10-02 12:29:42.156754155 +0000 UTC m=+0.071894453 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, tcib_managed=true, managed_by=edpm_ansible, config_id=edpm, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:29:42 compute-0 nova_compute[192079]: 2025-10-02 12:29:42.220 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:29:42 compute-0 nova_compute[192079]: 2025-10-02 12:29:42.839 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:29:44 compute-0 nova_compute[192079]: 2025-10-02 12:29:44.242 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759408169.240391, f0ae2d2a-f52d-4b63-a236-8886d694c795 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:29:44 compute-0 nova_compute[192079]: 2025-10-02 12:29:44.243 2 INFO nova.compute.manager [-] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] VM Stopped (Lifecycle Event)
Oct 02 12:29:44 compute-0 nova_compute[192079]: 2025-10-02 12:29:44.362 2 DEBUG nova.compute.manager [None req-c6c137f8-5f28-4310-a8ff-f6f20203e509 - - - - - -] [instance: f0ae2d2a-f52d-4b63-a236-8886d694c795] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:29:47 compute-0 nova_compute[192079]: 2025-10-02 12:29:47.222 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:29:47 compute-0 nova_compute[192079]: 2025-10-02 12:29:47.841 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:29:50 compute-0 podman[242847]: 2025-10-02 12:29:50.170401589 +0000 UTC m=+0.076419356 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, config_id=multipathd, container_name=multipathd, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']})
Oct 02 12:29:50 compute-0 podman[242846]: 2025-10-02 12:29:50.179493738 +0000 UTC m=+0.079999355 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, com.redhat.component=ubi9-minimal-container, container_name=openstack_network_exporter, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, version=9.6, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.tags=minimal rhel9, build-date=2025-08-20T13:12:41, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, managed_by=edpm_ansible, release=1755695350, vcs-type=git, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, maintainer=Red Hat, Inc., architecture=x86_64, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.openshift.expose-services=, name=ubi9-minimal, url=https://catalog.redhat.com/en/search?searchType=containers, distribution-scope=public, config_id=edpm, io.buildah.version=1.33.7, vendor=Red Hat, Inc.)
Oct 02 12:29:52 compute-0 nova_compute[192079]: 2025-10-02 12:29:52.223 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:29:52 compute-0 nova_compute[192079]: 2025-10-02 12:29:52.876 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:29:54 compute-0 podman[242889]: 2025-10-02 12:29:54.179372266 +0000 UTC m=+0.081585337 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:29:54 compute-0 podman[242890]: 2025-10-02 12:29:54.181652119 +0000 UTC m=+0.081847175 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, org.label-schema.build-date=20251001, config_id=iscsid, container_name=iscsid, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible)
Oct 02 12:29:57 compute-0 nova_compute[192079]: 2025-10-02 12:29:57.225 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:29:57 compute-0 nova_compute[192079]: 2025-10-02 12:29:57.878 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:29:59 compute-0 nova_compute[192079]: 2025-10-02 12:29:59.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:30:02 compute-0 nova_compute[192079]: 2025-10-02 12:30:02.227 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:02.230 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:30:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:02.230 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:30:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:02.230 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:30:02 compute-0 nova_compute[192079]: 2025-10-02 12:30:02.879 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:03 compute-0 podman[242933]: 2025-10-02 12:30:03.167639047 +0000 UTC m=+0.076571390 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, tcib_managed=true, config_id=ovn_metadata_agent, container_name=ovn_metadata_agent, org.label-schema.license=GPLv2)
Oct 02 12:30:03 compute-0 podman[242935]: 2025-10-02 12:30:03.181154486 +0000 UTC m=+0.084783685 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:30:03 compute-0 podman[242934]: 2025-10-02 12:30:03.210770704 +0000 UTC m=+0.113532339 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_controller, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, config_id=ovn_controller, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.license=GPLv2, tcib_managed=true)
Oct 02 12:30:07 compute-0 nova_compute[192079]: 2025-10-02 12:30:07.229 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:07 compute-0 nova_compute[192079]: 2025-10-02 12:30:07.881 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:10 compute-0 nova_compute[192079]: 2025-10-02 12:30:10.688 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._run_pending_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:30:10 compute-0 nova_compute[192079]: 2025-10-02 12:30:10.689 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Cleaning up deleted instances _run_pending_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:11145
Oct 02 12:30:10 compute-0 nova_compute[192079]: 2025-10-02 12:30:10.707 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] There are 0 instances to clean _run_pending_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:11154
Oct 02 12:30:12 compute-0 nova_compute[192079]: 2025-10-02 12:30:12.231 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:12 compute-0 nova_compute[192079]: 2025-10-02 12:30:12.890 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:13 compute-0 podman[242998]: 2025-10-02 12:30:13.146595432 +0000 UTC m=+0.062013993 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.license=GPLv2, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:30:17 compute-0 nova_compute[192079]: 2025-10-02 12:30:17.233 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:17 compute-0 nova_compute[192079]: 2025-10-02 12:30:17.893 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:20.396 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=34, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=33) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:30:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:20.396 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 5 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:30:20 compute-0 nova_compute[192079]: 2025-10-02 12:30:20.442 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:20 compute-0 podman[243018]: 2025-10-02 12:30:20.571283205 +0000 UTC m=+0.097946814 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, name=ubi9-minimal, distribution-scope=public, com.redhat.component=ubi9-minimal-container, io.openshift.tags=minimal rhel9, vendor=Red Hat, Inc., version=9.6, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, architecture=x86_64, managed_by=edpm_ansible, release=1755695350, config_id=edpm, io.openshift.expose-services=, io.buildah.version=1.33.7, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, maintainer=Red Hat, Inc., summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vcs-type=git, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., container_name=openstack_network_exporter, build-date=2025-08-20T13:12:41, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, url=https://catalog.redhat.com/en/search?searchType=containers)
Oct 02 12:30:20 compute-0 podman[243019]: 2025-10-02 12:30:20.588277579 +0000 UTC m=+0.114188207 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, tcib_managed=true, container_name=multipathd, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:30:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:20.979 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:2e:31:73 2001:db8:0:1:f816:3eff:fe2e:3173 2001:db8::f816:3eff:fe2e:3173'], port_security=[], type=localport, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': ''}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '2001:db8:0:1:f816:3eff:fe2e:3173/64 2001:db8::f816:3eff:fe2e:3173/64', 'neutron:device_id': 'ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9', 'neutron:device_owner': 'network:distributed', 'neutron:mtu': '', 'neutron:network_name': 'neutron-e2520108-9d67-4d82-a7a0-ba429a88c3c9', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'neutron:revision_number': '3', 'neutron:security_group_ids': '', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=876a7f58-2645-4e1a-8a60-dbbe16fdfb2e, chassis=[], tunnel_key=1, gateway_chassis=[], requested_chassis=[], logical_port=3eb0ed9e-d99b-4ee6-af64-ada9c8369b17) old=Port_Binding(mac=['fa:16:3e:2e:31:73 2001:db8::f816:3eff:fe2e:3173'], external_ids={'neutron:cidrs': '2001:db8::f816:3eff:fe2e:3173/64', 'neutron:device_id': 'ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9', 'neutron:device_owner': 'network:distributed', 'neutron:mtu': '', 'neutron:network_name': 'neutron-e2520108-9d67-4d82-a7a0-ba429a88c3c9', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'neutron:revision_number': '2', 'neutron:security_group_ids': '', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:30:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:20.981 103294 INFO neutron.agent.ovn.metadata.agent [-] Metadata Port 3eb0ed9e-d99b-4ee6-af64-ada9c8369b17 in datapath e2520108-9d67-4d82-a7a0-ba429a88c3c9 updated
Oct 02 12:30:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:20.984 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network e2520108-9d67-4d82-a7a0-ba429a88c3c9, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:30:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:20.986 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3392cb48-5c71-41d0-8b23-bcc100271620]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:22 compute-0 nova_compute[192079]: 2025-10-02 12:30:22.236 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:22 compute-0 nova_compute[192079]: 2025-10-02 12:30:22.895 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:24 compute-0 ovn_controller[94336]: 2025-10-02T12:30:24Z|00493|memory_trim|INFO|Detected inactivity (last active 30002 ms ago): trimming memory
Oct 02 12:30:25 compute-0 podman[243056]: 2025-10-02 12:30:25.131132504 +0000 UTC m=+0.047350593 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>)
Oct 02 12:30:25 compute-0 podman[243057]: 2025-10-02 12:30:25.143689897 +0000 UTC m=+0.055003932 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, config_id=iscsid, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid)
Oct 02 12:30:25 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:25.398 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '34'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:30:26 compute-0 nova_compute[192079]: 2025-10-02 12:30:26.495 2 DEBUG oslo_concurrency.lockutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:30:26 compute-0 nova_compute[192079]: 2025-10-02 12:30:26.495 2 DEBUG oslo_concurrency.lockutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:30:26 compute-0 nova_compute[192079]: 2025-10-02 12:30:26.521 2 DEBUG nova.compute.manager [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:30:26 compute-0 nova_compute[192079]: 2025-10-02 12:30:26.617 2 DEBUG oslo_concurrency.lockutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:30:26 compute-0 nova_compute[192079]: 2025-10-02 12:30:26.618 2 DEBUG oslo_concurrency.lockutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:30:26 compute-0 nova_compute[192079]: 2025-10-02 12:30:26.625 2 DEBUG nova.virt.hardware [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:30:26 compute-0 nova_compute[192079]: 2025-10-02 12:30:26.625 2 INFO nova.compute.claims [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:30:26 compute-0 nova_compute[192079]: 2025-10-02 12:30:26.753 2 DEBUG nova.compute.provider_tree [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:30:26 compute-0 nova_compute[192079]: 2025-10-02 12:30:26.766 2 DEBUG nova.scheduler.client.report [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:30:26 compute-0 nova_compute[192079]: 2025-10-02 12:30:26.784 2 DEBUG oslo_concurrency.lockutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.166s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:30:26 compute-0 nova_compute[192079]: 2025-10-02 12:30:26.785 2 DEBUG nova.compute.manager [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.041 2 DEBUG nova.compute.manager [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.042 2 DEBUG nova.network.neutron [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.073 2 INFO nova.virt.libvirt.driver [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.100 2 DEBUG nova.compute.manager [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.238 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.247 2 DEBUG nova.compute.manager [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.248 2 DEBUG nova.virt.libvirt.driver [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.248 2 INFO nova.virt.libvirt.driver [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Creating image(s)
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.248 2 DEBUG oslo_concurrency.lockutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "/var/lib/nova/instances/d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.249 2 DEBUG oslo_concurrency.lockutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "/var/lib/nova/instances/d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.249 2 DEBUG oslo_concurrency.lockutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "/var/lib/nova/instances/d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.260 2 DEBUG oslo_concurrency.processutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.346 2 DEBUG nova.policy [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.353 2 DEBUG oslo_concurrency.processutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.092s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.354 2 DEBUG oslo_concurrency.lockutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.355 2 DEBUG oslo_concurrency.lockutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.379 2 DEBUG oslo_concurrency.processutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.447 2 DEBUG oslo_concurrency.processutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.068s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.449 2 DEBUG oslo_concurrency.processutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.487 2 DEBUG oslo_concurrency.processutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk 1073741824" returned: 0 in 0.038s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.488 2 DEBUG oslo_concurrency.lockutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.133s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.489 2 DEBUG oslo_concurrency.processutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.542 2 DEBUG oslo_concurrency.processutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.053s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.544 2 DEBUG nova.virt.disk.api [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Checking if we can resize image /var/lib/nova/instances/d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.544 2 DEBUG oslo_concurrency.processutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.600 2 DEBUG oslo_concurrency.processutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.602 2 DEBUG nova.virt.disk.api [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Cannot resize image /var/lib/nova/instances/d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.603 2 DEBUG nova.objects.instance [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lazy-loading 'migration_context' on Instance uuid d59f518a-8b98-4c8c-b8f7-19f6b6809c6d obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.622 2 DEBUG nova.virt.libvirt.driver [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.623 2 DEBUG nova.virt.libvirt.driver [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Ensure instance console log exists: /var/lib/nova/instances/d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.624 2 DEBUG oslo_concurrency.lockutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.624 2 DEBUG oslo_concurrency.lockutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.625 2 DEBUG oslo_concurrency.lockutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:30:27 compute-0 nova_compute[192079]: 2025-10-02 12:30:27.897 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:29 compute-0 nova_compute[192079]: 2025-10-02 12:30:29.321 2 DEBUG nova.network.neutron [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Successfully created port: 3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:30:29 compute-0 nova_compute[192079]: 2025-10-02 12:30:29.680 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:30:30 compute-0 nova_compute[192079]: 2025-10-02 12:30:30.978 2 DEBUG nova.network.neutron [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Successfully created port: 2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:30:32 compute-0 nova_compute[192079]: 2025-10-02 12:30:32.241 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:32 compute-0 nova_compute[192079]: 2025-10-02 12:30:32.899 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:33 compute-0 nova_compute[192079]: 2025-10-02 12:30:33.611 2 DEBUG nova.network.neutron [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Successfully updated port: 3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:30:33 compute-0 nova_compute[192079]: 2025-10-02 12:30:33.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:30:33 compute-0 nova_compute[192079]: 2025-10-02 12:30:33.966 2 DEBUG nova.compute.manager [req-51cac780-1076-453d-8372-48e740eba0e0 req-30b6d95c-d386-4a8e-a698-1ee4e07341a4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Received event network-changed-3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:30:33 compute-0 nova_compute[192079]: 2025-10-02 12:30:33.967 2 DEBUG nova.compute.manager [req-51cac780-1076-453d-8372-48e740eba0e0 req-30b6d95c-d386-4a8e-a698-1ee4e07341a4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Refreshing instance network info cache due to event network-changed-3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:30:33 compute-0 nova_compute[192079]: 2025-10-02 12:30:33.968 2 DEBUG oslo_concurrency.lockutils [req-51cac780-1076-453d-8372-48e740eba0e0 req-30b6d95c-d386-4a8e-a698-1ee4e07341a4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:30:33 compute-0 nova_compute[192079]: 2025-10-02 12:30:33.968 2 DEBUG oslo_concurrency.lockutils [req-51cac780-1076-453d-8372-48e740eba0e0 req-30b6d95c-d386-4a8e-a698-1ee4e07341a4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:30:33 compute-0 nova_compute[192079]: 2025-10-02 12:30:33.969 2 DEBUG nova.network.neutron [req-51cac780-1076-453d-8372-48e740eba0e0 req-30b6d95c-d386-4a8e-a698-1ee4e07341a4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Refreshing network info cache for port 3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:30:34 compute-0 podman[243113]: 2025-10-02 12:30:34.168882356 +0000 UTC m=+0.070752131 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, container_name=ovn_metadata_agent, managed_by=edpm_ansible, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_id=ovn_metadata_agent)
Oct 02 12:30:34 compute-0 podman[243115]: 2025-10-02 12:30:34.171567009 +0000 UTC m=+0.070748061 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:30:34 compute-0 podman[243114]: 2025-10-02 12:30:34.202656397 +0000 UTC m=+0.099854895 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, container_name=ovn_controller)
Oct 02 12:30:34 compute-0 nova_compute[192079]: 2025-10-02 12:30:34.583 2 DEBUG nova.network.neutron [req-51cac780-1076-453d-8372-48e740eba0e0 req-30b6d95c-d386-4a8e-a698-1ee4e07341a4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:30:34 compute-0 nova_compute[192079]: 2025-10-02 12:30:34.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:30:34 compute-0 nova_compute[192079]: 2025-10-02 12:30:34.721 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:30:34 compute-0 nova_compute[192079]: 2025-10-02 12:30:34.722 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:30:34 compute-0 nova_compute[192079]: 2025-10-02 12:30:34.722 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:30:34 compute-0 nova_compute[192079]: 2025-10-02 12:30:34.722 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:30:34 compute-0 nova_compute[192079]: 2025-10-02 12:30:34.870 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:30:34 compute-0 nova_compute[192079]: 2025-10-02 12:30:34.871 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5729MB free_disk=73.34806823730469GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:30:34 compute-0 nova_compute[192079]: 2025-10-02 12:30:34.871 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:30:34 compute-0 nova_compute[192079]: 2025-10-02 12:30:34.871 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:30:35 compute-0 nova_compute[192079]: 2025-10-02 12:30:35.063 2 DEBUG nova.network.neutron [req-51cac780-1076-453d-8372-48e740eba0e0 req-30b6d95c-d386-4a8e-a698-1ee4e07341a4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:30:35 compute-0 nova_compute[192079]: 2025-10-02 12:30:35.096 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance d59f518a-8b98-4c8c-b8f7-19f6b6809c6d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:30:35 compute-0 nova_compute[192079]: 2025-10-02 12:30:35.097 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 1 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:30:35 compute-0 nova_compute[192079]: 2025-10-02 12:30:35.097 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=640MB phys_disk=79GB used_disk=1GB total_vcpus=8 used_vcpus=1 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:30:35 compute-0 nova_compute[192079]: 2025-10-02 12:30:35.125 2 DEBUG oslo_concurrency.lockutils [req-51cac780-1076-453d-8372-48e740eba0e0 req-30b6d95c-d386-4a8e-a698-1ee4e07341a4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:30:35 compute-0 nova_compute[192079]: 2025-10-02 12:30:35.205 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:30:35 compute-0 nova_compute[192079]: 2025-10-02 12:30:35.386 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:30:35 compute-0 nova_compute[192079]: 2025-10-02 12:30:35.691 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:30:35 compute-0 nova_compute[192079]: 2025-10-02 12:30:35.691 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.820s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:30:35 compute-0 nova_compute[192079]: 2025-10-02 12:30:35.720 2 DEBUG nova.network.neutron [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Successfully updated port: 2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:30:35 compute-0 nova_compute[192079]: 2025-10-02 12:30:35.761 2 DEBUG oslo_concurrency.lockutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "refresh_cache-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:30:35 compute-0 nova_compute[192079]: 2025-10-02 12:30:35.762 2 DEBUG oslo_concurrency.lockutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquired lock "refresh_cache-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:30:35 compute-0 nova_compute[192079]: 2025-10-02 12:30:35.762 2 DEBUG nova.network.neutron [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:30:36 compute-0 nova_compute[192079]: 2025-10-02 12:30:36.077 2 DEBUG nova.compute.manager [req-2c5ede90-e788-48ba-868c-1a383617ffdf req-751543e5-2300-404e-84b6-511f3ae73dff 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Received event network-changed-2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:30:36 compute-0 nova_compute[192079]: 2025-10-02 12:30:36.078 2 DEBUG nova.compute.manager [req-2c5ede90-e788-48ba-868c-1a383617ffdf req-751543e5-2300-404e-84b6-511f3ae73dff 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Refreshing instance network info cache due to event network-changed-2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:30:36 compute-0 nova_compute[192079]: 2025-10-02 12:30:36.078 2 DEBUG oslo_concurrency.lockutils [req-2c5ede90-e788-48ba-868c-1a383617ffdf req-751543e5-2300-404e-84b6-511f3ae73dff 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:30:36 compute-0 nova_compute[192079]: 2025-10-02 12:30:36.691 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:30:36 compute-0 nova_compute[192079]: 2025-10-02 12:30:36.692 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:30:36 compute-0 nova_compute[192079]: 2025-10-02 12:30:36.779 2 DEBUG nova.network.neutron [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:30:37 compute-0 nova_compute[192079]: 2025-10-02 12:30:37.243 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:37 compute-0 nova_compute[192079]: 2025-10-02 12:30:37.902 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.759 2 DEBUG nova.network.neutron [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Updating instance_info_cache with network_info: [{"id": "3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa", "address": "fa:16:3e:88:b5:22", "network": {"id": "299c5e6b-f8b7-4cca-810b-a9b2539f4246", "bridge": "br-int", "label": "tempest-network-smoke--136367555", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap3aa2fa6d-ae", "ovs_interfaceid": "3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2", "address": "fa:16:3e:7a:3b:f1", "network": {"id": "e2520108-9d67-4d82-a7a0-ba429a88c3c9", "bridge": "br-int", "label": "tempest-network-smoke--1271498361", "subnets": [{"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe7a:3bf1", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}, {"cidr": "2001:db8:0:1::/64", "dns": [], "gateway": {"address": "2001:db8:0:1::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8:0:1:f816:3eff:fe7a:3bf1", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap2cddfcab-eb", "ovs_interfaceid": "2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.938 2 DEBUG oslo_concurrency.lockutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Releasing lock "refresh_cache-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.939 2 DEBUG nova.compute.manager [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Instance network_info: |[{"id": "3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa", "address": "fa:16:3e:88:b5:22", "network": {"id": "299c5e6b-f8b7-4cca-810b-a9b2539f4246", "bridge": "br-int", "label": "tempest-network-smoke--136367555", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap3aa2fa6d-ae", "ovs_interfaceid": "3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2", "address": "fa:16:3e:7a:3b:f1", "network": {"id": "e2520108-9d67-4d82-a7a0-ba429a88c3c9", "bridge": "br-int", "label": "tempest-network-smoke--1271498361", "subnets": [{"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe7a:3bf1", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}, {"cidr": "2001:db8:0:1::/64", "dns": [], "gateway": {"address": "2001:db8:0:1::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8:0:1:f816:3eff:fe7a:3bf1", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap2cddfcab-eb", "ovs_interfaceid": "2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.939 2 DEBUG oslo_concurrency.lockutils [req-2c5ede90-e788-48ba-868c-1a383617ffdf req-751543e5-2300-404e-84b6-511f3ae73dff 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.940 2 DEBUG nova.network.neutron [req-2c5ede90-e788-48ba-868c-1a383617ffdf req-751543e5-2300-404e-84b6-511f3ae73dff 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Refreshing network info cache for port 2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.943 2 DEBUG nova.virt.libvirt.driver [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Start _get_guest_xml network_info=[{"id": "3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa", "address": "fa:16:3e:88:b5:22", "network": {"id": "299c5e6b-f8b7-4cca-810b-a9b2539f4246", "bridge": "br-int", "label": "tempest-network-smoke--136367555", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap3aa2fa6d-ae", "ovs_interfaceid": "3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2", "address": "fa:16:3e:7a:3b:f1", "network": {"id": "e2520108-9d67-4d82-a7a0-ba429a88c3c9", "bridge": "br-int", "label": "tempest-network-smoke--1271498361", "subnets": [{"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe7a:3bf1", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}, {"cidr": "2001:db8:0:1::/64", "dns": [], "gateway": {"address": "2001:db8:0:1::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8:0:1:f816:3eff:fe7a:3bf1", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap2cddfcab-eb", "ovs_interfaceid": "2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.947 2 WARNING nova.virt.libvirt.driver [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.950 2 DEBUG nova.virt.libvirt.host [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.950 2 DEBUG nova.virt.libvirt.host [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.954 2 DEBUG nova.virt.libvirt.host [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.954 2 DEBUG nova.virt.libvirt.host [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.955 2 DEBUG nova.virt.libvirt.driver [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.956 2 DEBUG nova.virt.hardware [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.956 2 DEBUG nova.virt.hardware [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.956 2 DEBUG nova.virt.hardware [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.957 2 DEBUG nova.virt.hardware [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.957 2 DEBUG nova.virt.hardware [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.957 2 DEBUG nova.virt.hardware [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.957 2 DEBUG nova.virt.hardware [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.958 2 DEBUG nova.virt.hardware [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.958 2 DEBUG nova.virt.hardware [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.958 2 DEBUG nova.virt.hardware [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.958 2 DEBUG nova.virt.hardware [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.961 2 DEBUG nova.virt.libvirt.vif [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:30:25Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestGettingAddress-server-597864105',display_name='tempest-TestGettingAddress-server-597864105',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testgettingaddress-server-597864105',id=134,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBB1eGJz2x2NclizHY0y1KagfJt0/XSi4q477vmnTxhDjfgu4TS7ARmj4iaatPUQRUeuKdnCSa7aN8Y00iK3sldRns4TIy1xYmuZAKRi07Qnv9+MtEFMWHsOHXiIH+9Mk5Q==',key_name='tempest-TestGettingAddress-63209992',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='fd801958556f4c8aab047ecdef6b5ee8',ramdisk_id='',reservation_id='r-b82006hp',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestGettingAddress-1355720650',owner_user_name='tempest-TestGettingAddress-1355720650-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:30:27Z,user_data=None,user_id='97ce9f1898484e0e9a1f7c84a9f0dfe3',uuid=d59f518a-8b98-4c8c-b8f7-19f6b6809c6d,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa", "address": "fa:16:3e:88:b5:22", "network": {"id": "299c5e6b-f8b7-4cca-810b-a9b2539f4246", "bridge": "br-int", "label": "tempest-network-smoke--136367555", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap3aa2fa6d-ae", "ovs_interfaceid": "3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.962 2 DEBUG nova.network.os_vif_util [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converting VIF {"id": "3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa", "address": "fa:16:3e:88:b5:22", "network": {"id": "299c5e6b-f8b7-4cca-810b-a9b2539f4246", "bridge": "br-int", "label": "tempest-network-smoke--136367555", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap3aa2fa6d-ae", "ovs_interfaceid": "3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.963 2 DEBUG nova.network.os_vif_util [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:88:b5:22,bridge_name='br-int',has_traffic_filtering=True,id=3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa,network=Network(299c5e6b-f8b7-4cca-810b-a9b2539f4246),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap3aa2fa6d-ae') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.963 2 DEBUG nova.virt.libvirt.vif [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:30:25Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestGettingAddress-server-597864105',display_name='tempest-TestGettingAddress-server-597864105',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testgettingaddress-server-597864105',id=134,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBB1eGJz2x2NclizHY0y1KagfJt0/XSi4q477vmnTxhDjfgu4TS7ARmj4iaatPUQRUeuKdnCSa7aN8Y00iK3sldRns4TIy1xYmuZAKRi07Qnv9+MtEFMWHsOHXiIH+9Mk5Q==',key_name='tempest-TestGettingAddress-63209992',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='fd801958556f4c8aab047ecdef6b5ee8',ramdisk_id='',reservation_id='r-b82006hp',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestGettingAddress-1355720650',owner_user_name='tempest-TestGettingAddress-1355720650-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:30:27Z,user_data=None,user_id='97ce9f1898484e0e9a1f7c84a9f0dfe3',uuid=d59f518a-8b98-4c8c-b8f7-19f6b6809c6d,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2", "address": "fa:16:3e:7a:3b:f1", "network": {"id": "e2520108-9d67-4d82-a7a0-ba429a88c3c9", "bridge": "br-int", "label": "tempest-network-smoke--1271498361", "subnets": [{"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe7a:3bf1", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}, {"cidr": "2001:db8:0:1::/64", "dns": [], "gateway": {"address": "2001:db8:0:1::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8:0:1:f816:3eff:fe7a:3bf1", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap2cddfcab-eb", "ovs_interfaceid": "2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.964 2 DEBUG nova.network.os_vif_util [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converting VIF {"id": "2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2", "address": "fa:16:3e:7a:3b:f1", "network": {"id": "e2520108-9d67-4d82-a7a0-ba429a88c3c9", "bridge": "br-int", "label": "tempest-network-smoke--1271498361", "subnets": [{"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe7a:3bf1", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}, {"cidr": "2001:db8:0:1::/64", "dns": [], "gateway": {"address": "2001:db8:0:1::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8:0:1:f816:3eff:fe7a:3bf1", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap2cddfcab-eb", "ovs_interfaceid": "2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.964 2 DEBUG nova.network.os_vif_util [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:7a:3b:f1,bridge_name='br-int',has_traffic_filtering=True,id=2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2,network=Network(e2520108-9d67-4d82-a7a0-ba429a88c3c9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap2cddfcab-eb') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:30:38 compute-0 nova_compute[192079]: 2025-10-02 12:30:38.965 2 DEBUG nova.objects.instance [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lazy-loading 'pci_devices' on Instance uuid d59f518a-8b98-4c8c-b8f7-19f6b6809c6d obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.001 2 DEBUG nova.virt.libvirt.driver [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:30:39 compute-0 nova_compute[192079]:   <uuid>d59f518a-8b98-4c8c-b8f7-19f6b6809c6d</uuid>
Oct 02 12:30:39 compute-0 nova_compute[192079]:   <name>instance-00000086</name>
Oct 02 12:30:39 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:30:39 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:30:39 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <nova:name>tempest-TestGettingAddress-server-597864105</nova:name>
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:30:38</nova:creationTime>
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:30:39 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:30:39 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:30:39 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:30:39 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:30:39 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:30:39 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:30:39 compute-0 nova_compute[192079]:         <nova:user uuid="97ce9f1898484e0e9a1f7c84a9f0dfe3">tempest-TestGettingAddress-1355720650-project-member</nova:user>
Oct 02 12:30:39 compute-0 nova_compute[192079]:         <nova:project uuid="fd801958556f4c8aab047ecdef6b5ee8">tempest-TestGettingAddress-1355720650</nova:project>
Oct 02 12:30:39 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:30:39 compute-0 nova_compute[192079]:         <nova:port uuid="3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa">
Oct 02 12:30:39 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.8" ipVersion="4"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:30:39 compute-0 nova_compute[192079]:         <nova:port uuid="2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2">
Oct 02 12:30:39 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="2001:db8::f816:3eff:fe7a:3bf1" ipVersion="6"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="2001:db8:0:1:f816:3eff:fe7a:3bf1" ipVersion="6"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:30:39 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:30:39 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:30:39 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <system>
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <entry name="serial">d59f518a-8b98-4c8c-b8f7-19f6b6809c6d</entry>
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <entry name="uuid">d59f518a-8b98-4c8c-b8f7-19f6b6809c6d</entry>
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     </system>
Oct 02 12:30:39 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:30:39 compute-0 nova_compute[192079]:   <os>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:   </os>
Oct 02 12:30:39 compute-0 nova_compute[192079]:   <features>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:   </features>
Oct 02 12:30:39 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:30:39 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:30:39 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk.config"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:88:b5:22"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <target dev="tap3aa2fa6d-ae"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:7a:3b:f1"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <target dev="tap2cddfcab-eb"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/console.log" append="off"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <video>
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     </video>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:30:39 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:30:39 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:30:39 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:30:39 compute-0 nova_compute[192079]: </domain>
Oct 02 12:30:39 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.002 2 DEBUG nova.compute.manager [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Preparing to wait for external event network-vif-plugged-3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.002 2 DEBUG oslo_concurrency.lockutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.003 2 DEBUG oslo_concurrency.lockutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.003 2 DEBUG oslo_concurrency.lockutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.003 2 DEBUG nova.compute.manager [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Preparing to wait for external event network-vif-plugged-2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.003 2 DEBUG oslo_concurrency.lockutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.004 2 DEBUG oslo_concurrency.lockutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.004 2 DEBUG oslo_concurrency.lockutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.004 2 DEBUG nova.virt.libvirt.vif [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:30:25Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestGettingAddress-server-597864105',display_name='tempest-TestGettingAddress-server-597864105',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testgettingaddress-server-597864105',id=134,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBB1eGJz2x2NclizHY0y1KagfJt0/XSi4q477vmnTxhDjfgu4TS7ARmj4iaatPUQRUeuKdnCSa7aN8Y00iK3sldRns4TIy1xYmuZAKRi07Qnv9+MtEFMWHsOHXiIH+9Mk5Q==',key_name='tempest-TestGettingAddress-63209992',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='fd801958556f4c8aab047ecdef6b5ee8',ramdisk_id='',reservation_id='r-b82006hp',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestGettingAddress-1355720650',owner_user_name='tempest-TestGettingAddress-1355720650-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:30:27Z,user_data=None,user_id='97ce9f1898484e0e9a1f7c84a9f0dfe3',uuid=d59f518a-8b98-4c8c-b8f7-19f6b6809c6d,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa", "address": "fa:16:3e:88:b5:22", "network": {"id": "299c5e6b-f8b7-4cca-810b-a9b2539f4246", "bridge": "br-int", "label": "tempest-network-smoke--136367555", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap3aa2fa6d-ae", "ovs_interfaceid": "3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.005 2 DEBUG nova.network.os_vif_util [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converting VIF {"id": "3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa", "address": "fa:16:3e:88:b5:22", "network": {"id": "299c5e6b-f8b7-4cca-810b-a9b2539f4246", "bridge": "br-int", "label": "tempest-network-smoke--136367555", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap3aa2fa6d-ae", "ovs_interfaceid": "3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.005 2 DEBUG nova.network.os_vif_util [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:88:b5:22,bridge_name='br-int',has_traffic_filtering=True,id=3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa,network=Network(299c5e6b-f8b7-4cca-810b-a9b2539f4246),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap3aa2fa6d-ae') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.006 2 DEBUG os_vif [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:88:b5:22,bridge_name='br-int',has_traffic_filtering=True,id=3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa,network=Network(299c5e6b-f8b7-4cca-810b-a9b2539f4246),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap3aa2fa6d-ae') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.006 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.006 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.007 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.009 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.009 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap3aa2fa6d-ae, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.010 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap3aa2fa6d-ae, col_values=(('external_ids', {'iface-id': '3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:88:b5:22', 'vm-uuid': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:30:39 compute-0 NetworkManager[51160]: <info>  [1759408239.0118] manager: (tap3aa2fa6d-ae): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/247)
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.012 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.019 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.019 2 INFO os_vif [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:88:b5:22,bridge_name='br-int',has_traffic_filtering=True,id=3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa,network=Network(299c5e6b-f8b7-4cca-810b-a9b2539f4246),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap3aa2fa6d-ae')
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.020 2 DEBUG nova.virt.libvirt.vif [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:30:25Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestGettingAddress-server-597864105',display_name='tempest-TestGettingAddress-server-597864105',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testgettingaddress-server-597864105',id=134,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBB1eGJz2x2NclizHY0y1KagfJt0/XSi4q477vmnTxhDjfgu4TS7ARmj4iaatPUQRUeuKdnCSa7aN8Y00iK3sldRns4TIy1xYmuZAKRi07Qnv9+MtEFMWHsOHXiIH+9Mk5Q==',key_name='tempest-TestGettingAddress-63209992',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='fd801958556f4c8aab047ecdef6b5ee8',ramdisk_id='',reservation_id='r-b82006hp',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestGettingAddress-1355720650',owner_user_name='tempest-TestGettingAddress-1355720650-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:30:27Z,user_data=None,user_id='97ce9f1898484e0e9a1f7c84a9f0dfe3',uuid=d59f518a-8b98-4c8c-b8f7-19f6b6809c6d,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2", "address": "fa:16:3e:7a:3b:f1", "network": {"id": "e2520108-9d67-4d82-a7a0-ba429a88c3c9", "bridge": "br-int", "label": "tempest-network-smoke--1271498361", "subnets": [{"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe7a:3bf1", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}, {"cidr": "2001:db8:0:1::/64", "dns": [], "gateway": {"address": "2001:db8:0:1::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8:0:1:f816:3eff:fe7a:3bf1", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap2cddfcab-eb", "ovs_interfaceid": "2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.021 2 DEBUG nova.network.os_vif_util [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converting VIF {"id": "2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2", "address": "fa:16:3e:7a:3b:f1", "network": {"id": "e2520108-9d67-4d82-a7a0-ba429a88c3c9", "bridge": "br-int", "label": "tempest-network-smoke--1271498361", "subnets": [{"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe7a:3bf1", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}, {"cidr": "2001:db8:0:1::/64", "dns": [], "gateway": {"address": "2001:db8:0:1::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8:0:1:f816:3eff:fe7a:3bf1", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap2cddfcab-eb", "ovs_interfaceid": "2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.021 2 DEBUG nova.network.os_vif_util [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:7a:3b:f1,bridge_name='br-int',has_traffic_filtering=True,id=2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2,network=Network(e2520108-9d67-4d82-a7a0-ba429a88c3c9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap2cddfcab-eb') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.022 2 DEBUG os_vif [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:7a:3b:f1,bridge_name='br-int',has_traffic_filtering=True,id=2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2,network=Network(e2520108-9d67-4d82-a7a0-ba429a88c3c9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap2cddfcab-eb') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.022 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.022 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.022 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.024 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.024 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap2cddfcab-eb, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.025 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap2cddfcab-eb, col_values=(('external_ids', {'iface-id': '2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:7a:3b:f1', 'vm-uuid': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:30:39 compute-0 NetworkManager[51160]: <info>  [1759408239.0268] manager: (tap2cddfcab-eb): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/248)
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.028 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.032 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.033 2 INFO os_vif [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:7a:3b:f1,bridge_name='br-int',has_traffic_filtering=True,id=2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2,network=Network(e2520108-9d67-4d82-a7a0-ba429a88c3c9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap2cddfcab-eb')
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.233 2 DEBUG nova.virt.libvirt.driver [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.234 2 DEBUG nova.virt.libvirt.driver [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.234 2 DEBUG nova.virt.libvirt.driver [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] No VIF found with MAC fa:16:3e:88:b5:22, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.234 2 DEBUG nova.virt.libvirt.driver [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] No VIF found with MAC fa:16:3e:7a:3b:f1, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:30:39 compute-0 nova_compute[192079]: 2025-10-02 12:30:39.235 2 INFO nova.virt.libvirt.driver [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Using config drive
Oct 02 12:30:40 compute-0 nova_compute[192079]: 2025-10-02 12:30:40.090 2 INFO nova.virt.libvirt.driver [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Creating config drive at /var/lib/nova/instances/d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk.config
Oct 02 12:30:40 compute-0 nova_compute[192079]: 2025-10-02 12:30:40.097 2 DEBUG oslo_concurrency.processutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp403sjmc1 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:30:40 compute-0 nova_compute[192079]: 2025-10-02 12:30:40.230 2 DEBUG oslo_concurrency.processutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp403sjmc1" returned: 0 in 0.133s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:30:40 compute-0 NetworkManager[51160]: <info>  [1759408240.2965] manager: (tap3aa2fa6d-ae): new Tun device (/org/freedesktop/NetworkManager/Devices/249)
Oct 02 12:30:40 compute-0 kernel: tap3aa2fa6d-ae: entered promiscuous mode
Oct 02 12:30:40 compute-0 ovn_controller[94336]: 2025-10-02T12:30:40Z|00494|binding|INFO|Claiming lport 3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa for this chassis.
Oct 02 12:30:40 compute-0 ovn_controller[94336]: 2025-10-02T12:30:40Z|00495|binding|INFO|3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa: Claiming fa:16:3e:88:b5:22 10.100.0.8
Oct 02 12:30:40 compute-0 nova_compute[192079]: 2025-10-02 12:30:40.303 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:40 compute-0 NetworkManager[51160]: <info>  [1759408240.3212] manager: (tap2cddfcab-eb): new Tun device (/org/freedesktop/NetworkManager/Devices/250)
Oct 02 12:30:40 compute-0 systemd-udevd[243202]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:30:40 compute-0 systemd-udevd[243203]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:30:40 compute-0 NetworkManager[51160]: <info>  [1759408240.3523] device (tap3aa2fa6d-ae): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:30:40 compute-0 NetworkManager[51160]: <info>  [1759408240.3535] device (tap3aa2fa6d-ae): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:30:40 compute-0 systemd-machined[152150]: New machine qemu-66-instance-00000086.
Oct 02 12:30:40 compute-0 kernel: tap2cddfcab-eb: entered promiscuous mode
Oct 02 12:30:40 compute-0 nova_compute[192079]: 2025-10-02 12:30:40.385 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:40 compute-0 NetworkManager[51160]: <info>  [1759408240.3884] device (tap2cddfcab-eb): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:30:40 compute-0 ovn_controller[94336]: 2025-10-02T12:30:40Z|00496|if_status|INFO|Not updating pb chassis for 2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2 now as sb is readonly
Oct 02 12:30:40 compute-0 systemd[1]: Started Virtual Machine qemu-66-instance-00000086.
Oct 02 12:30:40 compute-0 nova_compute[192079]: 2025-10-02 12:30:40.388 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:40 compute-0 ovn_controller[94336]: 2025-10-02T12:30:40Z|00497|binding|INFO|Claiming lport 2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2 for this chassis.
Oct 02 12:30:40 compute-0 ovn_controller[94336]: 2025-10-02T12:30:40Z|00498|binding|INFO|2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2: Claiming fa:16:3e:7a:3b:f1 2001:db8:0:1:f816:3eff:fe7a:3bf1 2001:db8::f816:3eff:fe7a:3bf1
Oct 02 12:30:40 compute-0 NetworkManager[51160]: <info>  [1759408240.4050] device (tap2cddfcab-eb): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:40.403 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:88:b5:22 10.100.0.8'], port_security=['fa:16:3e:88:b5:22 10.100.0.8'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.8/28', 'neutron:device_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-299c5e6b-f8b7-4cca-810b-a9b2539f4246', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'neutron:revision_number': '2', 'neutron:security_group_ids': 'f706c8b6-b68a-48d8-b578-b0c81b519c8e', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=4a9348f4-eede-4266-8396-8c521ea59fc0, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:40.404 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa in datapath 299c5e6b-f8b7-4cca-810b-a9b2539f4246 bound to our chassis
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:40.406 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 299c5e6b-f8b7-4cca-810b-a9b2539f4246
Oct 02 12:30:40 compute-0 nova_compute[192079]: 2025-10-02 12:30:40.406 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:40 compute-0 ovn_controller[94336]: 2025-10-02T12:30:40Z|00499|binding|INFO|Setting lport 3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa ovn-installed in OVS
Oct 02 12:30:40 compute-0 ovn_controller[94336]: 2025-10-02T12:30:40Z|00500|binding|INFO|Setting lport 2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2 ovn-installed in OVS
Oct 02 12:30:40 compute-0 nova_compute[192079]: 2025-10-02 12:30:40.417 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:40.417 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[992c4d26-298e-454f-b908-d205769c9693]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:40.418 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap299c5e6b-f1 in ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:40.419 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap299c5e6b-f0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:40.419 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a08b7c4a-44fd-4123-ae4f-bf2207c5bbd4]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:40.420 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[df37138b-a653-432b-9c5e-9e7c119b3901]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:40.431 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[56c5935a-8733-4eec-87f8-5c94f3fa910b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:40.455 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[134619b9-b305-4256-acff-d480eb3fad0e]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:40.482 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[455e7b8a-67b2-4363-8fcf-4c78bbcad462]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:40.488 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1b7eb5a6-dc89-483e-9fdb-c0b3eae52cd2]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:40 compute-0 NetworkManager[51160]: <info>  [1759408240.4899] manager: (tap299c5e6b-f0): new Veth device (/org/freedesktop/NetworkManager/Devices/251)
Oct 02 12:30:40 compute-0 ovn_controller[94336]: 2025-10-02T12:30:40Z|00501|binding|INFO|Setting lport 3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa up in Southbound
Oct 02 12:30:40 compute-0 ovn_controller[94336]: 2025-10-02T12:30:40Z|00502|binding|INFO|Setting lport 2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2 up in Southbound
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:40.511 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:7a:3b:f1 2001:db8:0:1:f816:3eff:fe7a:3bf1 2001:db8::f816:3eff:fe7a:3bf1'], port_security=['fa:16:3e:7a:3b:f1 2001:db8:0:1:f816:3eff:fe7a:3bf1 2001:db8::f816:3eff:fe7a:3bf1'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '2001:db8:0:1:f816:3eff:fe7a:3bf1/64 2001:db8::f816:3eff:fe7a:3bf1/64', 'neutron:device_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-e2520108-9d67-4d82-a7a0-ba429a88c3c9', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'neutron:revision_number': '2', 'neutron:security_group_ids': 'f706c8b6-b68a-48d8-b578-b0c81b519c8e', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=876a7f58-2645-4e1a-8a60-dbbe16fdfb2e, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:40.522 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[0912d0e6-29a2-496b-8055-9f0b75f86bf2]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:40.525 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[49ec3ff0-ba91-422e-b14d-4ef7ac1fdd72]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:40 compute-0 NetworkManager[51160]: <info>  [1759408240.5435] device (tap299c5e6b-f0): carrier: link connected
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:40.550 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[27246d19-dfaf-434a-ab5d-defc7296229c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:40.565 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[54974259-8866-4bcb-9d51-9488e2e4ad71]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap299c5e6b-f1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:da:24:47'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 162], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 623817, 'reachable_time': 16588, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 243239, 'error': None, 'target': 'ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:40.576 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[892f8dc7-7bd7-42fe-9cd2-1af0bfe0b44d]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:feda:2447'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 623817, 'tstamp': 623817}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 243240, 'error': None, 'target': 'ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:40.589 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ade9d91e-dc97-4418-9876-c12d5afded45]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap299c5e6b-f1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:da:24:47'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 162], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 623817, 'reachable_time': 16588, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 243241, 'error': None, 'target': 'ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:40.617 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ee63bb62-b1b8-4f76-9002-bec9953dae3e]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:40.669 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f36a6cb5-7335-459c-9e56-5f3e1b0c6e66]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:40.671 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap299c5e6b-f0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:40.671 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:40.671 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap299c5e6b-f0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:30:40 compute-0 nova_compute[192079]: 2025-10-02 12:30:40.673 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:40 compute-0 NetworkManager[51160]: <info>  [1759408240.6739] manager: (tap299c5e6b-f0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/252)
Oct 02 12:30:40 compute-0 kernel: tap299c5e6b-f0: entered promiscuous mode
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:40.676 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap299c5e6b-f0, col_values=(('external_ids', {'iface-id': '6e8b6bdc-2d9f-47a8-8b24-8ce9ea993d7a'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:30:40 compute-0 nova_compute[192079]: 2025-10-02 12:30:40.677 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:40 compute-0 ovn_controller[94336]: 2025-10-02T12:30:40Z|00503|binding|INFO|Releasing lport 6e8b6bdc-2d9f-47a8-8b24-8ce9ea993d7a from this chassis (sb_readonly=0)
Oct 02 12:30:40 compute-0 nova_compute[192079]: 2025-10-02 12:30:40.688 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:40.689 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/299c5e6b-f8b7-4cca-810b-a9b2539f4246.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/299c5e6b-f8b7-4cca-810b-a9b2539f4246.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:40.689 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[39305542-717d-4dd1-830c-2fe6ba3505b1]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:40.690 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-299c5e6b-f8b7-4cca-810b-a9b2539f4246
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/299c5e6b-f8b7-4cca-810b-a9b2539f4246.pid.haproxy
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 299c5e6b-f8b7-4cca-810b-a9b2539f4246
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:30:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:40.691 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246', 'env', 'PROCESS_TAG=haproxy-299c5e6b-f8b7-4cca-810b-a9b2539f4246', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/299c5e6b-f8b7-4cca-810b-a9b2539f4246.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:30:41 compute-0 podman[243272]: 2025-10-02 12:30:41.040547519 +0000 UTC m=+0.051032793 container create b4953488dbf5e132aabf422a3d1f3a7340a7d7536e09fd97da438042b2503274 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0)
Oct 02 12:30:41 compute-0 nova_compute[192079]: 2025-10-02 12:30:41.054 2 DEBUG nova.compute.manager [req-7b07d9fd-6b95-4e52-9e97-ab2f4d8691da req-cbd2490d-c15b-40a0-97d2-9d8e05fcae99 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Received event network-vif-plugged-2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:30:41 compute-0 nova_compute[192079]: 2025-10-02 12:30:41.055 2 DEBUG oslo_concurrency.lockutils [req-7b07d9fd-6b95-4e52-9e97-ab2f4d8691da req-cbd2490d-c15b-40a0-97d2-9d8e05fcae99 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:30:41 compute-0 nova_compute[192079]: 2025-10-02 12:30:41.055 2 DEBUG oslo_concurrency.lockutils [req-7b07d9fd-6b95-4e52-9e97-ab2f4d8691da req-cbd2490d-c15b-40a0-97d2-9d8e05fcae99 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:30:41 compute-0 nova_compute[192079]: 2025-10-02 12:30:41.056 2 DEBUG oslo_concurrency.lockutils [req-7b07d9fd-6b95-4e52-9e97-ab2f4d8691da req-cbd2490d-c15b-40a0-97d2-9d8e05fcae99 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:30:41 compute-0 nova_compute[192079]: 2025-10-02 12:30:41.056 2 DEBUG nova.compute.manager [req-7b07d9fd-6b95-4e52-9e97-ab2f4d8691da req-cbd2490d-c15b-40a0-97d2-9d8e05fcae99 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Processing event network-vif-plugged-2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:30:41 compute-0 systemd[1]: Started libpod-conmon-b4953488dbf5e132aabf422a3d1f3a7340a7d7536e09fd97da438042b2503274.scope.
Oct 02 12:30:41 compute-0 podman[243272]: 2025-10-02 12:30:41.010494179 +0000 UTC m=+0.020979483 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:30:41 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:30:41 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/eaa1606c1a8f0f63ae5273e2ba4072c659aef5bbf6890937417653c8870cee25/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:30:41 compute-0 podman[243272]: 2025-10-02 12:30:41.137243109 +0000 UTC m=+0.147728473 container init b4953488dbf5e132aabf422a3d1f3a7340a7d7536e09fd97da438042b2503274 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246, org.label-schema.vendor=CentOS, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:30:41 compute-0 podman[243272]: 2025-10-02 12:30:41.144495186 +0000 UTC m=+0.154980460 container start b4953488dbf5e132aabf422a3d1f3a7340a7d7536e09fd97da438042b2503274 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2)
Oct 02 12:30:41 compute-0 neutron-haproxy-ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246[243287]: [NOTICE]   (243291) : New worker (243293) forked
Oct 02 12:30:41 compute-0 neutron-haproxy-ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246[243287]: [NOTICE]   (243291) : Loading success.
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:41.201 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2 in datapath e2520108-9d67-4d82-a7a0-ba429a88c3c9 unbound from our chassis
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:41.203 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network e2520108-9d67-4d82-a7a0-ba429a88c3c9
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:41.213 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a3ac963a-6305-4b33-959e-008717d3bf54]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:41.213 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tape2520108-91 in ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:41.216 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tape2520108-90 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:41.216 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a39013e6-07c8-47ab-8c7e-3fec23832906]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:41.217 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f7864a28-c076-4ca6-8d09-cf95808b0d79]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:41.227 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[7affe9f5-2892-4681-a614-7a0aae715b8d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:41.239 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a8092e5c-d311-4311-958c-4d392a448584]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:41.266 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[cc8c7548-a454-4a16-9da4-53308fd5652c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:41 compute-0 NetworkManager[51160]: <info>  [1759408241.2738] manager: (tape2520108-90): new Veth device (/org/freedesktop/NetworkManager/Devices/253)
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:41.273 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c291897d-3d2a-40eb-a2b9-83dec1a64fa4]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:41.307 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[a07e3798-db4d-4707-8c75-2a3d5284d22d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:41.313 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[b434be3e-d4f9-4386-aba2-aba456ebf0a6]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:41 compute-0 NetworkManager[51160]: <info>  [1759408241.3354] device (tape2520108-90): carrier: link connected
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:41.341 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[24d975f8-6d92-4f49-aa4e-2f62fa2873dc]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:41 compute-0 nova_compute[192079]: 2025-10-02 12:30:41.342 2 DEBUG nova.network.neutron [req-2c5ede90-e788-48ba-868c-1a383617ffdf req-751543e5-2300-404e-84b6-511f3ae73dff 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Updated VIF entry in instance network info cache for port 2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:30:41 compute-0 nova_compute[192079]: 2025-10-02 12:30:41.342 2 DEBUG nova.network.neutron [req-2c5ede90-e788-48ba-868c-1a383617ffdf req-751543e5-2300-404e-84b6-511f3ae73dff 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Updating instance_info_cache with network_info: [{"id": "3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa", "address": "fa:16:3e:88:b5:22", "network": {"id": "299c5e6b-f8b7-4cca-810b-a9b2539f4246", "bridge": "br-int", "label": "tempest-network-smoke--136367555", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap3aa2fa6d-ae", "ovs_interfaceid": "3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2", "address": "fa:16:3e:7a:3b:f1", "network": {"id": "e2520108-9d67-4d82-a7a0-ba429a88c3c9", "bridge": "br-int", "label": "tempest-network-smoke--1271498361", "subnets": [{"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe7a:3bf1", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}, {"cidr": "2001:db8:0:1::/64", "dns": [], "gateway": {"address": "2001:db8:0:1::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8:0:1:f816:3eff:fe7a:3bf1", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap2cddfcab-eb", "ovs_interfaceid": "2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:41.372 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9db3a35a-f080-447c-b484-98d11afe00a9]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tape2520108-91'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:2e:31:73'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 163], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 623896, 'reachable_time': 15377, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 243320, 'error': None, 'target': 'ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:41.390 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c05fd563-cd1d-49e7-900c-ea6895819a3d]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe2e:3173'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 623896, 'tstamp': 623896}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 243321, 'error': None, 'target': 'ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:41 compute-0 nova_compute[192079]: 2025-10-02 12:30:41.405 2 DEBUG oslo_concurrency.lockutils [req-2c5ede90-e788-48ba-868c-1a383617ffdf req-751543e5-2300-404e-84b6-511f3ae73dff 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:41.411 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[32141ff6-fe9e-49ac-ad6e-df72a24b5705]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tape2520108-91'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:2e:31:73'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 163], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 623896, 'reachable_time': 15377, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 243322, 'error': None, 'target': 'ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:41.446 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0a03851a-b572-4f2f-88cc-4a73798a3859]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:41.482 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e2421a33-c913-48f3-9c42-ee8118a171be]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:41.483 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tape2520108-90, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:41.484 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:41.484 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tape2520108-90, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:30:41 compute-0 kernel: tape2520108-90: entered promiscuous mode
Oct 02 12:30:41 compute-0 NetworkManager[51160]: <info>  [1759408241.5208] manager: (tape2520108-90): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/254)
Oct 02 12:30:41 compute-0 nova_compute[192079]: 2025-10-02 12:30:41.519 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:41.521 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tape2520108-90, col_values=(('external_ids', {'iface-id': '3eb0ed9e-d99b-4ee6-af64-ada9c8369b17'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:30:41 compute-0 ovn_controller[94336]: 2025-10-02T12:30:41Z|00504|binding|INFO|Releasing lport 3eb0ed9e-d99b-4ee6-af64-ada9c8369b17 from this chassis (sb_readonly=0)
Oct 02 12:30:41 compute-0 nova_compute[192079]: 2025-10-02 12:30:41.546 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:41.546 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/e2520108-9d67-4d82-a7a0-ba429a88c3c9.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/e2520108-9d67-4d82-a7a0-ba429a88c3c9.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:41.547 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[df3e2e03-f493-4634-a39c-a9442bf627b5]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:41.548 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-e2520108-9d67-4d82-a7a0-ba429a88c3c9
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/e2520108-9d67-4d82-a7a0-ba429a88c3c9.pid.haproxy
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID e2520108-9d67-4d82-a7a0-ba429a88c3c9
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:30:41 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:30:41.549 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9', 'env', 'PROCESS_TAG=haproxy-e2520108-9d67-4d82-a7a0-ba429a88c3c9', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/e2520108-9d67-4d82-a7a0-ba429a88c3c9.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:30:41 compute-0 nova_compute[192079]: 2025-10-02 12:30:41.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:30:41 compute-0 nova_compute[192079]: 2025-10-02 12:30:41.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:30:41 compute-0 nova_compute[192079]: 2025-10-02 12:30:41.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:30:41 compute-0 nova_compute[192079]: 2025-10-02 12:30:41.687 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Skipping network cache update for instance because it is Building. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9871
Oct 02 12:30:41 compute-0 nova_compute[192079]: 2025-10-02 12:30:41.688 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:30:41 compute-0 nova_compute[192079]: 2025-10-02 12:30:41.799 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408241.7985756, d59f518a-8b98-4c8c-b8f7-19f6b6809c6d => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:30:41 compute-0 nova_compute[192079]: 2025-10-02 12:30:41.799 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] VM Started (Lifecycle Event)
Oct 02 12:30:41 compute-0 nova_compute[192079]: 2025-10-02 12:30:41.822 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:30:41 compute-0 nova_compute[192079]: 2025-10-02 12:30:41.826 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408241.8011272, d59f518a-8b98-4c8c-b8f7-19f6b6809c6d => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:30:41 compute-0 nova_compute[192079]: 2025-10-02 12:30:41.826 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] VM Paused (Lifecycle Event)
Oct 02 12:30:41 compute-0 nova_compute[192079]: 2025-10-02 12:30:41.866 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:30:41 compute-0 nova_compute[192079]: 2025-10-02 12:30:41.869 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:30:41 compute-0 nova_compute[192079]: 2025-10-02 12:30:41.901 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:30:41 compute-0 podman[243352]: 2025-10-02 12:30:41.935387358 +0000 UTC m=+0.045338549 container create eb2de2288d02d59280fecc1a1e9072a9d6d42b7966c9cf78e670077fed333e10 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001)
Oct 02 12:30:41 compute-0 systemd[1]: Started libpod-conmon-eb2de2288d02d59280fecc1a1e9072a9d6d42b7966c9cf78e670077fed333e10.scope.
Oct 02 12:30:41 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:30:41 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/30eba00810e4e957780d6f714d17e0d17b29c9aab4905d0f5a3e6f907cc35099/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:30:42 compute-0 podman[243352]: 2025-10-02 12:30:42.00250137 +0000 UTC m=+0.112452571 container init eb2de2288d02d59280fecc1a1e9072a9d6d42b7966c9cf78e670077fed333e10 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0)
Oct 02 12:30:42 compute-0 podman[243352]: 2025-10-02 12:30:41.9116316 +0000 UTC m=+0.021582821 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:30:42 compute-0 podman[243352]: 2025-10-02 12:30:42.008218246 +0000 UTC m=+0.118169437 container start eb2de2288d02d59280fecc1a1e9072a9d6d42b7966c9cf78e670077fed333e10 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, tcib_managed=true, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:30:42 compute-0 neutron-haproxy-ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9[243367]: [NOTICE]   (243371) : New worker (243373) forked
Oct 02 12:30:42 compute-0 neutron-haproxy-ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9[243367]: [NOTICE]   (243371) : Loading success.
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.246 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.451 2 DEBUG nova.compute.manager [req-5d0c44a4-77f9-4251-b939-3787f197b5a8 req-3f8ffe3d-885f-4a6d-bc22-bb7a591b3041 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Received event network-vif-plugged-3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.451 2 DEBUG oslo_concurrency.lockutils [req-5d0c44a4-77f9-4251-b939-3787f197b5a8 req-3f8ffe3d-885f-4a6d-bc22-bb7a591b3041 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.452 2 DEBUG oslo_concurrency.lockutils [req-5d0c44a4-77f9-4251-b939-3787f197b5a8 req-3f8ffe3d-885f-4a6d-bc22-bb7a591b3041 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.452 2 DEBUG oslo_concurrency.lockutils [req-5d0c44a4-77f9-4251-b939-3787f197b5a8 req-3f8ffe3d-885f-4a6d-bc22-bb7a591b3041 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.452 2 DEBUG nova.compute.manager [req-5d0c44a4-77f9-4251-b939-3787f197b5a8 req-3f8ffe3d-885f-4a6d-bc22-bb7a591b3041 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Processing event network-vif-plugged-3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.452 2 DEBUG nova.compute.manager [req-5d0c44a4-77f9-4251-b939-3787f197b5a8 req-3f8ffe3d-885f-4a6d-bc22-bb7a591b3041 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Received event network-vif-plugged-3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.452 2 DEBUG oslo_concurrency.lockutils [req-5d0c44a4-77f9-4251-b939-3787f197b5a8 req-3f8ffe3d-885f-4a6d-bc22-bb7a591b3041 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.453 2 DEBUG oslo_concurrency.lockutils [req-5d0c44a4-77f9-4251-b939-3787f197b5a8 req-3f8ffe3d-885f-4a6d-bc22-bb7a591b3041 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.453 2 DEBUG oslo_concurrency.lockutils [req-5d0c44a4-77f9-4251-b939-3787f197b5a8 req-3f8ffe3d-885f-4a6d-bc22-bb7a591b3041 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.453 2 DEBUG nova.compute.manager [req-5d0c44a4-77f9-4251-b939-3787f197b5a8 req-3f8ffe3d-885f-4a6d-bc22-bb7a591b3041 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] No waiting events found dispatching network-vif-plugged-3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.453 2 WARNING nova.compute.manager [req-5d0c44a4-77f9-4251-b939-3787f197b5a8 req-3f8ffe3d-885f-4a6d-bc22-bb7a591b3041 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Received unexpected event network-vif-plugged-3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa for instance with vm_state building and task_state spawning.
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.454 2 DEBUG nova.compute.manager [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Instance event wait completed in 0 seconds for network-vif-plugged,network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.458 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408242.457207, d59f518a-8b98-4c8c-b8f7-19f6b6809c6d => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.458 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] VM Resumed (Lifecycle Event)
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.460 2 DEBUG nova.virt.libvirt.driver [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.463 2 INFO nova.virt.libvirt.driver [-] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Instance spawned successfully.
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.463 2 DEBUG nova.virt.libvirt.driver [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.527 2 DEBUG nova.virt.libvirt.driver [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.531 2 DEBUG nova.virt.libvirt.driver [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.532 2 DEBUG nova.virt.libvirt.driver [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.532 2 DEBUG nova.virt.libvirt.driver [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.533 2 DEBUG nova.virt.libvirt.driver [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.533 2 DEBUG nova.virt.libvirt.driver [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.538 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.541 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.605 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.631 2 INFO nova.compute.manager [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Took 15.38 seconds to spawn the instance on the hypervisor.
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.631 2 DEBUG nova.compute.manager [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.751 2 INFO nova.compute.manager [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Took 16.18 seconds to build instance.
Oct 02 12:30:42 compute-0 nova_compute[192079]: 2025-10-02 12:30:42.773 2 DEBUG oslo_concurrency.lockutils [None req-d7b8f424-4fc4-4c41-b707-3ad87ebb38ac 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 16.278s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:30:43 compute-0 nova_compute[192079]: 2025-10-02 12:30:43.291 2 DEBUG nova.compute.manager [req-dd3d01cf-cf62-4e79-8595-731d8549d456 req-b444f7c8-7d09-48b3-b2ac-7752f664d1f1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Received event network-vif-plugged-2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:30:43 compute-0 nova_compute[192079]: 2025-10-02 12:30:43.292 2 DEBUG oslo_concurrency.lockutils [req-dd3d01cf-cf62-4e79-8595-731d8549d456 req-b444f7c8-7d09-48b3-b2ac-7752f664d1f1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:30:43 compute-0 nova_compute[192079]: 2025-10-02 12:30:43.292 2 DEBUG oslo_concurrency.lockutils [req-dd3d01cf-cf62-4e79-8595-731d8549d456 req-b444f7c8-7d09-48b3-b2ac-7752f664d1f1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:30:43 compute-0 nova_compute[192079]: 2025-10-02 12:30:43.292 2 DEBUG oslo_concurrency.lockutils [req-dd3d01cf-cf62-4e79-8595-731d8549d456 req-b444f7c8-7d09-48b3-b2ac-7752f664d1f1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:30:43 compute-0 nova_compute[192079]: 2025-10-02 12:30:43.292 2 DEBUG nova.compute.manager [req-dd3d01cf-cf62-4e79-8595-731d8549d456 req-b444f7c8-7d09-48b3-b2ac-7752f664d1f1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] No waiting events found dispatching network-vif-plugged-2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:30:43 compute-0 nova_compute[192079]: 2025-10-02 12:30:43.293 2 WARNING nova.compute.manager [req-dd3d01cf-cf62-4e79-8595-731d8549d456 req-b444f7c8-7d09-48b3-b2ac-7752f664d1f1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Received unexpected event network-vif-plugged-2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2 for instance with vm_state active and task_state None.
Oct 02 12:30:44 compute-0 nova_compute[192079]: 2025-10-02 12:30:44.031 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:44 compute-0 podman[243382]: 2025-10-02 12:30:44.148756946 +0000 UTC m=+0.061000995 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=edpm, tcib_managed=true)
Oct 02 12:30:46 compute-0 nova_compute[192079]: 2025-10-02 12:30:46.659 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:30:47 compute-0 nova_compute[192079]: 2025-10-02 12:30:47.248 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:47 compute-0 NetworkManager[51160]: <info>  [1759408247.6601] manager: (patch-br-int-to-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/255)
Oct 02 12:30:47 compute-0 NetworkManager[51160]: <info>  [1759408247.6611] manager: (patch-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d-to-br-int): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/256)
Oct 02 12:30:47 compute-0 nova_compute[192079]: 2025-10-02 12:30:47.677 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:47 compute-0 nova_compute[192079]: 2025-10-02 12:30:47.798 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:47 compute-0 ovn_controller[94336]: 2025-10-02T12:30:47Z|00505|binding|INFO|Releasing lport 3eb0ed9e-d99b-4ee6-af64-ada9c8369b17 from this chassis (sb_readonly=0)
Oct 02 12:30:47 compute-0 ovn_controller[94336]: 2025-10-02T12:30:47Z|00506|binding|INFO|Releasing lport 6e8b6bdc-2d9f-47a8-8b24-8ce9ea993d7a from this chassis (sb_readonly=0)
Oct 02 12:30:47 compute-0 nova_compute[192079]: 2025-10-02 12:30:47.820 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:48 compute-0 nova_compute[192079]: 2025-10-02 12:30:48.890 2 DEBUG nova.compute.manager [req-ceb10078-ec7d-431e-aa65-b9babcdb4d18 req-d563e2bc-3100-4023-a7a9-fc2daee92410 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Received event network-changed-3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:30:48 compute-0 nova_compute[192079]: 2025-10-02 12:30:48.891 2 DEBUG nova.compute.manager [req-ceb10078-ec7d-431e-aa65-b9babcdb4d18 req-d563e2bc-3100-4023-a7a9-fc2daee92410 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Refreshing instance network info cache due to event network-changed-3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:30:48 compute-0 nova_compute[192079]: 2025-10-02 12:30:48.891 2 DEBUG oslo_concurrency.lockutils [req-ceb10078-ec7d-431e-aa65-b9babcdb4d18 req-d563e2bc-3100-4023-a7a9-fc2daee92410 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:30:48 compute-0 nova_compute[192079]: 2025-10-02 12:30:48.891 2 DEBUG oslo_concurrency.lockutils [req-ceb10078-ec7d-431e-aa65-b9babcdb4d18 req-d563e2bc-3100-4023-a7a9-fc2daee92410 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:30:48 compute-0 nova_compute[192079]: 2025-10-02 12:30:48.892 2 DEBUG nova.network.neutron [req-ceb10078-ec7d-431e-aa65-b9babcdb4d18 req-d563e2bc-3100-4023-a7a9-fc2daee92410 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Refreshing network info cache for port 3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:30:49 compute-0 nova_compute[192079]: 2025-10-02 12:30:49.033 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:51 compute-0 podman[243406]: 2025-10-02 12:30:51.170885455 +0000 UTC m=+0.082032949 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, architecture=x86_64, distribution-scope=public, url=https://catalog.redhat.com/en/search?searchType=containers, container_name=openstack_network_exporter, vendor=Red Hat, Inc., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, managed_by=edpm_ansible, name=ubi9-minimal, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, config_id=edpm, io.openshift.expose-services=, io.buildah.version=1.33.7, version=9.6, build-date=2025-08-20T13:12:41, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, maintainer=Red Hat, Inc., release=1755695350, com.redhat.component=ubi9-minimal-container, io.openshift.tags=minimal rhel9, vcs-type=git, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly.)
Oct 02 12:30:51 compute-0 podman[243407]: 2025-10-02 12:30:51.170975537 +0000 UTC m=+0.068457068 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, config_id=multipathd, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, tcib_managed=true, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']})
Oct 02 12:30:52 compute-0 nova_compute[192079]: 2025-10-02 12:30:52.249 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:52 compute-0 nova_compute[192079]: 2025-10-02 12:30:52.796 2 DEBUG nova.network.neutron [req-ceb10078-ec7d-431e-aa65-b9babcdb4d18 req-d563e2bc-3100-4023-a7a9-fc2daee92410 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Updated VIF entry in instance network info cache for port 3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:30:52 compute-0 nova_compute[192079]: 2025-10-02 12:30:52.796 2 DEBUG nova.network.neutron [req-ceb10078-ec7d-431e-aa65-b9babcdb4d18 req-d563e2bc-3100-4023-a7a9-fc2daee92410 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Updating instance_info_cache with network_info: [{"id": "3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa", "address": "fa:16:3e:88:b5:22", "network": {"id": "299c5e6b-f8b7-4cca-810b-a9b2539f4246", "bridge": "br-int", "label": "tempest-network-smoke--136367555", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.247", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap3aa2fa6d-ae", "ovs_interfaceid": "3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2", "address": "fa:16:3e:7a:3b:f1", "network": {"id": "e2520108-9d67-4d82-a7a0-ba429a88c3c9", "bridge": "br-int", "label": "tempest-network-smoke--1271498361", "subnets": [{"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe7a:3bf1", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}, {"cidr": "2001:db8:0:1::/64", "dns": [], "gateway": {"address": "2001:db8:0:1::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8:0:1:f816:3eff:fe7a:3bf1", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap2cddfcab-eb", "ovs_interfaceid": "2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:30:52 compute-0 nova_compute[192079]: 2025-10-02 12:30:52.846 2 DEBUG oslo_concurrency.lockutils [req-ceb10078-ec7d-431e-aa65-b9babcdb4d18 req-d563e2bc-3100-4023-a7a9-fc2daee92410 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:30:54 compute-0 nova_compute[192079]: 2025-10-02 12:30:54.036 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:55 compute-0 ovn_controller[94336]: 2025-10-02T12:30:55Z|00507|binding|INFO|Releasing lport 3eb0ed9e-d99b-4ee6-af64-ada9c8369b17 from this chassis (sb_readonly=0)
Oct 02 12:30:55 compute-0 ovn_controller[94336]: 2025-10-02T12:30:55Z|00508|binding|INFO|Releasing lport 6e8b6bdc-2d9f-47a8-8b24-8ce9ea993d7a from this chassis (sb_readonly=0)
Oct 02 12:30:55 compute-0 nova_compute[192079]: 2025-10-02 12:30:55.513 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:56 compute-0 ovn_controller[94336]: 2025-10-02T12:30:56Z|00051|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:88:b5:22 10.100.0.8
Oct 02 12:30:56 compute-0 ovn_controller[94336]: 2025-10-02T12:30:56Z|00052|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:88:b5:22 10.100.0.8
Oct 02 12:30:56 compute-0 podman[243463]: 2025-10-02 12:30:56.143895518 +0000 UTC m=+0.060311327 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter)
Oct 02 12:30:56 compute-0 podman[243464]: 2025-10-02 12:30:56.144647128 +0000 UTC m=+0.052852563 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=iscsid, io.buildah.version=1.41.3, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, managed_by=edpm_ansible, config_id=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2)
Oct 02 12:30:57 compute-0 nova_compute[192079]: 2025-10-02 12:30:57.251 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:30:59 compute-0 nova_compute[192079]: 2025-10-02 12:30:59.041 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:02.231 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:31:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:02.233 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.002s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:31:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:02.233 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:31:02 compute-0 nova_compute[192079]: 2025-10-02 12:31:02.254 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:03 compute-0 nova_compute[192079]: 2025-10-02 12:31:03.655 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:04 compute-0 nova_compute[192079]: 2025-10-02 12:31:04.042 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:04.076 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=35, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=34) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:31:04 compute-0 nova_compute[192079]: 2025-10-02 12:31:04.077 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:04.078 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 1 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:31:05 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:05.079 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '35'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:31:05 compute-0 podman[243509]: 2025-10-02 12:31:05.151919417 +0000 UTC m=+0.047133327 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 12:31:05 compute-0 podman[243507]: 2025-10-02 12:31:05.170373551 +0000 UTC m=+0.071784880 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, io.buildah.version=1.41.3, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_metadata_agent, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_managed=true, container_name=ovn_metadata_agent, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:31:05 compute-0 podman[243508]: 2025-10-02 12:31:05.172494508 +0000 UTC m=+0.071547593 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, tcib_managed=true, config_id=ovn_controller, org.label-schema.schema-version=1.0, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_controller, io.buildah.version=1.41.3)
Oct 02 12:31:06 compute-0 nova_compute[192079]: 2025-10-02 12:31:06.985 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:07 compute-0 nova_compute[192079]: 2025-10-02 12:31:07.256 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:09 compute-0 nova_compute[192079]: 2025-10-02 12:31:09.045 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:09 compute-0 nova_compute[192079]: 2025-10-02 12:31:09.595 2 DEBUG oslo_concurrency.lockutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "607e9c3a-4079-4261-b2c6-3cc47ae67173" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:31:09 compute-0 nova_compute[192079]: 2025-10-02 12:31:09.595 2 DEBUG oslo_concurrency.lockutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "607e9c3a-4079-4261-b2c6-3cc47ae67173" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:31:09 compute-0 nova_compute[192079]: 2025-10-02 12:31:09.613 2 DEBUG nova.compute.manager [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:31:09 compute-0 nova_compute[192079]: 2025-10-02 12:31:09.728 2 DEBUG oslo_concurrency.lockutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:31:09 compute-0 nova_compute[192079]: 2025-10-02 12:31:09.728 2 DEBUG oslo_concurrency.lockutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:31:09 compute-0 nova_compute[192079]: 2025-10-02 12:31:09.736 2 DEBUG nova.virt.hardware [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:31:09 compute-0 nova_compute[192079]: 2025-10-02 12:31:09.737 2 INFO nova.compute.claims [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:31:09 compute-0 nova_compute[192079]: 2025-10-02 12:31:09.921 2 DEBUG nova.compute.provider_tree [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:31:09 compute-0 nova_compute[192079]: 2025-10-02 12:31:09.941 2 DEBUG nova.scheduler.client.report [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:31:09 compute-0 nova_compute[192079]: 2025-10-02 12:31:09.976 2 DEBUG oslo_concurrency.lockutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.248s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:31:09 compute-0 nova_compute[192079]: 2025-10-02 12:31:09.977 2 DEBUG nova.compute.manager [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.061 2 DEBUG nova.compute.manager [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.062 2 DEBUG nova.network.neutron [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.081 2 INFO nova.virt.libvirt.driver [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.115 2 DEBUG nova.compute.manager [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.297 2 DEBUG nova.compute.manager [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.299 2 DEBUG nova.virt.libvirt.driver [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.299 2 INFO nova.virt.libvirt.driver [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Creating image(s)
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.300 2 DEBUG oslo_concurrency.lockutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "/var/lib/nova/instances/607e9c3a-4079-4261-b2c6-3cc47ae67173/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.300 2 DEBUG oslo_concurrency.lockutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "/var/lib/nova/instances/607e9c3a-4079-4261-b2c6-3cc47ae67173/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.301 2 DEBUG oslo_concurrency.lockutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "/var/lib/nova/instances/607e9c3a-4079-4261-b2c6-3cc47ae67173/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.312 2 DEBUG oslo_concurrency.processutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.390 2 DEBUG oslo_concurrency.processutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.077s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.391 2 DEBUG oslo_concurrency.lockutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.391 2 DEBUG oslo_concurrency.lockutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.402 2 DEBUG oslo_concurrency.processutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.454 2 DEBUG oslo_concurrency.processutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.052s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.455 2 DEBUG oslo_concurrency.processutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/607e9c3a-4079-4261-b2c6-3cc47ae67173/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.537 2 DEBUG oslo_concurrency.processutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/607e9c3a-4079-4261-b2c6-3cc47ae67173/disk 1073741824" returned: 0 in 0.082s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.539 2 DEBUG oslo_concurrency.lockutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.147s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.539 2 DEBUG oslo_concurrency.processutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.596 2 DEBUG oslo_concurrency.processutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.057s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.599 2 DEBUG nova.virt.disk.api [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Checking if we can resize image /var/lib/nova/instances/607e9c3a-4079-4261-b2c6-3cc47ae67173/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.599 2 DEBUG oslo_concurrency.processutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/607e9c3a-4079-4261-b2c6-3cc47ae67173/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.657 2 DEBUG oslo_concurrency.processutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/607e9c3a-4079-4261-b2c6-3cc47ae67173/disk --force-share --output=json" returned: 0 in 0.058s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.659 2 DEBUG nova.virt.disk.api [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Cannot resize image /var/lib/nova/instances/607e9c3a-4079-4261-b2c6-3cc47ae67173/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.659 2 DEBUG nova.objects.instance [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lazy-loading 'migration_context' on Instance uuid 607e9c3a-4079-4261-b2c6-3cc47ae67173 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.671 2 DEBUG nova.virt.libvirt.driver [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.672 2 DEBUG nova.virt.libvirt.driver [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Ensure instance console log exists: /var/lib/nova/instances/607e9c3a-4079-4261-b2c6-3cc47ae67173/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.672 2 DEBUG oslo_concurrency.lockutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.673 2 DEBUG oslo_concurrency.lockutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.673 2 DEBUG oslo_concurrency.lockutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:31:10 compute-0 nova_compute[192079]: 2025-10-02 12:31:10.830 2 DEBUG nova.policy [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:31:11 compute-0 nova_compute[192079]: 2025-10-02 12:31:11.800 2 DEBUG nova.network.neutron [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Successfully created port: 7971997a-1f55-41fa-b77a-9c6fdaf497f7 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:31:12 compute-0 nova_compute[192079]: 2025-10-02 12:31:12.259 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:12 compute-0 nova_compute[192079]: 2025-10-02 12:31:12.642 2 DEBUG nova.network.neutron [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Successfully created port: 482c766c-1462-47af-a801-a64e61f66109 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:31:14 compute-0 nova_compute[192079]: 2025-10-02 12:31:14.013 2 DEBUG nova.network.neutron [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Successfully updated port: 7971997a-1f55-41fa-b77a-9c6fdaf497f7 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:31:14 compute-0 nova_compute[192079]: 2025-10-02 12:31:14.048 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:14 compute-0 nova_compute[192079]: 2025-10-02 12:31:14.892 2 DEBUG nova.network.neutron [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Successfully updated port: 482c766c-1462-47af-a801-a64e61f66109 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:31:14 compute-0 nova_compute[192079]: 2025-10-02 12:31:14.927 2 DEBUG oslo_concurrency.lockutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "refresh_cache-607e9c3a-4079-4261-b2c6-3cc47ae67173" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:31:14 compute-0 nova_compute[192079]: 2025-10-02 12:31:14.928 2 DEBUG oslo_concurrency.lockutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquired lock "refresh_cache-607e9c3a-4079-4261-b2c6-3cc47ae67173" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:31:14 compute-0 nova_compute[192079]: 2025-10-02 12:31:14.928 2 DEBUG nova.network.neutron [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:31:15 compute-0 nova_compute[192079]: 2025-10-02 12:31:15.126 2 DEBUG nova.network.neutron [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:31:15 compute-0 podman[243591]: 2025-10-02 12:31:15.138703725 +0000 UTC m=+0.057208402 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, tcib_managed=true, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.schema-version=1.0)
Oct 02 12:31:15 compute-0 nova_compute[192079]: 2025-10-02 12:31:15.277 2 DEBUG nova.compute.manager [req-ad61ab29-bd6c-4f88-b512-3ffb60cef3cb req-1d6874e0-8ee2-4c3a-8fb4-a21692c2ddcf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Received event network-changed-7971997a-1f55-41fa-b77a-9c6fdaf497f7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:31:15 compute-0 nova_compute[192079]: 2025-10-02 12:31:15.277 2 DEBUG nova.compute.manager [req-ad61ab29-bd6c-4f88-b512-3ffb60cef3cb req-1d6874e0-8ee2-4c3a-8fb4-a21692c2ddcf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Refreshing instance network info cache due to event network-changed-7971997a-1f55-41fa-b77a-9c6fdaf497f7. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:31:15 compute-0 nova_compute[192079]: 2025-10-02 12:31:15.278 2 DEBUG oslo_concurrency.lockutils [req-ad61ab29-bd6c-4f88-b512-3ffb60cef3cb req-1d6874e0-8ee2-4c3a-8fb4-a21692c2ddcf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-607e9c3a-4079-4261-b2c6-3cc47ae67173" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.112 12 DEBUG ceilometer.compute.discovery [-] instance data: {'id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'name': 'tempest-TestGettingAddress-server-597864105', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'os_type': 'hvm', 'architecture': 'x86_64', 'OS-EXT-SRV-ATTR:instance_name': 'instance-00000086', 'OS-EXT-SRV-ATTR:host': 'compute-0.ctlplane.example.com', 'OS-EXT-STS:vm_state': 'running', 'tenant_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'hostId': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'status': 'active', 'metadata': {}} discover_libvirt_polling /usr/lib/python3.9/site-packages/ceilometer/compute/discovery.py:228
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.113 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.latency in the context of pollsters
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.146 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk.device.read.latency volume: 1590207399 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.147 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk.device.read.latency volume: 45578788 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'b2e74201-de44-4c91-accb-ae4888e39119', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 1590207399, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d-vda', 'timestamp': '2025-10-02T12:31:17.113963', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'instance-00000086', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'b13f525e-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.801130955, 'message_signature': '5d22b5c2c150e04db5ba1a77b87dda56944876a1c94902518bee182e063b67a4'}, {'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 45578788, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d-sda', 'timestamp': '2025-10-02T12:31:17.113963', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'instance-00000086', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'b13f6b40-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.801130955, 'message_signature': '06d82b0133fac229da1ad72a77f182ba0e60e89871cb673811de024d69a39318'}]}, 'timestamp': '2025-10-02 12:31:17.148344', '_unique_id': '5272fb2e343045f8a5b5d789faca9e18'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.150 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.151 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.requests in the context of pollsters
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.152 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk.device.read.requests volume: 1133 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.152 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk.device.read.requests volume: 108 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'ac0b6917-f482-4e75-bd4c-e158bbb1ea6c', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 1133, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d-vda', 'timestamp': '2025-10-02T12:31:17.151977', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'instance-00000086', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'b1401a54-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.801130955, 'message_signature': '0e6a24e4171ada8757894cbf757d3c72f90b6be3e62bef77a2598fa37e715843'}, {'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 108, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d-sda', 'timestamp': '2025-10-02T12:31:17.151977', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'instance-00000086', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'b14038d6-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.801130955, 'message_signature': 'cc856518970dd4750536b82fa50e7397a212288a87ee9c3c2f2dea9f8f2dd8e7'}]}, 'timestamp': '2025-10-02 12:31:17.153543', '_unique_id': '5e03f719240342fcbf384e3eec24fb32'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.155 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.157 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets.drop in the context of pollsters
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.161 12 DEBUG ceilometer.compute.virt.libvirt.inspector [-] No delta meter predecessor for d59f518a-8b98-4c8c-b8f7-19f6b6809c6d / tap3aa2fa6d-ae inspect_vnics /usr/lib/python3.9/site-packages/ceilometer/compute/virt/libvirt/inspector.py:136
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.162 12 DEBUG ceilometer.compute.virt.libvirt.inspector [-] No delta meter predecessor for d59f518a-8b98-4c8c-b8f7-19f6b6809c6d / tap2cddfcab-eb inspect_vnics /usr/lib/python3.9/site-packages/ceilometer/compute/virt/libvirt/inspector.py:136
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.162 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/network.outgoing.packets.drop volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.163 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/network.outgoing.packets.drop volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '5ad9ee3b-949f-4b45-811b-6c648a1d614b', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets.drop', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'instance-00000086-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-tap3aa2fa6d-ae', 'timestamp': '2025-10-02T12:31:17.157329', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'tap3aa2fa6d-ae', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:88:b5:22', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap3aa2fa6d-ae'}, 'message_id': 'b141c552-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.844470958, 'message_signature': 'e55f57999fb07904faee96510b22ccbad5b6379e79b86aafc066dfb6800d2d33'}, {'source': 'openstack', 'counter_name': 'network.outgoing.packets.drop', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'instance-00000086-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-tap2cddfcab-eb', 'timestamp': '2025-10-02T12:31:17.157329', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'tap2cddfcab-eb', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:7a:3b:f1', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap2cddfcab-eb'}, 'message_id': 'b141e3fc-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.844470958, 'message_signature': '0eae7739372ebdc496d9f1c35c0b49b6d7840e2f9ab02d368f1cf220347c6844'}]}, 'timestamp': '2025-10-02 12:31:17.164503', '_unique_id': '218c6a485eb8461eb981ca5011645eb5'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.166 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.167 12 INFO ceilometer.polling.manager [-] Polling pollster memory.usage in the context of pollsters
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.190 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/memory.usage volume: 43.81640625 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'afe4626d-ecb4-4442-b413-b1a1d64877ea', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'memory.usage', 'counter_type': 'gauge', 'counter_unit': 'MB', 'counter_volume': 43.81640625, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'timestamp': '2025-10-02T12:31:17.168054', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'instance-00000086', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1}, 'message_id': 'b145eb96-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.877272613, 'message_signature': '4b5746cdf82797c9b11386a9214261371b2d9f94e3be56a6fdc43211947e5806'}]}, 'timestamp': '2025-10-02 12:31:17.190709', '_unique_id': '8553f6367e044e5c8c2fe5b0300177c0'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.191 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.192 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets.error in the context of pollsters
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.192 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/network.outgoing.packets.error volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.192 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/network.outgoing.packets.error volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'e7dfb8af-6a1e-4b2c-af16-df7f27bdb8cc', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets.error', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'instance-00000086-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-tap3aa2fa6d-ae', 'timestamp': '2025-10-02T12:31:17.192491', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'tap3aa2fa6d-ae', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:88:b5:22', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap3aa2fa6d-ae'}, 'message_id': 'b1463ed4-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.844470958, 'message_signature': '802e6d53af07006b427dd3f5178208c619f33604fedc941ee259efd14c39482f'}, {'source': 'openstack', 'counter_name': 'network.outgoing.packets.error', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'instance-00000086-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-tap2cddfcab-eb', 'timestamp': '2025-10-02T12:31:17.192491', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'tap2cddfcab-eb', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:7a:3b:f1', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap2cddfcab-eb'}, 'message_id': 'b1464ae6-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.844470958, 'message_signature': '403be9a8c67a686ccb218bec4e7e285b26c66a3f8ae7a7b38b876e3f871f1725'}]}, 'timestamp': '2025-10-02 12:31:17.193172', '_unique_id': 'dd5283c7702c4d58a9043ce812cfb66d'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.193 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.194 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.latency in the context of pollsters
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.194 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk.device.write.latency volume: 2286319151 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.195 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk.device.write.latency volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '257a9cc5-7fcc-4c41-a5f3-9a0460e78599', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 2286319151, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d-vda', 'timestamp': '2025-10-02T12:31:17.194895', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'instance-00000086', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'b1469da2-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.801130955, 'message_signature': 'c89053699109802ab961641a7e3b1a3eff897955b7bb09122974a0d286968133'}, {'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 0, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d-sda', 'timestamp': '2025-10-02T12:31:17.194895', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'instance-00000086', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'b146a950-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.801130955, 'message_signature': '77b4138f85ea0e6d5ac91f9a5834ed8f40d63a2eaffba9f91c2cfe067456a5b7'}]}, 'timestamp': '2025-10-02 12:31:17.195535', '_unique_id': 'ea765d49cdf54f7a934c3a4530c78f11'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.196 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.197 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets in the context of pollsters
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.197 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/network.outgoing.packets volume: 28 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.197 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/network.outgoing.packets volume: 24 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '1fe712da-246c-40b4-bd07-af1af0d654cd', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 28, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'instance-00000086-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-tap3aa2fa6d-ae', 'timestamp': '2025-10-02T12:31:17.197158', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'tap3aa2fa6d-ae', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:88:b5:22', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap3aa2fa6d-ae'}, 'message_id': 'b146f496-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.844470958, 'message_signature': 'ce65dba54a7437a7687e5a44f2aa9eeeaa62515a2c99a628b04cb61981f3caa6'}, {'source': 'openstack', 'counter_name': 'network.outgoing.packets', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 24, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'instance-00000086-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-tap2cddfcab-eb', 'timestamp': '2025-10-02T12:31:17.197158', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'tap2cddfcab-eb', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:7a:3b:f1', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap2cddfcab-eb'}, 'message_id': 'b1470288-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.844470958, 'message_signature': '65b3d3fe60bc3059b107a30005499165efb668370438485e5f4bb2962270c644'}]}, 'timestamp': '2025-10-02 12:31:17.197847', '_unique_id': '892f537b852b410ca420b43d8cd14f14'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.198 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.199 12 INFO ceilometer.polling.manager [-] Polling pollster cpu in the context of pollsters
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.199 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/cpu volume: 12210000000 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'ab7de395-ad0d-491d-8481-8c2f0ef38aac', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'cpu', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 12210000000, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'timestamp': '2025-10-02T12:31:17.199437', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'instance-00000086', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'cpu_number': 1}, 'message_id': 'b1474d60-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.877272613, 'message_signature': 'de5d1e92061cbaea29db0b3ec1506911c5eca84addb4aecd3d223f7b907b11cf'}]}, 'timestamp': '2025-10-02 12:31:17.199743', '_unique_id': '834ba9cbf4124e22af16e001fde55c13'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.200 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.201 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.bytes in the context of pollsters
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.201 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk.device.read.bytes volume: 31009280 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.201 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk.device.read.bytes volume: 274750 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'ab28535f-b2e0-4aa9-99c4-2a46a1bdff05', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 31009280, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d-vda', 'timestamp': '2025-10-02T12:31:17.201326', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'instance-00000086', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'b1479842-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.801130955, 'message_signature': '9a5ce7798f10affb22e99f76121f582437c89ad23657e2e1313b7d4c27ac91f2'}, {'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 274750, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d-sda', 'timestamp': '2025-10-02T12:31:17.201326', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'instance-00000086', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'b147a38c-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.801130955, 'message_signature': '784599dff8007e29733ec9a1d8aae31a203d1cb3937f6513522efc0e2210a371'}]}, 'timestamp': '2025-10-02 12:31:17.201952', '_unique_id': '24110553759a407fbdf8bfaf6ab7af5b'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.202 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.203 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes.delta in the context of pollsters
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.203 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/network.incoming.bytes.delta volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.203 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/network.incoming.bytes.delta volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'e50e6127-069d-4771-a2c1-07a701a3ca65', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.bytes.delta', 'counter_type': 'delta', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'instance-00000086-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-tap3aa2fa6d-ae', 'timestamp': '2025-10-02T12:31:17.203522', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'tap3aa2fa6d-ae', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:88:b5:22', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap3aa2fa6d-ae'}, 'message_id': 'b147ed10-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.844470958, 'message_signature': '1caf08dc0d2093ad00006bca8cf9f1549466a762c7a293cff625c14ea6d0c098'}, {'source': 'openstack', 'counter_name': 'network.incoming.bytes.delta', 'counter_type': 'delta', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'instance-00000086-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-tap2cddfcab-eb', 'timestamp': '2025-10-02T12:31:17.203522', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'tap2cddfcab-eb', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:7a:3b:f1', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap2cddfcab-eb'}, 'message_id': 'b147fa12-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.844470958, 'message_signature': 'd37162f94dd59ec98d78807ea034e6ef443122d91a6c75d1cf9230c08ae6e426'}]}, 'timestamp': '2025-10-02 12:31:17.204203', '_unique_id': 'cb3c03d15a394bbc8bff60a5a0a2e122'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.204 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.205 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets.drop in the context of pollsters
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.205 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/network.incoming.packets.drop volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.206 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/network.incoming.packets.drop volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'f27d04d0-dfdb-4af8-ba8f-0156100b9a08', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets.drop', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'instance-00000086-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-tap3aa2fa6d-ae', 'timestamp': '2025-10-02T12:31:17.205797', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'tap3aa2fa6d-ae', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:88:b5:22', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap3aa2fa6d-ae'}, 'message_id': 'b14845e4-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.844470958, 'message_signature': 'd3eaf30216a093424624ff8ce6109006a3561e00bd24a3ae2ca6429b84f35cc9'}, {'source': 'openstack', 'counter_name': 'network.incoming.packets.drop', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'instance-00000086-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-tap2cddfcab-eb', 'timestamp': '2025-10-02T12:31:17.205797', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'tap2cddfcab-eb', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:7a:3b:f1', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap2cddfcab-eb'}, 'message_id': 'b148550c-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.844470958, 'message_signature': '659c07d74bb5cbac558ed97c714676ace430161e6de5d908d63a984eb970dfc8'}]}, 'timestamp': '2025-10-02 12:31:17.206498', '_unique_id': '887a2f8f482047818e376088cbe804cc'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.207 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes.delta in the context of pollsters
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.208 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/network.outgoing.bytes.delta volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.208 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/network.outgoing.bytes.delta volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '48e2f0e0-a657-4283-86e6-1c3d30879755', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.bytes.delta', 'counter_type': 'delta', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'instance-00000086-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-tap3aa2fa6d-ae', 'timestamp': '2025-10-02T12:31:17.208062', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'tap3aa2fa6d-ae', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:88:b5:22', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap3aa2fa6d-ae'}, 'message_id': 'b1489eae-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.844470958, 'message_signature': '0becc6dd3ede8d49cf33559e41cba1663fab4dbc4062450627966f57dccc7d3f'}, {'source': 'openstack', 'counter_name': 'network.outgoing.bytes.delta', 'counter_type': 'delta', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'instance-00000086-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-tap2cddfcab-eb', 'timestamp': '2025-10-02T12:31:17.208062', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'tap2cddfcab-eb', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:7a:3b:f1', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap2cddfcab-eb'}, 'message_id': 'b148ac0a-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.844470958, 'message_signature': 'ae6496665ab5b48b22d6a008c3cd73bf5f6faed87a01bb2b25189d17b5989c81'}]}, 'timestamp': '2025-10-02 12:31:17.208720', '_unique_id': 'f754934d45084a668cad3deecc210fcf'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.209 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.210 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.latency in the context of pollsters
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.210 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for PerDeviceDiskLatencyPollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.210 12 ERROR ceilometer.polling.manager [-] Prevent pollster disk.device.latency from polling [<NovaLikeServer: tempest-TestGettingAddress-server-597864105>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-TestGettingAddress-server-597864105>]
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.210 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes.rate in the context of pollsters
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.210 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for OutgoingBytesRatePollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.210 12 ERROR ceilometer.polling.manager [-] Prevent pollster network.outgoing.bytes.rate from polling [<NovaLikeServer: tempest-TestGettingAddress-server-597864105>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-TestGettingAddress-server-597864105>]
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.211 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.usage in the context of pollsters
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.225 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk.device.usage volume: 30015488 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.226 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk.device.usage volume: 485376 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '8fd0445c-dbad-4533-9fd0-fed1f91b0ecb', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 30015488, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d-vda', 'timestamp': '2025-10-02T12:31:17.211195', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'instance-00000086', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'b14b5004-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.898262366, 'message_signature': '888c1ee88419dab89784fcef9ce97b16662c40000dabff53be8f1f78bb2034bd'}, {'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 485376, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d-sda', 'timestamp': '2025-10-02T12:31:17.211195', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'instance-00000086', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'b14b62c4-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.898262366, 'message_signature': 'b16febeecc1924ad797004fe83bf8e15eb31779bfb94cfacbdf0db9e62b67bb7'}]}, 'timestamp': '2025-10-02 12:31:17.226510', '_unique_id': '457dc5839efb4ea78a6a6339fb7948ea'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.227 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.228 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.allocation in the context of pollsters
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.228 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk.device.allocation volume: 30351360 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.228 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk.device.allocation volume: 487424 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '474c93dc-52a1-4c76-a859-9669d9ae196f', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 30351360, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d-vda', 'timestamp': '2025-10-02T12:31:17.228601', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'instance-00000086', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'b14bc0e8-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.898262366, 'message_signature': '593ac683447679bcfc50541338ba322df21577f9fe3c321ba5661bffeb1a9d1b'}, {'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 487424, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d-sda', 'timestamp': '2025-10-02T12:31:17.228601', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'instance-00000086', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'b14bce44-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.898262366, 'message_signature': '7f1ead408b49a373c46bc6f3f2c00a3c5d179e56020093d71c60e3f58c334347'}]}, 'timestamp': '2025-10-02 12:31:17.229275', '_unique_id': 'fdafcfd1ff4e477b9857a5e83af7baa6'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.229 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.230 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets.error in the context of pollsters
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.230 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/network.incoming.packets.error volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.231 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/network.incoming.packets.error volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '57291cb4-0b77-4b90-aa8d-c346144b21ba', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets.error', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'instance-00000086-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-tap3aa2fa6d-ae', 'timestamp': '2025-10-02T12:31:17.230865', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'tap3aa2fa6d-ae', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:88:b5:22', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap3aa2fa6d-ae'}, 'message_id': 'b14c1b1a-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.844470958, 'message_signature': 'cb3e9469bc2b8e9b5e1b7bed55171ecd4bd5e7106cbf56b7633ae54d835b8c5a'}, {'source': 'openstack', 'counter_name': 'network.incoming.packets.error', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'instance-00000086-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-tap2cddfcab-eb', 'timestamp': '2025-10-02T12:31:17.230865', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'tap2cddfcab-eb', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:7a:3b:f1', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap2cddfcab-eb'}, 'message_id': 'b14c2894-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.844470958, 'message_signature': 'd255353259bdca5d1dafbe87e0a3eee64d7f524e53e0c55b062e398c6e09e417'}]}, 'timestamp': '2025-10-02 12:31:17.231570', '_unique_id': '2d43d398d3ec4df7b1c0b665eda9c7f0'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.232 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.233 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets in the context of pollsters
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.233 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/network.incoming.packets volume: 28 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.233 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/network.incoming.packets volume: 10 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'd81ee008-3efe-4248-a6d3-661703b65f68', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 28, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'instance-00000086-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-tap3aa2fa6d-ae', 'timestamp': '2025-10-02T12:31:17.233199', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'tap3aa2fa6d-ae', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:88:b5:22', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap3aa2fa6d-ae'}, 'message_id': 'b14c74a2-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.844470958, 'message_signature': '229aba10d196c80aa2894d65d2f58dfd95487925fc602b3af93755fef544983a'}, {'source': 'openstack', 'counter_name': 'network.incoming.packets', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 10, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'instance-00000086-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-tap2cddfcab-eb', 'timestamp': '2025-10-02T12:31:17.233199', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'tap2cddfcab-eb', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:7a:3b:f1', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap2cddfcab-eb'}, 'message_id': 'b14c8014-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.844470958, 'message_signature': 'a7fa050cf9e0e48f90d40269b8248e9ede5f56dd9b7d4b10c9823482a1669015'}]}, 'timestamp': '2025-10-02 12:31:17.233805', '_unique_id': 'd4f19996467e4197a21a0cf917217bdc'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.234 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.235 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes.rate in the context of pollsters
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.235 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for IncomingBytesRatePollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.235 12 ERROR ceilometer.polling.manager [-] Prevent pollster network.incoming.bytes.rate from polling [<NovaLikeServer: tempest-TestGettingAddress-server-597864105>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-TestGettingAddress-server-597864105>]
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.235 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes in the context of pollsters
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.235 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/network.outgoing.bytes volume: 3390 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.236 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/network.outgoing.bytes volume: 2620 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '49f4a034-b5ff-46ca-a75e-a0d3e1a7732a', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 3390, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'instance-00000086-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-tap3aa2fa6d-ae', 'timestamp': '2025-10-02T12:31:17.235786', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'tap3aa2fa6d-ae', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:88:b5:22', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap3aa2fa6d-ae'}, 'message_id': 'b14cd956-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.844470958, 'message_signature': '6f4a1a2f569523f759f02bf270f53b7fe5b078e07050c6408378c93bee501422'}, {'source': 'openstack', 'counter_name': 'network.outgoing.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 2620, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'instance-00000086-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-tap2cddfcab-eb', 'timestamp': '2025-10-02T12:31:17.235786', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'tap2cddfcab-eb', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:7a:3b:f1', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap2cddfcab-eb'}, 'message_id': 'b14ce6ee-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.844470958, 'message_signature': '50a7a66bd0b893fd6e16abd032676e2abf18a06895972a59d9c7a2073e82f099'}]}, 'timestamp': '2025-10-02 12:31:17.236442', '_unique_id': '3ba0c02401304722bc1f4dfd4e50caa9'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.237 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.238 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes in the context of pollsters
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.238 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/network.incoming.bytes volume: 4495 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.238 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/network.incoming.bytes volume: 1072 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '2d54f64f-9c04-474f-bdf8-6ceba3aacc2e', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 4495, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'instance-00000086-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-tap3aa2fa6d-ae', 'timestamp': '2025-10-02T12:31:17.238326', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'tap3aa2fa6d-ae', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:88:b5:22', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap3aa2fa6d-ae'}, 'message_id': 'b14d3cac-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.844470958, 'message_signature': '79bf42b4784be80b414d437e8f766684e4648f56bd18fb1da11cf7a92ef9525c'}, {'source': 'openstack', 'counter_name': 'network.incoming.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 1072, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'instance-00000086-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-tap2cddfcab-eb', 'timestamp': '2025-10-02T12:31:17.238326', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'tap2cddfcab-eb', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:7a:3b:f1', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap2cddfcab-eb'}, 'message_id': 'b14d47ec-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.844470958, 'message_signature': '898fd51ea6a4c7cfd8b123145673e1bdd6e440f36527438d939a146acd1b0c0d'}]}, 'timestamp': '2025-10-02 12:31:17.238925', '_unique_id': '749d623aca974bd0824fec36b8d5db8c'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.239 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.240 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.requests in the context of pollsters
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.240 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk.device.write.requests volume: 315 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.240 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk.device.write.requests volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '9d27487a-287f-4937-9002-3184cb77120c', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 315, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d-vda', 'timestamp': '2025-10-02T12:31:17.240477', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'instance-00000086', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'b14d9062-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.801130955, 'message_signature': '60edd1e87d0ebbd80d24272aaabcb9f2ca8be2038b9c73a72fead446eafb2b1f'}, {'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 0, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d-sda', 'timestamp': '2025-10-02T12:31:17.240477', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'instance-00000086', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'b14d9b02-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.801130955, 'message_signature': 'd72c577d9b77bcbacdeab70a91d2e8085594d9c96d7a69ecda19d616ba66461f'}]}, 'timestamp': '2025-10-02 12:31:17.241071', '_unique_id': '0181e0bf614e407fb0fcfd2a280518e0'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.241 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.242 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.capacity in the context of pollsters
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.242 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk.device.capacity volume: 1073741824 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.243 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk.device.capacity volume: 485376 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'b80891b9-f387-470b-92fa-5e6303c48d11', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 1073741824, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d-vda', 'timestamp': '2025-10-02T12:31:17.242867', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'instance-00000086', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'b14def1c-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.898262366, 'message_signature': '517e2b3ce6dd97db44c7a838ac69b3ada9327a11a8dd2c7837ba1f20b0c32efb'}, {'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 485376, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d-sda', 'timestamp': '2025-10-02T12:31:17.242867', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'instance-00000086', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'b14dfaa2-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.898262366, 'message_signature': '64f1c0278da691b88a28e97e3ebedfeb6a5a1e3ba621426a613723eda61a00b2'}]}, 'timestamp': '2025-10-02 12:31:17.243488', '_unique_id': 'a797a5737ae141b3965b8e40ca93bcc9'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.244 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.iops in the context of pollsters
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.245 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for PerDeviceDiskIOPSPollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.245 12 ERROR ceilometer.polling.manager [-] Prevent pollster disk.device.iops from polling [<NovaLikeServer: tempest-TestGettingAddress-server-597864105>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-TestGettingAddress-server-597864105>]
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.245 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.bytes in the context of pollsters
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.245 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk.device.write.bytes volume: 72974336 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.245 12 DEBUG ceilometer.compute.pollsters [-] d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk.device.write.bytes volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '670ac506-995a-42df-8532-884f9bb4c52a', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 72974336, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d-vda', 'timestamp': '2025-10-02T12:31:17.245583', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'instance-00000086', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': 'b14e5844-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.801130955, 'message_signature': '7729d6857a80377550919e5cfb7b54392e1c888595996685bdec2260a4929fb5'}, {'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '97ce9f1898484e0e9a1f7c84a9f0dfe3', 'user_name': None, 'project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'project_name': None, 'resource_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d-sda', 'timestamp': '2025-10-02T12:31:17.245583', 'resource_metadata': {'display_name': 'tempest-TestGettingAddress-server-597864105', 'name': 'instance-00000086', 'instance_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'instance_type': 'm1.nano', 'host': '9c070ceddab253636965ad8325778035bf6bc157d278063a2ee37a8e', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': 'b14e656e-9f8b-11f0-af18-fa163efc5e78', 'monotonic_time': 6274.801130955, 'message_signature': '4aec852cc60cacecbb0c8655d9d0ac330bd34ddb95a01dfa64b0a80767d35238'}]}, 'timestamp': '2025-10-02 12:31:17.246225', '_unique_id': '3954241106be4c4b8c5bafa41140cc31'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:31:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:31:17.246 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:31:17 compute-0 nova_compute[192079]: 2025-10-02 12:31:17.284 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.008 2 DEBUG nova.network.neutron [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Updating instance_info_cache with network_info: [{"id": "7971997a-1f55-41fa-b77a-9c6fdaf497f7", "address": "fa:16:3e:6d:0d:5c", "network": {"id": "299c5e6b-f8b7-4cca-810b-a9b2539f4246", "bridge": "br-int", "label": "tempest-network-smoke--136367555", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap7971997a-1f", "ovs_interfaceid": "7971997a-1f55-41fa-b77a-9c6fdaf497f7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "482c766c-1462-47af-a801-a64e61f66109", "address": "fa:16:3e:06:76:22", "network": {"id": "e2520108-9d67-4d82-a7a0-ba429a88c3c9", "bridge": "br-int", "label": "tempest-network-smoke--1271498361", "subnets": [{"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe06:7622", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}, {"cidr": "2001:db8:0:1::/64", "dns": [], "gateway": {"address": "2001:db8:0:1::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8:0:1:f816:3eff:fe06:7622", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap482c766c-14", "ovs_interfaceid": "482c766c-1462-47af-a801-a64e61f66109", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.044 2 DEBUG oslo_concurrency.lockutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Releasing lock "refresh_cache-607e9c3a-4079-4261-b2c6-3cc47ae67173" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.045 2 DEBUG nova.compute.manager [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Instance network_info: |[{"id": "7971997a-1f55-41fa-b77a-9c6fdaf497f7", "address": "fa:16:3e:6d:0d:5c", "network": {"id": "299c5e6b-f8b7-4cca-810b-a9b2539f4246", "bridge": "br-int", "label": "tempest-network-smoke--136367555", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap7971997a-1f", "ovs_interfaceid": "7971997a-1f55-41fa-b77a-9c6fdaf497f7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "482c766c-1462-47af-a801-a64e61f66109", "address": "fa:16:3e:06:76:22", "network": {"id": "e2520108-9d67-4d82-a7a0-ba429a88c3c9", "bridge": "br-int", "label": "tempest-network-smoke--1271498361", "subnets": [{"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe06:7622", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}, {"cidr": "2001:db8:0:1::/64", "dns": [], "gateway": {"address": "2001:db8:0:1::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8:0:1:f816:3eff:fe06:7622", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap482c766c-14", "ovs_interfaceid": "482c766c-1462-47af-a801-a64e61f66109", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.046 2 DEBUG oslo_concurrency.lockutils [req-ad61ab29-bd6c-4f88-b512-3ffb60cef3cb req-1d6874e0-8ee2-4c3a-8fb4-a21692c2ddcf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-607e9c3a-4079-4261-b2c6-3cc47ae67173" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.046 2 DEBUG nova.network.neutron [req-ad61ab29-bd6c-4f88-b512-3ffb60cef3cb req-1d6874e0-8ee2-4c3a-8fb4-a21692c2ddcf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Refreshing network info cache for port 7971997a-1f55-41fa-b77a-9c6fdaf497f7 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.054 2 DEBUG nova.virt.libvirt.driver [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Start _get_guest_xml network_info=[{"id": "7971997a-1f55-41fa-b77a-9c6fdaf497f7", "address": "fa:16:3e:6d:0d:5c", "network": {"id": "299c5e6b-f8b7-4cca-810b-a9b2539f4246", "bridge": "br-int", "label": "tempest-network-smoke--136367555", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap7971997a-1f", "ovs_interfaceid": "7971997a-1f55-41fa-b77a-9c6fdaf497f7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "482c766c-1462-47af-a801-a64e61f66109", "address": "fa:16:3e:06:76:22", "network": {"id": "e2520108-9d67-4d82-a7a0-ba429a88c3c9", "bridge": "br-int", "label": "tempest-network-smoke--1271498361", "subnets": [{"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe06:7622", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}, {"cidr": "2001:db8:0:1::/64", "dns": [], "gateway": {"address": "2001:db8:0:1::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8:0:1:f816:3eff:fe06:7622", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap482c766c-14", "ovs_interfaceid": "482c766c-1462-47af-a801-a64e61f66109", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.055 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.062 2 WARNING nova.virt.libvirt.driver [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.067 2 DEBUG nova.virt.libvirt.host [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.069 2 DEBUG nova.virt.libvirt.host [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.081 2 DEBUG nova.virt.libvirt.host [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.082 2 DEBUG nova.virt.libvirt.host [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.085 2 DEBUG nova.virt.libvirt.driver [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.085 2 DEBUG nova.virt.hardware [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.086 2 DEBUG nova.virt.hardware [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.087 2 DEBUG nova.virt.hardware [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.087 2 DEBUG nova.virt.hardware [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.088 2 DEBUG nova.virt.hardware [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.089 2 DEBUG nova.virt.hardware [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.089 2 DEBUG nova.virt.hardware [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.090 2 DEBUG nova.virt.hardware [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.090 2 DEBUG nova.virt.hardware [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.091 2 DEBUG nova.virt.hardware [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.091 2 DEBUG nova.virt.hardware [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.097 2 DEBUG nova.virt.libvirt.vif [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:31:07Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestGettingAddress-server-2092283481',display_name='tempest-TestGettingAddress-server-2092283481',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testgettingaddress-server-2092283481',id=137,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBB1eGJz2x2NclizHY0y1KagfJt0/XSi4q477vmnTxhDjfgu4TS7ARmj4iaatPUQRUeuKdnCSa7aN8Y00iK3sldRns4TIy1xYmuZAKRi07Qnv9+MtEFMWHsOHXiIH+9Mk5Q==',key_name='tempest-TestGettingAddress-63209992',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='fd801958556f4c8aab047ecdef6b5ee8',ramdisk_id='',reservation_id='r-i0zbgkoj',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestGettingAddress-1355720650',owner_user_name='tempest-TestGettingAddress-1355720650-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:31:10Z,user_data=None,user_id='97ce9f1898484e0e9a1f7c84a9f0dfe3',uuid=607e9c3a-4079-4261-b2c6-3cc47ae67173,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "7971997a-1f55-41fa-b77a-9c6fdaf497f7", "address": "fa:16:3e:6d:0d:5c", "network": {"id": "299c5e6b-f8b7-4cca-810b-a9b2539f4246", "bridge": "br-int", "label": "tempest-network-smoke--136367555", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap7971997a-1f", "ovs_interfaceid": "7971997a-1f55-41fa-b77a-9c6fdaf497f7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.098 2 DEBUG nova.network.os_vif_util [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converting VIF {"id": "7971997a-1f55-41fa-b77a-9c6fdaf497f7", "address": "fa:16:3e:6d:0d:5c", "network": {"id": "299c5e6b-f8b7-4cca-810b-a9b2539f4246", "bridge": "br-int", "label": "tempest-network-smoke--136367555", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap7971997a-1f", "ovs_interfaceid": "7971997a-1f55-41fa-b77a-9c6fdaf497f7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.099 2 DEBUG nova.network.os_vif_util [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:6d:0d:5c,bridge_name='br-int',has_traffic_filtering=True,id=7971997a-1f55-41fa-b77a-9c6fdaf497f7,network=Network(299c5e6b-f8b7-4cca-810b-a9b2539f4246),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap7971997a-1f') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.101 2 DEBUG nova.virt.libvirt.vif [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:31:07Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestGettingAddress-server-2092283481',display_name='tempest-TestGettingAddress-server-2092283481',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testgettingaddress-server-2092283481',id=137,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBB1eGJz2x2NclizHY0y1KagfJt0/XSi4q477vmnTxhDjfgu4TS7ARmj4iaatPUQRUeuKdnCSa7aN8Y00iK3sldRns4TIy1xYmuZAKRi07Qnv9+MtEFMWHsOHXiIH+9Mk5Q==',key_name='tempest-TestGettingAddress-63209992',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='fd801958556f4c8aab047ecdef6b5ee8',ramdisk_id='',reservation_id='r-i0zbgkoj',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestGettingAddress-1355720650',owner_user_name='tempest-TestGettingAddress-1355720650-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:31:10Z,user_data=None,user_id='97ce9f1898484e0e9a1f7c84a9f0dfe3',uuid=607e9c3a-4079-4261-b2c6-3cc47ae67173,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "482c766c-1462-47af-a801-a64e61f66109", "address": "fa:16:3e:06:76:22", "network": {"id": "e2520108-9d67-4d82-a7a0-ba429a88c3c9", "bridge": "br-int", "label": "tempest-network-smoke--1271498361", "subnets": [{"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe06:7622", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}, {"cidr": "2001:db8:0:1::/64", "dns": [], "gateway": {"address": "2001:db8:0:1::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8:0:1:f816:3eff:fe06:7622", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap482c766c-14", "ovs_interfaceid": "482c766c-1462-47af-a801-a64e61f66109", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.101 2 DEBUG nova.network.os_vif_util [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converting VIF {"id": "482c766c-1462-47af-a801-a64e61f66109", "address": "fa:16:3e:06:76:22", "network": {"id": "e2520108-9d67-4d82-a7a0-ba429a88c3c9", "bridge": "br-int", "label": "tempest-network-smoke--1271498361", "subnets": [{"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe06:7622", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}, {"cidr": "2001:db8:0:1::/64", "dns": [], "gateway": {"address": "2001:db8:0:1::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8:0:1:f816:3eff:fe06:7622", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap482c766c-14", "ovs_interfaceid": "482c766c-1462-47af-a801-a64e61f66109", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.103 2 DEBUG nova.network.os_vif_util [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:06:76:22,bridge_name='br-int',has_traffic_filtering=True,id=482c766c-1462-47af-a801-a64e61f66109,network=Network(e2520108-9d67-4d82-a7a0-ba429a88c3c9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap482c766c-14') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.104 2 DEBUG nova.objects.instance [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lazy-loading 'pci_devices' on Instance uuid 607e9c3a-4079-4261-b2c6-3cc47ae67173 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.124 2 DEBUG nova.virt.libvirt.driver [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:31:19 compute-0 nova_compute[192079]:   <uuid>607e9c3a-4079-4261-b2c6-3cc47ae67173</uuid>
Oct 02 12:31:19 compute-0 nova_compute[192079]:   <name>instance-00000089</name>
Oct 02 12:31:19 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:31:19 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:31:19 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <nova:name>tempest-TestGettingAddress-server-2092283481</nova:name>
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:31:19</nova:creationTime>
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:31:19 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:31:19 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:31:19 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:31:19 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:31:19 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:31:19 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:31:19 compute-0 nova_compute[192079]:         <nova:user uuid="97ce9f1898484e0e9a1f7c84a9f0dfe3">tempest-TestGettingAddress-1355720650-project-member</nova:user>
Oct 02 12:31:19 compute-0 nova_compute[192079]:         <nova:project uuid="fd801958556f4c8aab047ecdef6b5ee8">tempest-TestGettingAddress-1355720650</nova:project>
Oct 02 12:31:19 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:31:19 compute-0 nova_compute[192079]:         <nova:port uuid="7971997a-1f55-41fa-b77a-9c6fdaf497f7">
Oct 02 12:31:19 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.11" ipVersion="4"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:31:19 compute-0 nova_compute[192079]:         <nova:port uuid="482c766c-1462-47af-a801-a64e61f66109">
Oct 02 12:31:19 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="2001:db8::f816:3eff:fe06:7622" ipVersion="6"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="2001:db8:0:1:f816:3eff:fe06:7622" ipVersion="6"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:31:19 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:31:19 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:31:19 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <system>
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <entry name="serial">607e9c3a-4079-4261-b2c6-3cc47ae67173</entry>
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <entry name="uuid">607e9c3a-4079-4261-b2c6-3cc47ae67173</entry>
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     </system>
Oct 02 12:31:19 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:31:19 compute-0 nova_compute[192079]:   <os>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:   </os>
Oct 02 12:31:19 compute-0 nova_compute[192079]:   <features>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:   </features>
Oct 02 12:31:19 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:31:19 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:31:19 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/607e9c3a-4079-4261-b2c6-3cc47ae67173/disk"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/607e9c3a-4079-4261-b2c6-3cc47ae67173/disk.config"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:6d:0d:5c"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <target dev="tap7971997a-1f"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:06:76:22"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <target dev="tap482c766c-14"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/607e9c3a-4079-4261-b2c6-3cc47ae67173/console.log" append="off"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <video>
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     </video>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:31:19 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:31:19 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:31:19 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:31:19 compute-0 nova_compute[192079]: </domain>
Oct 02 12:31:19 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.126 2 DEBUG nova.compute.manager [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Preparing to wait for external event network-vif-plugged-7971997a-1f55-41fa-b77a-9c6fdaf497f7 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.126 2 DEBUG oslo_concurrency.lockutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.127 2 DEBUG oslo_concurrency.lockutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.127 2 DEBUG oslo_concurrency.lockutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.128 2 DEBUG nova.compute.manager [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Preparing to wait for external event network-vif-plugged-482c766c-1462-47af-a801-a64e61f66109 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.128 2 DEBUG oslo_concurrency.lockutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.128 2 DEBUG oslo_concurrency.lockutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.129 2 DEBUG oslo_concurrency.lockutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.130 2 DEBUG nova.virt.libvirt.vif [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:31:07Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestGettingAddress-server-2092283481',display_name='tempest-TestGettingAddress-server-2092283481',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testgettingaddress-server-2092283481',id=137,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBB1eGJz2x2NclizHY0y1KagfJt0/XSi4q477vmnTxhDjfgu4TS7ARmj4iaatPUQRUeuKdnCSa7aN8Y00iK3sldRns4TIy1xYmuZAKRi07Qnv9+MtEFMWHsOHXiIH+9Mk5Q==',key_name='tempest-TestGettingAddress-63209992',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='fd801958556f4c8aab047ecdef6b5ee8',ramdisk_id='',reservation_id='r-i0zbgkoj',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestGettingAddress-1355720650',owner_user_name='tempest-TestGettingAddress-1355720650-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:31:10Z,user_data=None,user_id='97ce9f1898484e0e9a1f7c84a9f0dfe3',uuid=607e9c3a-4079-4261-b2c6-3cc47ae67173,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "7971997a-1f55-41fa-b77a-9c6fdaf497f7", "address": "fa:16:3e:6d:0d:5c", "network": {"id": "299c5e6b-f8b7-4cca-810b-a9b2539f4246", "bridge": "br-int", "label": "tempest-network-smoke--136367555", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap7971997a-1f", "ovs_interfaceid": "7971997a-1f55-41fa-b77a-9c6fdaf497f7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.131 2 DEBUG nova.network.os_vif_util [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converting VIF {"id": "7971997a-1f55-41fa-b77a-9c6fdaf497f7", "address": "fa:16:3e:6d:0d:5c", "network": {"id": "299c5e6b-f8b7-4cca-810b-a9b2539f4246", "bridge": "br-int", "label": "tempest-network-smoke--136367555", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap7971997a-1f", "ovs_interfaceid": "7971997a-1f55-41fa-b77a-9c6fdaf497f7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.132 2 DEBUG nova.network.os_vif_util [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:6d:0d:5c,bridge_name='br-int',has_traffic_filtering=True,id=7971997a-1f55-41fa-b77a-9c6fdaf497f7,network=Network(299c5e6b-f8b7-4cca-810b-a9b2539f4246),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap7971997a-1f') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.132 2 DEBUG os_vif [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:6d:0d:5c,bridge_name='br-int',has_traffic_filtering=True,id=7971997a-1f55-41fa-b77a-9c6fdaf497f7,network=Network(299c5e6b-f8b7-4cca-810b-a9b2539f4246),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap7971997a-1f') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.133 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.134 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.134 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.138 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.139 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap7971997a-1f, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.139 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap7971997a-1f, col_values=(('external_ids', {'iface-id': '7971997a-1f55-41fa-b77a-9c6fdaf497f7', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:6d:0d:5c', 'vm-uuid': '607e9c3a-4079-4261-b2c6-3cc47ae67173'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.141 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:19 compute-0 NetworkManager[51160]: <info>  [1759408279.1434] manager: (tap7971997a-1f): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/257)
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.144 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.152 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.154 2 INFO os_vif [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:6d:0d:5c,bridge_name='br-int',has_traffic_filtering=True,id=7971997a-1f55-41fa-b77a-9c6fdaf497f7,network=Network(299c5e6b-f8b7-4cca-810b-a9b2539f4246),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap7971997a-1f')
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.155 2 DEBUG nova.virt.libvirt.vif [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:31:07Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestGettingAddress-server-2092283481',display_name='tempest-TestGettingAddress-server-2092283481',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testgettingaddress-server-2092283481',id=137,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBB1eGJz2x2NclizHY0y1KagfJt0/XSi4q477vmnTxhDjfgu4TS7ARmj4iaatPUQRUeuKdnCSa7aN8Y00iK3sldRns4TIy1xYmuZAKRi07Qnv9+MtEFMWHsOHXiIH+9Mk5Q==',key_name='tempest-TestGettingAddress-63209992',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='fd801958556f4c8aab047ecdef6b5ee8',ramdisk_id='',reservation_id='r-i0zbgkoj',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestGettingAddress-1355720650',owner_user_name='tempest-TestGettingAddress-1355720650-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:31:10Z,user_data=None,user_id='97ce9f1898484e0e9a1f7c84a9f0dfe3',uuid=607e9c3a-4079-4261-b2c6-3cc47ae67173,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "482c766c-1462-47af-a801-a64e61f66109", "address": "fa:16:3e:06:76:22", "network": {"id": "e2520108-9d67-4d82-a7a0-ba429a88c3c9", "bridge": "br-int", "label": "tempest-network-smoke--1271498361", "subnets": [{"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe06:7622", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}, {"cidr": "2001:db8:0:1::/64", "dns": [], "gateway": {"address": "2001:db8:0:1::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8:0:1:f816:3eff:fe06:7622", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap482c766c-14", "ovs_interfaceid": "482c766c-1462-47af-a801-a64e61f66109", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.156 2 DEBUG nova.network.os_vif_util [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converting VIF {"id": "482c766c-1462-47af-a801-a64e61f66109", "address": "fa:16:3e:06:76:22", "network": {"id": "e2520108-9d67-4d82-a7a0-ba429a88c3c9", "bridge": "br-int", "label": "tempest-network-smoke--1271498361", "subnets": [{"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe06:7622", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}, {"cidr": "2001:db8:0:1::/64", "dns": [], "gateway": {"address": "2001:db8:0:1::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8:0:1:f816:3eff:fe06:7622", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap482c766c-14", "ovs_interfaceid": "482c766c-1462-47af-a801-a64e61f66109", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.157 2 DEBUG nova.network.os_vif_util [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:06:76:22,bridge_name='br-int',has_traffic_filtering=True,id=482c766c-1462-47af-a801-a64e61f66109,network=Network(e2520108-9d67-4d82-a7a0-ba429a88c3c9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap482c766c-14') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.158 2 DEBUG os_vif [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:06:76:22,bridge_name='br-int',has_traffic_filtering=True,id=482c766c-1462-47af-a801-a64e61f66109,network=Network(e2520108-9d67-4d82-a7a0-ba429a88c3c9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap482c766c-14') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.159 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.160 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.160 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.163 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.164 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap482c766c-14, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.165 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap482c766c-14, col_values=(('external_ids', {'iface-id': '482c766c-1462-47af-a801-a64e61f66109', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:06:76:22', 'vm-uuid': '607e9c3a-4079-4261-b2c6-3cc47ae67173'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.167 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:19 compute-0 NetworkManager[51160]: <info>  [1759408279.1685] manager: (tap482c766c-14): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/258)
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.171 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.175 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.176 2 INFO os_vif [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:06:76:22,bridge_name='br-int',has_traffic_filtering=True,id=482c766c-1462-47af-a801-a64e61f66109,network=Network(e2520108-9d67-4d82-a7a0-ba429a88c3c9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap482c766c-14')
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.318 2 DEBUG nova.virt.libvirt.driver [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.319 2 DEBUG nova.virt.libvirt.driver [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.319 2 DEBUG nova.virt.libvirt.driver [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] No VIF found with MAC fa:16:3e:6d:0d:5c, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.319 2 DEBUG nova.virt.libvirt.driver [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] No VIF found with MAC fa:16:3e:06:76:22, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.320 2 INFO nova.virt.libvirt.driver [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Using config drive
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.968 2 INFO nova.virt.libvirt.driver [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Creating config drive at /var/lib/nova/instances/607e9c3a-4079-4261-b2c6-3cc47ae67173/disk.config
Oct 02 12:31:19 compute-0 nova_compute[192079]: 2025-10-02 12:31:19.975 2 DEBUG oslo_concurrency.processutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/607e9c3a-4079-4261-b2c6-3cc47ae67173/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpz1xte1rh execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:31:20 compute-0 nova_compute[192079]: 2025-10-02 12:31:20.110 2 DEBUG oslo_concurrency.processutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/607e9c3a-4079-4261-b2c6-3cc47ae67173/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpz1xte1rh" returned: 0 in 0.135s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:31:20 compute-0 NetworkManager[51160]: <info>  [1759408280.1933] manager: (tap7971997a-1f): new Tun device (/org/freedesktop/NetworkManager/Devices/259)
Oct 02 12:31:20 compute-0 kernel: tap7971997a-1f: entered promiscuous mode
Oct 02 12:31:20 compute-0 ovn_controller[94336]: 2025-10-02T12:31:20Z|00509|binding|INFO|Claiming lport 7971997a-1f55-41fa-b77a-9c6fdaf497f7 for this chassis.
Oct 02 12:31:20 compute-0 ovn_controller[94336]: 2025-10-02T12:31:20Z|00510|binding|INFO|7971997a-1f55-41fa-b77a-9c6fdaf497f7: Claiming fa:16:3e:6d:0d:5c 10.100.0.11
Oct 02 12:31:20 compute-0 nova_compute[192079]: 2025-10-02 12:31:20.198 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:20.204 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:6d:0d:5c 10.100.0.11'], port_security=['fa:16:3e:6d:0d:5c 10.100.0.11'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.11/28', 'neutron:device_id': '607e9c3a-4079-4261-b2c6-3cc47ae67173', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-299c5e6b-f8b7-4cca-810b-a9b2539f4246', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'neutron:revision_number': '2', 'neutron:security_group_ids': 'f706c8b6-b68a-48d8-b578-b0c81b519c8e', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=4a9348f4-eede-4266-8396-8c521ea59fc0, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=7971997a-1f55-41fa-b77a-9c6fdaf497f7) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:31:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:20.206 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 7971997a-1f55-41fa-b77a-9c6fdaf497f7 in datapath 299c5e6b-f8b7-4cca-810b-a9b2539f4246 bound to our chassis
Oct 02 12:31:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:20.207 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 299c5e6b-f8b7-4cca-810b-a9b2539f4246
Oct 02 12:31:20 compute-0 NetworkManager[51160]: <info>  [1759408280.2108] manager: (tap482c766c-14): new Tun device (/org/freedesktop/NetworkManager/Devices/260)
Oct 02 12:31:20 compute-0 kernel: tap482c766c-14: entered promiscuous mode
Oct 02 12:31:20 compute-0 systemd-udevd[243637]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:31:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:20.224 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9d0ee896-130c-475d-9318-aa53aa2a77dd]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:20 compute-0 nova_compute[192079]: 2025-10-02 12:31:20.224 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:20 compute-0 systemd-udevd[243638]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:31:20 compute-0 ovn_controller[94336]: 2025-10-02T12:31:20Z|00511|binding|INFO|Claiming lport 482c766c-1462-47af-a801-a64e61f66109 for this chassis.
Oct 02 12:31:20 compute-0 ovn_controller[94336]: 2025-10-02T12:31:20Z|00512|binding|INFO|482c766c-1462-47af-a801-a64e61f66109: Claiming fa:16:3e:06:76:22 2001:db8:0:1:f816:3eff:fe06:7622 2001:db8::f816:3eff:fe06:7622
Oct 02 12:31:20 compute-0 ovn_controller[94336]: 2025-10-02T12:31:20Z|00513|binding|INFO|Setting lport 7971997a-1f55-41fa-b77a-9c6fdaf497f7 ovn-installed in OVS
Oct 02 12:31:20 compute-0 ovn_controller[94336]: 2025-10-02T12:31:20Z|00514|binding|INFO|Setting lport 7971997a-1f55-41fa-b77a-9c6fdaf497f7 up in Southbound
Oct 02 12:31:20 compute-0 nova_compute[192079]: 2025-10-02 12:31:20.228 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:20.235 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:06:76:22 2001:db8:0:1:f816:3eff:fe06:7622 2001:db8::f816:3eff:fe06:7622'], port_security=['fa:16:3e:06:76:22 2001:db8:0:1:f816:3eff:fe06:7622 2001:db8::f816:3eff:fe06:7622'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '2001:db8:0:1:f816:3eff:fe06:7622/64 2001:db8::f816:3eff:fe06:7622/64', 'neutron:device_id': '607e9c3a-4079-4261-b2c6-3cc47ae67173', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-e2520108-9d67-4d82-a7a0-ba429a88c3c9', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'neutron:revision_number': '2', 'neutron:security_group_ids': 'f706c8b6-b68a-48d8-b578-b0c81b519c8e', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=876a7f58-2645-4e1a-8a60-dbbe16fdfb2e, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=482c766c-1462-47af-a801-a64e61f66109) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:31:20 compute-0 NetworkManager[51160]: <info>  [1759408280.2448] device (tap7971997a-1f): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:31:20 compute-0 NetworkManager[51160]: <info>  [1759408280.2459] device (tap7971997a-1f): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:31:20 compute-0 ovn_controller[94336]: 2025-10-02T12:31:20Z|00515|binding|INFO|Setting lport 482c766c-1462-47af-a801-a64e61f66109 ovn-installed in OVS
Oct 02 12:31:20 compute-0 ovn_controller[94336]: 2025-10-02T12:31:20Z|00516|binding|INFO|Setting lport 482c766c-1462-47af-a801-a64e61f66109 up in Southbound
Oct 02 12:31:20 compute-0 NetworkManager[51160]: <info>  [1759408280.2465] device (tap482c766c-14): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:31:20 compute-0 NetworkManager[51160]: <info>  [1759408280.2472] device (tap482c766c-14): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:31:20 compute-0 nova_compute[192079]: 2025-10-02 12:31:20.248 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:20 compute-0 systemd-machined[152150]: New machine qemu-67-instance-00000089.
Oct 02 12:31:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:20.262 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[c1b062b9-3936-42e8-879d-36c846162a44]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:20.266 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[56315251-8f0b-4ce0-8264-fe058b410c39]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:20 compute-0 systemd[1]: Started Virtual Machine qemu-67-instance-00000089.
Oct 02 12:31:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:20.296 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[dca1a765-f8eb-476d-b008-4cffb5144b43]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:20.314 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[dcc28f3f-820d-49d4-a103-b762ca2fba94]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap299c5e6b-f1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:da:24:47'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 10, 'tx_packets': 5, 'rx_bytes': 916, 'tx_bytes': 354, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 10, 'tx_packets': 5, 'rx_bytes': 916, 'tx_bytes': 354, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 162], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 623817, 'reachable_time': 16588, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 8, 'inoctets': 720, 'indelivers': 1, 'outforwdatagrams': 0, 'outpkts': 3, 'outoctets': 228, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 8, 'outmcastpkts': 3, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 720, 'outmcastoctets': 228, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 8, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 1, 'inerrors': 0, 'outmsgs': 3, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 243651, 'error': None, 'target': 'ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:20.331 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9ad924cd-7e0c-4e43-997b-5e68be127ee5]: (4, ({'family': 2, 'prefixlen': 28, 'flags': 128, 'scope': 0, 'index': 2, 'attrs': [['IFA_ADDRESS', '10.100.0.2'], ['IFA_LOCAL', '10.100.0.2'], ['IFA_BROADCAST', '10.100.0.15'], ['IFA_LABEL', 'tap299c5e6b-f1'], ['IFA_FLAGS', 128], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 623826, 'tstamp': 623826}]], 'header': {'length': 96, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 243654, 'error': None, 'target': 'ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'}, {'family': 2, 'prefixlen': 32, 'flags': 128, 'scope': 0, 'index': 2, 'attrs': [['IFA_ADDRESS', '169.254.169.254'], ['IFA_LOCAL', '169.254.169.254'], ['IFA_BROADCAST', '169.254.169.254'], ['IFA_LABEL', 'tap299c5e6b-f1'], ['IFA_FLAGS', 128], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 623829, 'tstamp': 623829}]], 'header': {'length': 96, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 243654, 'error': None, 'target': 'ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'})) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:20.333 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap299c5e6b-f0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:31:20 compute-0 nova_compute[192079]: 2025-10-02 12:31:20.335 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:20 compute-0 nova_compute[192079]: 2025-10-02 12:31:20.336 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:20.336 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap299c5e6b-f0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:31:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:20.337 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:31:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:20.337 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap299c5e6b-f0, col_values=(('external_ids', {'iface-id': '6e8b6bdc-2d9f-47a8-8b24-8ce9ea993d7a'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:31:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:20.338 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:31:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:20.339 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 482c766c-1462-47af-a801-a64e61f66109 in datapath e2520108-9d67-4d82-a7a0-ba429a88c3c9 unbound from our chassis
Oct 02 12:31:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:20.341 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network e2520108-9d67-4d82-a7a0-ba429a88c3c9
Oct 02 12:31:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:20.357 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[12ade355-2a80-4bcc-9c84-27c340f30218]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:20.390 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[8bc8f402-2bdd-4a09-bd7c-b76abb31bb7e]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:20.393 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[f3b47019-7d03-4b0e-8294-fb95a8415e8a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:20.423 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[bbfdcc0a-9314-4ca9-a0ce-177b6cfd58bf]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:20.442 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1de44cb7-624a-484f-a173-2ba4da9ee76a]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tape2520108-91'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:2e:31:73'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 24, 'tx_packets': 4, 'rx_bytes': 2216, 'tx_bytes': 312, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 24, 'tx_packets': 4, 'rx_bytes': 2216, 'tx_bytes': 312, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 163], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 623896, 'reachable_time': 15377, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 24, 'inoctets': 1880, 'indelivers': 7, 'outforwdatagrams': 0, 'outpkts': 3, 'outoctets': 228, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 24, 'outmcastpkts': 3, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 1880, 'outmcastoctets': 228, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 24, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 7, 'inerrors': 0, 'outmsgs': 3, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 243661, 'error': None, 'target': 'ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:20.460 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[48864a0e-2824-46a1-b3e5-19fbfb34266d]: (4, ({'family': 2, 'prefixlen': 32, 'flags': 128, 'scope': 0, 'index': 2, 'attrs': [['IFA_ADDRESS', '169.254.169.254'], ['IFA_LOCAL', '169.254.169.254'], ['IFA_BROADCAST', '169.254.169.254'], ['IFA_LABEL', 'tape2520108-91'], ['IFA_FLAGS', 128], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 623910, 'tstamp': 623910}]], 'header': {'length': 96, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 243662, 'error': None, 'target': 'ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:20.461 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tape2520108-90, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:31:20 compute-0 nova_compute[192079]: 2025-10-02 12:31:20.462 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:20 compute-0 nova_compute[192079]: 2025-10-02 12:31:20.463 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:20.464 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tape2520108-90, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:31:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:20.464 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:31:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:20.464 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tape2520108-90, col_values=(('external_ids', {'iface-id': '3eb0ed9e-d99b-4ee6-af64-ada9c8369b17'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:31:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:20.464 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:31:21 compute-0 nova_compute[192079]: 2025-10-02 12:31:21.115 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408281.114444, 607e9c3a-4079-4261-b2c6-3cc47ae67173 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:31:21 compute-0 nova_compute[192079]: 2025-10-02 12:31:21.115 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] VM Started (Lifecycle Event)
Oct 02 12:31:21 compute-0 nova_compute[192079]: 2025-10-02 12:31:21.139 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:31:21 compute-0 nova_compute[192079]: 2025-10-02 12:31:21.144 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408281.117833, 607e9c3a-4079-4261-b2c6-3cc47ae67173 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:31:21 compute-0 nova_compute[192079]: 2025-10-02 12:31:21.144 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] VM Paused (Lifecycle Event)
Oct 02 12:31:21 compute-0 nova_compute[192079]: 2025-10-02 12:31:21.163 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:31:21 compute-0 nova_compute[192079]: 2025-10-02 12:31:21.167 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:31:21 compute-0 nova_compute[192079]: 2025-10-02 12:31:21.184 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:31:21 compute-0 nova_compute[192079]: 2025-10-02 12:31:21.205 2 DEBUG nova.network.neutron [req-ad61ab29-bd6c-4f88-b512-3ffb60cef3cb req-1d6874e0-8ee2-4c3a-8fb4-a21692c2ddcf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Updated VIF entry in instance network info cache for port 7971997a-1f55-41fa-b77a-9c6fdaf497f7. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:31:21 compute-0 nova_compute[192079]: 2025-10-02 12:31:21.206 2 DEBUG nova.network.neutron [req-ad61ab29-bd6c-4f88-b512-3ffb60cef3cb req-1d6874e0-8ee2-4c3a-8fb4-a21692c2ddcf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Updating instance_info_cache with network_info: [{"id": "7971997a-1f55-41fa-b77a-9c6fdaf497f7", "address": "fa:16:3e:6d:0d:5c", "network": {"id": "299c5e6b-f8b7-4cca-810b-a9b2539f4246", "bridge": "br-int", "label": "tempest-network-smoke--136367555", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap7971997a-1f", "ovs_interfaceid": "7971997a-1f55-41fa-b77a-9c6fdaf497f7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "482c766c-1462-47af-a801-a64e61f66109", "address": "fa:16:3e:06:76:22", "network": {"id": "e2520108-9d67-4d82-a7a0-ba429a88c3c9", "bridge": "br-int", "label": "tempest-network-smoke--1271498361", "subnets": [{"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe06:7622", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}, {"cidr": "2001:db8:0:1::/64", "dns": [], "gateway": {"address": "2001:db8:0:1::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8:0:1:f816:3eff:fe06:7622", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap482c766c-14", "ovs_interfaceid": "482c766c-1462-47af-a801-a64e61f66109", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:31:21 compute-0 nova_compute[192079]: 2025-10-02 12:31:21.224 2 DEBUG oslo_concurrency.lockutils [req-ad61ab29-bd6c-4f88-b512-3ffb60cef3cb req-1d6874e0-8ee2-4c3a-8fb4-a21692c2ddcf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-607e9c3a-4079-4261-b2c6-3cc47ae67173" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:31:21 compute-0 nova_compute[192079]: 2025-10-02 12:31:21.224 2 DEBUG nova.compute.manager [req-ad61ab29-bd6c-4f88-b512-3ffb60cef3cb req-1d6874e0-8ee2-4c3a-8fb4-a21692c2ddcf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Received event network-changed-482c766c-1462-47af-a801-a64e61f66109 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:31:21 compute-0 nova_compute[192079]: 2025-10-02 12:31:21.225 2 DEBUG nova.compute.manager [req-ad61ab29-bd6c-4f88-b512-3ffb60cef3cb req-1d6874e0-8ee2-4c3a-8fb4-a21692c2ddcf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Refreshing instance network info cache due to event network-changed-482c766c-1462-47af-a801-a64e61f66109. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:31:21 compute-0 nova_compute[192079]: 2025-10-02 12:31:21.225 2 DEBUG oslo_concurrency.lockutils [req-ad61ab29-bd6c-4f88-b512-3ffb60cef3cb req-1d6874e0-8ee2-4c3a-8fb4-a21692c2ddcf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-607e9c3a-4079-4261-b2c6-3cc47ae67173" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:31:21 compute-0 nova_compute[192079]: 2025-10-02 12:31:21.225 2 DEBUG oslo_concurrency.lockutils [req-ad61ab29-bd6c-4f88-b512-3ffb60cef3cb req-1d6874e0-8ee2-4c3a-8fb4-a21692c2ddcf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-607e9c3a-4079-4261-b2c6-3cc47ae67173" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:31:21 compute-0 nova_compute[192079]: 2025-10-02 12:31:21.226 2 DEBUG nova.network.neutron [req-ad61ab29-bd6c-4f88-b512-3ffb60cef3cb req-1d6874e0-8ee2-4c3a-8fb4-a21692c2ddcf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Refreshing network info cache for port 482c766c-1462-47af-a801-a64e61f66109 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.126 2 DEBUG nova.compute.manager [req-7efc88bf-bfbc-49f3-9637-766350e07d0b req-4341db8c-8b33-4b6e-aaff-bcef991d65a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Received event network-vif-plugged-7971997a-1f55-41fa-b77a-9c6fdaf497f7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.127 2 DEBUG oslo_concurrency.lockutils [req-7efc88bf-bfbc-49f3-9637-766350e07d0b req-4341db8c-8b33-4b6e-aaff-bcef991d65a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.127 2 DEBUG oslo_concurrency.lockutils [req-7efc88bf-bfbc-49f3-9637-766350e07d0b req-4341db8c-8b33-4b6e-aaff-bcef991d65a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.127 2 DEBUG oslo_concurrency.lockutils [req-7efc88bf-bfbc-49f3-9637-766350e07d0b req-4341db8c-8b33-4b6e-aaff-bcef991d65a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.128 2 DEBUG nova.compute.manager [req-7efc88bf-bfbc-49f3-9637-766350e07d0b req-4341db8c-8b33-4b6e-aaff-bcef991d65a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Processing event network-vif-plugged-7971997a-1f55-41fa-b77a-9c6fdaf497f7 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.128 2 DEBUG nova.compute.manager [req-7efc88bf-bfbc-49f3-9637-766350e07d0b req-4341db8c-8b33-4b6e-aaff-bcef991d65a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Received event network-vif-plugged-7971997a-1f55-41fa-b77a-9c6fdaf497f7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.128 2 DEBUG oslo_concurrency.lockutils [req-7efc88bf-bfbc-49f3-9637-766350e07d0b req-4341db8c-8b33-4b6e-aaff-bcef991d65a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.128 2 DEBUG oslo_concurrency.lockutils [req-7efc88bf-bfbc-49f3-9637-766350e07d0b req-4341db8c-8b33-4b6e-aaff-bcef991d65a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.128 2 DEBUG oslo_concurrency.lockutils [req-7efc88bf-bfbc-49f3-9637-766350e07d0b req-4341db8c-8b33-4b6e-aaff-bcef991d65a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.128 2 DEBUG nova.compute.manager [req-7efc88bf-bfbc-49f3-9637-766350e07d0b req-4341db8c-8b33-4b6e-aaff-bcef991d65a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] No event matching network-vif-plugged-7971997a-1f55-41fa-b77a-9c6fdaf497f7 in dict_keys([('network-vif-plugged', '482c766c-1462-47af-a801-a64e61f66109')]) pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:325
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.129 2 WARNING nova.compute.manager [req-7efc88bf-bfbc-49f3-9637-766350e07d0b req-4341db8c-8b33-4b6e-aaff-bcef991d65a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Received unexpected event network-vif-plugged-7971997a-1f55-41fa-b77a-9c6fdaf497f7 for instance with vm_state building and task_state spawning.
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.129 2 DEBUG nova.compute.manager [req-7efc88bf-bfbc-49f3-9637-766350e07d0b req-4341db8c-8b33-4b6e-aaff-bcef991d65a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Received event network-vif-plugged-482c766c-1462-47af-a801-a64e61f66109 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.129 2 DEBUG oslo_concurrency.lockutils [req-7efc88bf-bfbc-49f3-9637-766350e07d0b req-4341db8c-8b33-4b6e-aaff-bcef991d65a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.129 2 DEBUG oslo_concurrency.lockutils [req-7efc88bf-bfbc-49f3-9637-766350e07d0b req-4341db8c-8b33-4b6e-aaff-bcef991d65a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.129 2 DEBUG oslo_concurrency.lockutils [req-7efc88bf-bfbc-49f3-9637-766350e07d0b req-4341db8c-8b33-4b6e-aaff-bcef991d65a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.129 2 DEBUG nova.compute.manager [req-7efc88bf-bfbc-49f3-9637-766350e07d0b req-4341db8c-8b33-4b6e-aaff-bcef991d65a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Processing event network-vif-plugged-482c766c-1462-47af-a801-a64e61f66109 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.129 2 DEBUG nova.compute.manager [req-7efc88bf-bfbc-49f3-9637-766350e07d0b req-4341db8c-8b33-4b6e-aaff-bcef991d65a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Received event network-vif-plugged-482c766c-1462-47af-a801-a64e61f66109 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.130 2 DEBUG oslo_concurrency.lockutils [req-7efc88bf-bfbc-49f3-9637-766350e07d0b req-4341db8c-8b33-4b6e-aaff-bcef991d65a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.130 2 DEBUG oslo_concurrency.lockutils [req-7efc88bf-bfbc-49f3-9637-766350e07d0b req-4341db8c-8b33-4b6e-aaff-bcef991d65a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.130 2 DEBUG oslo_concurrency.lockutils [req-7efc88bf-bfbc-49f3-9637-766350e07d0b req-4341db8c-8b33-4b6e-aaff-bcef991d65a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.130 2 DEBUG nova.compute.manager [req-7efc88bf-bfbc-49f3-9637-766350e07d0b req-4341db8c-8b33-4b6e-aaff-bcef991d65a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] No waiting events found dispatching network-vif-plugged-482c766c-1462-47af-a801-a64e61f66109 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.130 2 WARNING nova.compute.manager [req-7efc88bf-bfbc-49f3-9637-766350e07d0b req-4341db8c-8b33-4b6e-aaff-bcef991d65a2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Received unexpected event network-vif-plugged-482c766c-1462-47af-a801-a64e61f66109 for instance with vm_state building and task_state spawning.
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.131 2 DEBUG nova.compute.manager [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Instance event wait completed in 1 seconds for network-vif-plugged,network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.134 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408282.1344779, 607e9c3a-4079-4261-b2c6-3cc47ae67173 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.134 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] VM Resumed (Lifecycle Event)
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.136 2 DEBUG nova.virt.libvirt.driver [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.139 2 INFO nova.virt.libvirt.driver [-] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Instance spawned successfully.
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.140 2 DEBUG nova.virt.libvirt.driver [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:31:22 compute-0 podman[243671]: 2025-10-02 12:31:22.171545086 +0000 UTC m=+0.076373336 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, release=1755695350, vendor=Red Hat, Inc., config_id=edpm, io.buildah.version=1.33.7, managed_by=edpm_ansible, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, maintainer=Red Hat, Inc., summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, name=ubi9-minimal, version=9.6, com.redhat.component=ubi9-minimal-container, build-date=2025-08-20T13:12:41, io.openshift.expose-services=, architecture=x86_64, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, url=https://catalog.redhat.com/en/search?searchType=containers, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vcs-type=git, container_name=openstack_network_exporter, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, distribution-scope=public, io.openshift.tags=minimal rhel9)
Oct 02 12:31:22 compute-0 podman[243672]: 2025-10-02 12:31:22.180408268 +0000 UTC m=+0.071105302 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.vendor=CentOS, config_id=multipathd, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, container_name=multipathd, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.191 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.195 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.199 2 DEBUG nova.virt.libvirt.driver [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.199 2 DEBUG nova.virt.libvirt.driver [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.200 2 DEBUG nova.virt.libvirt.driver [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.200 2 DEBUG nova.virt.libvirt.driver [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.200 2 DEBUG nova.virt.libvirt.driver [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.201 2 DEBUG nova.virt.libvirt.driver [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.236 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.287 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.352 2 INFO nova.compute.manager [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Took 12.05 seconds to spawn the instance on the hypervisor.
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.353 2 DEBUG nova.compute.manager [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.512 2 INFO nova.compute.manager [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Took 12.84 seconds to build instance.
Oct 02 12:31:22 compute-0 nova_compute[192079]: 2025-10-02 12:31:22.539 2 DEBUG oslo_concurrency.lockutils [None req-8b3802e2-ccde-44a6-ba46-aba83c4714af 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "607e9c3a-4079-4261-b2c6-3cc47ae67173" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 12.944s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:31:24 compute-0 nova_compute[192079]: 2025-10-02 12:31:24.003 2 DEBUG nova.network.neutron [req-ad61ab29-bd6c-4f88-b512-3ffb60cef3cb req-1d6874e0-8ee2-4c3a-8fb4-a21692c2ddcf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Updated VIF entry in instance network info cache for port 482c766c-1462-47af-a801-a64e61f66109. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:31:24 compute-0 nova_compute[192079]: 2025-10-02 12:31:24.004 2 DEBUG nova.network.neutron [req-ad61ab29-bd6c-4f88-b512-3ffb60cef3cb req-1d6874e0-8ee2-4c3a-8fb4-a21692c2ddcf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Updating instance_info_cache with network_info: [{"id": "7971997a-1f55-41fa-b77a-9c6fdaf497f7", "address": "fa:16:3e:6d:0d:5c", "network": {"id": "299c5e6b-f8b7-4cca-810b-a9b2539f4246", "bridge": "br-int", "label": "tempest-network-smoke--136367555", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap7971997a-1f", "ovs_interfaceid": "7971997a-1f55-41fa-b77a-9c6fdaf497f7", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "482c766c-1462-47af-a801-a64e61f66109", "address": "fa:16:3e:06:76:22", "network": {"id": "e2520108-9d67-4d82-a7a0-ba429a88c3c9", "bridge": "br-int", "label": "tempest-network-smoke--1271498361", "subnets": [{"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe06:7622", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}, {"cidr": "2001:db8:0:1::/64", "dns": [], "gateway": {"address": "2001:db8:0:1::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8:0:1:f816:3eff:fe06:7622", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap482c766c-14", "ovs_interfaceid": "482c766c-1462-47af-a801-a64e61f66109", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:31:24 compute-0 nova_compute[192079]: 2025-10-02 12:31:24.023 2 DEBUG oslo_concurrency.lockutils [req-ad61ab29-bd6c-4f88-b512-3ffb60cef3cb req-1d6874e0-8ee2-4c3a-8fb4-a21692c2ddcf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-607e9c3a-4079-4261-b2c6-3cc47ae67173" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:31:24 compute-0 nova_compute[192079]: 2025-10-02 12:31:24.169 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:26 compute-0 nova_compute[192079]: 2025-10-02 12:31:26.069 2 DEBUG nova.compute.manager [req-c39f4eab-6239-417c-ba83-be6c0c6483e7 req-2145208e-0d55-4925-8897-09887f835a5f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Received event network-changed-7971997a-1f55-41fa-b77a-9c6fdaf497f7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:31:26 compute-0 nova_compute[192079]: 2025-10-02 12:31:26.069 2 DEBUG nova.compute.manager [req-c39f4eab-6239-417c-ba83-be6c0c6483e7 req-2145208e-0d55-4925-8897-09887f835a5f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Refreshing instance network info cache due to event network-changed-7971997a-1f55-41fa-b77a-9c6fdaf497f7. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:31:26 compute-0 nova_compute[192079]: 2025-10-02 12:31:26.069 2 DEBUG oslo_concurrency.lockutils [req-c39f4eab-6239-417c-ba83-be6c0c6483e7 req-2145208e-0d55-4925-8897-09887f835a5f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-607e9c3a-4079-4261-b2c6-3cc47ae67173" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:31:26 compute-0 nova_compute[192079]: 2025-10-02 12:31:26.069 2 DEBUG oslo_concurrency.lockutils [req-c39f4eab-6239-417c-ba83-be6c0c6483e7 req-2145208e-0d55-4925-8897-09887f835a5f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-607e9c3a-4079-4261-b2c6-3cc47ae67173" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:31:26 compute-0 nova_compute[192079]: 2025-10-02 12:31:26.070 2 DEBUG nova.network.neutron [req-c39f4eab-6239-417c-ba83-be6c0c6483e7 req-2145208e-0d55-4925-8897-09887f835a5f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Refreshing network info cache for port 7971997a-1f55-41fa-b77a-9c6fdaf497f7 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:31:27 compute-0 podman[243711]: 2025-10-02 12:31:27.15962094 +0000 UTC m=+0.070478734 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:31:27 compute-0 podman[243712]: 2025-10-02 12:31:27.170743763 +0000 UTC m=+0.065402855 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=iscsid, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, tcib_managed=true)
Oct 02 12:31:27 compute-0 nova_compute[192079]: 2025-10-02 12:31:27.289 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:27 compute-0 nova_compute[192079]: 2025-10-02 12:31:27.755 2 DEBUG nova.network.neutron [req-c39f4eab-6239-417c-ba83-be6c0c6483e7 req-2145208e-0d55-4925-8897-09887f835a5f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Updated VIF entry in instance network info cache for port 7971997a-1f55-41fa-b77a-9c6fdaf497f7. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:31:27 compute-0 nova_compute[192079]: 2025-10-02 12:31:27.756 2 DEBUG nova.network.neutron [req-c39f4eab-6239-417c-ba83-be6c0c6483e7 req-2145208e-0d55-4925-8897-09887f835a5f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Updating instance_info_cache with network_info: [{"id": "7971997a-1f55-41fa-b77a-9c6fdaf497f7", "address": "fa:16:3e:6d:0d:5c", "network": {"id": "299c5e6b-f8b7-4cca-810b-a9b2539f4246", "bridge": "br-int", "label": "tempest-network-smoke--136367555", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap7971997a-1f", "ovs_interfaceid": "7971997a-1f55-41fa-b77a-9c6fdaf497f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "482c766c-1462-47af-a801-a64e61f66109", "address": "fa:16:3e:06:76:22", "network": {"id": "e2520108-9d67-4d82-a7a0-ba429a88c3c9", "bridge": "br-int", "label": "tempest-network-smoke--1271498361", "subnets": [{"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe06:7622", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}, {"cidr": "2001:db8:0:1::/64", "dns": [], "gateway": {"address": "2001:db8:0:1::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8:0:1:f816:3eff:fe06:7622", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap482c766c-14", "ovs_interfaceid": "482c766c-1462-47af-a801-a64e61f66109", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:31:27 compute-0 nova_compute[192079]: 2025-10-02 12:31:27.774 2 DEBUG oslo_concurrency.lockutils [req-c39f4eab-6239-417c-ba83-be6c0c6483e7 req-2145208e-0d55-4925-8897-09887f835a5f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-607e9c3a-4079-4261-b2c6-3cc47ae67173" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:31:29 compute-0 nova_compute[192079]: 2025-10-02 12:31:29.171 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:29 compute-0 nova_compute[192079]: 2025-10-02 12:31:29.769 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:31:31 compute-0 ovn_controller[94336]: 2025-10-02T12:31:31Z|00517|binding|INFO|Releasing lport 3eb0ed9e-d99b-4ee6-af64-ada9c8369b17 from this chassis (sb_readonly=0)
Oct 02 12:31:31 compute-0 ovn_controller[94336]: 2025-10-02T12:31:31Z|00518|binding|INFO|Releasing lport 6e8b6bdc-2d9f-47a8-8b24-8ce9ea993d7a from this chassis (sb_readonly=0)
Oct 02 12:31:31 compute-0 nova_compute[192079]: 2025-10-02 12:31:31.389 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:32 compute-0 nova_compute[192079]: 2025-10-02 12:31:32.292 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:33 compute-0 nova_compute[192079]: 2025-10-02 12:31:33.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:31:34 compute-0 nova_compute[192079]: 2025-10-02 12:31:34.213 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:35 compute-0 nova_compute[192079]: 2025-10-02 12:31:35.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:31:35 compute-0 nova_compute[192079]: 2025-10-02 12:31:35.688 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:31:35 compute-0 nova_compute[192079]: 2025-10-02 12:31:35.688 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:31:35 compute-0 nova_compute[192079]: 2025-10-02 12:31:35.689 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:31:35 compute-0 nova_compute[192079]: 2025-10-02 12:31:35.689 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:31:35 compute-0 nova_compute[192079]: 2025-10-02 12:31:35.763 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:31:35 compute-0 podman[243768]: 2025-10-02 12:31:35.814901544 +0000 UTC m=+0.082867343 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, container_name=ovn_metadata_agent, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:31:35 compute-0 podman[243771]: 2025-10-02 12:31:35.819428156 +0000 UTC m=+0.082810640 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 12:31:35 compute-0 podman[243770]: 2025-10-02 12:31:35.831713772 +0000 UTC m=+0.098075427 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller, container_name=ovn_controller, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, managed_by=edpm_ansible, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_managed=true)
Oct 02 12:31:35 compute-0 nova_compute[192079]: 2025-10-02 12:31:35.844 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk --force-share --output=json" returned: 0 in 0.081s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:31:35 compute-0 nova_compute[192079]: 2025-10-02 12:31:35.845 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:31:35 compute-0 nova_compute[192079]: 2025-10-02 12:31:35.899 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/d59f518a-8b98-4c8c-b8f7-19f6b6809c6d/disk --force-share --output=json" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:31:35 compute-0 nova_compute[192079]: 2025-10-02 12:31:35.905 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/607e9c3a-4079-4261-b2c6-3cc47ae67173/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:31:35 compute-0 nova_compute[192079]: 2025-10-02 12:31:35.957 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/607e9c3a-4079-4261-b2c6-3cc47ae67173/disk --force-share --output=json" returned: 0 in 0.052s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:31:35 compute-0 nova_compute[192079]: 2025-10-02 12:31:35.958 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/607e9c3a-4079-4261-b2c6-3cc47ae67173/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:31:36 compute-0 nova_compute[192079]: 2025-10-02 12:31:36.015 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/607e9c3a-4079-4261-b2c6-3cc47ae67173/disk --force-share --output=json" returned: 0 in 0.058s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:31:36 compute-0 ovn_controller[94336]: 2025-10-02T12:31:36Z|00053|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:6d:0d:5c 10.100.0.11
Oct 02 12:31:36 compute-0 ovn_controller[94336]: 2025-10-02T12:31:36Z|00054|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:6d:0d:5c 10.100.0.11
Oct 02 12:31:36 compute-0 nova_compute[192079]: 2025-10-02 12:31:36.188 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:31:36 compute-0 nova_compute[192079]: 2025-10-02 12:31:36.189 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5380MB free_disk=73.2839469909668GB free_vcpus=6 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:31:36 compute-0 nova_compute[192079]: 2025-10-02 12:31:36.189 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:31:36 compute-0 nova_compute[192079]: 2025-10-02 12:31:36.190 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:31:36 compute-0 nova_compute[192079]: 2025-10-02 12:31:36.322 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance d59f518a-8b98-4c8c-b8f7-19f6b6809c6d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:31:36 compute-0 nova_compute[192079]: 2025-10-02 12:31:36.322 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance 607e9c3a-4079-4261-b2c6-3cc47ae67173 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:31:36 compute-0 nova_compute[192079]: 2025-10-02 12:31:36.322 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 2 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:31:36 compute-0 nova_compute[192079]: 2025-10-02 12:31:36.322 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=768MB phys_disk=79GB used_disk=2GB total_vcpus=8 used_vcpus=2 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:31:36 compute-0 nova_compute[192079]: 2025-10-02 12:31:36.446 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing inventories for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708 _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:804
Oct 02 12:31:36 compute-0 nova_compute[192079]: 2025-10-02 12:31:36.524 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Updating ProviderTree inventory for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 from _refresh_and_get_inventory using data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} _refresh_and_get_inventory /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:768
Oct 02 12:31:36 compute-0 nova_compute[192079]: 2025-10-02 12:31:36.525 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Updating inventory in ProviderTree for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 with inventory: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:176
Oct 02 12:31:36 compute-0 nova_compute[192079]: 2025-10-02 12:31:36.549 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing aggregate associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, aggregates: None _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:813
Oct 02 12:31:36 compute-0 nova_compute[192079]: 2025-10-02 12:31:36.581 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing trait associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, traits: COMPUTE_SECURITY_UEFI_SECURE_BOOT,COMPUTE_VIOMMU_MODEL_VIRTIO,COMPUTE_VIOMMU_MODEL_AUTO,COMPUTE_IMAGE_TYPE_AKI,COMPUTE_GRAPHICS_MODEL_VIRTIO,COMPUTE_NET_VIF_MODEL_PCNET,HW_CPU_X86_SSE42,COMPUTE_RESCUE_BFV,COMPUTE_VOLUME_EXTEND,COMPUTE_IMAGE_TYPE_QCOW2,COMPUTE_TRUSTED_CERTS,COMPUTE_SOCKET_PCI_NUMA_AFFINITY,COMPUTE_GRAPHICS_MODEL_CIRRUS,HW_CPU_X86_MMX,COMPUTE_STORAGE_BUS_VIRTIO,COMPUTE_NET_ATTACH_INTERFACE_WITH_TAG,COMPUTE_STORAGE_BUS_FDC,COMPUTE_STORAGE_BUS_USB,COMPUTE_NODE,HW_CPU_X86_SSSE3,HW_CPU_X86_SSE2,COMPUTE_GRAPHICS_MODEL_BOCHS,COMPUTE_NET_VIF_MODEL_E1000E,COMPUTE_IMAGE_TYPE_RAW,COMPUTE_NET_VIF_MODEL_NE2K_PCI,COMPUTE_IMAGE_TYPE_AMI,COMPUTE_VIOMMU_MODEL_INTEL,COMPUTE_SECURITY_TPM_2_0,COMPUTE_STORAGE_BUS_SCSI,COMPUTE_IMAGE_TYPE_ARI,COMPUTE_NET_VIF_MODEL_VMXNET3,COMPUTE_SECURITY_TPM_1_2,COMPUTE_NET_VIF_MODEL_E1000,HW_CPU_X86_SSE,COMPUTE_VOLUME_MULTI_ATTACH,COMPUTE_STORAGE_BUS_IDE,COMPUTE_GRAPHICS_MODEL_NONE,COMPUTE_VOLUME_ATTACH_WITH_TAG,COMPUTE_NET_VIF_MODEL_VIRTIO,HW_CPU_X86_SSE41,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_DEVICE_TAGGING,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_ACCELERATORS,COMPUTE_NET_VIF_MODEL_RTL8139,COMPUTE_GRAPHICS_MODEL_VGA,COMPUTE_STORAGE_BUS_SATA,COMPUTE_NET_VIF_MODEL_SPAPR_VLAN _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:825
Oct 02 12:31:36 compute-0 nova_compute[192079]: 2025-10-02 12:31:36.666 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:31:36 compute-0 nova_compute[192079]: 2025-10-02 12:31:36.683 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:31:36 compute-0 nova_compute[192079]: 2025-10-02 12:31:36.704 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:31:36 compute-0 nova_compute[192079]: 2025-10-02 12:31:36.704 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.515s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:31:37 compute-0 nova_compute[192079]: 2025-10-02 12:31:37.294 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:38 compute-0 nova_compute[192079]: 2025-10-02 12:31:38.479 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:38 compute-0 nova_compute[192079]: 2025-10-02 12:31:38.704 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:31:38 compute-0 nova_compute[192079]: 2025-10-02 12:31:38.705 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:31:39 compute-0 nova_compute[192079]: 2025-10-02 12:31:39.214 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:39 compute-0 nova_compute[192079]: 2025-10-02 12:31:39.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:31:39 compute-0 nova_compute[192079]: 2025-10-02 12:31:39.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:31:39 compute-0 nova_compute[192079]: 2025-10-02 12:31:39.920 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:40 compute-0 nova_compute[192079]: 2025-10-02 12:31:40.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:31:42 compute-0 nova_compute[192079]: 2025-10-02 12:31:42.298 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:42 compute-0 nova_compute[192079]: 2025-10-02 12:31:42.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:31:42 compute-0 nova_compute[192079]: 2025-10-02 12:31:42.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:31:42 compute-0 nova_compute[192079]: 2025-10-02 12:31:42.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:31:42 compute-0 ovn_controller[94336]: 2025-10-02T12:31:42Z|00519|binding|INFO|Releasing lport 3eb0ed9e-d99b-4ee6-af64-ada9c8369b17 from this chassis (sb_readonly=0)
Oct 02 12:31:42 compute-0 ovn_controller[94336]: 2025-10-02T12:31:42Z|00520|binding|INFO|Releasing lport 6e8b6bdc-2d9f-47a8-8b24-8ce9ea993d7a from this chassis (sb_readonly=0)
Oct 02 12:31:42 compute-0 nova_compute[192079]: 2025-10-02 12:31:42.785 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:42 compute-0 nova_compute[192079]: 2025-10-02 12:31:42.938 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "refresh_cache-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:31:42 compute-0 nova_compute[192079]: 2025-10-02 12:31:42.939 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquired lock "refresh_cache-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:31:42 compute-0 nova_compute[192079]: 2025-10-02 12:31:42.939 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Forcefully refreshing network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2004
Oct 02 12:31:42 compute-0 nova_compute[192079]: 2025-10-02 12:31:42.939 2 DEBUG nova.objects.instance [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lazy-loading 'info_cache' on Instance uuid d59f518a-8b98-4c8c-b8f7-19f6b6809c6d obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:31:43 compute-0 nova_compute[192079]: 2025-10-02 12:31:43.914 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:44 compute-0 nova_compute[192079]: 2025-10-02 12:31:44.217 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:45 compute-0 nova_compute[192079]: 2025-10-02 12:31:45.297 2 DEBUG nova.compute.manager [req-fdf3feb6-89a9-4254-a0f4-8adde064371a req-b53d36e0-a5b6-49de-b05c-31b068849386 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Received event network-changed-7971997a-1f55-41fa-b77a-9c6fdaf497f7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:31:45 compute-0 nova_compute[192079]: 2025-10-02 12:31:45.297 2 DEBUG nova.compute.manager [req-fdf3feb6-89a9-4254-a0f4-8adde064371a req-b53d36e0-a5b6-49de-b05c-31b068849386 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Refreshing instance network info cache due to event network-changed-7971997a-1f55-41fa-b77a-9c6fdaf497f7. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:31:45 compute-0 nova_compute[192079]: 2025-10-02 12:31:45.298 2 DEBUG oslo_concurrency.lockutils [req-fdf3feb6-89a9-4254-a0f4-8adde064371a req-b53d36e0-a5b6-49de-b05c-31b068849386 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-607e9c3a-4079-4261-b2c6-3cc47ae67173" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:31:45 compute-0 nova_compute[192079]: 2025-10-02 12:31:45.298 2 DEBUG oslo_concurrency.lockutils [req-fdf3feb6-89a9-4254-a0f4-8adde064371a req-b53d36e0-a5b6-49de-b05c-31b068849386 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-607e9c3a-4079-4261-b2c6-3cc47ae67173" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:31:45 compute-0 nova_compute[192079]: 2025-10-02 12:31:45.298 2 DEBUG nova.network.neutron [req-fdf3feb6-89a9-4254-a0f4-8adde064371a req-b53d36e0-a5b6-49de-b05c-31b068849386 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Refreshing network info cache for port 7971997a-1f55-41fa-b77a-9c6fdaf497f7 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:31:45 compute-0 nova_compute[192079]: 2025-10-02 12:31:45.862 2 DEBUG oslo_concurrency.lockutils [None req-91e95b9f-17a0-41de-a699-1d796f9240ca 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "607e9c3a-4079-4261-b2c6-3cc47ae67173" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:31:45 compute-0 nova_compute[192079]: 2025-10-02 12:31:45.863 2 DEBUG oslo_concurrency.lockutils [None req-91e95b9f-17a0-41de-a699-1d796f9240ca 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "607e9c3a-4079-4261-b2c6-3cc47ae67173" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:31:45 compute-0 nova_compute[192079]: 2025-10-02 12:31:45.863 2 DEBUG oslo_concurrency.lockutils [None req-91e95b9f-17a0-41de-a699-1d796f9240ca 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:31:45 compute-0 nova_compute[192079]: 2025-10-02 12:31:45.863 2 DEBUG oslo_concurrency.lockutils [None req-91e95b9f-17a0-41de-a699-1d796f9240ca 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:31:45 compute-0 nova_compute[192079]: 2025-10-02 12:31:45.864 2 DEBUG oslo_concurrency.lockutils [None req-91e95b9f-17a0-41de-a699-1d796f9240ca 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:31:45 compute-0 nova_compute[192079]: 2025-10-02 12:31:45.945 2 INFO nova.compute.manager [None req-91e95b9f-17a0-41de-a699-1d796f9240ca 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Terminating instance
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.016 2 DEBUG nova.compute.manager [None req-91e95b9f-17a0-41de-a699-1d796f9240ca 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:31:46 compute-0 kernel: tap7971997a-1f (unregistering): left promiscuous mode
Oct 02 12:31:46 compute-0 NetworkManager[51160]: <info>  [1759408306.0404] device (tap7971997a-1f): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:31:46 compute-0 ovn_controller[94336]: 2025-10-02T12:31:46Z|00521|binding|INFO|Releasing lport 7971997a-1f55-41fa-b77a-9c6fdaf497f7 from this chassis (sb_readonly=0)
Oct 02 12:31:46 compute-0 ovn_controller[94336]: 2025-10-02T12:31:46Z|00522|binding|INFO|Setting lport 7971997a-1f55-41fa-b77a-9c6fdaf497f7 down in Southbound
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.048 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:46 compute-0 ovn_controller[94336]: 2025-10-02T12:31:46Z|00523|binding|INFO|Removing iface tap7971997a-1f ovn-installed in OVS
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.051 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.066 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:46 compute-0 kernel: tap482c766c-14 (unregistering): left promiscuous mode
Oct 02 12:31:46 compute-0 NetworkManager[51160]: <info>  [1759408306.0898] device (tap482c766c-14): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.103 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:46 compute-0 ovn_controller[94336]: 2025-10-02T12:31:46Z|00524|binding|INFO|Releasing lport 482c766c-1462-47af-a801-a64e61f66109 from this chassis (sb_readonly=1)
Oct 02 12:31:46 compute-0 ovn_controller[94336]: 2025-10-02T12:31:46Z|00525|binding|INFO|Removing iface tap482c766c-14 ovn-installed in OVS
Oct 02 12:31:46 compute-0 ovn_controller[94336]: 2025-10-02T12:31:46Z|00526|if_status|INFO|Dropped 2 log messages in last 437 seconds (most recently, 437 seconds ago) due to excessive rate
Oct 02 12:31:46 compute-0 ovn_controller[94336]: 2025-10-02T12:31:46Z|00527|if_status|INFO|Not setting lport 482c766c-1462-47af-a801-a64e61f66109 down as sb is readonly
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.104 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.121 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:46 compute-0 podman[243843]: 2025-10-02 12:31:46.145790051 +0000 UTC m=+0.080744854 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, container_name=ceilometer_agent_compute, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=edpm, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:31:46 compute-0 systemd[1]: machine-qemu\x2d67\x2dinstance\x2d00000089.scope: Deactivated successfully.
Oct 02 12:31:46 compute-0 systemd[1]: machine-qemu\x2d67\x2dinstance\x2d00000089.scope: Consumed 15.441s CPU time.
Oct 02 12:31:46 compute-0 systemd-machined[152150]: Machine qemu-67-instance-00000089 terminated.
Oct 02 12:31:46 compute-0 ovn_controller[94336]: 2025-10-02T12:31:46Z|00528|binding|INFO|Setting lport 482c766c-1462-47af-a801-a64e61f66109 down in Southbound
Oct 02 12:31:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:46.198 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:6d:0d:5c 10.100.0.11'], port_security=['fa:16:3e:6d:0d:5c 10.100.0.11'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.11/28', 'neutron:device_id': '607e9c3a-4079-4261-b2c6-3cc47ae67173', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-299c5e6b-f8b7-4cca-810b-a9b2539f4246', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'neutron:revision_number': '4', 'neutron:security_group_ids': 'f706c8b6-b68a-48d8-b578-b0c81b519c8e', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=4a9348f4-eede-4266-8396-8c521ea59fc0, chassis=[], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=7971997a-1f55-41fa-b77a-9c6fdaf497f7) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:31:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:46.200 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 7971997a-1f55-41fa-b77a-9c6fdaf497f7 in datapath 299c5e6b-f8b7-4cca-810b-a9b2539f4246 unbound from our chassis
Oct 02 12:31:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:46.201 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 299c5e6b-f8b7-4cca-810b-a9b2539f4246
Oct 02 12:31:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:46.216 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8b49e087-c3bc-49aa-9063-364961147e82]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:46.235 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:06:76:22 2001:db8:0:1:f816:3eff:fe06:7622 2001:db8::f816:3eff:fe06:7622'], port_security=['fa:16:3e:06:76:22 2001:db8:0:1:f816:3eff:fe06:7622 2001:db8::f816:3eff:fe06:7622'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '2001:db8:0:1:f816:3eff:fe06:7622/64 2001:db8::f816:3eff:fe06:7622/64', 'neutron:device_id': '607e9c3a-4079-4261-b2c6-3cc47ae67173', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-e2520108-9d67-4d82-a7a0-ba429a88c3c9', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'neutron:revision_number': '4', 'neutron:security_group_ids': 'f706c8b6-b68a-48d8-b578-b0c81b519c8e', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=876a7f58-2645-4e1a-8a60-dbbe16fdfb2e, chassis=[], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=482c766c-1462-47af-a801-a64e61f66109) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:31:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:46.243 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[850fd007-05e3-44c8-8f78-346f3256e503]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:46.248 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[42dcd3c9-1024-4074-9eba-3f187f38e07c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:46.283 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[052497eb-883f-4867-aaed-bb93f1ffe35b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:46.301 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[57fc0584-40de-4c96-b36c-9b467dbe4f48]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap299c5e6b-f1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:da:24:47'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 12, 'tx_packets': 7, 'rx_bytes': 1000, 'tx_bytes': 438, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 12, 'tx_packets': 7, 'rx_bytes': 1000, 'tx_bytes': 438, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 162], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 623817, 'reachable_time': 16588, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 8, 'inoctets': 720, 'indelivers': 1, 'outforwdatagrams': 0, 'outpkts': 3, 'outoctets': 228, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 8, 'outmcastpkts': 3, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 720, 'outmcastoctets': 228, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 8, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 1, 'inerrors': 0, 'outmsgs': 3, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 243906, 'error': None, 'target': 'ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.302 2 INFO nova.virt.libvirt.driver [-] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Instance destroyed successfully.
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.303 2 DEBUG nova.objects.instance [None req-91e95b9f-17a0-41de-a699-1d796f9240ca 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lazy-loading 'resources' on Instance uuid 607e9c3a-4079-4261-b2c6-3cc47ae67173 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:31:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:46.316 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d0b67b8e-85f3-4ed0-a950-47abbc8614cc]: (4, ({'family': 2, 'prefixlen': 28, 'flags': 128, 'scope': 0, 'index': 2, 'attrs': [['IFA_ADDRESS', '10.100.0.2'], ['IFA_LOCAL', '10.100.0.2'], ['IFA_BROADCAST', '10.100.0.15'], ['IFA_LABEL', 'tap299c5e6b-f1'], ['IFA_FLAGS', 128], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 623826, 'tstamp': 623826}]], 'header': {'length': 96, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 243908, 'error': None, 'target': 'ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'}, {'family': 2, 'prefixlen': 32, 'flags': 128, 'scope': 0, 'index': 2, 'attrs': [['IFA_ADDRESS', '169.254.169.254'], ['IFA_LOCAL', '169.254.169.254'], ['IFA_BROADCAST', '169.254.169.254'], ['IFA_LABEL', 'tap299c5e6b-f1'], ['IFA_FLAGS', 128], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 623829, 'tstamp': 623829}]], 'header': {'length': 96, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 243908, 'error': None, 'target': 'ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'})) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:46.317 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap299c5e6b-f0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.319 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.325 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:46.325 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap299c5e6b-f0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:31:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:46.326 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:31:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:46.326 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap299c5e6b-f0, col_values=(('external_ids', {'iface-id': '6e8b6bdc-2d9f-47a8-8b24-8ce9ea993d7a'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:31:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:46.326 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:31:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:46.328 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 482c766c-1462-47af-a801-a64e61f66109 in datapath e2520108-9d67-4d82-a7a0-ba429a88c3c9 unbound from our chassis
Oct 02 12:31:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:46.330 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network e2520108-9d67-4d82-a7a0-ba429a88c3c9
Oct 02 12:31:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:46.343 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ebf5eead-2b92-4ab1-8f79-0c77c77a3ccf]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:46.370 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[7168fcd5-b188-49d7-910b-93f0f85cf4d0]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:46.373 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[1c7d2e0c-1cb1-43a6-bb09-d5713e40386c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.387 2 DEBUG nova.virt.libvirt.vif [None req-91e95b9f-17a0-41de-a699-1d796f9240ca 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:31:07Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestGettingAddress-server-2092283481',display_name='tempest-TestGettingAddress-server-2092283481',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testgettingaddress-server-2092283481',id=137,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBB1eGJz2x2NclizHY0y1KagfJt0/XSi4q477vmnTxhDjfgu4TS7ARmj4iaatPUQRUeuKdnCSa7aN8Y00iK3sldRns4TIy1xYmuZAKRi07Qnv9+MtEFMWHsOHXiIH+9Mk5Q==',key_name='tempest-TestGettingAddress-63209992',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:31:22Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='fd801958556f4c8aab047ecdef6b5ee8',ramdisk_id='',reservation_id='r-i0zbgkoj',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-TestGettingAddress-1355720650',owner_user_name='tempest-TestGettingAddress-1355720650-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:31:22Z,user_data=None,user_id='97ce9f1898484e0e9a1f7c84a9f0dfe3',uuid=607e9c3a-4079-4261-b2c6-3cc47ae67173,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "7971997a-1f55-41fa-b77a-9c6fdaf497f7", "address": "fa:16:3e:6d:0d:5c", "network": {"id": "299c5e6b-f8b7-4cca-810b-a9b2539f4246", "bridge": "br-int", "label": "tempest-network-smoke--136367555", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap7971997a-1f", "ovs_interfaceid": "7971997a-1f55-41fa-b77a-9c6fdaf497f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.388 2 DEBUG nova.network.os_vif_util [None req-91e95b9f-17a0-41de-a699-1d796f9240ca 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converting VIF {"id": "7971997a-1f55-41fa-b77a-9c6fdaf497f7", "address": "fa:16:3e:6d:0d:5c", "network": {"id": "299c5e6b-f8b7-4cca-810b-a9b2539f4246", "bridge": "br-int", "label": "tempest-network-smoke--136367555", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.192", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap7971997a-1f", "ovs_interfaceid": "7971997a-1f55-41fa-b77a-9c6fdaf497f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.389 2 DEBUG nova.network.os_vif_util [None req-91e95b9f-17a0-41de-a699-1d796f9240ca 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:6d:0d:5c,bridge_name='br-int',has_traffic_filtering=True,id=7971997a-1f55-41fa-b77a-9c6fdaf497f7,network=Network(299c5e6b-f8b7-4cca-810b-a9b2539f4246),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap7971997a-1f') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.389 2 DEBUG os_vif [None req-91e95b9f-17a0-41de-a699-1d796f9240ca 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:6d:0d:5c,bridge_name='br-int',has_traffic_filtering=True,id=7971997a-1f55-41fa-b77a-9c6fdaf497f7,network=Network(299c5e6b-f8b7-4cca-810b-a9b2539f4246),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap7971997a-1f') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.391 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.391 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap7971997a-1f, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.393 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.395 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.397 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.400 2 INFO os_vif [None req-91e95b9f-17a0-41de-a699-1d796f9240ca 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:6d:0d:5c,bridge_name='br-int',has_traffic_filtering=True,id=7971997a-1f55-41fa-b77a-9c6fdaf497f7,network=Network(299c5e6b-f8b7-4cca-810b-a9b2539f4246),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap7971997a-1f')
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.401 2 DEBUG nova.virt.libvirt.vif [None req-91e95b9f-17a0-41de-a699-1d796f9240ca 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:31:07Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestGettingAddress-server-2092283481',display_name='tempest-TestGettingAddress-server-2092283481',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testgettingaddress-server-2092283481',id=137,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBB1eGJz2x2NclizHY0y1KagfJt0/XSi4q477vmnTxhDjfgu4TS7ARmj4iaatPUQRUeuKdnCSa7aN8Y00iK3sldRns4TIy1xYmuZAKRi07Qnv9+MtEFMWHsOHXiIH+9Mk5Q==',key_name='tempest-TestGettingAddress-63209992',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:31:22Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='fd801958556f4c8aab047ecdef6b5ee8',ramdisk_id='',reservation_id='r-i0zbgkoj',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-TestGettingAddress-1355720650',owner_user_name='tempest-TestGettingAddress-1355720650-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:31:22Z,user_data=None,user_id='97ce9f1898484e0e9a1f7c84a9f0dfe3',uuid=607e9c3a-4079-4261-b2c6-3cc47ae67173,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "482c766c-1462-47af-a801-a64e61f66109", "address": "fa:16:3e:06:76:22", "network": {"id": "e2520108-9d67-4d82-a7a0-ba429a88c3c9", "bridge": "br-int", "label": "tempest-network-smoke--1271498361", "subnets": [{"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe06:7622", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}, {"cidr": "2001:db8:0:1::/64", "dns": [], "gateway": {"address": "2001:db8:0:1::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8:0:1:f816:3eff:fe06:7622", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap482c766c-14", "ovs_interfaceid": "482c766c-1462-47af-a801-a64e61f66109", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.401 2 DEBUG nova.network.os_vif_util [None req-91e95b9f-17a0-41de-a699-1d796f9240ca 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converting VIF {"id": "482c766c-1462-47af-a801-a64e61f66109", "address": "fa:16:3e:06:76:22", "network": {"id": "e2520108-9d67-4d82-a7a0-ba429a88c3c9", "bridge": "br-int", "label": "tempest-network-smoke--1271498361", "subnets": [{"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe06:7622", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}, {"cidr": "2001:db8:0:1::/64", "dns": [], "gateway": {"address": "2001:db8:0:1::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8:0:1:f816:3eff:fe06:7622", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap482c766c-14", "ovs_interfaceid": "482c766c-1462-47af-a801-a64e61f66109", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:31:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:46.402 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[bbfe1fa6-d407-470f-934b-942c2357544a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.402 2 DEBUG nova.network.os_vif_util [None req-91e95b9f-17a0-41de-a699-1d796f9240ca 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:06:76:22,bridge_name='br-int',has_traffic_filtering=True,id=482c766c-1462-47af-a801-a64e61f66109,network=Network(e2520108-9d67-4d82-a7a0-ba429a88c3c9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap482c766c-14') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.403 2 DEBUG os_vif [None req-91e95b9f-17a0-41de-a699-1d796f9240ca 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:06:76:22,bridge_name='br-int',has_traffic_filtering=True,id=482c766c-1462-47af-a801-a64e61f66109,network=Network(e2520108-9d67-4d82-a7a0-ba429a88c3c9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap482c766c-14') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.404 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.404 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap482c766c-14, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.406 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.407 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.408 2 INFO os_vif [None req-91e95b9f-17a0-41de-a699-1d796f9240ca 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:06:76:22,bridge_name='br-int',has_traffic_filtering=True,id=482c766c-1462-47af-a801-a64e61f66109,network=Network(e2520108-9d67-4d82-a7a0-ba429a88c3c9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap482c766c-14')
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.409 2 INFO nova.virt.libvirt.driver [None req-91e95b9f-17a0-41de-a699-1d796f9240ca 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Deleting instance files /var/lib/nova/instances/607e9c3a-4079-4261-b2c6-3cc47ae67173_del
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.410 2 INFO nova.virt.libvirt.driver [None req-91e95b9f-17a0-41de-a699-1d796f9240ca 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Deletion of /var/lib/nova/instances/607e9c3a-4079-4261-b2c6-3cc47ae67173_del complete
Oct 02 12:31:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:46.420 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[04fcbb5e-40fe-48e3-84c5-4d4d5f9fb26e]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tape2520108-91'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:2e:31:73'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 40, 'tx_packets': 5, 'rx_bytes': 3600, 'tx_bytes': 354, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 40, 'tx_packets': 5, 'rx_bytes': 3600, 'tx_bytes': 354, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 163], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 623896, 'reachable_time': 15377, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 40, 'inoctets': 3040, 'indelivers': 13, 'outforwdatagrams': 0, 'outpkts': 3, 'outoctets': 228, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 40, 'outmcastpkts': 3, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 3040, 'outmcastoctets': 228, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 40, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 13, 'inerrors': 0, 'outmsgs': 3, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 243917, 'error': None, 'target': 'ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:46.432 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e9bb5303-d00f-4c59-b093-2a2659e01eb0]: (4, ({'family': 2, 'prefixlen': 32, 'flags': 128, 'scope': 0, 'index': 2, 'attrs': [['IFA_ADDRESS', '169.254.169.254'], ['IFA_LOCAL', '169.254.169.254'], ['IFA_BROADCAST', '169.254.169.254'], ['IFA_LABEL', 'tape2520108-91'], ['IFA_FLAGS', 128], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 623910, 'tstamp': 623910}]], 'header': {'length': 96, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 243918, 'error': None, 'target': 'ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:46.434 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tape2520108-90, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.435 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.436 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:46.437 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tape2520108-90, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:31:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:46.437 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:31:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:46.437 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tape2520108-90, col_values=(('external_ids', {'iface-id': '3eb0ed9e-d99b-4ee6-af64-ada9c8369b17'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:31:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:46.438 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.740 2 INFO nova.compute.manager [None req-91e95b9f-17a0-41de-a699-1d796f9240ca 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Took 0.72 seconds to destroy the instance on the hypervisor.
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.740 2 DEBUG oslo.service.loopingcall [None req-91e95b9f-17a0-41de-a699-1d796f9240ca 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.741 2 DEBUG nova.compute.manager [-] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:31:46 compute-0 nova_compute[192079]: 2025-10-02 12:31:46.741 2 DEBUG nova.network.neutron [-] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:31:47 compute-0 nova_compute[192079]: 2025-10-02 12:31:47.299 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:47 compute-0 nova_compute[192079]: 2025-10-02 12:31:47.855 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Updating instance_info_cache with network_info: [{"id": "3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa", "address": "fa:16:3e:88:b5:22", "network": {"id": "299c5e6b-f8b7-4cca-810b-a9b2539f4246", "bridge": "br-int", "label": "tempest-network-smoke--136367555", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.247", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap3aa2fa6d-ae", "ovs_interfaceid": "3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2", "address": "fa:16:3e:7a:3b:f1", "network": {"id": "e2520108-9d67-4d82-a7a0-ba429a88c3c9", "bridge": "br-int", "label": "tempest-network-smoke--1271498361", "subnets": [{"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe7a:3bf1", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}, {"cidr": "2001:db8:0:1::/64", "dns": [], "gateway": {"address": "2001:db8:0:1::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8:0:1:f816:3eff:fe7a:3bf1", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap2cddfcab-eb", "ovs_interfaceid": "2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:31:47 compute-0 nova_compute[192079]: 2025-10-02 12:31:47.884 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Releasing lock "refresh_cache-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:31:47 compute-0 nova_compute[192079]: 2025-10-02 12:31:47.884 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Updated the network info_cache for instance _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9929
Oct 02 12:31:47 compute-0 nova_compute[192079]: 2025-10-02 12:31:47.885 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:31:48 compute-0 nova_compute[192079]: 2025-10-02 12:31:48.044 2 DEBUG nova.compute.manager [req-a8a125a5-e016-4d1b-8e64-a4077a2b8e3d req-e378d3c0-98ed-4e79-9974-8d328191cdba 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Received event network-vif-unplugged-7971997a-1f55-41fa-b77a-9c6fdaf497f7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:31:48 compute-0 nova_compute[192079]: 2025-10-02 12:31:48.045 2 DEBUG oslo_concurrency.lockutils [req-a8a125a5-e016-4d1b-8e64-a4077a2b8e3d req-e378d3c0-98ed-4e79-9974-8d328191cdba 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:31:48 compute-0 nova_compute[192079]: 2025-10-02 12:31:48.045 2 DEBUG oslo_concurrency.lockutils [req-a8a125a5-e016-4d1b-8e64-a4077a2b8e3d req-e378d3c0-98ed-4e79-9974-8d328191cdba 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:31:48 compute-0 nova_compute[192079]: 2025-10-02 12:31:48.045 2 DEBUG oslo_concurrency.lockutils [req-a8a125a5-e016-4d1b-8e64-a4077a2b8e3d req-e378d3c0-98ed-4e79-9974-8d328191cdba 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:31:48 compute-0 nova_compute[192079]: 2025-10-02 12:31:48.045 2 DEBUG nova.compute.manager [req-a8a125a5-e016-4d1b-8e64-a4077a2b8e3d req-e378d3c0-98ed-4e79-9974-8d328191cdba 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] No waiting events found dispatching network-vif-unplugged-7971997a-1f55-41fa-b77a-9c6fdaf497f7 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:31:48 compute-0 nova_compute[192079]: 2025-10-02 12:31:48.046 2 DEBUG nova.compute.manager [req-a8a125a5-e016-4d1b-8e64-a4077a2b8e3d req-e378d3c0-98ed-4e79-9974-8d328191cdba 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Received event network-vif-unplugged-7971997a-1f55-41fa-b77a-9c6fdaf497f7 for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:31:48 compute-0 nova_compute[192079]: 2025-10-02 12:31:48.221 2 DEBUG nova.compute.manager [req-573747c2-50a4-4969-813c-48621d330d38 req-723538b3-7497-4e6f-af34-b13d9879937a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Received event network-vif-deleted-7971997a-1f55-41fa-b77a-9c6fdaf497f7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:31:48 compute-0 nova_compute[192079]: 2025-10-02 12:31:48.221 2 INFO nova.compute.manager [req-573747c2-50a4-4969-813c-48621d330d38 req-723538b3-7497-4e6f-af34-b13d9879937a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Neutron deleted interface 7971997a-1f55-41fa-b77a-9c6fdaf497f7; detaching it from the instance and deleting it from the info cache
Oct 02 12:31:48 compute-0 nova_compute[192079]: 2025-10-02 12:31:48.221 2 DEBUG nova.network.neutron [req-573747c2-50a4-4969-813c-48621d330d38 req-723538b3-7497-4e6f-af34-b13d9879937a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Updating instance_info_cache with network_info: [{"id": "482c766c-1462-47af-a801-a64e61f66109", "address": "fa:16:3e:06:76:22", "network": {"id": "e2520108-9d67-4d82-a7a0-ba429a88c3c9", "bridge": "br-int", "label": "tempest-network-smoke--1271498361", "subnets": [{"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe06:7622", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}, {"cidr": "2001:db8:0:1::/64", "dns": [], "gateway": {"address": "2001:db8:0:1::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8:0:1:f816:3eff:fe06:7622", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap482c766c-14", "ovs_interfaceid": "482c766c-1462-47af-a801-a64e61f66109", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:31:48 compute-0 nova_compute[192079]: 2025-10-02 12:31:48.240 2 DEBUG nova.compute.manager [req-573747c2-50a4-4969-813c-48621d330d38 req-723538b3-7497-4e6f-af34-b13d9879937a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Detach interface failed, port_id=7971997a-1f55-41fa-b77a-9c6fdaf497f7, reason: Instance 607e9c3a-4079-4261-b2c6-3cc47ae67173 could not be found. _process_instance_vif_deleted_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10882
Oct 02 12:31:49 compute-0 nova_compute[192079]: 2025-10-02 12:31:49.054 2 DEBUG nova.network.neutron [req-fdf3feb6-89a9-4254-a0f4-8adde064371a req-b53d36e0-a5b6-49de-b05c-31b068849386 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Updated VIF entry in instance network info cache for port 7971997a-1f55-41fa-b77a-9c6fdaf497f7. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:31:49 compute-0 nova_compute[192079]: 2025-10-02 12:31:49.054 2 DEBUG nova.network.neutron [req-fdf3feb6-89a9-4254-a0f4-8adde064371a req-b53d36e0-a5b6-49de-b05c-31b068849386 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Updating instance_info_cache with network_info: [{"id": "7971997a-1f55-41fa-b77a-9c6fdaf497f7", "address": "fa:16:3e:6d:0d:5c", "network": {"id": "299c5e6b-f8b7-4cca-810b-a9b2539f4246", "bridge": "br-int", "label": "tempest-network-smoke--136367555", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap7971997a-1f", "ovs_interfaceid": "7971997a-1f55-41fa-b77a-9c6fdaf497f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "482c766c-1462-47af-a801-a64e61f66109", "address": "fa:16:3e:06:76:22", "network": {"id": "e2520108-9d67-4d82-a7a0-ba429a88c3c9", "bridge": "br-int", "label": "tempest-network-smoke--1271498361", "subnets": [{"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe06:7622", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}, {"cidr": "2001:db8:0:1::/64", "dns": [], "gateway": {"address": "2001:db8:0:1::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8:0:1:f816:3eff:fe06:7622", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap482c766c-14", "ovs_interfaceid": "482c766c-1462-47af-a801-a64e61f66109", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:31:49 compute-0 nova_compute[192079]: 2025-10-02 12:31:49.112 2 DEBUG nova.network.neutron [-] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:31:49 compute-0 nova_compute[192079]: 2025-10-02 12:31:49.122 2 DEBUG oslo_concurrency.lockutils [req-fdf3feb6-89a9-4254-a0f4-8adde064371a req-b53d36e0-a5b6-49de-b05c-31b068849386 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-607e9c3a-4079-4261-b2c6-3cc47ae67173" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:31:49 compute-0 nova_compute[192079]: 2025-10-02 12:31:49.162 2 INFO nova.compute.manager [-] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Took 2.42 seconds to deallocate network for instance.
Oct 02 12:31:49 compute-0 nova_compute[192079]: 2025-10-02 12:31:49.397 2 DEBUG oslo_concurrency.lockutils [None req-91e95b9f-17a0-41de-a699-1d796f9240ca 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:31:49 compute-0 nova_compute[192079]: 2025-10-02 12:31:49.398 2 DEBUG oslo_concurrency.lockutils [None req-91e95b9f-17a0-41de-a699-1d796f9240ca 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:31:49 compute-0 nova_compute[192079]: 2025-10-02 12:31:49.490 2 DEBUG nova.compute.provider_tree [None req-91e95b9f-17a0-41de-a699-1d796f9240ca 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:31:49 compute-0 nova_compute[192079]: 2025-10-02 12:31:49.549 2 DEBUG nova.scheduler.client.report [None req-91e95b9f-17a0-41de-a699-1d796f9240ca 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:31:49 compute-0 nova_compute[192079]: 2025-10-02 12:31:49.600 2 DEBUG oslo_concurrency.lockutils [None req-91e95b9f-17a0-41de-a699-1d796f9240ca 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.202s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:31:49 compute-0 nova_compute[192079]: 2025-10-02 12:31:49.664 2 INFO nova.scheduler.client.report [None req-91e95b9f-17a0-41de-a699-1d796f9240ca 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Deleted allocations for instance 607e9c3a-4079-4261-b2c6-3cc47ae67173
Oct 02 12:31:49 compute-0 nova_compute[192079]: 2025-10-02 12:31:49.845 2 DEBUG oslo_concurrency.lockutils [None req-91e95b9f-17a0-41de-a699-1d796f9240ca 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "607e9c3a-4079-4261-b2c6-3cc47ae67173" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 3.982s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:31:50 compute-0 nova_compute[192079]: 2025-10-02 12:31:50.192 2 DEBUG nova.compute.manager [req-8c9751a7-fedd-4f18-9e4b-34533b850393 req-12e5126b-50ff-484b-a65e-2513c559f690 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Received event network-vif-plugged-7971997a-1f55-41fa-b77a-9c6fdaf497f7 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:31:50 compute-0 nova_compute[192079]: 2025-10-02 12:31:50.192 2 DEBUG oslo_concurrency.lockutils [req-8c9751a7-fedd-4f18-9e4b-34533b850393 req-12e5126b-50ff-484b-a65e-2513c559f690 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:31:50 compute-0 nova_compute[192079]: 2025-10-02 12:31:50.193 2 DEBUG oslo_concurrency.lockutils [req-8c9751a7-fedd-4f18-9e4b-34533b850393 req-12e5126b-50ff-484b-a65e-2513c559f690 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:31:50 compute-0 nova_compute[192079]: 2025-10-02 12:31:50.193 2 DEBUG oslo_concurrency.lockutils [req-8c9751a7-fedd-4f18-9e4b-34533b850393 req-12e5126b-50ff-484b-a65e-2513c559f690 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:31:50 compute-0 nova_compute[192079]: 2025-10-02 12:31:50.193 2 DEBUG nova.compute.manager [req-8c9751a7-fedd-4f18-9e4b-34533b850393 req-12e5126b-50ff-484b-a65e-2513c559f690 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] No waiting events found dispatching network-vif-plugged-7971997a-1f55-41fa-b77a-9c6fdaf497f7 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:31:50 compute-0 nova_compute[192079]: 2025-10-02 12:31:50.193 2 WARNING nova.compute.manager [req-8c9751a7-fedd-4f18-9e4b-34533b850393 req-12e5126b-50ff-484b-a65e-2513c559f690 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Received unexpected event network-vif-plugged-7971997a-1f55-41fa-b77a-9c6fdaf497f7 for instance with vm_state deleted and task_state None.
Oct 02 12:31:50 compute-0 nova_compute[192079]: 2025-10-02 12:31:50.194 2 DEBUG nova.compute.manager [req-8c9751a7-fedd-4f18-9e4b-34533b850393 req-12e5126b-50ff-484b-a65e-2513c559f690 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Received event network-vif-unplugged-482c766c-1462-47af-a801-a64e61f66109 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:31:50 compute-0 nova_compute[192079]: 2025-10-02 12:31:50.194 2 DEBUG oslo_concurrency.lockutils [req-8c9751a7-fedd-4f18-9e4b-34533b850393 req-12e5126b-50ff-484b-a65e-2513c559f690 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:31:50 compute-0 nova_compute[192079]: 2025-10-02 12:31:50.194 2 DEBUG oslo_concurrency.lockutils [req-8c9751a7-fedd-4f18-9e4b-34533b850393 req-12e5126b-50ff-484b-a65e-2513c559f690 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:31:50 compute-0 nova_compute[192079]: 2025-10-02 12:31:50.194 2 DEBUG oslo_concurrency.lockutils [req-8c9751a7-fedd-4f18-9e4b-34533b850393 req-12e5126b-50ff-484b-a65e-2513c559f690 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:31:50 compute-0 nova_compute[192079]: 2025-10-02 12:31:50.195 2 DEBUG nova.compute.manager [req-8c9751a7-fedd-4f18-9e4b-34533b850393 req-12e5126b-50ff-484b-a65e-2513c559f690 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] No waiting events found dispatching network-vif-unplugged-482c766c-1462-47af-a801-a64e61f66109 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:31:50 compute-0 nova_compute[192079]: 2025-10-02 12:31:50.195 2 WARNING nova.compute.manager [req-8c9751a7-fedd-4f18-9e4b-34533b850393 req-12e5126b-50ff-484b-a65e-2513c559f690 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Received unexpected event network-vif-unplugged-482c766c-1462-47af-a801-a64e61f66109 for instance with vm_state deleted and task_state None.
Oct 02 12:31:50 compute-0 nova_compute[192079]: 2025-10-02 12:31:50.195 2 DEBUG nova.compute.manager [req-8c9751a7-fedd-4f18-9e4b-34533b850393 req-12e5126b-50ff-484b-a65e-2513c559f690 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Received event network-vif-plugged-482c766c-1462-47af-a801-a64e61f66109 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:31:50 compute-0 nova_compute[192079]: 2025-10-02 12:31:50.195 2 DEBUG oslo_concurrency.lockutils [req-8c9751a7-fedd-4f18-9e4b-34533b850393 req-12e5126b-50ff-484b-a65e-2513c559f690 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:31:50 compute-0 nova_compute[192079]: 2025-10-02 12:31:50.196 2 DEBUG oslo_concurrency.lockutils [req-8c9751a7-fedd-4f18-9e4b-34533b850393 req-12e5126b-50ff-484b-a65e-2513c559f690 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:31:50 compute-0 nova_compute[192079]: 2025-10-02 12:31:50.196 2 DEBUG oslo_concurrency.lockutils [req-8c9751a7-fedd-4f18-9e4b-34533b850393 req-12e5126b-50ff-484b-a65e-2513c559f690 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "607e9c3a-4079-4261-b2c6-3cc47ae67173-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:31:50 compute-0 nova_compute[192079]: 2025-10-02 12:31:50.196 2 DEBUG nova.compute.manager [req-8c9751a7-fedd-4f18-9e4b-34533b850393 req-12e5126b-50ff-484b-a65e-2513c559f690 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] No waiting events found dispatching network-vif-plugged-482c766c-1462-47af-a801-a64e61f66109 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:31:50 compute-0 nova_compute[192079]: 2025-10-02 12:31:50.196 2 WARNING nova.compute.manager [req-8c9751a7-fedd-4f18-9e4b-34533b850393 req-12e5126b-50ff-484b-a65e-2513c559f690 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Received unexpected event network-vif-plugged-482c766c-1462-47af-a801-a64e61f66109 for instance with vm_state deleted and task_state None.
Oct 02 12:31:50 compute-0 nova_compute[192079]: 2025-10-02 12:31:50.373 2 DEBUG nova.compute.manager [req-5bc70c37-c267-40f3-9c4c-933d31d9a9c0 req-bc354cb0-d465-4c50-8568-1f30f400fc11 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Received event network-vif-deleted-482c766c-1462-47af-a801-a64e61f66109 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:31:51 compute-0 nova_compute[192079]: 2025-10-02 12:31:51.407 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:51 compute-0 nova_compute[192079]: 2025-10-02 12:31:51.762 2 DEBUG oslo_concurrency.lockutils [None req-2c36345d-d54c-4b23-b5e3-1188a5191225 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:31:51 compute-0 nova_compute[192079]: 2025-10-02 12:31:51.763 2 DEBUG oslo_concurrency.lockutils [None req-2c36345d-d54c-4b23-b5e3-1188a5191225 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:31:51 compute-0 nova_compute[192079]: 2025-10-02 12:31:51.763 2 DEBUG oslo_concurrency.lockutils [None req-2c36345d-d54c-4b23-b5e3-1188a5191225 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:31:51 compute-0 nova_compute[192079]: 2025-10-02 12:31:51.763 2 DEBUG oslo_concurrency.lockutils [None req-2c36345d-d54c-4b23-b5e3-1188a5191225 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:31:51 compute-0 nova_compute[192079]: 2025-10-02 12:31:51.764 2 DEBUG oslo_concurrency.lockutils [None req-2c36345d-d54c-4b23-b5e3-1188a5191225 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:31:51 compute-0 nova_compute[192079]: 2025-10-02 12:31:51.776 2 INFO nova.compute.manager [None req-2c36345d-d54c-4b23-b5e3-1188a5191225 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Terminating instance
Oct 02 12:31:51 compute-0 nova_compute[192079]: 2025-10-02 12:31:51.789 2 DEBUG nova.compute.manager [None req-2c36345d-d54c-4b23-b5e3-1188a5191225 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:31:51 compute-0 kernel: tap3aa2fa6d-ae (unregistering): left promiscuous mode
Oct 02 12:31:51 compute-0 NetworkManager[51160]: <info>  [1759408311.8243] device (tap3aa2fa6d-ae): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:31:51 compute-0 ovn_controller[94336]: 2025-10-02T12:31:51Z|00529|binding|INFO|Releasing lport 3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa from this chassis (sb_readonly=0)
Oct 02 12:31:51 compute-0 ovn_controller[94336]: 2025-10-02T12:31:51Z|00530|binding|INFO|Setting lport 3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa down in Southbound
Oct 02 12:31:51 compute-0 ovn_controller[94336]: 2025-10-02T12:31:51Z|00531|binding|INFO|Removing iface tap3aa2fa6d-ae ovn-installed in OVS
Oct 02 12:31:51 compute-0 nova_compute[192079]: 2025-10-02 12:31:51.830 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:51 compute-0 nova_compute[192079]: 2025-10-02 12:31:51.832 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:51.840 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:88:b5:22 10.100.0.8'], port_security=['fa:16:3e:88:b5:22 10.100.0.8'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.8/28', 'neutron:device_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-299c5e6b-f8b7-4cca-810b-a9b2539f4246', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'neutron:revision_number': '4', 'neutron:security_group_ids': 'f706c8b6-b68a-48d8-b578-b0c81b519c8e', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=4a9348f4-eede-4266-8396-8c521ea59fc0, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:31:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:51.842 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa in datapath 299c5e6b-f8b7-4cca-810b-a9b2539f4246 unbound from our chassis
Oct 02 12:31:51 compute-0 nova_compute[192079]: 2025-10-02 12:31:51.845 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:51.846 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 299c5e6b-f8b7-4cca-810b-a9b2539f4246, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:31:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:51.847 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d345078f-cea3-47da-bc14-10e32ec0eac5]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:51.849 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246 namespace which is not needed anymore
Oct 02 12:31:51 compute-0 kernel: tap2cddfcab-eb (unregistering): left promiscuous mode
Oct 02 12:31:51 compute-0 NetworkManager[51160]: <info>  [1759408311.8700] device (tap2cddfcab-eb): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:31:51 compute-0 nova_compute[192079]: 2025-10-02 12:31:51.870 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:51 compute-0 ovn_controller[94336]: 2025-10-02T12:31:51Z|00532|binding|INFO|Releasing lport 2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2 from this chassis (sb_readonly=0)
Oct 02 12:31:51 compute-0 ovn_controller[94336]: 2025-10-02T12:31:51Z|00533|binding|INFO|Setting lport 2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2 down in Southbound
Oct 02 12:31:51 compute-0 ovn_controller[94336]: 2025-10-02T12:31:51Z|00534|binding|INFO|Removing iface tap2cddfcab-eb ovn-installed in OVS
Oct 02 12:31:51 compute-0 nova_compute[192079]: 2025-10-02 12:31:51.880 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:51 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:51.887 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:7a:3b:f1 2001:db8:0:1:f816:3eff:fe7a:3bf1 2001:db8::f816:3eff:fe7a:3bf1'], port_security=['fa:16:3e:7a:3b:f1 2001:db8:0:1:f816:3eff:fe7a:3bf1 2001:db8::f816:3eff:fe7a:3bf1'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '2001:db8:0:1:f816:3eff:fe7a:3bf1/64 2001:db8::f816:3eff:fe7a:3bf1/64', 'neutron:device_id': 'd59f518a-8b98-4c8c-b8f7-19f6b6809c6d', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-e2520108-9d67-4d82-a7a0-ba429a88c3c9', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'neutron:revision_number': '4', 'neutron:security_group_ids': 'f706c8b6-b68a-48d8-b578-b0c81b519c8e', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=876a7f58-2645-4e1a-8a60-dbbe16fdfb2e, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:31:51 compute-0 nova_compute[192079]: 2025-10-02 12:31:51.891 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:51 compute-0 nova_compute[192079]: 2025-10-02 12:31:51.894 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:51 compute-0 systemd[1]: machine-qemu\x2d66\x2dinstance\x2d00000086.scope: Deactivated successfully.
Oct 02 12:31:51 compute-0 systemd[1]: machine-qemu\x2d66\x2dinstance\x2d00000086.scope: Consumed 16.755s CPU time.
Oct 02 12:31:51 compute-0 systemd-machined[152150]: Machine qemu-66-instance-00000086 terminated.
Oct 02 12:31:51 compute-0 neutron-haproxy-ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246[243287]: [NOTICE]   (243291) : haproxy version is 2.8.14-c23fe91
Oct 02 12:31:51 compute-0 neutron-haproxy-ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246[243287]: [NOTICE]   (243291) : path to executable is /usr/sbin/haproxy
Oct 02 12:31:51 compute-0 neutron-haproxy-ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246[243287]: [WARNING]  (243291) : Exiting Master process...
Oct 02 12:31:51 compute-0 neutron-haproxy-ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246[243287]: [WARNING]  (243291) : Exiting Master process...
Oct 02 12:31:51 compute-0 neutron-haproxy-ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246[243287]: [ALERT]    (243291) : Current worker (243293) exited with code 143 (Terminated)
Oct 02 12:31:51 compute-0 neutron-haproxy-ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246[243287]: [WARNING]  (243291) : All workers exited. Exiting... (0)
Oct 02 12:31:51 compute-0 systemd[1]: libpod-b4953488dbf5e132aabf422a3d1f3a7340a7d7536e09fd97da438042b2503274.scope: Deactivated successfully.
Oct 02 12:31:52 compute-0 podman[243946]: 2025-10-02 12:31:52.002883877 +0000 UTC m=+0.044666250 container died b4953488dbf5e132aabf422a3d1f3a7340a7d7536e09fd97da438042b2503274 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true)
Oct 02 12:31:52 compute-0 NetworkManager[51160]: <info>  [1759408312.0238] manager: (tap2cddfcab-eb): new Tun device (/org/freedesktop/NetworkManager/Devices/261)
Oct 02 12:31:52 compute-0 systemd[1]: var-lib-containers-storage-overlay-eaa1606c1a8f0f63ae5273e2ba4072c659aef5bbf6890937417653c8870cee25-merged.mount: Deactivated successfully.
Oct 02 12:31:52 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-b4953488dbf5e132aabf422a3d1f3a7340a7d7536e09fd97da438042b2503274-userdata-shm.mount: Deactivated successfully.
Oct 02 12:31:52 compute-0 podman[243946]: 2025-10-02 12:31:52.062105183 +0000 UTC m=+0.103887566 container cleanup b4953488dbf5e132aabf422a3d1f3a7340a7d7536e09fd97da438042b2503274 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:31:52 compute-0 systemd[1]: libpod-conmon-b4953488dbf5e132aabf422a3d1f3a7340a7d7536e09fd97da438042b2503274.scope: Deactivated successfully.
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.076 2 INFO nova.virt.libvirt.driver [-] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Instance destroyed successfully.
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.077 2 DEBUG nova.objects.instance [None req-2c36345d-d54c-4b23-b5e3-1188a5191225 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lazy-loading 'resources' on Instance uuid d59f518a-8b98-4c8c-b8f7-19f6b6809c6d obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.099 2 DEBUG nova.virt.libvirt.vif [None req-2c36345d-d54c-4b23-b5e3-1188a5191225 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:30:25Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestGettingAddress-server-597864105',display_name='tempest-TestGettingAddress-server-597864105',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testgettingaddress-server-597864105',id=134,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBB1eGJz2x2NclizHY0y1KagfJt0/XSi4q477vmnTxhDjfgu4TS7ARmj4iaatPUQRUeuKdnCSa7aN8Y00iK3sldRns4TIy1xYmuZAKRi07Qnv9+MtEFMWHsOHXiIH+9Mk5Q==',key_name='tempest-TestGettingAddress-63209992',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:30:42Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='fd801958556f4c8aab047ecdef6b5ee8',ramdisk_id='',reservation_id='r-b82006hp',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-TestGettingAddress-1355720650',owner_user_name='tempest-TestGettingAddress-1355720650-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:30:42Z,user_data=None,user_id='97ce9f1898484e0e9a1f7c84a9f0dfe3',uuid=d59f518a-8b98-4c8c-b8f7-19f6b6809c6d,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa", "address": "fa:16:3e:88:b5:22", "network": {"id": "299c5e6b-f8b7-4cca-810b-a9b2539f4246", "bridge": "br-int", "label": "tempest-network-smoke--136367555", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.247", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap3aa2fa6d-ae", "ovs_interfaceid": "3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.099 2 DEBUG nova.network.os_vif_util [None req-2c36345d-d54c-4b23-b5e3-1188a5191225 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converting VIF {"id": "3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa", "address": "fa:16:3e:88:b5:22", "network": {"id": "299c5e6b-f8b7-4cca-810b-a9b2539f4246", "bridge": "br-int", "label": "tempest-network-smoke--136367555", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.247", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap3aa2fa6d-ae", "ovs_interfaceid": "3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.100 2 DEBUG nova.network.os_vif_util [None req-2c36345d-d54c-4b23-b5e3-1188a5191225 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:88:b5:22,bridge_name='br-int',has_traffic_filtering=True,id=3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa,network=Network(299c5e6b-f8b7-4cca-810b-a9b2539f4246),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap3aa2fa6d-ae') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.100 2 DEBUG os_vif [None req-2c36345d-d54c-4b23-b5e3-1188a5191225 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:88:b5:22,bridge_name='br-int',has_traffic_filtering=True,id=3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa,network=Network(299c5e6b-f8b7-4cca-810b-a9b2539f4246),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap3aa2fa6d-ae') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.102 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.102 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap3aa2fa6d-ae, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.103 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.105 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.129 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.131 2 INFO os_vif [None req-2c36345d-d54c-4b23-b5e3-1188a5191225 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:88:b5:22,bridge_name='br-int',has_traffic_filtering=True,id=3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa,network=Network(299c5e6b-f8b7-4cca-810b-a9b2539f4246),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap3aa2fa6d-ae')
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.132 2 DEBUG nova.virt.libvirt.vif [None req-2c36345d-d54c-4b23-b5e3-1188a5191225 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:30:25Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestGettingAddress-server-597864105',display_name='tempest-TestGettingAddress-server-597864105',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testgettingaddress-server-597864105',id=134,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBB1eGJz2x2NclizHY0y1KagfJt0/XSi4q477vmnTxhDjfgu4TS7ARmj4iaatPUQRUeuKdnCSa7aN8Y00iK3sldRns4TIy1xYmuZAKRi07Qnv9+MtEFMWHsOHXiIH+9Mk5Q==',key_name='tempest-TestGettingAddress-63209992',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:30:42Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='fd801958556f4c8aab047ecdef6b5ee8',ramdisk_id='',reservation_id='r-b82006hp',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-TestGettingAddress-1355720650',owner_user_name='tempest-TestGettingAddress-1355720650-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:30:42Z,user_data=None,user_id='97ce9f1898484e0e9a1f7c84a9f0dfe3',uuid=d59f518a-8b98-4c8c-b8f7-19f6b6809c6d,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2", "address": "fa:16:3e:7a:3b:f1", "network": {"id": "e2520108-9d67-4d82-a7a0-ba429a88c3c9", "bridge": "br-int", "label": "tempest-network-smoke--1271498361", "subnets": [{"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe7a:3bf1", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}, {"cidr": "2001:db8:0:1::/64", "dns": [], "gateway": {"address": "2001:db8:0:1::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8:0:1:f816:3eff:fe7a:3bf1", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap2cddfcab-eb", "ovs_interfaceid": "2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.133 2 DEBUG nova.network.os_vif_util [None req-2c36345d-d54c-4b23-b5e3-1188a5191225 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converting VIF {"id": "2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2", "address": "fa:16:3e:7a:3b:f1", "network": {"id": "e2520108-9d67-4d82-a7a0-ba429a88c3c9", "bridge": "br-int", "label": "tempest-network-smoke--1271498361", "subnets": [{"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe7a:3bf1", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}, {"cidr": "2001:db8:0:1::/64", "dns": [], "gateway": {"address": "2001:db8:0:1::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8:0:1:f816:3eff:fe7a:3bf1", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap2cddfcab-eb", "ovs_interfaceid": "2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.133 2 DEBUG nova.network.os_vif_util [None req-2c36345d-d54c-4b23-b5e3-1188a5191225 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:7a:3b:f1,bridge_name='br-int',has_traffic_filtering=True,id=2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2,network=Network(e2520108-9d67-4d82-a7a0-ba429a88c3c9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap2cddfcab-eb') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.134 2 DEBUG os_vif [None req-2c36345d-d54c-4b23-b5e3-1188a5191225 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:7a:3b:f1,bridge_name='br-int',has_traffic_filtering=True,id=2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2,network=Network(e2520108-9d67-4d82-a7a0-ba429a88c3c9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap2cddfcab-eb') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.135 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.135 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap2cddfcab-eb, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.136 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.137 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.138 2 INFO os_vif [None req-2c36345d-d54c-4b23-b5e3-1188a5191225 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:7a:3b:f1,bridge_name='br-int',has_traffic_filtering=True,id=2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2,network=Network(e2520108-9d67-4d82-a7a0-ba429a88c3c9),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap2cddfcab-eb')
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.139 2 INFO nova.virt.libvirt.driver [None req-2c36345d-d54c-4b23-b5e3-1188a5191225 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Deleting instance files /var/lib/nova/instances/d59f518a-8b98-4c8c-b8f7-19f6b6809c6d_del
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.140 2 INFO nova.virt.libvirt.driver [None req-2c36345d-d54c-4b23-b5e3-1188a5191225 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Deletion of /var/lib/nova/instances/d59f518a-8b98-4c8c-b8f7-19f6b6809c6d_del complete
Oct 02 12:31:52 compute-0 podman[244000]: 2025-10-02 12:31:52.141914931 +0000 UTC m=+0.049998715 container remove b4953488dbf5e132aabf422a3d1f3a7340a7d7536e09fd97da438042b2503274 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 12:31:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:52.146 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e552a318-f1c3-40d3-aa82-6efb71b70639]: (4, ('Thu Oct  2 12:31:51 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246 (b4953488dbf5e132aabf422a3d1f3a7340a7d7536e09fd97da438042b2503274)\nb4953488dbf5e132aabf422a3d1f3a7340a7d7536e09fd97da438042b2503274\nThu Oct  2 12:31:52 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246 (b4953488dbf5e132aabf422a3d1f3a7340a7d7536e09fd97da438042b2503274)\nb4953488dbf5e132aabf422a3d1f3a7340a7d7536e09fd97da438042b2503274\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:52.147 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d9d2873c-438f-4e22-9a21-8b9ea6ca666f]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:52.148 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap299c5e6b-f0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.149 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.160 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:52 compute-0 kernel: tap299c5e6b-f0: left promiscuous mode
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.162 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:52.164 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[79589c3d-3187-4c65-897b-ae706deeae65]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:52.191 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6e4bdd50-4542-4004-9358-75ed0c195b7c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:52.192 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2871455f-a999-4ec6-bd8f-e7e5bc5cbdec]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:52.209 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[72f27c15-d4a5-42ba-b190-472a5131c03f]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 623810, 'reachable_time': 31596, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 244016, 'error': None, 'target': 'ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:52 compute-0 systemd[1]: run-netns-ovnmeta\x2d299c5e6b\x2df8b7\x2d4cca\x2d810b\x2da9b2539f4246.mount: Deactivated successfully.
Oct 02 12:31:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:52.211 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-299c5e6b-f8b7-4cca-810b-a9b2539f4246 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:31:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:52.211 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[b1340272-8afa-4257-8d8c-40e1633424a9]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:52.214 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2 in datapath e2520108-9d67-4d82-a7a0-ba429a88c3c9 unbound from our chassis
Oct 02 12:31:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:52.216 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network e2520108-9d67-4d82-a7a0-ba429a88c3c9, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:31:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:52.217 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f8ad316e-3600-4120-89c0-14244f11eb48]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:52.217 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9 namespace which is not needed anymore
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.266 2 INFO nova.compute.manager [None req-2c36345d-d54c-4b23-b5e3-1188a5191225 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Took 0.48 seconds to destroy the instance on the hypervisor.
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.267 2 DEBUG oslo.service.loopingcall [None req-2c36345d-d54c-4b23-b5e3-1188a5191225 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.267 2 DEBUG nova.compute.manager [-] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.267 2 DEBUG nova.network.neutron [-] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:31:52 compute-0 podman[244017]: 2025-10-02 12:31:52.298317199 +0000 UTC m=+0.064125451 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, vcs-type=git, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., io.openshift.expose-services=, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., distribution-scope=public, vendor=Red Hat, Inc., url=https://catalog.redhat.com/en/search?searchType=containers, version=9.6, io.buildah.version=1.33.7, config_id=edpm, managed_by=edpm_ansible, release=1755695350, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, maintainer=Red Hat, Inc., com.redhat.component=ubi9-minimal-container, architecture=x86_64, io.openshift.tags=minimal rhel9, build-date=2025-08-20T13:12:41, container_name=openstack_network_exporter, name=ubi9-minimal)
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.303 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:52 compute-0 podman[244019]: 2025-10-02 12:31:52.31226492 +0000 UTC m=+0.070125485 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, container_name=multipathd, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, org.label-schema.build-date=20251001)
Oct 02 12:31:52 compute-0 neutron-haproxy-ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9[243367]: [NOTICE]   (243371) : haproxy version is 2.8.14-c23fe91
Oct 02 12:31:52 compute-0 neutron-haproxy-ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9[243367]: [NOTICE]   (243371) : path to executable is /usr/sbin/haproxy
Oct 02 12:31:52 compute-0 neutron-haproxy-ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9[243367]: [WARNING]  (243371) : Exiting Master process...
Oct 02 12:31:52 compute-0 neutron-haproxy-ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9[243367]: [WARNING]  (243371) : Exiting Master process...
Oct 02 12:31:52 compute-0 neutron-haproxy-ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9[243367]: [ALERT]    (243371) : Current worker (243373) exited with code 143 (Terminated)
Oct 02 12:31:52 compute-0 neutron-haproxy-ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9[243367]: [WARNING]  (243371) : All workers exited. Exiting... (0)
Oct 02 12:31:52 compute-0 systemd[1]: libpod-eb2de2288d02d59280fecc1a1e9072a9d6d42b7966c9cf78e670077fed333e10.scope: Deactivated successfully.
Oct 02 12:31:52 compute-0 conmon[243367]: conmon eb2de2288d02d59280fe <nwarn>: Failed to open cgroups file: /sys/fs/cgroup/machine.slice/libpod-eb2de2288d02d59280fecc1a1e9072a9d6d42b7966c9cf78e670077fed333e10.scope/container/memory.events
Oct 02 12:31:52 compute-0 podman[244075]: 2025-10-02 12:31:52.370337745 +0000 UTC m=+0.049550144 container died eb2de2288d02d59280fecc1a1e9072a9d6d42b7966c9cf78e670077fed333e10 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.384 2 DEBUG nova.compute.manager [req-20efdee8-cb33-49a0-9f14-949396b632ba req-df35391d-62b7-46e4-8b01-d50499453832 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Received event network-vif-unplugged-2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.384 2 DEBUG oslo_concurrency.lockutils [req-20efdee8-cb33-49a0-9f14-949396b632ba req-df35391d-62b7-46e4-8b01-d50499453832 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.384 2 DEBUG oslo_concurrency.lockutils [req-20efdee8-cb33-49a0-9f14-949396b632ba req-df35391d-62b7-46e4-8b01-d50499453832 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.385 2 DEBUG oslo_concurrency.lockutils [req-20efdee8-cb33-49a0-9f14-949396b632ba req-df35391d-62b7-46e4-8b01-d50499453832 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.385 2 DEBUG nova.compute.manager [req-20efdee8-cb33-49a0-9f14-949396b632ba req-df35391d-62b7-46e4-8b01-d50499453832 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] No waiting events found dispatching network-vif-unplugged-2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.386 2 DEBUG nova.compute.manager [req-20efdee8-cb33-49a0-9f14-949396b632ba req-df35391d-62b7-46e4-8b01-d50499453832 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Received event network-vif-unplugged-2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2 for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:31:52 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-eb2de2288d02d59280fecc1a1e9072a9d6d42b7966c9cf78e670077fed333e10-userdata-shm.mount: Deactivated successfully.
Oct 02 12:31:52 compute-0 systemd[1]: var-lib-containers-storage-overlay-30eba00810e4e957780d6f714d17e0d17b29c9aab4905d0f5a3e6f907cc35099-merged.mount: Deactivated successfully.
Oct 02 12:31:52 compute-0 podman[244075]: 2025-10-02 12:31:52.409300527 +0000 UTC m=+0.088512896 container cleanup eb2de2288d02d59280fecc1a1e9072a9d6d42b7966c9cf78e670077fed333e10 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2)
Oct 02 12:31:52 compute-0 systemd[1]: libpod-conmon-eb2de2288d02d59280fecc1a1e9072a9d6d42b7966c9cf78e670077fed333e10.scope: Deactivated successfully.
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.488 2 DEBUG nova.compute.manager [req-b21c754b-b932-49db-bc43-7a4e057336a9 req-5f0e42f3-f058-48dc-ae01-2b2e4ec1cfaf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Received event network-changed-3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.488 2 DEBUG nova.compute.manager [req-b21c754b-b932-49db-bc43-7a4e057336a9 req-5f0e42f3-f058-48dc-ae01-2b2e4ec1cfaf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Refreshing instance network info cache due to event network-changed-3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.489 2 DEBUG oslo_concurrency.lockutils [req-b21c754b-b932-49db-bc43-7a4e057336a9 req-5f0e42f3-f058-48dc-ae01-2b2e4ec1cfaf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.489 2 DEBUG oslo_concurrency.lockutils [req-b21c754b-b932-49db-bc43-7a4e057336a9 req-5f0e42f3-f058-48dc-ae01-2b2e4ec1cfaf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.489 2 DEBUG nova.network.neutron [req-b21c754b-b932-49db-bc43-7a4e057336a9 req-5f0e42f3-f058-48dc-ae01-2b2e4ec1cfaf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Refreshing network info cache for port 3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.499 2 DEBUG nova.compute.manager [req-ae8e5f55-3058-4bbb-930b-cb6949f69cdc req-56d3322b-7c98-40ea-a654-5b470530b1d0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Received event network-vif-unplugged-3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.500 2 DEBUG oslo_concurrency.lockutils [req-ae8e5f55-3058-4bbb-930b-cb6949f69cdc req-56d3322b-7c98-40ea-a654-5b470530b1d0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.500 2 DEBUG oslo_concurrency.lockutils [req-ae8e5f55-3058-4bbb-930b-cb6949f69cdc req-56d3322b-7c98-40ea-a654-5b470530b1d0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.500 2 DEBUG oslo_concurrency.lockutils [req-ae8e5f55-3058-4bbb-930b-cb6949f69cdc req-56d3322b-7c98-40ea-a654-5b470530b1d0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.501 2 DEBUG nova.compute.manager [req-ae8e5f55-3058-4bbb-930b-cb6949f69cdc req-56d3322b-7c98-40ea-a654-5b470530b1d0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] No waiting events found dispatching network-vif-unplugged-3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.501 2 DEBUG nova.compute.manager [req-ae8e5f55-3058-4bbb-930b-cb6949f69cdc req-56d3322b-7c98-40ea-a654-5b470530b1d0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Received event network-vif-unplugged-3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:31:52 compute-0 podman[244107]: 2025-10-02 12:31:52.869983047 +0000 UTC m=+0.441093017 container remove eb2de2288d02d59280fecc1a1e9072a9d6d42b7966c9cf78e670077fed333e10 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, tcib_managed=true, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001)
Oct 02 12:31:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:52.877 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d2554bc5-d8db-47ae-a427-492f62ba0129]: (4, ('Thu Oct  2 12:31:52 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9 (eb2de2288d02d59280fecc1a1e9072a9d6d42b7966c9cf78e670077fed333e10)\neb2de2288d02d59280fecc1a1e9072a9d6d42b7966c9cf78e670077fed333e10\nThu Oct  2 12:31:52 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9 (eb2de2288d02d59280fecc1a1e9072a9d6d42b7966c9cf78e670077fed333e10)\neb2de2288d02d59280fecc1a1e9072a9d6d42b7966c9cf78e670077fed333e10\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:52.880 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6fb36709-634a-4c33-8c63-31b8b2e0c08b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:52.881 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tape2520108-90, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.883 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:52 compute-0 kernel: tape2520108-90: left promiscuous mode
Oct 02 12:31:52 compute-0 nova_compute[192079]: 2025-10-02 12:31:52.895 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:52.900 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[743c6b87-a47d-4a0e-9a32-caef765388b2]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:52.937 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[70ffa752-b907-4db0-a312-f2b16e7f56fe]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:52.940 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1d0bd49d-06dd-4db4-9a13-e29b22328894]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:52.960 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[bb7d5266-67f7-4cc1-bf19-5224bf484874]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 623889, 'reachable_time': 42514, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 244121, 'error': None, 'target': 'ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:52.963 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-e2520108-9d67-4d82-a7a0-ba429a88c3c9 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:31:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:31:52.963 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[cadf5db7-32d6-4392-a90a-527b71affc09]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:31:52 compute-0 systemd[1]: run-netns-ovnmeta\x2de2520108\x2d9d67\x2d4d82\x2da7a0\x2dba429a88c3c9.mount: Deactivated successfully.
Oct 02 12:31:54 compute-0 nova_compute[192079]: 2025-10-02 12:31:54.756 2 DEBUG nova.compute.manager [req-c2336114-474e-4a91-82e1-ee2faffd75a6 req-b2838b89-ea1b-4b93-b879-68316e03f253 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Received event network-vif-plugged-3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:31:54 compute-0 nova_compute[192079]: 2025-10-02 12:31:54.758 2 DEBUG oslo_concurrency.lockutils [req-c2336114-474e-4a91-82e1-ee2faffd75a6 req-b2838b89-ea1b-4b93-b879-68316e03f253 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:31:54 compute-0 nova_compute[192079]: 2025-10-02 12:31:54.758 2 DEBUG oslo_concurrency.lockutils [req-c2336114-474e-4a91-82e1-ee2faffd75a6 req-b2838b89-ea1b-4b93-b879-68316e03f253 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:31:54 compute-0 nova_compute[192079]: 2025-10-02 12:31:54.759 2 DEBUG oslo_concurrency.lockutils [req-c2336114-474e-4a91-82e1-ee2faffd75a6 req-b2838b89-ea1b-4b93-b879-68316e03f253 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:31:54 compute-0 nova_compute[192079]: 2025-10-02 12:31:54.759 2 DEBUG nova.compute.manager [req-c2336114-474e-4a91-82e1-ee2faffd75a6 req-b2838b89-ea1b-4b93-b879-68316e03f253 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] No waiting events found dispatching network-vif-plugged-3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:31:54 compute-0 nova_compute[192079]: 2025-10-02 12:31:54.760 2 WARNING nova.compute.manager [req-c2336114-474e-4a91-82e1-ee2faffd75a6 req-b2838b89-ea1b-4b93-b879-68316e03f253 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Received unexpected event network-vif-plugged-3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa for instance with vm_state active and task_state deleting.
Oct 02 12:31:54 compute-0 nova_compute[192079]: 2025-10-02 12:31:54.763 2 DEBUG nova.network.neutron [req-b21c754b-b932-49db-bc43-7a4e057336a9 req-5f0e42f3-f058-48dc-ae01-2b2e4ec1cfaf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Updated VIF entry in instance network info cache for port 3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:31:54 compute-0 nova_compute[192079]: 2025-10-02 12:31:54.764 2 DEBUG nova.network.neutron [req-b21c754b-b932-49db-bc43-7a4e057336a9 req-5f0e42f3-f058-48dc-ae01-2b2e4ec1cfaf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Updating instance_info_cache with network_info: [{"id": "3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa", "address": "fa:16:3e:88:b5:22", "network": {"id": "299c5e6b-f8b7-4cca-810b-a9b2539f4246", "bridge": "br-int", "label": "tempest-network-smoke--136367555", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap3aa2fa6d-ae", "ovs_interfaceid": "3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2", "address": "fa:16:3e:7a:3b:f1", "network": {"id": "e2520108-9d67-4d82-a7a0-ba429a88c3c9", "bridge": "br-int", "label": "tempest-network-smoke--1271498361", "subnets": [{"cidr": "2001:db8::/64", "dns": [], "gateway": {"address": "2001:db8::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8::f816:3eff:fe7a:3bf1", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}, {"cidr": "2001:db8:0:1::/64", "dns": [], "gateway": {"address": "2001:db8:0:1::", "type": "gateway", "version": 6, "meta": {}}, "ips": [{"address": "2001:db8:0:1:f816:3eff:fe7a:3bf1", "type": "fixed", "version": 6, "meta": {}, "floating_ips": []}], "routes": [], "version": 6, "meta": {"enable_dhcp": true, "ipv6_address_mode": "slaac"}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap2cddfcab-eb", "ovs_interfaceid": "2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:31:54 compute-0 nova_compute[192079]: 2025-10-02 12:31:54.778 2 DEBUG nova.compute.manager [req-cc60cdab-dbae-4052-bf8f-dc09fcf64894 req-8e530fa6-c943-4bf0-8a88-174155134488 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Received event network-vif-plugged-2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:31:54 compute-0 nova_compute[192079]: 2025-10-02 12:31:54.779 2 DEBUG oslo_concurrency.lockutils [req-cc60cdab-dbae-4052-bf8f-dc09fcf64894 req-8e530fa6-c943-4bf0-8a88-174155134488 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:31:54 compute-0 nova_compute[192079]: 2025-10-02 12:31:54.779 2 DEBUG oslo_concurrency.lockutils [req-cc60cdab-dbae-4052-bf8f-dc09fcf64894 req-8e530fa6-c943-4bf0-8a88-174155134488 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:31:54 compute-0 nova_compute[192079]: 2025-10-02 12:31:54.780 2 DEBUG oslo_concurrency.lockutils [req-cc60cdab-dbae-4052-bf8f-dc09fcf64894 req-8e530fa6-c943-4bf0-8a88-174155134488 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:31:54 compute-0 nova_compute[192079]: 2025-10-02 12:31:54.780 2 DEBUG nova.compute.manager [req-cc60cdab-dbae-4052-bf8f-dc09fcf64894 req-8e530fa6-c943-4bf0-8a88-174155134488 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] No waiting events found dispatching network-vif-plugged-2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:31:54 compute-0 nova_compute[192079]: 2025-10-02 12:31:54.780 2 WARNING nova.compute.manager [req-cc60cdab-dbae-4052-bf8f-dc09fcf64894 req-8e530fa6-c943-4bf0-8a88-174155134488 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Received unexpected event network-vif-plugged-2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2 for instance with vm_state active and task_state deleting.
Oct 02 12:31:54 compute-0 nova_compute[192079]: 2025-10-02 12:31:54.781 2 DEBUG nova.compute.manager [req-cc60cdab-dbae-4052-bf8f-dc09fcf64894 req-8e530fa6-c943-4bf0-8a88-174155134488 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Received event network-vif-deleted-2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:31:54 compute-0 nova_compute[192079]: 2025-10-02 12:31:54.781 2 INFO nova.compute.manager [req-cc60cdab-dbae-4052-bf8f-dc09fcf64894 req-8e530fa6-c943-4bf0-8a88-174155134488 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Neutron deleted interface 2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2; detaching it from the instance and deleting it from the info cache
Oct 02 12:31:54 compute-0 nova_compute[192079]: 2025-10-02 12:31:54.782 2 DEBUG nova.network.neutron [req-cc60cdab-dbae-4052-bf8f-dc09fcf64894 req-8e530fa6-c943-4bf0-8a88-174155134488 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Updating instance_info_cache with network_info: [{"id": "3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa", "address": "fa:16:3e:88:b5:22", "network": {"id": "299c5e6b-f8b7-4cca-810b-a9b2539f4246", "bridge": "br-int", "label": "tempest-network-smoke--136367555", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.247", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fd801958556f4c8aab047ecdef6b5ee8", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap3aa2fa6d-ae", "ovs_interfaceid": "3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:31:54 compute-0 nova_compute[192079]: 2025-10-02 12:31:54.859 2 DEBUG oslo_concurrency.lockutils [req-b21c754b-b932-49db-bc43-7a4e057336a9 req-5f0e42f3-f058-48dc-ae01-2b2e4ec1cfaf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-d59f518a-8b98-4c8c-b8f7-19f6b6809c6d" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:31:54 compute-0 nova_compute[192079]: 2025-10-02 12:31:54.891 2 DEBUG nova.compute.manager [req-cc60cdab-dbae-4052-bf8f-dc09fcf64894 req-8e530fa6-c943-4bf0-8a88-174155134488 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Detach interface failed, port_id=2cddfcab-eba7-4a20-a7df-8f7f5bbaadf2, reason: Instance d59f518a-8b98-4c8c-b8f7-19f6b6809c6d could not be found. _process_instance_vif_deleted_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10882
Oct 02 12:31:55 compute-0 nova_compute[192079]: 2025-10-02 12:31:55.242 2 DEBUG nova.network.neutron [-] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:31:55 compute-0 nova_compute[192079]: 2025-10-02 12:31:55.302 2 INFO nova.compute.manager [-] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Took 3.03 seconds to deallocate network for instance.
Oct 02 12:31:55 compute-0 nova_compute[192079]: 2025-10-02 12:31:55.429 2 DEBUG oslo_concurrency.lockutils [None req-2c36345d-d54c-4b23-b5e3-1188a5191225 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:31:55 compute-0 nova_compute[192079]: 2025-10-02 12:31:55.430 2 DEBUG oslo_concurrency.lockutils [None req-2c36345d-d54c-4b23-b5e3-1188a5191225 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:31:55 compute-0 nova_compute[192079]: 2025-10-02 12:31:55.513 2 DEBUG nova.compute.provider_tree [None req-2c36345d-d54c-4b23-b5e3-1188a5191225 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:31:55 compute-0 nova_compute[192079]: 2025-10-02 12:31:55.549 2 DEBUG nova.scheduler.client.report [None req-2c36345d-d54c-4b23-b5e3-1188a5191225 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:31:55 compute-0 nova_compute[192079]: 2025-10-02 12:31:55.617 2 DEBUG oslo_concurrency.lockutils [None req-2c36345d-d54c-4b23-b5e3-1188a5191225 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.187s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:31:55 compute-0 nova_compute[192079]: 2025-10-02 12:31:55.686 2 INFO nova.scheduler.client.report [None req-2c36345d-d54c-4b23-b5e3-1188a5191225 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Deleted allocations for instance d59f518a-8b98-4c8c-b8f7-19f6b6809c6d
Oct 02 12:31:55 compute-0 nova_compute[192079]: 2025-10-02 12:31:55.838 2 DEBUG oslo_concurrency.lockutils [None req-2c36345d-d54c-4b23-b5e3-1188a5191225 97ce9f1898484e0e9a1f7c84a9f0dfe3 fd801958556f4c8aab047ecdef6b5ee8 - - default default] Lock "d59f518a-8b98-4c8c-b8f7-19f6b6809c6d" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 4.075s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:31:56 compute-0 nova_compute[192079]: 2025-10-02 12:31:56.900 2 DEBUG nova.compute.manager [req-61821f0f-592b-406a-9fee-0e48a2059994 req-e238bab6-2fb9-41f6-ae52-8b0aa22fb472 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Received event network-vif-deleted-3aa2fa6d-ae2a-4242-ac6f-cd557f5072aa external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:31:57 compute-0 nova_compute[192079]: 2025-10-02 12:31:57.139 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:57 compute-0 nova_compute[192079]: 2025-10-02 12:31:57.304 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:31:58 compute-0 podman[244123]: 2025-10-02 12:31:58.160960379 +0000 UTC m=+0.067472554 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, tcib_managed=true, config_id=iscsid, container_name=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001)
Oct 02 12:31:58 compute-0 podman[244122]: 2025-10-02 12:31:58.174962791 +0000 UTC m=+0.074829583 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>)
Oct 02 12:31:58 compute-0 nova_compute[192079]: 2025-10-02 12:31:58.225 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:00 compute-0 nova_compute[192079]: 2025-10-02 12:32:00.419 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:00 compute-0 nova_compute[192079]: 2025-10-02 12:32:00.647 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:01 compute-0 nova_compute[192079]: 2025-10-02 12:32:01.300 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759408306.298695, 607e9c3a-4079-4261-b2c6-3cc47ae67173 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:32:01 compute-0 nova_compute[192079]: 2025-10-02 12:32:01.300 2 INFO nova.compute.manager [-] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] VM Stopped (Lifecycle Event)
Oct 02 12:32:01 compute-0 nova_compute[192079]: 2025-10-02 12:32:01.336 2 DEBUG nova.compute.manager [None req-26f8c365-67fc-4f04-a085-6ecf2411dec5 - - - - - -] [instance: 607e9c3a-4079-4261-b2c6-3cc47ae67173] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:32:02 compute-0 nova_compute[192079]: 2025-10-02 12:32:02.143 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:02.232 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:32:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:02.233 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:32:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:02.234 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:32:02 compute-0 nova_compute[192079]: 2025-10-02 12:32:02.306 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:02 compute-0 nova_compute[192079]: 2025-10-02 12:32:02.742 2 DEBUG oslo_concurrency.lockutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:32:02 compute-0 nova_compute[192079]: 2025-10-02 12:32:02.743 2 DEBUG oslo_concurrency.lockutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:32:02 compute-0 nova_compute[192079]: 2025-10-02 12:32:02.787 2 DEBUG nova.compute.manager [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:32:02 compute-0 nova_compute[192079]: 2025-10-02 12:32:02.954 2 DEBUG oslo_concurrency.lockutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:32:02 compute-0 nova_compute[192079]: 2025-10-02 12:32:02.955 2 DEBUG oslo_concurrency.lockutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:32:02 compute-0 nova_compute[192079]: 2025-10-02 12:32:02.964 2 DEBUG nova.virt.hardware [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:32:02 compute-0 nova_compute[192079]: 2025-10-02 12:32:02.964 2 INFO nova.compute.claims [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:32:03 compute-0 nova_compute[192079]: 2025-10-02 12:32:03.411 2 DEBUG nova.compute.provider_tree [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:32:03 compute-0 nova_compute[192079]: 2025-10-02 12:32:03.446 2 DEBUG nova.scheduler.client.report [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:32:03 compute-0 nova_compute[192079]: 2025-10-02 12:32:03.517 2 DEBUG oslo_concurrency.lockutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.561s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:32:03 compute-0 nova_compute[192079]: 2025-10-02 12:32:03.518 2 DEBUG nova.compute.manager [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:32:03 compute-0 nova_compute[192079]: 2025-10-02 12:32:03.630 2 DEBUG nova.compute.manager [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:32:03 compute-0 nova_compute[192079]: 2025-10-02 12:32:03.631 2 DEBUG nova.network.neutron [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:32:03 compute-0 nova_compute[192079]: 2025-10-02 12:32:03.653 2 INFO nova.virt.libvirt.driver [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:32:03 compute-0 nova_compute[192079]: 2025-10-02 12:32:03.672 2 DEBUG nova.compute.manager [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:32:04 compute-0 nova_compute[192079]: 2025-10-02 12:32:04.002 2 DEBUG nova.compute.manager [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:32:04 compute-0 nova_compute[192079]: 2025-10-02 12:32:04.004 2 DEBUG nova.virt.libvirt.driver [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:32:04 compute-0 nova_compute[192079]: 2025-10-02 12:32:04.004 2 INFO nova.virt.libvirt.driver [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Creating image(s)
Oct 02 12:32:04 compute-0 nova_compute[192079]: 2025-10-02 12:32:04.005 2 DEBUG oslo_concurrency.lockutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "/var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:32:04 compute-0 nova_compute[192079]: 2025-10-02 12:32:04.005 2 DEBUG oslo_concurrency.lockutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "/var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:32:04 compute-0 nova_compute[192079]: 2025-10-02 12:32:04.006 2 DEBUG oslo_concurrency.lockutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "/var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:32:04 compute-0 nova_compute[192079]: 2025-10-02 12:32:04.020 2 DEBUG oslo_concurrency.processutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:32:04 compute-0 nova_compute[192079]: 2025-10-02 12:32:04.083 2 DEBUG oslo_concurrency.processutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.063s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:32:04 compute-0 nova_compute[192079]: 2025-10-02 12:32:04.084 2 DEBUG oslo_concurrency.lockutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:32:04 compute-0 nova_compute[192079]: 2025-10-02 12:32:04.085 2 DEBUG oslo_concurrency.lockutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:32:04 compute-0 nova_compute[192079]: 2025-10-02 12:32:04.096 2 DEBUG oslo_concurrency.processutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:32:04 compute-0 nova_compute[192079]: 2025-10-02 12:32:04.124 2 DEBUG nova.policy [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1faa7e121a0e43ad8cb4ae5b2cfcc6a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '76c7dd40d83e4e3ca71abbebf57921b6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:32:04 compute-0 nova_compute[192079]: 2025-10-02 12:32:04.167 2 DEBUG oslo_concurrency.processutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.071s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:32:04 compute-0 nova_compute[192079]: 2025-10-02 12:32:04.169 2 DEBUG oslo_concurrency.processutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:32:04 compute-0 nova_compute[192079]: 2025-10-02 12:32:04.280 2 DEBUG oslo_concurrency.processutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk 1073741824" returned: 0 in 0.111s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:32:04 compute-0 nova_compute[192079]: 2025-10-02 12:32:04.282 2 DEBUG oslo_concurrency.lockutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.197s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:32:04 compute-0 nova_compute[192079]: 2025-10-02 12:32:04.282 2 DEBUG oslo_concurrency.processutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:32:04 compute-0 nova_compute[192079]: 2025-10-02 12:32:04.345 2 DEBUG oslo_concurrency.processutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.063s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:32:04 compute-0 nova_compute[192079]: 2025-10-02 12:32:04.347 2 DEBUG nova.virt.disk.api [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Checking if we can resize image /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:32:04 compute-0 nova_compute[192079]: 2025-10-02 12:32:04.347 2 DEBUG oslo_concurrency.processutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:32:04 compute-0 nova_compute[192079]: 2025-10-02 12:32:04.411 2 DEBUG oslo_concurrency.processutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk --force-share --output=json" returned: 0 in 0.063s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:32:04 compute-0 nova_compute[192079]: 2025-10-02 12:32:04.412 2 DEBUG nova.virt.disk.api [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Cannot resize image /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:32:04 compute-0 nova_compute[192079]: 2025-10-02 12:32:04.413 2 DEBUG nova.objects.instance [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'migration_context' on Instance uuid a0a5e290-69d3-4ce0-9533-6df7cf06c204 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:32:04 compute-0 nova_compute[192079]: 2025-10-02 12:32:04.432 2 DEBUG nova.virt.libvirt.driver [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:32:04 compute-0 nova_compute[192079]: 2025-10-02 12:32:04.433 2 DEBUG nova.virt.libvirt.driver [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Ensure instance console log exists: /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:32:04 compute-0 nova_compute[192079]: 2025-10-02 12:32:04.434 2 DEBUG oslo_concurrency.lockutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:32:04 compute-0 nova_compute[192079]: 2025-10-02 12:32:04.434 2 DEBUG oslo_concurrency.lockutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:32:04 compute-0 nova_compute[192079]: 2025-10-02 12:32:04.434 2 DEBUG oslo_concurrency.lockutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:32:05 compute-0 nova_compute[192079]: 2025-10-02 12:32:05.178 2 DEBUG nova.network.neutron [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Successfully created port: d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:32:06 compute-0 podman[244181]: 2025-10-02 12:32:06.156915389 +0000 UTC m=+0.067072277 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_metadata_agent, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, container_name=ovn_metadata_agent)
Oct 02 12:32:06 compute-0 podman[244183]: 2025-10-02 12:32:06.169296286 +0000 UTC m=+0.073998835 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 12:32:06 compute-0 podman[244182]: 2025-10-02 12:32:06.195109748 +0000 UTC m=+0.103683933 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, container_name=ovn_controller, tcib_managed=true)
Oct 02 12:32:06 compute-0 nova_compute[192079]: 2025-10-02 12:32:06.911 2 DEBUG nova.network.neutron [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Successfully updated port: d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:32:06 compute-0 nova_compute[192079]: 2025-10-02 12:32:06.932 2 DEBUG oslo_concurrency.lockutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "refresh_cache-a0a5e290-69d3-4ce0-9533-6df7cf06c204" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:32:06 compute-0 nova_compute[192079]: 2025-10-02 12:32:06.932 2 DEBUG oslo_concurrency.lockutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquired lock "refresh_cache-a0a5e290-69d3-4ce0-9533-6df7cf06c204" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:32:06 compute-0 nova_compute[192079]: 2025-10-02 12:32:06.932 2 DEBUG nova.network.neutron [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:32:07 compute-0 nova_compute[192079]: 2025-10-02 12:32:07.045 2 DEBUG nova.compute.manager [req-05cbc3ee-218d-4e0c-952c-4e3767c00a49 req-375fad1a-a001-4fba-8e9c-d0a370f1e3b3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Received event network-changed-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:32:07 compute-0 nova_compute[192079]: 2025-10-02 12:32:07.045 2 DEBUG nova.compute.manager [req-05cbc3ee-218d-4e0c-952c-4e3767c00a49 req-375fad1a-a001-4fba-8e9c-d0a370f1e3b3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Refreshing instance network info cache due to event network-changed-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:32:07 compute-0 nova_compute[192079]: 2025-10-02 12:32:07.046 2 DEBUG oslo_concurrency.lockutils [req-05cbc3ee-218d-4e0c-952c-4e3767c00a49 req-375fad1a-a001-4fba-8e9c-d0a370f1e3b3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-a0a5e290-69d3-4ce0-9533-6df7cf06c204" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:32:07 compute-0 nova_compute[192079]: 2025-10-02 12:32:07.075 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759408312.0745776, d59f518a-8b98-4c8c-b8f7-19f6b6809c6d => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:32:07 compute-0 nova_compute[192079]: 2025-10-02 12:32:07.076 2 INFO nova.compute.manager [-] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] VM Stopped (Lifecycle Event)
Oct 02 12:32:07 compute-0 nova_compute[192079]: 2025-10-02 12:32:07.097 2 DEBUG nova.compute.manager [None req-d98d48b2-6c4c-49ac-a033-c81689dfd30a - - - - - -] [instance: d59f518a-8b98-4c8c-b8f7-19f6b6809c6d] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:32:07 compute-0 nova_compute[192079]: 2025-10-02 12:32:07.146 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:07 compute-0 nova_compute[192079]: 2025-10-02 12:32:07.191 2 DEBUG nova.network.neutron [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:32:07 compute-0 nova_compute[192079]: 2025-10-02 12:32:07.308 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.147 2 DEBUG nova.network.neutron [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Updating instance_info_cache with network_info: [{"id": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "address": "fa:16:3e:5b:11:fa", "network": {"id": "574af896-2fe0-426b-87eb-93e7ba659a79", "bridge": "br-int", "label": "tempest-network-smoke--1599149382", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd3cfd23d-04", "ovs_interfaceid": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.172 2 DEBUG oslo_concurrency.lockutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Releasing lock "refresh_cache-a0a5e290-69d3-4ce0-9533-6df7cf06c204" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.172 2 DEBUG nova.compute.manager [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Instance network_info: |[{"id": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "address": "fa:16:3e:5b:11:fa", "network": {"id": "574af896-2fe0-426b-87eb-93e7ba659a79", "bridge": "br-int", "label": "tempest-network-smoke--1599149382", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd3cfd23d-04", "ovs_interfaceid": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.173 2 DEBUG oslo_concurrency.lockutils [req-05cbc3ee-218d-4e0c-952c-4e3767c00a49 req-375fad1a-a001-4fba-8e9c-d0a370f1e3b3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-a0a5e290-69d3-4ce0-9533-6df7cf06c204" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.173 2 DEBUG nova.network.neutron [req-05cbc3ee-218d-4e0c-952c-4e3767c00a49 req-375fad1a-a001-4fba-8e9c-d0a370f1e3b3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Refreshing network info cache for port d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.175 2 DEBUG nova.virt.libvirt.driver [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Start _get_guest_xml network_info=[{"id": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "address": "fa:16:3e:5b:11:fa", "network": {"id": "574af896-2fe0-426b-87eb-93e7ba659a79", "bridge": "br-int", "label": "tempest-network-smoke--1599149382", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd3cfd23d-04", "ovs_interfaceid": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.179 2 WARNING nova.virt.libvirt.driver [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.184 2 DEBUG nova.virt.libvirt.host [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.185 2 DEBUG nova.virt.libvirt.host [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.191 2 DEBUG nova.virt.libvirt.host [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.192 2 DEBUG nova.virt.libvirt.host [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.193 2 DEBUG nova.virt.libvirt.driver [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.193 2 DEBUG nova.virt.hardware [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.193 2 DEBUG nova.virt.hardware [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.193 2 DEBUG nova.virt.hardware [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.194 2 DEBUG nova.virt.hardware [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.194 2 DEBUG nova.virt.hardware [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.194 2 DEBUG nova.virt.hardware [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.194 2 DEBUG nova.virt.hardware [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.195 2 DEBUG nova.virt.hardware [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.195 2 DEBUG nova.virt.hardware [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.195 2 DEBUG nova.virt.hardware [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.196 2 DEBUG nova.virt.hardware [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.198 2 DEBUG nova.virt.libvirt.vif [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:32:01Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestNetworkAdvancedServerOps-server-1138303133',display_name='tempest-TestNetworkAdvancedServerOps-server-1138303133',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkadvancedserverops-server-1138303133',id=141,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBHZXGbdK2BsOqbaUUYa7XicNW8CV2qKZlqbls3huCtjRaED+CX4fIrUFMW9LtPV4B8c3A6SEeNHwH0MqJ7ttz8hSi/AjegEpdsa/s/FufbgfsmCM0TXi1lTy8HcOB+sZ9g==',key_name='tempest-TestNetworkAdvancedServerOps-1099183543',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='76c7dd40d83e4e3ca71abbebf57921b6',ramdisk_id='',reservation_id='r-iebv0qns',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestNetworkAdvancedServerOps-597114071',owner_user_name='tempest-TestNetworkAdvancedServerOps-597114071-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:32:03Z,user_data=None,user_id='1faa7e121a0e43ad8cb4ae5b2cfcc6a2',uuid=a0a5e290-69d3-4ce0-9533-6df7cf06c204,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "address": "fa:16:3e:5b:11:fa", "network": {"id": "574af896-2fe0-426b-87eb-93e7ba659a79", "bridge": "br-int", "label": "tempest-network-smoke--1599149382", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd3cfd23d-04", "ovs_interfaceid": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.199 2 DEBUG nova.network.os_vif_util [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converting VIF {"id": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "address": "fa:16:3e:5b:11:fa", "network": {"id": "574af896-2fe0-426b-87eb-93e7ba659a79", "bridge": "br-int", "label": "tempest-network-smoke--1599149382", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd3cfd23d-04", "ovs_interfaceid": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.199 2 DEBUG nova.network.os_vif_util [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:5b:11:fa,bridge_name='br-int',has_traffic_filtering=True,id=d3cfd23d-04b0-4f18-b20d-14f75e69b2a3,network=Network(574af896-2fe0-426b-87eb-93e7ba659a79),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd3cfd23d-04') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.200 2 DEBUG nova.objects.instance [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'pci_devices' on Instance uuid a0a5e290-69d3-4ce0-9533-6df7cf06c204 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.216 2 DEBUG nova.virt.libvirt.driver [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:32:09 compute-0 nova_compute[192079]:   <uuid>a0a5e290-69d3-4ce0-9533-6df7cf06c204</uuid>
Oct 02 12:32:09 compute-0 nova_compute[192079]:   <name>instance-0000008d</name>
Oct 02 12:32:09 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:32:09 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:32:09 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:32:09 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:       <nova:name>tempest-TestNetworkAdvancedServerOps-server-1138303133</nova:name>
Oct 02 12:32:09 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:32:09</nova:creationTime>
Oct 02 12:32:09 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:32:09 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:32:09 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:32:09 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:32:09 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:32:09 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:32:09 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:32:09 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:32:09 compute-0 nova_compute[192079]:         <nova:user uuid="1faa7e121a0e43ad8cb4ae5b2cfcc6a2">tempest-TestNetworkAdvancedServerOps-597114071-project-member</nova:user>
Oct 02 12:32:09 compute-0 nova_compute[192079]:         <nova:project uuid="76c7dd40d83e4e3ca71abbebf57921b6">tempest-TestNetworkAdvancedServerOps-597114071</nova:project>
Oct 02 12:32:09 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:32:09 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:32:09 compute-0 nova_compute[192079]:         <nova:port uuid="d3cfd23d-04b0-4f18-b20d-14f75e69b2a3">
Oct 02 12:32:09 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.5" ipVersion="4"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:32:09 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:32:09 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:32:09 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <system>
Oct 02 12:32:09 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:32:09 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:32:09 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:32:09 compute-0 nova_compute[192079]:       <entry name="serial">a0a5e290-69d3-4ce0-9533-6df7cf06c204</entry>
Oct 02 12:32:09 compute-0 nova_compute[192079]:       <entry name="uuid">a0a5e290-69d3-4ce0-9533-6df7cf06c204</entry>
Oct 02 12:32:09 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     </system>
Oct 02 12:32:09 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:32:09 compute-0 nova_compute[192079]:   <os>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:   </os>
Oct 02 12:32:09 compute-0 nova_compute[192079]:   <features>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:   </features>
Oct 02 12:32:09 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:32:09 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:32:09 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:32:09 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:32:09 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk.config"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:32:09 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:5b:11:fa"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:       <target dev="tapd3cfd23d-04"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:32:09 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/console.log" append="off"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <video>
Oct 02 12:32:09 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     </video>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:32:09 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:32:09 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:32:09 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:32:09 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:32:09 compute-0 nova_compute[192079]: </domain>
Oct 02 12:32:09 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.217 2 DEBUG nova.compute.manager [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Preparing to wait for external event network-vif-plugged-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.218 2 DEBUG oslo_concurrency.lockutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.218 2 DEBUG oslo_concurrency.lockutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.219 2 DEBUG oslo_concurrency.lockutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.219 2 DEBUG nova.virt.libvirt.vif [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:32:01Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestNetworkAdvancedServerOps-server-1138303133',display_name='tempest-TestNetworkAdvancedServerOps-server-1138303133',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkadvancedserverops-server-1138303133',id=141,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBHZXGbdK2BsOqbaUUYa7XicNW8CV2qKZlqbls3huCtjRaED+CX4fIrUFMW9LtPV4B8c3A6SEeNHwH0MqJ7ttz8hSi/AjegEpdsa/s/FufbgfsmCM0TXi1lTy8HcOB+sZ9g==',key_name='tempest-TestNetworkAdvancedServerOps-1099183543',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='76c7dd40d83e4e3ca71abbebf57921b6',ramdisk_id='',reservation_id='r-iebv0qns',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestNetworkAdvancedServerOps-597114071',owner_user_name='tempest-TestNetworkAdvancedServerOps-597114071-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:32:03Z,user_data=None,user_id='1faa7e121a0e43ad8cb4ae5b2cfcc6a2',uuid=a0a5e290-69d3-4ce0-9533-6df7cf06c204,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "address": "fa:16:3e:5b:11:fa", "network": {"id": "574af896-2fe0-426b-87eb-93e7ba659a79", "bridge": "br-int", "label": "tempest-network-smoke--1599149382", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd3cfd23d-04", "ovs_interfaceid": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.219 2 DEBUG nova.network.os_vif_util [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converting VIF {"id": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "address": "fa:16:3e:5b:11:fa", "network": {"id": "574af896-2fe0-426b-87eb-93e7ba659a79", "bridge": "br-int", "label": "tempest-network-smoke--1599149382", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd3cfd23d-04", "ovs_interfaceid": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.220 2 DEBUG nova.network.os_vif_util [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:5b:11:fa,bridge_name='br-int',has_traffic_filtering=True,id=d3cfd23d-04b0-4f18-b20d-14f75e69b2a3,network=Network(574af896-2fe0-426b-87eb-93e7ba659a79),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd3cfd23d-04') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.220 2 DEBUG os_vif [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:5b:11:fa,bridge_name='br-int',has_traffic_filtering=True,id=d3cfd23d-04b0-4f18-b20d-14f75e69b2a3,network=Network(574af896-2fe0-426b-87eb-93e7ba659a79),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd3cfd23d-04') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.221 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.221 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.222 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.224 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.224 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapd3cfd23d-04, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.224 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapd3cfd23d-04, col_values=(('external_ids', {'iface-id': 'd3cfd23d-04b0-4f18-b20d-14f75e69b2a3', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:5b:11:fa', 'vm-uuid': 'a0a5e290-69d3-4ce0-9533-6df7cf06c204'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:32:09 compute-0 NetworkManager[51160]: <info>  [1759408329.2704] manager: (tapd3cfd23d-04): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/262)
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.270 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.273 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.276 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.276 2 INFO os_vif [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:5b:11:fa,bridge_name='br-int',has_traffic_filtering=True,id=d3cfd23d-04b0-4f18-b20d-14f75e69b2a3,network=Network(574af896-2fe0-426b-87eb-93e7ba659a79),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd3cfd23d-04')
Oct 02 12:32:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:09.413 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=36, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=35) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:32:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:09.414 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 1 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.414 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.515 2 DEBUG nova.virt.libvirt.driver [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.515 2 DEBUG nova.virt.libvirt.driver [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.516 2 DEBUG nova.virt.libvirt.driver [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] No VIF found with MAC fa:16:3e:5b:11:fa, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:32:09 compute-0 nova_compute[192079]: 2025-10-02 12:32:09.517 2 INFO nova.virt.libvirt.driver [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Using config drive
Oct 02 12:32:10 compute-0 nova_compute[192079]: 2025-10-02 12:32:10.287 2 INFO nova.virt.libvirt.driver [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Creating config drive at /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk.config
Oct 02 12:32:10 compute-0 nova_compute[192079]: 2025-10-02 12:32:10.291 2 DEBUG oslo_concurrency.processutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp2gno43jd execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:10.416 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '36'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:32:10 compute-0 nova_compute[192079]: 2025-10-02 12:32:10.419 2 DEBUG oslo_concurrency.processutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp2gno43jd" returned: 0 in 0.127s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:32:10 compute-0 kernel: tapd3cfd23d-04: entered promiscuous mode
Oct 02 12:32:10 compute-0 NetworkManager[51160]: <info>  [1759408330.4771] manager: (tapd3cfd23d-04): new Tun device (/org/freedesktop/NetworkManager/Devices/263)
Oct 02 12:32:10 compute-0 ovn_controller[94336]: 2025-10-02T12:32:10Z|00535|binding|INFO|Claiming lport d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 for this chassis.
Oct 02 12:32:10 compute-0 ovn_controller[94336]: 2025-10-02T12:32:10Z|00536|binding|INFO|d3cfd23d-04b0-4f18-b20d-14f75e69b2a3: Claiming fa:16:3e:5b:11:fa 10.100.0.5
Oct 02 12:32:10 compute-0 nova_compute[192079]: 2025-10-02 12:32:10.477 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:10 compute-0 nova_compute[192079]: 2025-10-02 12:32:10.481 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:10 compute-0 nova_compute[192079]: 2025-10-02 12:32:10.483 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:10 compute-0 systemd-udevd[244266]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:32:10 compute-0 systemd-machined[152150]: New machine qemu-68-instance-0000008d.
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:10.512 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:5b:11:fa 10.100.0.5'], port_security=['fa:16:3e:5b:11:fa 10.100.0.5'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.5/28', 'neutron:device_id': 'a0a5e290-69d3-4ce0-9533-6df7cf06c204', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-574af896-2fe0-426b-87eb-93e7ba659a79', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '76c7dd40d83e4e3ca71abbebf57921b6', 'neutron:revision_number': '2', 'neutron:security_group_ids': '24011c1c-187e-42ed-b64a-06bc43fab21b', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=1b106f5d-d8f5-4be2-bc91-66bf2d8dc8a3, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=d3cfd23d-04b0-4f18-b20d-14f75e69b2a3) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:10.513 103294 INFO neutron.agent.ovn.metadata.agent [-] Port d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 in datapath 574af896-2fe0-426b-87eb-93e7ba659a79 bound to our chassis
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:10.515 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 574af896-2fe0-426b-87eb-93e7ba659a79
Oct 02 12:32:10 compute-0 NetworkManager[51160]: <info>  [1759408330.5177] device (tapd3cfd23d-04): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:32:10 compute-0 NetworkManager[51160]: <info>  [1759408330.5183] device (tapd3cfd23d-04): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:10.526 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[22c15ce7-e3ce-424a-b7ed-87b1481dbb21]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:10.527 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap574af896-21 in ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:10.529 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap574af896-20 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:10.529 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[80994413-2f6a-4fab-9fef-31b8edd6faa0]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:10.529 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[661d777c-521c-4422-b90b-bdaf744165bc]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:10 compute-0 systemd[1]: Started Virtual Machine qemu-68-instance-0000008d.
Oct 02 12:32:10 compute-0 nova_compute[192079]: 2025-10-02 12:32:10.541 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:10.542 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[90b6a9d6-5825-4537-b357-afb47156a81d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:10 compute-0 ovn_controller[94336]: 2025-10-02T12:32:10Z|00537|binding|INFO|Setting lport d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 ovn-installed in OVS
Oct 02 12:32:10 compute-0 ovn_controller[94336]: 2025-10-02T12:32:10Z|00538|binding|INFO|Setting lport d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 up in Southbound
Oct 02 12:32:10 compute-0 nova_compute[192079]: 2025-10-02 12:32:10.544 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:10.564 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b8935794-1f9e-4f96-bffd-1471e0cab2b9]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:10.591 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[6f2b95b9-9db5-411c-ada4-23d2514d035a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:10 compute-0 NetworkManager[51160]: <info>  [1759408330.5970] manager: (tap574af896-20): new Veth device (/org/freedesktop/NetworkManager/Devices/264)
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:10.596 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[de869f76-d5ca-47e3-961c-358065757b69]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:10 compute-0 systemd-udevd[244268]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:10.625 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[1af01550-8fcb-4cfd-9596-9ac669e61b59]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:10.628 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[483e65bb-b9ed-4d3c-ae77-36ae90cda2c7]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:10 compute-0 NetworkManager[51160]: <info>  [1759408330.6473] device (tap574af896-20): carrier: link connected
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:10.652 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[91f919c7-08d4-4088-911f-16175adc6100]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:10.667 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[490e6c1e-8d79-4c94-86ec-0e6d6d1f84a0]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap574af896-21'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:aa:4f:2a'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 171], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 632827, 'reachable_time': 32963, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 244299, 'error': None, 'target': 'ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:10.686 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3800de0d-6b91-473c-853a-399793e345b3]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:feaa:4f2a'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 632827, 'tstamp': 632827}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 244300, 'error': None, 'target': 'ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:10.701 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[736e14d6-ace7-457a-a9a8-b96fe07af27a]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap574af896-21'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:aa:4f:2a'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 171], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 632827, 'reachable_time': 32963, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 244301, 'error': None, 'target': 'ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:10.730 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d090a149-5c85-4cd6-82c3-7d4a7f20c9eb]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:10.779 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3a26688f-607a-4661-a1f6-2d5e03426cbf]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:10.780 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap574af896-20, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:10.781 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:10.781 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap574af896-20, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:32:10 compute-0 NetworkManager[51160]: <info>  [1759408330.7835] manager: (tap574af896-20): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/265)
Oct 02 12:32:10 compute-0 nova_compute[192079]: 2025-10-02 12:32:10.783 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:10 compute-0 kernel: tap574af896-20: entered promiscuous mode
Oct 02 12:32:10 compute-0 nova_compute[192079]: 2025-10-02 12:32:10.786 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:10.787 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap574af896-20, col_values=(('external_ids', {'iface-id': '312f0f58-e800-4204-8cd3-57c9c3245ec8'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:32:10 compute-0 nova_compute[192079]: 2025-10-02 12:32:10.788 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:10 compute-0 ovn_controller[94336]: 2025-10-02T12:32:10Z|00539|binding|INFO|Releasing lport 312f0f58-e800-4204-8cd3-57c9c3245ec8 from this chassis (sb_readonly=0)
Oct 02 12:32:10 compute-0 nova_compute[192079]: 2025-10-02 12:32:10.789 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:10.790 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/574af896-2fe0-426b-87eb-93e7ba659a79.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/574af896-2fe0-426b-87eb-93e7ba659a79.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:10.794 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[707d881e-7505-4704-9e9c-802c672d11e3]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:10.795 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-574af896-2fe0-426b-87eb-93e7ba659a79
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/574af896-2fe0-426b-87eb-93e7ba659a79.pid.haproxy
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 574af896-2fe0-426b-87eb-93e7ba659a79
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:32:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:10.796 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79', 'env', 'PROCESS_TAG=haproxy-574af896-2fe0-426b-87eb-93e7ba659a79', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/574af896-2fe0-426b-87eb-93e7ba659a79.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:32:10 compute-0 nova_compute[192079]: 2025-10-02 12:32:10.800 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.090 2 DEBUG nova.compute.manager [req-822caef9-dd82-4062-9fdb-878a59345a4f req-1db4237c-1c0c-498a-82f9-9fbfbd997595 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Received event network-vif-plugged-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.091 2 DEBUG oslo_concurrency.lockutils [req-822caef9-dd82-4062-9fdb-878a59345a4f req-1db4237c-1c0c-498a-82f9-9fbfbd997595 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.091 2 DEBUG oslo_concurrency.lockutils [req-822caef9-dd82-4062-9fdb-878a59345a4f req-1db4237c-1c0c-498a-82f9-9fbfbd997595 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.092 2 DEBUG oslo_concurrency.lockutils [req-822caef9-dd82-4062-9fdb-878a59345a4f req-1db4237c-1c0c-498a-82f9-9fbfbd997595 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.092 2 DEBUG nova.compute.manager [req-822caef9-dd82-4062-9fdb-878a59345a4f req-1db4237c-1c0c-498a-82f9-9fbfbd997595 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Processing event network-vif-plugged-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:32:11 compute-0 podman[244340]: 2025-10-02 12:32:11.14306886 +0000 UTC m=+0.058959226 container create 2ad8bf2ee1898d6e3502f0f12f9ce759a9d0b30068f65f244580e08d6361ea33 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3, tcib_managed=true)
Oct 02 12:32:11 compute-0 systemd[1]: Started libpod-conmon-2ad8bf2ee1898d6e3502f0f12f9ce759a9d0b30068f65f244580e08d6361ea33.scope.
Oct 02 12:32:11 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:32:11 compute-0 podman[244340]: 2025-10-02 12:32:11.115250132 +0000 UTC m=+0.031140528 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:32:11 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/160a6739c43e88719ab2e763db65cc4505fd77dc004f6d20bfe8b05d739636c7/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:32:11 compute-0 podman[244340]: 2025-10-02 12:32:11.228406082 +0000 UTC m=+0.144296468 container init 2ad8bf2ee1898d6e3502f0f12f9ce759a9d0b30068f65f244580e08d6361ea33 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79, io.buildah.version=1.41.3, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:32:11 compute-0 podman[244340]: 2025-10-02 12:32:11.233574173 +0000 UTC m=+0.149464539 container start 2ad8bf2ee1898d6e3502f0f12f9ce759a9d0b30068f65f244580e08d6361ea33 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79, tcib_managed=true, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001)
Oct 02 12:32:11 compute-0 neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79[244355]: [NOTICE]   (244359) : New worker (244361) forked
Oct 02 12:32:11 compute-0 neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79[244355]: [NOTICE]   (244359) : Loading success.
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.290 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408331.2897959, a0a5e290-69d3-4ce0-9533-6df7cf06c204 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.290 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] VM Started (Lifecycle Event)
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.293 2 DEBUG nova.compute.manager [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.295 2 DEBUG nova.virt.libvirt.driver [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.299 2 DEBUG nova.network.neutron [req-05cbc3ee-218d-4e0c-952c-4e3767c00a49 req-375fad1a-a001-4fba-8e9c-d0a370f1e3b3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Updated VIF entry in instance network info cache for port d3cfd23d-04b0-4f18-b20d-14f75e69b2a3. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.299 2 DEBUG nova.network.neutron [req-05cbc3ee-218d-4e0c-952c-4e3767c00a49 req-375fad1a-a001-4fba-8e9c-d0a370f1e3b3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Updating instance_info_cache with network_info: [{"id": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "address": "fa:16:3e:5b:11:fa", "network": {"id": "574af896-2fe0-426b-87eb-93e7ba659a79", "bridge": "br-int", "label": "tempest-network-smoke--1599149382", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd3cfd23d-04", "ovs_interfaceid": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.301 2 INFO nova.virt.libvirt.driver [-] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Instance spawned successfully.
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.301 2 DEBUG nova.virt.libvirt.driver [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.327 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.328 2 DEBUG oslo_concurrency.lockutils [req-05cbc3ee-218d-4e0c-952c-4e3767c00a49 req-375fad1a-a001-4fba-8e9c-d0a370f1e3b3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-a0a5e290-69d3-4ce0-9533-6df7cf06c204" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.331 2 DEBUG nova.virt.libvirt.driver [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.332 2 DEBUG nova.virt.libvirt.driver [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.332 2 DEBUG nova.virt.libvirt.driver [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.332 2 DEBUG nova.virt.libvirt.driver [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.333 2 DEBUG nova.virt.libvirt.driver [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.333 2 DEBUG nova.virt.libvirt.driver [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.338 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.385 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.386 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408331.2905645, a0a5e290-69d3-4ce0-9533-6df7cf06c204 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.387 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] VM Paused (Lifecycle Event)
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.411 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.414 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408331.2948678, a0a5e290-69d3-4ce0-9533-6df7cf06c204 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.414 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] VM Resumed (Lifecycle Event)
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.434 2 INFO nova.compute.manager [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Took 7.43 seconds to spawn the instance on the hypervisor.
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.435 2 DEBUG nova.compute.manager [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.442 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.445 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.478 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.551 2 INFO nova.compute.manager [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Took 8.66 seconds to build instance.
Oct 02 12:32:11 compute-0 nova_compute[192079]: 2025-10-02 12:32:11.574 2 DEBUG oslo_concurrency.lockutils [None req-bb3e2931-de77-45ac-b3c2-4a6638b0a369 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 8.831s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:32:12 compute-0 nova_compute[192079]: 2025-10-02 12:32:12.310 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:13 compute-0 nova_compute[192079]: 2025-10-02 12:32:13.187 2 DEBUG nova.compute.manager [req-5209ee87-8088-4ff9-a626-47e4016f11e1 req-b5e637e9-505e-4c17-a928-b24febb9c512 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Received event network-vif-plugged-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:32:13 compute-0 nova_compute[192079]: 2025-10-02 12:32:13.188 2 DEBUG oslo_concurrency.lockutils [req-5209ee87-8088-4ff9-a626-47e4016f11e1 req-b5e637e9-505e-4c17-a928-b24febb9c512 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:32:13 compute-0 nova_compute[192079]: 2025-10-02 12:32:13.188 2 DEBUG oslo_concurrency.lockutils [req-5209ee87-8088-4ff9-a626-47e4016f11e1 req-b5e637e9-505e-4c17-a928-b24febb9c512 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:32:13 compute-0 nova_compute[192079]: 2025-10-02 12:32:13.188 2 DEBUG oslo_concurrency.lockutils [req-5209ee87-8088-4ff9-a626-47e4016f11e1 req-b5e637e9-505e-4c17-a928-b24febb9c512 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:32:13 compute-0 nova_compute[192079]: 2025-10-02 12:32:13.189 2 DEBUG nova.compute.manager [req-5209ee87-8088-4ff9-a626-47e4016f11e1 req-b5e637e9-505e-4c17-a928-b24febb9c512 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] No waiting events found dispatching network-vif-plugged-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:32:13 compute-0 nova_compute[192079]: 2025-10-02 12:32:13.189 2 WARNING nova.compute.manager [req-5209ee87-8088-4ff9-a626-47e4016f11e1 req-b5e637e9-505e-4c17-a928-b24febb9c512 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Received unexpected event network-vif-plugged-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 for instance with vm_state active and task_state None.
Oct 02 12:32:14 compute-0 nova_compute[192079]: 2025-10-02 12:32:14.271 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:15 compute-0 nova_compute[192079]: 2025-10-02 12:32:15.080 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:15 compute-0 NetworkManager[51160]: <info>  [1759408335.0806] manager: (patch-br-int-to-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/266)
Oct 02 12:32:15 compute-0 NetworkManager[51160]: <info>  [1759408335.0819] manager: (patch-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d-to-br-int): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/267)
Oct 02 12:32:15 compute-0 nova_compute[192079]: 2025-10-02 12:32:15.152 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:15 compute-0 ovn_controller[94336]: 2025-10-02T12:32:15Z|00540|binding|INFO|Releasing lport 312f0f58-e800-4204-8cd3-57c9c3245ec8 from this chassis (sb_readonly=0)
Oct 02 12:32:15 compute-0 nova_compute[192079]: 2025-10-02 12:32:15.172 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:15 compute-0 nova_compute[192079]: 2025-10-02 12:32:15.682 2 DEBUG nova.compute.manager [req-eef5600d-19cd-4415-bce1-088048b74532 req-686fb278-90e0-40e8-92ea-06ac333387e1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Received event network-changed-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:32:15 compute-0 nova_compute[192079]: 2025-10-02 12:32:15.682 2 DEBUG nova.compute.manager [req-eef5600d-19cd-4415-bce1-088048b74532 req-686fb278-90e0-40e8-92ea-06ac333387e1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Refreshing instance network info cache due to event network-changed-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:32:15 compute-0 nova_compute[192079]: 2025-10-02 12:32:15.683 2 DEBUG oslo_concurrency.lockutils [req-eef5600d-19cd-4415-bce1-088048b74532 req-686fb278-90e0-40e8-92ea-06ac333387e1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-a0a5e290-69d3-4ce0-9533-6df7cf06c204" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:32:15 compute-0 nova_compute[192079]: 2025-10-02 12:32:15.683 2 DEBUG oslo_concurrency.lockutils [req-eef5600d-19cd-4415-bce1-088048b74532 req-686fb278-90e0-40e8-92ea-06ac333387e1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-a0a5e290-69d3-4ce0-9533-6df7cf06c204" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:32:15 compute-0 nova_compute[192079]: 2025-10-02 12:32:15.683 2 DEBUG nova.network.neutron [req-eef5600d-19cd-4415-bce1-088048b74532 req-686fb278-90e0-40e8-92ea-06ac333387e1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Refreshing network info cache for port d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:32:17 compute-0 podman[244371]: 2025-10-02 12:32:17.157942807 +0000 UTC m=+0.068053713 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, container_name=ceilometer_agent_compute, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:32:17 compute-0 nova_compute[192079]: 2025-10-02 12:32:17.349 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:17 compute-0 nova_compute[192079]: 2025-10-02 12:32:17.612 2 DEBUG nova.network.neutron [req-eef5600d-19cd-4415-bce1-088048b74532 req-686fb278-90e0-40e8-92ea-06ac333387e1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Updated VIF entry in instance network info cache for port d3cfd23d-04b0-4f18-b20d-14f75e69b2a3. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:32:17 compute-0 nova_compute[192079]: 2025-10-02 12:32:17.613 2 DEBUG nova.network.neutron [req-eef5600d-19cd-4415-bce1-088048b74532 req-686fb278-90e0-40e8-92ea-06ac333387e1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Updating instance_info_cache with network_info: [{"id": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "address": "fa:16:3e:5b:11:fa", "network": {"id": "574af896-2fe0-426b-87eb-93e7ba659a79", "bridge": "br-int", "label": "tempest-network-smoke--1599149382", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd3cfd23d-04", "ovs_interfaceid": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:32:17 compute-0 nova_compute[192079]: 2025-10-02 12:32:17.655 2 DEBUG oslo_concurrency.lockutils [req-eef5600d-19cd-4415-bce1-088048b74532 req-686fb278-90e0-40e8-92ea-06ac333387e1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-a0a5e290-69d3-4ce0-9533-6df7cf06c204" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:32:17 compute-0 nova_compute[192079]: 2025-10-02 12:32:17.979 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:18 compute-0 nova_compute[192079]: 2025-10-02 12:32:18.458 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:19 compute-0 nova_compute[192079]: 2025-10-02 12:32:19.273 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:20 compute-0 nova_compute[192079]: 2025-10-02 12:32:20.949 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:22 compute-0 nova_compute[192079]: 2025-10-02 12:32:22.351 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:23 compute-0 podman[244391]: 2025-10-02 12:32:23.19025236 +0000 UTC m=+0.097905296 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, distribution-scope=public, name=ubi9-minimal, release=1755695350, url=https://catalog.redhat.com/en/search?searchType=containers, config_id=edpm, vendor=Red Hat, Inc., com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, build-date=2025-08-20T13:12:41, com.redhat.component=ubi9-minimal-container, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, container_name=openstack_network_exporter, managed_by=edpm_ansible, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., version=9.6, architecture=x86_64, maintainer=Red Hat, Inc., io.buildah.version=1.33.7, io.openshift.expose-services=, io.openshift.tags=minimal rhel9, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vcs-type=git)
Oct 02 12:32:23 compute-0 podman[244392]: 2025-10-02 12:32:23.208541868 +0000 UTC m=+0.103110748 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=multipathd, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_managed=true, container_name=multipathd, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']})
Oct 02 12:32:24 compute-0 nova_compute[192079]: 2025-10-02 12:32:24.277 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:24 compute-0 nova_compute[192079]: 2025-10-02 12:32:24.471 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:24 compute-0 ovn_controller[94336]: 2025-10-02T12:32:24Z|00055|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:5b:11:fa 10.100.0.5
Oct 02 12:32:24 compute-0 ovn_controller[94336]: 2025-10-02T12:32:24Z|00056|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:5b:11:fa 10.100.0.5
Oct 02 12:32:25 compute-0 sshd-session[244447]: Connection closed by 45.148.10.240 port 54696
Oct 02 12:32:26 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:26.252 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:d0:76:2a 2001:db8:0:1:f816:3eff:fed0:762a 2001:db8::f816:3eff:fed0:762a'], port_security=[], type=localport, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': ''}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '2001:db8:0:1:f816:3eff:fed0:762a/64 2001:db8::f816:3eff:fed0:762a/64', 'neutron:device_id': 'ovnmeta-f55e0845-fc62-481d-a70d-8546faf2b8fb', 'neutron:device_owner': 'network:distributed', 'neutron:mtu': '', 'neutron:network_name': 'neutron-f55e0845-fc62-481d-a70d-8546faf2b8fb', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'neutron:revision_number': '3', 'neutron:security_group_ids': '', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=512667a6-6958-4dd6-8891-fcda7d607ab5, chassis=[], tunnel_key=1, gateway_chassis=[], requested_chassis=[], logical_port=763e1f51-8560-461a-a2f3-3c284c8e5a17) old=Port_Binding(mac=['fa:16:3e:d0:76:2a 2001:db8::f816:3eff:fed0:762a'], external_ids={'neutron:cidrs': '2001:db8::f816:3eff:fed0:762a/64', 'neutron:device_id': 'ovnmeta-f55e0845-fc62-481d-a70d-8546faf2b8fb', 'neutron:device_owner': 'network:distributed', 'neutron:mtu': '', 'neutron:network_name': 'neutron-f55e0845-fc62-481d-a70d-8546faf2b8fb', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'neutron:revision_number': '2', 'neutron:security_group_ids': '', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:32:26 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:26.256 103294 INFO neutron.agent.ovn.metadata.agent [-] Metadata Port 763e1f51-8560-461a-a2f3-3c284c8e5a17 in datapath f55e0845-fc62-481d-a70d-8546faf2b8fb updated
Oct 02 12:32:26 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:26.259 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network f55e0845-fc62-481d-a70d-8546faf2b8fb, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:32:26 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:26.261 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[678ce0e3-e0e8-40b7-9b05-f2446e6762ba]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:27 compute-0 nova_compute[192079]: 2025-10-02 12:32:27.352 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:29 compute-0 podman[244449]: 2025-10-02 12:32:29.143976984 +0000 UTC m=+0.051897353 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, container_name=iscsid, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0)
Oct 02 12:32:29 compute-0 podman[244448]: 2025-10-02 12:32:29.151226752 +0000 UTC m=+0.055498382 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:32:29 compute-0 nova_compute[192079]: 2025-10-02 12:32:29.280 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:30 compute-0 nova_compute[192079]: 2025-10-02 12:32:30.995 2 INFO nova.compute.manager [None req-21549fe5-e3ac-4ec1-825a-168ffbabe18d 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Get console output
Oct 02 12:32:31 compute-0 nova_compute[192079]: 2025-10-02 12:32:31.000 55 INFO nova.privsep.libvirt [-] Ignored error while reading from instance console pty: can't concat NoneType to bytes
Oct 02 12:32:32 compute-0 nova_compute[192079]: 2025-10-02 12:32:32.354 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:33 compute-0 nova_compute[192079]: 2025-10-02 12:32:33.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:32:33 compute-0 nova_compute[192079]: 2025-10-02 12:32:33.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:32:34 compute-0 nova_compute[192079]: 2025-10-02 12:32:34.283 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:36 compute-0 nova_compute[192079]: 2025-10-02 12:32:36.106 2 DEBUG nova.compute.manager [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Stashing vm_state: active _prep_resize /usr/lib/python3.9/site-packages/nova/compute/manager.py:5560
Oct 02 12:32:36 compute-0 nova_compute[192079]: 2025-10-02 12:32:36.572 2 DEBUG oslo_concurrency.lockutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:32:36 compute-0 nova_compute[192079]: 2025-10-02 12:32:36.573 2 DEBUG oslo_concurrency.lockutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:32:36 compute-0 nova_compute[192079]: 2025-10-02 12:32:36.641 2 DEBUG nova.objects.instance [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'pci_requests' on Instance uuid a0a5e290-69d3-4ce0-9533-6df7cf06c204 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:32:36 compute-0 nova_compute[192079]: 2025-10-02 12:32:36.676 2 DEBUG nova.virt.hardware [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:32:36 compute-0 nova_compute[192079]: 2025-10-02 12:32:36.677 2 INFO nova.compute.claims [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:32:36 compute-0 nova_compute[192079]: 2025-10-02 12:32:36.677 2 DEBUG nova.objects.instance [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'resources' on Instance uuid a0a5e290-69d3-4ce0-9533-6df7cf06c204 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:32:36 compute-0 nova_compute[192079]: 2025-10-02 12:32:36.713 2 DEBUG nova.objects.instance [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'pci_devices' on Instance uuid a0a5e290-69d3-4ce0-9533-6df7cf06c204 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:32:36 compute-0 nova_compute[192079]: 2025-10-02 12:32:36.923 2 INFO nova.compute.resource_tracker [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Updating resource usage from migration 837d1691-f875-4534-b0af-f97e8fe7f055
Oct 02 12:32:37 compute-0 nova_compute[192079]: 2025-10-02 12:32:37.031 2 DEBUG nova.compute.provider_tree [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:32:37 compute-0 nova_compute[192079]: 2025-10-02 12:32:37.064 2 DEBUG nova.scheduler.client.report [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:32:37 compute-0 podman[244489]: 2025-10-02 12:32:37.131334512 +0000 UTC m=+0.048477512 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.license=GPLv2)
Oct 02 12:32:37 compute-0 podman[244491]: 2025-10-02 12:32:37.147706178 +0000 UTC m=+0.059364878 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']})
Oct 02 12:32:37 compute-0 nova_compute[192079]: 2025-10-02 12:32:37.145 2 DEBUG oslo_concurrency.lockutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 0.572s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:32:37 compute-0 nova_compute[192079]: 2025-10-02 12:32:37.145 2 INFO nova.compute.manager [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Migrating
Oct 02 12:32:37 compute-0 podman[244490]: 2025-10-02 12:32:37.180157601 +0000 UTC m=+0.090086994 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, config_id=ovn_controller, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, container_name=ovn_controller, managed_by=edpm_ansible, org.label-schema.schema-version=1.0)
Oct 02 12:32:37 compute-0 nova_compute[192079]: 2025-10-02 12:32:37.255 2 DEBUG oslo_concurrency.lockutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "refresh_cache-a0a5e290-69d3-4ce0-9533-6df7cf06c204" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:32:37 compute-0 nova_compute[192079]: 2025-10-02 12:32:37.255 2 DEBUG oslo_concurrency.lockutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquired lock "refresh_cache-a0a5e290-69d3-4ce0-9533-6df7cf06c204" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:32:37 compute-0 nova_compute[192079]: 2025-10-02 12:32:37.255 2 DEBUG nova.network.neutron [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:32:37 compute-0 nova_compute[192079]: 2025-10-02 12:32:37.355 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:37 compute-0 nova_compute[192079]: 2025-10-02 12:32:37.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:32:37 compute-0 nova_compute[192079]: 2025-10-02 12:32:37.713 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:32:37 compute-0 nova_compute[192079]: 2025-10-02 12:32:37.714 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:32:37 compute-0 nova_compute[192079]: 2025-10-02 12:32:37.714 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:32:37 compute-0 nova_compute[192079]: 2025-10-02 12:32:37.714 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:32:37 compute-0 nova_compute[192079]: 2025-10-02 12:32:37.843 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:32:37 compute-0 nova_compute[192079]: 2025-10-02 12:32:37.910 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk --force-share --output=json" returned: 0 in 0.067s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:32:37 compute-0 nova_compute[192079]: 2025-10-02 12:32:37.911 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:32:37 compute-0 nova_compute[192079]: 2025-10-02 12:32:37.969 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk --force-share --output=json" returned: 0 in 0.058s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:32:38 compute-0 nova_compute[192079]: 2025-10-02 12:32:38.129 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:32:38 compute-0 nova_compute[192079]: 2025-10-02 12:32:38.130 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5562MB free_disk=73.31092834472656GB free_vcpus=7 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:32:38 compute-0 nova_compute[192079]: 2025-10-02 12:32:38.130 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:32:38 compute-0 nova_compute[192079]: 2025-10-02 12:32:38.131 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:32:38 compute-0 nova_compute[192079]: 2025-10-02 12:32:38.218 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Applying migration context for instance a0a5e290-69d3-4ce0-9533-6df7cf06c204 as it has an incoming, in-progress migration 837d1691-f875-4534-b0af-f97e8fe7f055. Migration status is pre-migrating _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:950
Oct 02 12:32:38 compute-0 nova_compute[192079]: 2025-10-02 12:32:38.219 2 INFO nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Updating resource usage from migration 837d1691-f875-4534-b0af-f97e8fe7f055
Oct 02 12:32:38 compute-0 nova_compute[192079]: 2025-10-02 12:32:38.239 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Migration 837d1691-f875-4534-b0af-f97e8fe7f055 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1640
Oct 02 12:32:38 compute-0 nova_compute[192079]: 2025-10-02 12:32:38.239 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance a0a5e290-69d3-4ce0-9533-6df7cf06c204 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:32:38 compute-0 nova_compute[192079]: 2025-10-02 12:32:38.239 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 2 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:32:38 compute-0 nova_compute[192079]: 2025-10-02 12:32:38.240 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=832MB phys_disk=79GB used_disk=2GB total_vcpus=8 used_vcpus=2 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:32:38 compute-0 nova_compute[192079]: 2025-10-02 12:32:38.521 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:32:38 compute-0 nova_compute[192079]: 2025-10-02 12:32:38.549 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:32:38 compute-0 nova_compute[192079]: 2025-10-02 12:32:38.711 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:32:38 compute-0 nova_compute[192079]: 2025-10-02 12:32:38.711 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.581s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:32:39 compute-0 nova_compute[192079]: 2025-10-02 12:32:39.151 2 DEBUG nova.network.neutron [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Updating instance_info_cache with network_info: [{"id": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "address": "fa:16:3e:5b:11:fa", "network": {"id": "574af896-2fe0-426b-87eb-93e7ba659a79", "bridge": "br-int", "label": "tempest-network-smoke--1599149382", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd3cfd23d-04", "ovs_interfaceid": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:32:39 compute-0 nova_compute[192079]: 2025-10-02 12:32:39.248 2 DEBUG oslo_concurrency.lockutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Releasing lock "refresh_cache-a0a5e290-69d3-4ce0-9533-6df7cf06c204" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:32:39 compute-0 nova_compute[192079]: 2025-10-02 12:32:39.286 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:39 compute-0 nova_compute[192079]: 2025-10-02 12:32:39.713 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:32:39 compute-0 nova_compute[192079]: 2025-10-02 12:32:39.713 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:32:39 compute-0 nova_compute[192079]: 2025-10-02 12:32:39.905 2 DEBUG nova.virt.libvirt.driver [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Starting migrate_disk_and_power_off migrate_disk_and_power_off /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:11511
Oct 02 12:32:39 compute-0 nova_compute[192079]: 2025-10-02 12:32:39.909 2 DEBUG nova.virt.libvirt.driver [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Shutting down instance from state 1 _clean_shutdown /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4071
Oct 02 12:32:40 compute-0 nova_compute[192079]: 2025-10-02 12:32:40.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:32:40 compute-0 nova_compute[192079]: 2025-10-02 12:32:40.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:32:41 compute-0 nova_compute[192079]: 2025-10-02 12:32:41.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:32:42 compute-0 kernel: tapd3cfd23d-04 (unregistering): left promiscuous mode
Oct 02 12:32:42 compute-0 NetworkManager[51160]: <info>  [1759408362.0526] device (tapd3cfd23d-04): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:32:42 compute-0 ovn_controller[94336]: 2025-10-02T12:32:42Z|00541|binding|INFO|Releasing lport d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 from this chassis (sb_readonly=0)
Oct 02 12:32:42 compute-0 ovn_controller[94336]: 2025-10-02T12:32:42Z|00542|binding|INFO|Setting lport d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 down in Southbound
Oct 02 12:32:42 compute-0 ovn_controller[94336]: 2025-10-02T12:32:42Z|00543|binding|INFO|Removing iface tapd3cfd23d-04 ovn-installed in OVS
Oct 02 12:32:42 compute-0 nova_compute[192079]: 2025-10-02 12:32:42.064 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:42 compute-0 nova_compute[192079]: 2025-10-02 12:32:42.080 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:42 compute-0 systemd[1]: machine-qemu\x2d68\x2dinstance\x2d0000008d.scope: Deactivated successfully.
Oct 02 12:32:42 compute-0 systemd[1]: machine-qemu\x2d68\x2dinstance\x2d0000008d.scope: Consumed 14.351s CPU time.
Oct 02 12:32:42 compute-0 systemd-machined[152150]: Machine qemu-68-instance-0000008d terminated.
Oct 02 12:32:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:42.179 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:5b:11:fa 10.100.0.5'], port_security=['fa:16:3e:5b:11:fa 10.100.0.5'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.5/28', 'neutron:device_id': 'a0a5e290-69d3-4ce0-9533-6df7cf06c204', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-574af896-2fe0-426b-87eb-93e7ba659a79', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '76c7dd40d83e4e3ca71abbebf57921b6', 'neutron:revision_number': '4', 'neutron:security_group_ids': '24011c1c-187e-42ed-b64a-06bc43fab21b', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com', 'neutron:port_fip': '192.168.122.194'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=1b106f5d-d8f5-4be2-bc91-66bf2d8dc8a3, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=d3cfd23d-04b0-4f18-b20d-14f75e69b2a3) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:32:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:42.180 103294 INFO neutron.agent.ovn.metadata.agent [-] Port d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 in datapath 574af896-2fe0-426b-87eb-93e7ba659a79 unbound from our chassis
Oct 02 12:32:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:42.182 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 574af896-2fe0-426b-87eb-93e7ba659a79, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:32:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:42.183 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[741a3b1a-a6a4-420f-b455-b51f6665e0fd]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:42.183 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79 namespace which is not needed anymore
Oct 02 12:32:42 compute-0 neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79[244355]: [NOTICE]   (244359) : haproxy version is 2.8.14-c23fe91
Oct 02 12:32:42 compute-0 neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79[244355]: [NOTICE]   (244359) : path to executable is /usr/sbin/haproxy
Oct 02 12:32:42 compute-0 neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79[244355]: [WARNING]  (244359) : Exiting Master process...
Oct 02 12:32:42 compute-0 neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79[244355]: [WARNING]  (244359) : Exiting Master process...
Oct 02 12:32:42 compute-0 neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79[244355]: [ALERT]    (244359) : Current worker (244361) exited with code 143 (Terminated)
Oct 02 12:32:42 compute-0 neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79[244355]: [WARNING]  (244359) : All workers exited. Exiting... (0)
Oct 02 12:32:42 compute-0 systemd[1]: libpod-2ad8bf2ee1898d6e3502f0f12f9ce759a9d0b30068f65f244580e08d6361ea33.scope: Deactivated successfully.
Oct 02 12:32:42 compute-0 conmon[244355]: conmon 2ad8bf2ee1898d6e3502 <nwarn>: Failed to open cgroups file: /sys/fs/cgroup/machine.slice/libpod-2ad8bf2ee1898d6e3502f0f12f9ce759a9d0b30068f65f244580e08d6361ea33.scope/container/memory.events
Oct 02 12:32:42 compute-0 podman[244588]: 2025-10-02 12:32:42.31682191 +0000 UTC m=+0.048596414 container died 2ad8bf2ee1898d6e3502f0f12f9ce759a9d0b30068f65f244580e08d6361ea33 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0)
Oct 02 12:32:42 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-2ad8bf2ee1898d6e3502f0f12f9ce759a9d0b30068f65f244580e08d6361ea33-userdata-shm.mount: Deactivated successfully.
Oct 02 12:32:42 compute-0 systemd[1]: var-lib-containers-storage-overlay-160a6739c43e88719ab2e763db65cc4505fd77dc004f6d20bfe8b05d739636c7-merged.mount: Deactivated successfully.
Oct 02 12:32:42 compute-0 podman[244588]: 2025-10-02 12:32:42.35724141 +0000 UTC m=+0.089015904 container cleanup 2ad8bf2ee1898d6e3502f0f12f9ce759a9d0b30068f65f244580e08d6361ea33 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3)
Oct 02 12:32:42 compute-0 nova_compute[192079]: 2025-10-02 12:32:42.359 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:42 compute-0 systemd[1]: libpod-conmon-2ad8bf2ee1898d6e3502f0f12f9ce759a9d0b30068f65f244580e08d6361ea33.scope: Deactivated successfully.
Oct 02 12:32:42 compute-0 podman[244636]: 2025-10-02 12:32:42.423139575 +0000 UTC m=+0.041563263 container remove 2ad8bf2ee1898d6e3502f0f12f9ce759a9d0b30068f65f244580e08d6361ea33 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0)
Oct 02 12:32:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:42.431 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[28130cca-7510-4f98-9552-3445059d3a72]: (4, ('Thu Oct  2 12:32:42 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79 (2ad8bf2ee1898d6e3502f0f12f9ce759a9d0b30068f65f244580e08d6361ea33)\n2ad8bf2ee1898d6e3502f0f12f9ce759a9d0b30068f65f244580e08d6361ea33\nThu Oct  2 12:32:42 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79 (2ad8bf2ee1898d6e3502f0f12f9ce759a9d0b30068f65f244580e08d6361ea33)\n2ad8bf2ee1898d6e3502f0f12f9ce759a9d0b30068f65f244580e08d6361ea33\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:42.433 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[358cc029-039d-4151-8187-78da479dea43]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:42.434 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap574af896-20, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:32:42 compute-0 nova_compute[192079]: 2025-10-02 12:32:42.436 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:42 compute-0 kernel: tap574af896-20: left promiscuous mode
Oct 02 12:32:42 compute-0 nova_compute[192079]: 2025-10-02 12:32:42.452 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:42.456 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ec53057d-0d90-4295-b9b7-750bc816d67b]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:42.485 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[56358c5e-d36c-456e-9a45-aba6b49bd1ee]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:42.486 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6e0b8b1e-0f8b-46ce-be53-06450515ca01]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:42.503 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ed1a4295-e51f-40e0-87f2-2ba5b777d2ca]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 632821, 'reachable_time': 42185, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 244655, 'error': None, 'target': 'ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:42.506 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:32:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:42.506 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[56545ccb-9d23-4487-bea4-1b617f0aa0bd]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:42 compute-0 systemd[1]: run-netns-ovnmeta\x2d574af896\x2d2fe0\x2d426b\x2d87eb\x2d93e7ba659a79.mount: Deactivated successfully.
Oct 02 12:32:42 compute-0 nova_compute[192079]: 2025-10-02 12:32:42.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:32:42 compute-0 nova_compute[192079]: 2025-10-02 12:32:42.925 2 INFO nova.virt.libvirt.driver [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Instance shutdown successfully after 3 seconds.
Oct 02 12:32:42 compute-0 nova_compute[192079]: 2025-10-02 12:32:42.934 2 INFO nova.virt.libvirt.driver [-] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Instance destroyed successfully.
Oct 02 12:32:42 compute-0 nova_compute[192079]: 2025-10-02 12:32:42.936 2 DEBUG nova.virt.libvirt.vif [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:32:01Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestNetworkAdvancedServerOps-server-1138303133',display_name='tempest-TestNetworkAdvancedServerOps-server-1138303133',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkadvancedserverops-server-1138303133',id=141,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBHZXGbdK2BsOqbaUUYa7XicNW8CV2qKZlqbls3huCtjRaED+CX4fIrUFMW9LtPV4B8c3A6SEeNHwH0MqJ7ttz8hSi/AjegEpdsa/s/FufbgfsmCM0TXi1lTy8HcOB+sZ9g==',key_name='tempest-TestNetworkAdvancedServerOps-1099183543',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:32:11Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=MigrationContext,new_flavor=Flavor(2),node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='76c7dd40d83e4e3ca71abbebf57921b6',ramdisk_id='',reservation_id='r-iebv0qns',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=ServiceList,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',old_vm_state='active',owner_project_name='tempest-TestNetworkAdvancedServerOps-597114071',owner_user_name='tempest-TestNetworkAdvancedServerOps-597114071-project-member'},tags=<?>,task_state='resize_migrating',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:32:36Z,user_data=None,user_id='1faa7e121a0e43ad8cb4ae5b2cfcc6a2',uuid=a0a5e290-69d3-4ce0-9533-6df7cf06c204,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "address": "fa:16:3e:5b:11:fa", "network": {"id": "574af896-2fe0-426b-87eb-93e7ba659a79", "bridge": "br-int", "label": "tempest-network-smoke--1599149382", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.194", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-network-smoke--1599149382", "vif_mac": "fa:16:3e:5b:11:fa"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd3cfd23d-04", "ovs_interfaceid": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:32:42 compute-0 nova_compute[192079]: 2025-10-02 12:32:42.937 2 DEBUG nova.network.os_vif_util [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converting VIF {"id": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "address": "fa:16:3e:5b:11:fa", "network": {"id": "574af896-2fe0-426b-87eb-93e7ba659a79", "bridge": "br-int", "label": "tempest-network-smoke--1599149382", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.194", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-network-smoke--1599149382", "vif_mac": "fa:16:3e:5b:11:fa"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd3cfd23d-04", "ovs_interfaceid": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:32:42 compute-0 nova_compute[192079]: 2025-10-02 12:32:42.938 2 DEBUG nova.network.os_vif_util [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:5b:11:fa,bridge_name='br-int',has_traffic_filtering=True,id=d3cfd23d-04b0-4f18-b20d-14f75e69b2a3,network=Network(574af896-2fe0-426b-87eb-93e7ba659a79),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd3cfd23d-04') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:32:42 compute-0 nova_compute[192079]: 2025-10-02 12:32:42.939 2 DEBUG os_vif [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:5b:11:fa,bridge_name='br-int',has_traffic_filtering=True,id=d3cfd23d-04b0-4f18-b20d-14f75e69b2a3,network=Network(574af896-2fe0-426b-87eb-93e7ba659a79),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd3cfd23d-04') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:32:42 compute-0 nova_compute[192079]: 2025-10-02 12:32:42.943 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:42 compute-0 nova_compute[192079]: 2025-10-02 12:32:42.943 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapd3cfd23d-04, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:32:42 compute-0 nova_compute[192079]: 2025-10-02 12:32:42.945 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:42 compute-0 nova_compute[192079]: 2025-10-02 12:32:42.947 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:42 compute-0 nova_compute[192079]: 2025-10-02 12:32:42.951 2 INFO os_vif [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:5b:11:fa,bridge_name='br-int',has_traffic_filtering=True,id=d3cfd23d-04b0-4f18-b20d-14f75e69b2a3,network=Network(574af896-2fe0-426b-87eb-93e7ba659a79),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd3cfd23d-04')
Oct 02 12:32:42 compute-0 nova_compute[192079]: 2025-10-02 12:32:42.957 2 DEBUG oslo_concurrency.processutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:32:43 compute-0 nova_compute[192079]: 2025-10-02 12:32:43.054 2 DEBUG oslo_concurrency.processutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk --force-share --output=json" returned: 0 in 0.097s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:32:43 compute-0 nova_compute[192079]: 2025-10-02 12:32:43.056 2 DEBUG oslo_concurrency.processutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:32:43 compute-0 nova_compute[192079]: 2025-10-02 12:32:43.127 2 DEBUG oslo_concurrency.processutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk --force-share --output=json" returned: 0 in 0.071s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:32:43 compute-0 nova_compute[192079]: 2025-10-02 12:32:43.128 2 DEBUG oslo_concurrency.processutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): cp -r /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204_resize/disk /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:32:43 compute-0 nova_compute[192079]: 2025-10-02 12:32:43.164 2 DEBUG oslo_concurrency.processutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "cp -r /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204_resize/disk /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk" returned: 0 in 0.035s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:32:43 compute-0 nova_compute[192079]: 2025-10-02 12:32:43.165 2 DEBUG oslo_concurrency.processutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): cp -r /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204_resize/disk.config /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk.config execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:32:43 compute-0 nova_compute[192079]: 2025-10-02 12:32:43.198 2 DEBUG oslo_concurrency.processutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "cp -r /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204_resize/disk.config /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk.config" returned: 0 in 0.033s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:32:43 compute-0 nova_compute[192079]: 2025-10-02 12:32:43.200 2 DEBUG oslo_concurrency.processutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): cp -r /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204_resize/disk.info /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk.info execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:32:43 compute-0 nova_compute[192079]: 2025-10-02 12:32:43.243 2 DEBUG oslo_concurrency.processutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "cp -r /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204_resize/disk.info /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk.info" returned: 0 in 0.043s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:32:43 compute-0 nova_compute[192079]: 2025-10-02 12:32:43.889 2 DEBUG nova.network.neutron [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Port d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 binding to destination host compute-0.ctlplane.example.com is already ACTIVE migrate_instance_start /usr/lib/python3.9/site-packages/nova/network/neutron.py:3171
Oct 02 12:32:44 compute-0 nova_compute[192079]: 2025-10-02 12:32:43.999 2 DEBUG oslo_concurrency.lockutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:32:44 compute-0 nova_compute[192079]: 2025-10-02 12:32:44.000 2 DEBUG oslo_concurrency.lockutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:32:44 compute-0 nova_compute[192079]: 2025-10-02 12:32:44.000 2 DEBUG oslo_concurrency.lockutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:32:44 compute-0 nova_compute[192079]: 2025-10-02 12:32:44.199 2 DEBUG nova.compute.manager [req-0365d337-b3ac-4d2b-b19b-0c86a7ec92de req-24b5a3e3-c032-40c6-8fdd-2a1a033de32b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Received event network-vif-unplugged-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:32:44 compute-0 nova_compute[192079]: 2025-10-02 12:32:44.199 2 DEBUG oslo_concurrency.lockutils [req-0365d337-b3ac-4d2b-b19b-0c86a7ec92de req-24b5a3e3-c032-40c6-8fdd-2a1a033de32b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:32:44 compute-0 nova_compute[192079]: 2025-10-02 12:32:44.200 2 DEBUG oslo_concurrency.lockutils [req-0365d337-b3ac-4d2b-b19b-0c86a7ec92de req-24b5a3e3-c032-40c6-8fdd-2a1a033de32b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:32:44 compute-0 nova_compute[192079]: 2025-10-02 12:32:44.200 2 DEBUG oslo_concurrency.lockutils [req-0365d337-b3ac-4d2b-b19b-0c86a7ec92de req-24b5a3e3-c032-40c6-8fdd-2a1a033de32b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:32:44 compute-0 nova_compute[192079]: 2025-10-02 12:32:44.200 2 DEBUG nova.compute.manager [req-0365d337-b3ac-4d2b-b19b-0c86a7ec92de req-24b5a3e3-c032-40c6-8fdd-2a1a033de32b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] No waiting events found dispatching network-vif-unplugged-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:32:44 compute-0 nova_compute[192079]: 2025-10-02 12:32:44.200 2 WARNING nova.compute.manager [req-0365d337-b3ac-4d2b-b19b-0c86a7ec92de req-24b5a3e3-c032-40c6-8fdd-2a1a033de32b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Received unexpected event network-vif-unplugged-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 for instance with vm_state active and task_state resize_migrated.
Oct 02 12:32:44 compute-0 nova_compute[192079]: 2025-10-02 12:32:44.201 2 DEBUG nova.compute.manager [req-0365d337-b3ac-4d2b-b19b-0c86a7ec92de req-24b5a3e3-c032-40c6-8fdd-2a1a033de32b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Received event network-vif-plugged-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:32:44 compute-0 nova_compute[192079]: 2025-10-02 12:32:44.201 2 DEBUG oslo_concurrency.lockutils [req-0365d337-b3ac-4d2b-b19b-0c86a7ec92de req-24b5a3e3-c032-40c6-8fdd-2a1a033de32b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:32:44 compute-0 nova_compute[192079]: 2025-10-02 12:32:44.201 2 DEBUG oslo_concurrency.lockutils [req-0365d337-b3ac-4d2b-b19b-0c86a7ec92de req-24b5a3e3-c032-40c6-8fdd-2a1a033de32b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:32:44 compute-0 nova_compute[192079]: 2025-10-02 12:32:44.201 2 DEBUG oslo_concurrency.lockutils [req-0365d337-b3ac-4d2b-b19b-0c86a7ec92de req-24b5a3e3-c032-40c6-8fdd-2a1a033de32b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:32:44 compute-0 nova_compute[192079]: 2025-10-02 12:32:44.201 2 DEBUG nova.compute.manager [req-0365d337-b3ac-4d2b-b19b-0c86a7ec92de req-24b5a3e3-c032-40c6-8fdd-2a1a033de32b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] No waiting events found dispatching network-vif-plugged-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:32:44 compute-0 nova_compute[192079]: 2025-10-02 12:32:44.202 2 WARNING nova.compute.manager [req-0365d337-b3ac-4d2b-b19b-0c86a7ec92de req-24b5a3e3-c032-40c6-8fdd-2a1a033de32b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Received unexpected event network-vif-plugged-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 for instance with vm_state active and task_state resize_migrated.
Oct 02 12:32:44 compute-0 nova_compute[192079]: 2025-10-02 12:32:44.244 2 DEBUG oslo_concurrency.lockutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "refresh_cache-a0a5e290-69d3-4ce0-9533-6df7cf06c204" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:32:44 compute-0 nova_compute[192079]: 2025-10-02 12:32:44.245 2 DEBUG oslo_concurrency.lockutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquired lock "refresh_cache-a0a5e290-69d3-4ce0-9533-6df7cf06c204" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:32:44 compute-0 nova_compute[192079]: 2025-10-02 12:32:44.245 2 DEBUG nova.network.neutron [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:32:44 compute-0 nova_compute[192079]: 2025-10-02 12:32:44.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:32:44 compute-0 nova_compute[192079]: 2025-10-02 12:32:44.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:32:44 compute-0 nova_compute[192079]: 2025-10-02 12:32:44.689 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:32:45 compute-0 nova_compute[192079]: 2025-10-02 12:32:45.621 2 DEBUG nova.network.neutron [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Updating instance_info_cache with network_info: [{"id": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "address": "fa:16:3e:5b:11:fa", "network": {"id": "574af896-2fe0-426b-87eb-93e7ba659a79", "bridge": "br-int", "label": "tempest-network-smoke--1599149382", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd3cfd23d-04", "ovs_interfaceid": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:32:45 compute-0 nova_compute[192079]: 2025-10-02 12:32:45.646 2 DEBUG oslo_concurrency.lockutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Releasing lock "refresh_cache-a0a5e290-69d3-4ce0-9533-6df7cf06c204" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:32:45 compute-0 nova_compute[192079]: 2025-10-02 12:32:45.815 2 DEBUG nova.virt.libvirt.driver [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Starting finish_migration finish_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:11698
Oct 02 12:32:45 compute-0 nova_compute[192079]: 2025-10-02 12:32:45.817 2 DEBUG nova.virt.libvirt.driver [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Instance directory exists: not creating _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4719
Oct 02 12:32:45 compute-0 nova_compute[192079]: 2025-10-02 12:32:45.817 2 INFO nova.virt.libvirt.driver [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Creating image(s)
Oct 02 12:32:45 compute-0 nova_compute[192079]: 2025-10-02 12:32:45.819 2 DEBUG nova.objects.instance [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'trusted_certs' on Instance uuid a0a5e290-69d3-4ce0-9533-6df7cf06c204 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:32:45 compute-0 nova_compute[192079]: 2025-10-02 12:32:45.839 2 DEBUG oslo_concurrency.processutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:32:45 compute-0 nova_compute[192079]: 2025-10-02 12:32:45.929 2 DEBUG oslo_concurrency.processutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.091s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:32:45 compute-0 nova_compute[192079]: 2025-10-02 12:32:45.931 2 DEBUG nova.virt.disk.api [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Checking if we can resize image /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:32:45 compute-0 nova_compute[192079]: 2025-10-02 12:32:45.931 2 DEBUG oslo_concurrency.processutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.004 2 DEBUG oslo_concurrency.processutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk --force-share --output=json" returned: 0 in 0.073s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.006 2 DEBUG nova.virt.disk.api [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Cannot resize image /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.021 2 DEBUG nova.virt.libvirt.driver [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Did not create local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4859
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.021 2 DEBUG nova.virt.libvirt.driver [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Ensure instance console log exists: /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.022 2 DEBUG oslo_concurrency.lockutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.022 2 DEBUG oslo_concurrency.lockutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.023 2 DEBUG oslo_concurrency.lockutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.026 2 DEBUG nova.virt.libvirt.driver [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Start _get_guest_xml network_info=[{"id": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "address": "fa:16:3e:5b:11:fa", "network": {"id": "574af896-2fe0-426b-87eb-93e7ba659a79", "bridge": "br-int", "label": "tempest-network-smoke--1599149382", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.194", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-network-smoke--1599149382", "vif_mac": "fa:16:3e:5b:11:fa"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd3cfd23d-04", "ovs_interfaceid": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.033 2 WARNING nova.virt.libvirt.driver [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.039 2 DEBUG nova.virt.libvirt.host [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.040 2 DEBUG nova.virt.libvirt.host [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.044 2 DEBUG nova.virt.libvirt.host [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.046 2 DEBUG nova.virt.libvirt.host [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.047 2 DEBUG nova.virt.libvirt.driver [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.047 2 DEBUG nova.virt.hardware [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:25Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9949d9da-6314-4ede-8797-6f2f0a6a64fc',id=2,is_public=True,memory_mb=192,name='m1.micro',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.048 2 DEBUG nova.virt.hardware [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.048 2 DEBUG nova.virt.hardware [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.048 2 DEBUG nova.virt.hardware [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.049 2 DEBUG nova.virt.hardware [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.049 2 DEBUG nova.virt.hardware [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.049 2 DEBUG nova.virt.hardware [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.050 2 DEBUG nova.virt.hardware [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.050 2 DEBUG nova.virt.hardware [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.050 2 DEBUG nova.virt.hardware [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.051 2 DEBUG nova.virt.hardware [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.051 2 DEBUG nova.objects.instance [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'vcpu_model' on Instance uuid a0a5e290-69d3-4ce0-9533-6df7cf06c204 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.074 2 DEBUG oslo_concurrency.processutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk.config --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.129 2 DEBUG oslo_concurrency.processutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk.config --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.130 2 DEBUG oslo_concurrency.lockutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "/var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.131 2 DEBUG oslo_concurrency.lockutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "/var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.132 2 DEBUG oslo_concurrency.lockutils [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "/var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.133 2 DEBUG nova.virt.libvirt.vif [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:32:01Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestNetworkAdvancedServerOps-server-1138303133',display_name='tempest-TestNetworkAdvancedServerOps-server-1138303133',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(2),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkadvancedserverops-server-1138303133',id=141,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=2,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBHZXGbdK2BsOqbaUUYa7XicNW8CV2qKZlqbls3huCtjRaED+CX4fIrUFMW9LtPV4B8c3A6SEeNHwH0MqJ7ttz8hSi/AjegEpdsa/s/FufbgfsmCM0TXi1lTy8HcOB+sZ9g==',key_name='tempest-TestNetworkAdvancedServerOps-1099183543',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:32:11Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=192,metadata={},migration_context=MigrationContext,new_flavor=Flavor(2),node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=Flavor(1),os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='76c7dd40d83e4e3ca71abbebf57921b6',ramdisk_id='',reservation_id='r-iebv0qns',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=ServiceList,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',old_vm_state='active',owner_project_name='tempest-TestNetworkAdvancedServerOps-597114071',owner_user_name='tempest-TestNetworkAdvancedServerOps-597114071-project-member'},tags=<?>,task_state='resize_finish',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:32:43Z,user_data=None,user_id='1faa7e121a0e43ad8cb4ae5b2cfcc6a2',uuid=a0a5e290-69d3-4ce0-9533-6df7cf06c204,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "address": "fa:16:3e:5b:11:fa", "network": {"id": "574af896-2fe0-426b-87eb-93e7ba659a79", "bridge": "br-int", "label": "tempest-network-smoke--1599149382", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.194", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-network-smoke--1599149382", "vif_mac": "fa:16:3e:5b:11:fa"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd3cfd23d-04", "ovs_interfaceid": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.134 2 DEBUG nova.network.os_vif_util [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converting VIF {"id": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "address": "fa:16:3e:5b:11:fa", "network": {"id": "574af896-2fe0-426b-87eb-93e7ba659a79", "bridge": "br-int", "label": "tempest-network-smoke--1599149382", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.194", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-network-smoke--1599149382", "vif_mac": "fa:16:3e:5b:11:fa"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd3cfd23d-04", "ovs_interfaceid": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.135 2 DEBUG nova.network.os_vif_util [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:5b:11:fa,bridge_name='br-int',has_traffic_filtering=True,id=d3cfd23d-04b0-4f18-b20d-14f75e69b2a3,network=Network(574af896-2fe0-426b-87eb-93e7ba659a79),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd3cfd23d-04') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.137 2 DEBUG nova.virt.libvirt.driver [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:32:46 compute-0 nova_compute[192079]:   <uuid>a0a5e290-69d3-4ce0-9533-6df7cf06c204</uuid>
Oct 02 12:32:46 compute-0 nova_compute[192079]:   <name>instance-0000008d</name>
Oct 02 12:32:46 compute-0 nova_compute[192079]:   <memory>196608</memory>
Oct 02 12:32:46 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:32:46 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:32:46 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:       <nova:name>tempest-TestNetworkAdvancedServerOps-server-1138303133</nova:name>
Oct 02 12:32:46 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:32:46</nova:creationTime>
Oct 02 12:32:46 compute-0 nova_compute[192079]:       <nova:flavor name="m1.micro">
Oct 02 12:32:46 compute-0 nova_compute[192079]:         <nova:memory>192</nova:memory>
Oct 02 12:32:46 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:32:46 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:32:46 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:32:46 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:32:46 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:32:46 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:32:46 compute-0 nova_compute[192079]:         <nova:user uuid="1faa7e121a0e43ad8cb4ae5b2cfcc6a2">tempest-TestNetworkAdvancedServerOps-597114071-project-member</nova:user>
Oct 02 12:32:46 compute-0 nova_compute[192079]:         <nova:project uuid="76c7dd40d83e4e3ca71abbebf57921b6">tempest-TestNetworkAdvancedServerOps-597114071</nova:project>
Oct 02 12:32:46 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:32:46 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:32:46 compute-0 nova_compute[192079]:         <nova:port uuid="d3cfd23d-04b0-4f18-b20d-14f75e69b2a3">
Oct 02 12:32:46 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.5" ipVersion="4"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:32:46 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:32:46 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:32:46 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <system>
Oct 02 12:32:46 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:32:46 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:32:46 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:32:46 compute-0 nova_compute[192079]:       <entry name="serial">a0a5e290-69d3-4ce0-9533-6df7cf06c204</entry>
Oct 02 12:32:46 compute-0 nova_compute[192079]:       <entry name="uuid">a0a5e290-69d3-4ce0-9533-6df7cf06c204</entry>
Oct 02 12:32:46 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     </system>
Oct 02 12:32:46 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:32:46 compute-0 nova_compute[192079]:   <os>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:   </os>
Oct 02 12:32:46 compute-0 nova_compute[192079]:   <features>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:   </features>
Oct 02 12:32:46 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:32:46 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:32:46 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:32:46 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:32:46 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/disk.config"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:32:46 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:5b:11:fa"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:       <target dev="tapd3cfd23d-04"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:32:46 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204/console.log" append="off"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <video>
Oct 02 12:32:46 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     </video>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:32:46 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:32:46 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:32:46 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:32:46 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:32:46 compute-0 nova_compute[192079]: </domain>
Oct 02 12:32:46 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.140 2 DEBUG nova.virt.libvirt.vif [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:32:01Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestNetworkAdvancedServerOps-server-1138303133',display_name='tempest-TestNetworkAdvancedServerOps-server-1138303133',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(2),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkadvancedserverops-server-1138303133',id=141,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=2,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBHZXGbdK2BsOqbaUUYa7XicNW8CV2qKZlqbls3huCtjRaED+CX4fIrUFMW9LtPV4B8c3A6SEeNHwH0MqJ7ttz8hSi/AjegEpdsa/s/FufbgfsmCM0TXi1lTy8HcOB+sZ9g==',key_name='tempest-TestNetworkAdvancedServerOps-1099183543',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:32:11Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=192,metadata={},migration_context=MigrationContext,new_flavor=Flavor(2),node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=Flavor(1),os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=1,progress=0,project_id='76c7dd40d83e4e3ca71abbebf57921b6',ramdisk_id='',reservation_id='r-iebv0qns',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=ServiceList,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',old_vm_state='active',owner_project_name='tempest-TestNetworkAdvancedServerOps-597114071',owner_user_name='tempest-TestNetworkAdvancedServerOps-597114071-project-member'},tags=<?>,task_state='resize_finish',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:32:43Z,user_data=None,user_id='1faa7e121a0e43ad8cb4ae5b2cfcc6a2',uuid=a0a5e290-69d3-4ce0-9533-6df7cf06c204,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "address": "fa:16:3e:5b:11:fa", "network": {"id": "574af896-2fe0-426b-87eb-93e7ba659a79", "bridge": "br-int", "label": "tempest-network-smoke--1599149382", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.194", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-network-smoke--1599149382", "vif_mac": "fa:16:3e:5b:11:fa"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd3cfd23d-04", "ovs_interfaceid": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.140 2 DEBUG nova.network.os_vif_util [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converting VIF {"id": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "address": "fa:16:3e:5b:11:fa", "network": {"id": "574af896-2fe0-426b-87eb-93e7ba659a79", "bridge": "br-int", "label": "tempest-network-smoke--1599149382", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.194", "type": "floating", "version": 4, "meta": {}}], "label": "tempest-network-smoke--1599149382", "vif_mac": "fa:16:3e:5b:11:fa"}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd3cfd23d-04", "ovs_interfaceid": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.141 2 DEBUG nova.network.os_vif_util [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:5b:11:fa,bridge_name='br-int',has_traffic_filtering=True,id=d3cfd23d-04b0-4f18-b20d-14f75e69b2a3,network=Network(574af896-2fe0-426b-87eb-93e7ba659a79),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd3cfd23d-04') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.142 2 DEBUG os_vif [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:5b:11:fa,bridge_name='br-int',has_traffic_filtering=True,id=d3cfd23d-04b0-4f18-b20d-14f75e69b2a3,network=Network(574af896-2fe0-426b-87eb-93e7ba659a79),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd3cfd23d-04') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.142 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.143 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.144 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.146 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.146 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapd3cfd23d-04, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.146 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapd3cfd23d-04, col_values=(('external_ids', {'iface-id': 'd3cfd23d-04b0-4f18-b20d-14f75e69b2a3', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:5b:11:fa', 'vm-uuid': 'a0a5e290-69d3-4ce0-9533-6df7cf06c204'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.202 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:46 compute-0 NetworkManager[51160]: <info>  [1759408366.2039] manager: (tapd3cfd23d-04): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/268)
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.205 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.210 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.211 2 INFO os_vif [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:5b:11:fa,bridge_name='br-int',has_traffic_filtering=True,id=d3cfd23d-04b0-4f18-b20d-14f75e69b2a3,network=Network(574af896-2fe0-426b-87eb-93e7ba659a79),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd3cfd23d-04')
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.269 2 DEBUG nova.virt.libvirt.driver [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.270 2 DEBUG nova.virt.libvirt.driver [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.270 2 DEBUG nova.virt.libvirt.driver [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] No VIF found with MAC fa:16:3e:5b:11:fa, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.271 2 INFO nova.virt.libvirt.driver [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Using config drive
Oct 02 12:32:46 compute-0 kernel: tapd3cfd23d-04: entered promiscuous mode
Oct 02 12:32:46 compute-0 NetworkManager[51160]: <info>  [1759408366.3293] manager: (tapd3cfd23d-04): new Tun device (/org/freedesktop/NetworkManager/Devices/269)
Oct 02 12:32:46 compute-0 ovn_controller[94336]: 2025-10-02T12:32:46Z|00544|binding|INFO|Claiming lport d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 for this chassis.
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.330 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:46 compute-0 ovn_controller[94336]: 2025-10-02T12:32:46Z|00545|binding|INFO|d3cfd23d-04b0-4f18-b20d-14f75e69b2a3: Claiming fa:16:3e:5b:11:fa 10.100.0.5
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.332 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:46.341 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:5b:11:fa 10.100.0.5'], port_security=['fa:16:3e:5b:11:fa 10.100.0.5'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.5/28', 'neutron:device_id': 'a0a5e290-69d3-4ce0-9533-6df7cf06c204', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-574af896-2fe0-426b-87eb-93e7ba659a79', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '76c7dd40d83e4e3ca71abbebf57921b6', 'neutron:revision_number': '5', 'neutron:security_group_ids': '24011c1c-187e-42ed-b64a-06bc43fab21b', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:port_fip': '192.168.122.194'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=1b106f5d-d8f5-4be2-bc91-66bf2d8dc8a3, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=d3cfd23d-04b0-4f18-b20d-14f75e69b2a3) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:46.343 103294 INFO neutron.agent.ovn.metadata.agent [-] Port d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 in datapath 574af896-2fe0-426b-87eb-93e7ba659a79 bound to our chassis
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:46.344 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 574af896-2fe0-426b-87eb-93e7ba659a79
Oct 02 12:32:46 compute-0 ovn_controller[94336]: 2025-10-02T12:32:46Z|00546|binding|INFO|Setting lport d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 ovn-installed in OVS
Oct 02 12:32:46 compute-0 ovn_controller[94336]: 2025-10-02T12:32:46Z|00547|binding|INFO|Setting lport d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 up in Southbound
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.347 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:46 compute-0 systemd-udevd[244690]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:46.358 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4e8bdc92-587d-44b9-be72-f855707f0489]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:46.359 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap574af896-21 in ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:46.360 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap574af896-20 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:46.360 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[937a7143-c54e-41d0-90d9-a880a0c6f4ed]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:46.361 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3fd5b5aa-8e03-49ea-b292-9515b338ef77]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:46 compute-0 NetworkManager[51160]: <info>  [1759408366.3701] device (tapd3cfd23d-04): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:32:46 compute-0 NetworkManager[51160]: <info>  [1759408366.3708] device (tapd3cfd23d-04): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:32:46 compute-0 systemd-machined[152150]: New machine qemu-69-instance-0000008d.
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:46.373 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[733aa4e0-ba4c-42c7-aa0e-105277fe77d6]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:46 compute-0 systemd[1]: Started Virtual Machine qemu-69-instance-0000008d.
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:46.385 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c33af289-5ef8-40fa-971f-fd023ebf3628]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:46.412 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[8dd72108-0066-44a7-841a-f0aa55263ca2]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:46.417 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c953bae6-58c6-4dc7-8fa6-0a5606b183c7]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:46 compute-0 NetworkManager[51160]: <info>  [1759408366.4186] manager: (tap574af896-20): new Veth device (/org/freedesktop/NetworkManager/Devices/270)
Oct 02 12:32:46 compute-0 systemd-udevd[244695]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:46.449 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[4d893660-da30-4fa2-adfb-ce1f71de8caf]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:46.453 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[571adbc6-da2e-4b54-abcc-4477d9848fad]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:46 compute-0 NetworkManager[51160]: <info>  [1759408366.4751] device (tap574af896-20): carrier: link connected
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:46.479 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[e949e25d-d8cf-416f-bfb8-ee7e4bbdfda9]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:46.494 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0b5163e9-ee68-4c03-a35a-c600dd83c89d]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap574af896-21'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:aa:4f:2a'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 174], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 636410, 'reachable_time': 25032, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 244724, 'error': None, 'target': 'ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:46.506 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b81d0054-4661-49c0-ab5a-06923578ff7d]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:feaa:4f2a'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 636410, 'tstamp': 636410}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 244725, 'error': None, 'target': 'ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:46.521 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1abe6ca9-e367-4fdd-91b0-35af20328596]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap574af896-21'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:aa:4f:2a'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 174], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 636410, 'reachable_time': 25032, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 244726, 'error': None, 'target': 'ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:46.544 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a0f739c5-d415-4243-820c-9160b7556c26]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:46.595 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[fb807e83-6ac4-4f43-a778-9b19a4d5c9c5]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:46.598 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap574af896-20, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:46.598 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:46.598 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap574af896-20, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:32:46 compute-0 kernel: tap574af896-20: entered promiscuous mode
Oct 02 12:32:46 compute-0 NetworkManager[51160]: <info>  [1759408366.6039] manager: (tap574af896-20): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/271)
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.601 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.602 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:46.608 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap574af896-20, col_values=(('external_ids', {'iface-id': '312f0f58-e800-4204-8cd3-57c9c3245ec8'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:32:46 compute-0 ovn_controller[94336]: 2025-10-02T12:32:46Z|00548|binding|INFO|Releasing lport 312f0f58-e800-4204-8cd3-57c9c3245ec8 from this chassis (sb_readonly=0)
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.610 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.611 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:46.612 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/574af896-2fe0-426b-87eb-93e7ba659a79.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/574af896-2fe0-426b-87eb-93e7ba659a79.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:46.613 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[417c2e1d-0bcf-4753-8971-45d807e9e7e5]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:46.614 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-574af896-2fe0-426b-87eb-93e7ba659a79
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/574af896-2fe0-426b-87eb-93e7ba659a79.pid.haproxy
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 574af896-2fe0-426b-87eb-93e7ba659a79
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:32:46 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:32:46.614 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79', 'env', 'PROCESS_TAG=haproxy-574af896-2fe0-426b-87eb-93e7ba659a79', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/574af896-2fe0-426b-87eb-93e7ba659a79.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:32:46 compute-0 nova_compute[192079]: 2025-10-02 12:32:46.623 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:46 compute-0 podman[244762]: 2025-10-02 12:32:46.948973445 +0000 UTC m=+0.045499519 container create 02b554e8c6f67a43ab6044e93c5af21e4aa2bcd3ae46a77574d8eee0732ae53b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2)
Oct 02 12:32:46 compute-0 systemd[1]: Started libpod-conmon-02b554e8c6f67a43ab6044e93c5af21e4aa2bcd3ae46a77574d8eee0732ae53b.scope.
Oct 02 12:32:47 compute-0 podman[244762]: 2025-10-02 12:32:46.923335857 +0000 UTC m=+0.019861951 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:32:47 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:32:47 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/88efe688ea47ea9e4f774a20e1d0cac5b14587b88e849fede8e67e5f148e7bb1/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:32:47 compute-0 podman[244762]: 2025-10-02 12:32:47.042846391 +0000 UTC m=+0.139372485 container init 02b554e8c6f67a43ab6044e93c5af21e4aa2bcd3ae46a77574d8eee0732ae53b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79, org.label-schema.build-date=20251001, tcib_managed=true, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0)
Oct 02 12:32:47 compute-0 podman[244762]: 2025-10-02 12:32:47.050001135 +0000 UTC m=+0.146527209 container start 02b554e8c6f67a43ab6044e93c5af21e4aa2bcd3ae46a77574d8eee0732ae53b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:32:47 compute-0 neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79[244777]: [NOTICE]   (244781) : New worker (244783) forked
Oct 02 12:32:47 compute-0 neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79[244777]: [NOTICE]   (244781) : Loading success.
Oct 02 12:32:47 compute-0 nova_compute[192079]: 2025-10-02 12:32:47.271 2 DEBUG nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Removed pending event for a0a5e290-69d3-4ce0-9533-6df7cf06c204 due to event _event_emit_delayed /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:438
Oct 02 12:32:47 compute-0 nova_compute[192079]: 2025-10-02 12:32:47.273 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408367.2714055, a0a5e290-69d3-4ce0-9533-6df7cf06c204 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:32:47 compute-0 nova_compute[192079]: 2025-10-02 12:32:47.274 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] VM Resumed (Lifecycle Event)
Oct 02 12:32:47 compute-0 nova_compute[192079]: 2025-10-02 12:32:47.278 2 DEBUG nova.compute.manager [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:32:47 compute-0 nova_compute[192079]: 2025-10-02 12:32:47.283 2 INFO nova.virt.libvirt.driver [-] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Instance running successfully.
Oct 02 12:32:47 compute-0 virtqemud[191807]: argument unsupported: QEMU guest agent is not configured
Oct 02 12:32:47 compute-0 nova_compute[192079]: 2025-10-02 12:32:47.286 2 DEBUG nova.virt.libvirt.guest [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Failed to set time: agent not configured sync_guest_time /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:200
Oct 02 12:32:47 compute-0 nova_compute[192079]: 2025-10-02 12:32:47.286 2 DEBUG nova.virt.libvirt.driver [None req-132a7e98-f4e6-4c2a-9951-19c38c576754 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] finish_migration finished successfully. finish_migration /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:11793
Oct 02 12:32:47 compute-0 nova_compute[192079]: 2025-10-02 12:32:47.307 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:32:47 compute-0 nova_compute[192079]: 2025-10-02 12:32:47.314 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: active, current task_state: resize_finish, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:32:47 compute-0 nova_compute[192079]: 2025-10-02 12:32:47.359 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:47 compute-0 nova_compute[192079]: 2025-10-02 12:32:47.362 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] During sync_power_state the instance has a pending task (resize_finish). Skip.
Oct 02 12:32:47 compute-0 nova_compute[192079]: 2025-10-02 12:32:47.362 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408367.275338, a0a5e290-69d3-4ce0-9533-6df7cf06c204 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:32:47 compute-0 nova_compute[192079]: 2025-10-02 12:32:47.363 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] VM Started (Lifecycle Event)
Oct 02 12:32:47 compute-0 nova_compute[192079]: 2025-10-02 12:32:47.413 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:32:47 compute-0 nova_compute[192079]: 2025-10-02 12:32:47.416 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Synchronizing instance power state after lifecycle event "Started"; current vm_state: active, current task_state: resize_finish, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:32:48 compute-0 nova_compute[192079]: 2025-10-02 12:32:48.052 2 DEBUG oslo_concurrency.lockutils [None req-b721095f-e23a-45d9-a7a4-87499a9876f1 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204" by "nova.compute.manager.ComputeManager.confirm_resize.<locals>.do_confirm_resize" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:32:48 compute-0 nova_compute[192079]: 2025-10-02 12:32:48.053 2 DEBUG oslo_concurrency.lockutils [None req-b721095f-e23a-45d9-a7a4-87499a9876f1 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204" acquired by "nova.compute.manager.ComputeManager.confirm_resize.<locals>.do_confirm_resize" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:32:48 compute-0 nova_compute[192079]: 2025-10-02 12:32:48.053 2 DEBUG nova.compute.manager [None req-b721095f-e23a-45d9-a7a4-87499a9876f1 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Going to confirm migration 20 do_confirm_resize /usr/lib/python3.9/site-packages/nova/compute/manager.py:4679
Oct 02 12:32:48 compute-0 nova_compute[192079]: 2025-10-02 12:32:48.090 2 DEBUG nova.objects.instance [None req-b721095f-e23a-45d9-a7a4-87499a9876f1 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'info_cache' on Instance uuid a0a5e290-69d3-4ce0-9533-6df7cf06c204 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:32:48 compute-0 podman[244793]: 2025-10-02 12:32:48.163922221 +0000 UTC m=+0.072802242 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, config_id=edpm, io.buildah.version=1.41.3, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']})
Oct 02 12:32:48 compute-0 nova_compute[192079]: 2025-10-02 12:32:48.372 2 DEBUG oslo_concurrency.lockutils [None req-b721095f-e23a-45d9-a7a4-87499a9876f1 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "refresh_cache-a0a5e290-69d3-4ce0-9533-6df7cf06c204" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:32:48 compute-0 nova_compute[192079]: 2025-10-02 12:32:48.374 2 DEBUG oslo_concurrency.lockutils [None req-b721095f-e23a-45d9-a7a4-87499a9876f1 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquired lock "refresh_cache-a0a5e290-69d3-4ce0-9533-6df7cf06c204" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:32:48 compute-0 nova_compute[192079]: 2025-10-02 12:32:48.374 2 DEBUG nova.network.neutron [None req-b721095f-e23a-45d9-a7a4-87499a9876f1 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:32:48 compute-0 nova_compute[192079]: 2025-10-02 12:32:48.546 2 DEBUG nova.compute.manager [req-4f8b5628-ed7c-4670-921b-0307ae2c0302 req-dd58d7c4-411c-4ab4-90bb-b3766ce81b4b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Received event network-vif-plugged-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:32:48 compute-0 nova_compute[192079]: 2025-10-02 12:32:48.547 2 DEBUG oslo_concurrency.lockutils [req-4f8b5628-ed7c-4670-921b-0307ae2c0302 req-dd58d7c4-411c-4ab4-90bb-b3766ce81b4b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:32:48 compute-0 nova_compute[192079]: 2025-10-02 12:32:48.547 2 DEBUG oslo_concurrency.lockutils [req-4f8b5628-ed7c-4670-921b-0307ae2c0302 req-dd58d7c4-411c-4ab4-90bb-b3766ce81b4b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:32:48 compute-0 nova_compute[192079]: 2025-10-02 12:32:48.548 2 DEBUG oslo_concurrency.lockutils [req-4f8b5628-ed7c-4670-921b-0307ae2c0302 req-dd58d7c4-411c-4ab4-90bb-b3766ce81b4b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:32:48 compute-0 nova_compute[192079]: 2025-10-02 12:32:48.548 2 DEBUG nova.compute.manager [req-4f8b5628-ed7c-4670-921b-0307ae2c0302 req-dd58d7c4-411c-4ab4-90bb-b3766ce81b4b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] No waiting events found dispatching network-vif-plugged-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:32:48 compute-0 nova_compute[192079]: 2025-10-02 12:32:48.549 2 WARNING nova.compute.manager [req-4f8b5628-ed7c-4670-921b-0307ae2c0302 req-dd58d7c4-411c-4ab4-90bb-b3766ce81b4b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Received unexpected event network-vif-plugged-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 for instance with vm_state resized and task_state None.
Oct 02 12:32:48 compute-0 nova_compute[192079]: 2025-10-02 12:32:48.549 2 DEBUG nova.compute.manager [req-4f8b5628-ed7c-4670-921b-0307ae2c0302 req-dd58d7c4-411c-4ab4-90bb-b3766ce81b4b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Received event network-vif-plugged-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:32:48 compute-0 nova_compute[192079]: 2025-10-02 12:32:48.550 2 DEBUG oslo_concurrency.lockutils [req-4f8b5628-ed7c-4670-921b-0307ae2c0302 req-dd58d7c4-411c-4ab4-90bb-b3766ce81b4b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:32:48 compute-0 nova_compute[192079]: 2025-10-02 12:32:48.550 2 DEBUG oslo_concurrency.lockutils [req-4f8b5628-ed7c-4670-921b-0307ae2c0302 req-dd58d7c4-411c-4ab4-90bb-b3766ce81b4b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:32:48 compute-0 nova_compute[192079]: 2025-10-02 12:32:48.550 2 DEBUG oslo_concurrency.lockutils [req-4f8b5628-ed7c-4670-921b-0307ae2c0302 req-dd58d7c4-411c-4ab4-90bb-b3766ce81b4b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:32:48 compute-0 nova_compute[192079]: 2025-10-02 12:32:48.551 2 DEBUG nova.compute.manager [req-4f8b5628-ed7c-4670-921b-0307ae2c0302 req-dd58d7c4-411c-4ab4-90bb-b3766ce81b4b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] No waiting events found dispatching network-vif-plugged-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:32:48 compute-0 nova_compute[192079]: 2025-10-02 12:32:48.551 2 WARNING nova.compute.manager [req-4f8b5628-ed7c-4670-921b-0307ae2c0302 req-dd58d7c4-411c-4ab4-90bb-b3766ce81b4b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Received unexpected event network-vif-plugged-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 for instance with vm_state resized and task_state None.
Oct 02 12:32:48 compute-0 nova_compute[192079]: 2025-10-02 12:32:48.683 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:32:49 compute-0 nova_compute[192079]: 2025-10-02 12:32:49.832 2 DEBUG nova.network.neutron [None req-b721095f-e23a-45d9-a7a4-87499a9876f1 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Updating instance_info_cache with network_info: [{"id": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "address": "fa:16:3e:5b:11:fa", "network": {"id": "574af896-2fe0-426b-87eb-93e7ba659a79", "bridge": "br-int", "label": "tempest-network-smoke--1599149382", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd3cfd23d-04", "ovs_interfaceid": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:32:49 compute-0 nova_compute[192079]: 2025-10-02 12:32:49.850 2 DEBUG oslo_concurrency.lockutils [None req-b721095f-e23a-45d9-a7a4-87499a9876f1 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Releasing lock "refresh_cache-a0a5e290-69d3-4ce0-9533-6df7cf06c204" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:32:49 compute-0 nova_compute[192079]: 2025-10-02 12:32:49.850 2 DEBUG nova.objects.instance [None req-b721095f-e23a-45d9-a7a4-87499a9876f1 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'migration_context' on Instance uuid a0a5e290-69d3-4ce0-9533-6df7cf06c204 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:32:49 compute-0 nova_compute[192079]: 2025-10-02 12:32:49.870 2 DEBUG oslo_concurrency.lockutils [None req-b721095f-e23a-45d9-a7a4-87499a9876f1 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:32:49 compute-0 nova_compute[192079]: 2025-10-02 12:32:49.871 2 DEBUG oslo_concurrency.lockutils [None req-b721095f-e23a-45d9-a7a4-87499a9876f1 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:32:49 compute-0 nova_compute[192079]: 2025-10-02 12:32:49.966 2 DEBUG nova.compute.provider_tree [None req-b721095f-e23a-45d9-a7a4-87499a9876f1 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:32:49 compute-0 nova_compute[192079]: 2025-10-02 12:32:49.981 2 DEBUG nova.scheduler.client.report [None req-b721095f-e23a-45d9-a7a4-87499a9876f1 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:32:50 compute-0 nova_compute[192079]: 2025-10-02 12:32:50.018 2 DEBUG oslo_concurrency.lockutils [None req-b721095f-e23a-45d9-a7a4-87499a9876f1 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 0.147s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:32:50 compute-0 nova_compute[192079]: 2025-10-02 12:32:50.164 2 INFO nova.scheduler.client.report [None req-b721095f-e23a-45d9-a7a4-87499a9876f1 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Deleted allocation for migration 837d1691-f875-4534-b0af-f97e8fe7f055
Oct 02 12:32:50 compute-0 nova_compute[192079]: 2025-10-02 12:32:50.261 2 DEBUG oslo_concurrency.lockutils [None req-b721095f-e23a-45d9-a7a4-87499a9876f1 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204" "released" by "nova.compute.manager.ComputeManager.confirm_resize.<locals>.do_confirm_resize" :: held 2.209s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:32:51 compute-0 nova_compute[192079]: 2025-10-02 12:32:51.204 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:52 compute-0 nova_compute[192079]: 2025-10-02 12:32:52.361 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:54 compute-0 podman[244815]: 2025-10-02 12:32:54.156053361 +0000 UTC m=+0.065590426 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, com.redhat.component=ubi9-minimal-container, architecture=x86_64, name=ubi9-minimal, distribution-scope=public, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.openshift.tags=minimal rhel9, version=9.6, url=https://catalog.redhat.com/en/search?searchType=containers, build-date=2025-08-20T13:12:41, config_id=edpm, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.expose-services=, vcs-type=git, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, vendor=Red Hat, Inc., container_name=openstack_network_exporter, release=1755695350, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.buildah.version=1.33.7, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., managed_by=edpm_ansible, maintainer=Red Hat, Inc.)
Oct 02 12:32:54 compute-0 podman[244816]: 2025-10-02 12:32:54.184102605 +0000 UTC m=+0.091467092 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.build-date=20251001, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_id=multipathd, container_name=multipathd)
Oct 02 12:32:56 compute-0 nova_compute[192079]: 2025-10-02 12:32:56.206 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:32:57 compute-0 nova_compute[192079]: 2025-10-02 12:32:57.364 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:00 compute-0 podman[244861]: 2025-10-02 12:33:00.143930763 +0000 UTC m=+0.055299865 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:33:00 compute-0 podman[244862]: 2025-10-02 12:33:00.175174735 +0000 UTC m=+0.073581115 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, config_id=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, io.buildah.version=1.41.3, container_name=iscsid, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0)
Oct 02 12:33:00 compute-0 ovn_controller[94336]: 2025-10-02T12:33:00Z|00057|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:5b:11:fa 10.100.0.5
Oct 02 12:33:01 compute-0 nova_compute[192079]: 2025-10-02 12:33:01.208 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:01 compute-0 nova_compute[192079]: 2025-10-02 12:33:01.332 2 DEBUG oslo_concurrency.lockutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Acquiring lock "2d10392f-7700-4f17-8be6-9fe493836a58" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:01 compute-0 nova_compute[192079]: 2025-10-02 12:33:01.333 2 DEBUG oslo_concurrency.lockutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Lock "2d10392f-7700-4f17-8be6-9fe493836a58" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:01 compute-0 nova_compute[192079]: 2025-10-02 12:33:01.354 2 DEBUG nova.compute.manager [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:33:01 compute-0 nova_compute[192079]: 2025-10-02 12:33:01.500 2 DEBUG oslo_concurrency.lockutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:01 compute-0 nova_compute[192079]: 2025-10-02 12:33:01.501 2 DEBUG oslo_concurrency.lockutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:01 compute-0 nova_compute[192079]: 2025-10-02 12:33:01.507 2 DEBUG nova.virt.hardware [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:33:01 compute-0 nova_compute[192079]: 2025-10-02 12:33:01.507 2 INFO nova.compute.claims [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:33:01 compute-0 nova_compute[192079]: 2025-10-02 12:33:01.679 2 DEBUG nova.compute.provider_tree [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:33:01 compute-0 nova_compute[192079]: 2025-10-02 12:33:01.692 2 DEBUG nova.scheduler.client.report [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:33:01 compute-0 nova_compute[192079]: 2025-10-02 12:33:01.713 2 DEBUG oslo_concurrency.lockutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.212s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:01 compute-0 nova_compute[192079]: 2025-10-02 12:33:01.714 2 DEBUG nova.compute.manager [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:33:01 compute-0 nova_compute[192079]: 2025-10-02 12:33:01.769 2 DEBUG nova.compute.manager [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:33:01 compute-0 nova_compute[192079]: 2025-10-02 12:33:01.769 2 DEBUG nova.network.neutron [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:33:01 compute-0 nova_compute[192079]: 2025-10-02 12:33:01.793 2 INFO nova.virt.libvirt.driver [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:33:01 compute-0 nova_compute[192079]: 2025-10-02 12:33:01.817 2 DEBUG nova.compute.manager [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:33:01 compute-0 nova_compute[192079]: 2025-10-02 12:33:01.939 2 DEBUG nova.compute.manager [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:33:01 compute-0 nova_compute[192079]: 2025-10-02 12:33:01.941 2 DEBUG nova.virt.libvirt.driver [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:33:01 compute-0 nova_compute[192079]: 2025-10-02 12:33:01.942 2 INFO nova.virt.libvirt.driver [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Creating image(s)
Oct 02 12:33:01 compute-0 nova_compute[192079]: 2025-10-02 12:33:01.943 2 DEBUG oslo_concurrency.lockutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Acquiring lock "/var/lib/nova/instances/2d10392f-7700-4f17-8be6-9fe493836a58/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:01 compute-0 nova_compute[192079]: 2025-10-02 12:33:01.943 2 DEBUG oslo_concurrency.lockutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Lock "/var/lib/nova/instances/2d10392f-7700-4f17-8be6-9fe493836a58/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:01 compute-0 nova_compute[192079]: 2025-10-02 12:33:01.945 2 DEBUG oslo_concurrency.lockutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Lock "/var/lib/nova/instances/2d10392f-7700-4f17-8be6-9fe493836a58/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:01 compute-0 nova_compute[192079]: 2025-10-02 12:33:01.971 2 DEBUG nova.policy [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '27daa263abb54d4d8e3ae34cd1c5ccf5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a4a7099974504a798e1607c8e6a1f570', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:33:01 compute-0 nova_compute[192079]: 2025-10-02 12:33:01.975 2 DEBUG oslo_concurrency.processutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:33:02 compute-0 nova_compute[192079]: 2025-10-02 12:33:02.037 2 DEBUG oslo_concurrency.processutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.062s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:33:02 compute-0 nova_compute[192079]: 2025-10-02 12:33:02.038 2 DEBUG oslo_concurrency.lockutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:02 compute-0 nova_compute[192079]: 2025-10-02 12:33:02.039 2 DEBUG oslo_concurrency.lockutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:02 compute-0 nova_compute[192079]: 2025-10-02 12:33:02.054 2 DEBUG oslo_concurrency.processutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:33:02 compute-0 nova_compute[192079]: 2025-10-02 12:33:02.118 2 DEBUG oslo_concurrency.processutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.063s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:33:02 compute-0 nova_compute[192079]: 2025-10-02 12:33:02.119 2 DEBUG oslo_concurrency.processutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/2d10392f-7700-4f17-8be6-9fe493836a58/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:33:02 compute-0 nova_compute[192079]: 2025-10-02 12:33:02.158 2 DEBUG oslo_concurrency.processutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/2d10392f-7700-4f17-8be6-9fe493836a58/disk 1073741824" returned: 0 in 0.039s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:33:02 compute-0 nova_compute[192079]: 2025-10-02 12:33:02.159 2 DEBUG oslo_concurrency.lockutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.120s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:02 compute-0 nova_compute[192079]: 2025-10-02 12:33:02.159 2 DEBUG oslo_concurrency.processutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:33:02 compute-0 nova_compute[192079]: 2025-10-02 12:33:02.215 2 DEBUG oslo_concurrency.processutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:33:02 compute-0 nova_compute[192079]: 2025-10-02 12:33:02.217 2 DEBUG nova.virt.disk.api [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Checking if we can resize image /var/lib/nova/instances/2d10392f-7700-4f17-8be6-9fe493836a58/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:33:02 compute-0 nova_compute[192079]: 2025-10-02 12:33:02.217 2 DEBUG oslo_concurrency.processutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2d10392f-7700-4f17-8be6-9fe493836a58/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:33:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:02.234 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:02.234 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:02.235 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:02 compute-0 nova_compute[192079]: 2025-10-02 12:33:02.278 2 DEBUG oslo_concurrency.processutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2d10392f-7700-4f17-8be6-9fe493836a58/disk --force-share --output=json" returned: 0 in 0.061s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:33:02 compute-0 nova_compute[192079]: 2025-10-02 12:33:02.280 2 DEBUG nova.virt.disk.api [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Cannot resize image /var/lib/nova/instances/2d10392f-7700-4f17-8be6-9fe493836a58/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:33:02 compute-0 nova_compute[192079]: 2025-10-02 12:33:02.280 2 DEBUG nova.objects.instance [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Lazy-loading 'migration_context' on Instance uuid 2d10392f-7700-4f17-8be6-9fe493836a58 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:33:02 compute-0 nova_compute[192079]: 2025-10-02 12:33:02.301 2 DEBUG nova.virt.libvirt.driver [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:33:02 compute-0 nova_compute[192079]: 2025-10-02 12:33:02.302 2 DEBUG nova.virt.libvirt.driver [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Ensure instance console log exists: /var/lib/nova/instances/2d10392f-7700-4f17-8be6-9fe493836a58/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:33:02 compute-0 nova_compute[192079]: 2025-10-02 12:33:02.302 2 DEBUG oslo_concurrency.lockutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:02 compute-0 nova_compute[192079]: 2025-10-02 12:33:02.303 2 DEBUG oslo_concurrency.lockutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:02 compute-0 nova_compute[192079]: 2025-10-02 12:33:02.303 2 DEBUG oslo_concurrency.lockutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:02 compute-0 nova_compute[192079]: 2025-10-02 12:33:02.366 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:02 compute-0 nova_compute[192079]: 2025-10-02 12:33:02.994 2 DEBUG nova.network.neutron [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Successfully created port: d4c65816-736d-45f1-a48d-ee78e03b1bea _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:33:04 compute-0 nova_compute[192079]: 2025-10-02 12:33:04.169 2 DEBUG nova.network.neutron [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Successfully updated port: d4c65816-736d-45f1-a48d-ee78e03b1bea _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:33:04 compute-0 nova_compute[192079]: 2025-10-02 12:33:04.182 2 DEBUG oslo_concurrency.lockutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Acquiring lock "refresh_cache-2d10392f-7700-4f17-8be6-9fe493836a58" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:33:04 compute-0 nova_compute[192079]: 2025-10-02 12:33:04.183 2 DEBUG oslo_concurrency.lockutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Acquired lock "refresh_cache-2d10392f-7700-4f17-8be6-9fe493836a58" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:33:04 compute-0 nova_compute[192079]: 2025-10-02 12:33:04.183 2 DEBUG nova.network.neutron [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:33:04 compute-0 nova_compute[192079]: 2025-10-02 12:33:04.293 2 DEBUG nova.compute.manager [req-b914a6b5-df8a-44d9-a8a8-e4210c592345 req-376f07f0-c970-4856-bb9b-b09371a222e4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Received event network-changed-d4c65816-736d-45f1-a48d-ee78e03b1bea external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:33:04 compute-0 nova_compute[192079]: 2025-10-02 12:33:04.294 2 DEBUG nova.compute.manager [req-b914a6b5-df8a-44d9-a8a8-e4210c592345 req-376f07f0-c970-4856-bb9b-b09371a222e4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Refreshing instance network info cache due to event network-changed-d4c65816-736d-45f1-a48d-ee78e03b1bea. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:33:04 compute-0 nova_compute[192079]: 2025-10-02 12:33:04.295 2 DEBUG oslo_concurrency.lockutils [req-b914a6b5-df8a-44d9-a8a8-e4210c592345 req-376f07f0-c970-4856-bb9b-b09371a222e4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-2d10392f-7700-4f17-8be6-9fe493836a58" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:33:04 compute-0 nova_compute[192079]: 2025-10-02 12:33:04.340 2 DEBUG nova.network.neutron [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.407 2 DEBUG nova.network.neutron [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Updating instance_info_cache with network_info: [{"id": "d4c65816-736d-45f1-a48d-ee78e03b1bea", "address": "fa:16:3e:d4:32:59", "network": {"id": "1acf42c5-084c-4cc4-bdc5-910eec0249e3", "bridge": "br-int", "label": "tempest-ServersTestJSON-5464492-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "a4a7099974504a798e1607c8e6a1f570", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd4c65816-73", "ovs_interfaceid": "d4c65816-736d-45f1-a48d-ee78e03b1bea", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.425 2 INFO nova.compute.manager [None req-4e41ee9c-c6a7-445a-9728-cd9e6b9c0a1f 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Get console output
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.428 2 DEBUG oslo_concurrency.lockutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Releasing lock "refresh_cache-2d10392f-7700-4f17-8be6-9fe493836a58" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.429 2 DEBUG nova.compute.manager [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Instance network_info: |[{"id": "d4c65816-736d-45f1-a48d-ee78e03b1bea", "address": "fa:16:3e:d4:32:59", "network": {"id": "1acf42c5-084c-4cc4-bdc5-910eec0249e3", "bridge": "br-int", "label": "tempest-ServersTestJSON-5464492-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "a4a7099974504a798e1607c8e6a1f570", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd4c65816-73", "ovs_interfaceid": "d4c65816-736d-45f1-a48d-ee78e03b1bea", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.430 2 DEBUG oslo_concurrency.lockutils [req-b914a6b5-df8a-44d9-a8a8-e4210c592345 req-376f07f0-c970-4856-bb9b-b09371a222e4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-2d10392f-7700-4f17-8be6-9fe493836a58" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.431 2 DEBUG nova.network.neutron [req-b914a6b5-df8a-44d9-a8a8-e4210c592345 req-376f07f0-c970-4856-bb9b-b09371a222e4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Refreshing network info cache for port d4c65816-736d-45f1-a48d-ee78e03b1bea _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.434 2 DEBUG nova.virt.libvirt.driver [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Start _get_guest_xml network_info=[{"id": "d4c65816-736d-45f1-a48d-ee78e03b1bea", "address": "fa:16:3e:d4:32:59", "network": {"id": "1acf42c5-084c-4cc4-bdc5-910eec0249e3", "bridge": "br-int", "label": "tempest-ServersTestJSON-5464492-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "a4a7099974504a798e1607c8e6a1f570", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd4c65816-73", "ovs_interfaceid": "d4c65816-736d-45f1-a48d-ee78e03b1bea", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.439 55 INFO nova.privsep.libvirt [-] Ignored error while reading from instance console pty: can't concat NoneType to bytes
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.442 2 WARNING nova.virt.libvirt.driver [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.449 2 DEBUG nova.virt.libvirt.host [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.449 2 DEBUG nova.virt.libvirt.host [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.453 2 DEBUG nova.virt.libvirt.host [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.454 2 DEBUG nova.virt.libvirt.host [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.455 2 DEBUG nova.virt.libvirt.driver [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.455 2 DEBUG nova.virt.hardware [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.456 2 DEBUG nova.virt.hardware [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.456 2 DEBUG nova.virt.hardware [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.456 2 DEBUG nova.virt.hardware [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.456 2 DEBUG nova.virt.hardware [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.457 2 DEBUG nova.virt.hardware [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.457 2 DEBUG nova.virt.hardware [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.457 2 DEBUG nova.virt.hardware [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.457 2 DEBUG nova.virt.hardware [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.458 2 DEBUG nova.virt.hardware [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.458 2 DEBUG nova.virt.hardware [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.462 2 DEBUG nova.virt.libvirt.vif [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:33:00Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ServersTestJSON-server-1562978230',display_name='tempest-ServersTestJSON-server-1562978230',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverstestjson-server-1562978230',id=147,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='a4a7099974504a798e1607c8e6a1f570',ramdisk_id='',reservation_id='r-97apu7d8',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServersTestJSON-1163535506',owner_user_name='tempest-ServersTestJSON-1163535506-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:33:01Z,user_data=None,user_id='27daa263abb54d4d8e3ae34cd1c5ccf5',uuid=2d10392f-7700-4f17-8be6-9fe493836a58,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "d4c65816-736d-45f1-a48d-ee78e03b1bea", "address": "fa:16:3e:d4:32:59", "network": {"id": "1acf42c5-084c-4cc4-bdc5-910eec0249e3", "bridge": "br-int", "label": "tempest-ServersTestJSON-5464492-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "a4a7099974504a798e1607c8e6a1f570", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd4c65816-73", "ovs_interfaceid": "d4c65816-736d-45f1-a48d-ee78e03b1bea", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.462 2 DEBUG nova.network.os_vif_util [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Converting VIF {"id": "d4c65816-736d-45f1-a48d-ee78e03b1bea", "address": "fa:16:3e:d4:32:59", "network": {"id": "1acf42c5-084c-4cc4-bdc5-910eec0249e3", "bridge": "br-int", "label": "tempest-ServersTestJSON-5464492-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "a4a7099974504a798e1607c8e6a1f570", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd4c65816-73", "ovs_interfaceid": "d4c65816-736d-45f1-a48d-ee78e03b1bea", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.463 2 DEBUG nova.network.os_vif_util [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:d4:32:59,bridge_name='br-int',has_traffic_filtering=True,id=d4c65816-736d-45f1-a48d-ee78e03b1bea,network=Network(1acf42c5-084c-4cc4-bdc5-910eec0249e3),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd4c65816-73') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.464 2 DEBUG nova.objects.instance [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Lazy-loading 'pci_devices' on Instance uuid 2d10392f-7700-4f17-8be6-9fe493836a58 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.485 2 DEBUG nova.virt.libvirt.driver [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:33:05 compute-0 nova_compute[192079]:   <uuid>2d10392f-7700-4f17-8be6-9fe493836a58</uuid>
Oct 02 12:33:05 compute-0 nova_compute[192079]:   <name>instance-00000093</name>
Oct 02 12:33:05 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:33:05 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:33:05 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:33:05 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:       <nova:name>tempest-ServersTestJSON-server-1562978230</nova:name>
Oct 02 12:33:05 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:33:05</nova:creationTime>
Oct 02 12:33:05 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:33:05 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:33:05 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:33:05 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:33:05 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:33:05 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:33:05 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:33:05 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:33:05 compute-0 nova_compute[192079]:         <nova:user uuid="27daa263abb54d4d8e3ae34cd1c5ccf5">tempest-ServersTestJSON-1163535506-project-member</nova:user>
Oct 02 12:33:05 compute-0 nova_compute[192079]:         <nova:project uuid="a4a7099974504a798e1607c8e6a1f570">tempest-ServersTestJSON-1163535506</nova:project>
Oct 02 12:33:05 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:33:05 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:33:05 compute-0 nova_compute[192079]:         <nova:port uuid="d4c65816-736d-45f1-a48d-ee78e03b1bea">
Oct 02 12:33:05 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.3" ipVersion="4"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:33:05 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:33:05 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:33:05 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <system>
Oct 02 12:33:05 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:33:05 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:33:05 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:33:05 compute-0 nova_compute[192079]:       <entry name="serial">2d10392f-7700-4f17-8be6-9fe493836a58</entry>
Oct 02 12:33:05 compute-0 nova_compute[192079]:       <entry name="uuid">2d10392f-7700-4f17-8be6-9fe493836a58</entry>
Oct 02 12:33:05 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     </system>
Oct 02 12:33:05 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:33:05 compute-0 nova_compute[192079]:   <os>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:   </os>
Oct 02 12:33:05 compute-0 nova_compute[192079]:   <features>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:   </features>
Oct 02 12:33:05 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:33:05 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:33:05 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:33:05 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/2d10392f-7700-4f17-8be6-9fe493836a58/disk"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:33:05 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/2d10392f-7700-4f17-8be6-9fe493836a58/disk.config"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:33:05 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:d4:32:59"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:       <target dev="tapd4c65816-73"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:33:05 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/2d10392f-7700-4f17-8be6-9fe493836a58/console.log" append="off"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <video>
Oct 02 12:33:05 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     </video>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:33:05 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:33:05 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:33:05 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:33:05 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:33:05 compute-0 nova_compute[192079]: </domain>
Oct 02 12:33:05 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.487 2 DEBUG nova.compute.manager [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Preparing to wait for external event network-vif-plugged-d4c65816-736d-45f1-a48d-ee78e03b1bea prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.488 2 DEBUG oslo_concurrency.lockutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Acquiring lock "2d10392f-7700-4f17-8be6-9fe493836a58-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.488 2 DEBUG oslo_concurrency.lockutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Lock "2d10392f-7700-4f17-8be6-9fe493836a58-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.489 2 DEBUG oslo_concurrency.lockutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Lock "2d10392f-7700-4f17-8be6-9fe493836a58-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.490 2 DEBUG nova.virt.libvirt.vif [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:33:00Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-ServersTestJSON-server-1562978230',display_name='tempest-ServersTestJSON-server-1562978230',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverstestjson-server-1562978230',id=147,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='a4a7099974504a798e1607c8e6a1f570',ramdisk_id='',reservation_id='r-97apu7d8',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-ServersTestJSON-1163535506',owner_user_name='tempest-ServersTestJSON-1163535506-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:33:01Z,user_data=None,user_id='27daa263abb54d4d8e3ae34cd1c5ccf5',uuid=2d10392f-7700-4f17-8be6-9fe493836a58,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "d4c65816-736d-45f1-a48d-ee78e03b1bea", "address": "fa:16:3e:d4:32:59", "network": {"id": "1acf42c5-084c-4cc4-bdc5-910eec0249e3", "bridge": "br-int", "label": "tempest-ServersTestJSON-5464492-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "a4a7099974504a798e1607c8e6a1f570", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd4c65816-73", "ovs_interfaceid": "d4c65816-736d-45f1-a48d-ee78e03b1bea", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.491 2 DEBUG nova.network.os_vif_util [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Converting VIF {"id": "d4c65816-736d-45f1-a48d-ee78e03b1bea", "address": "fa:16:3e:d4:32:59", "network": {"id": "1acf42c5-084c-4cc4-bdc5-910eec0249e3", "bridge": "br-int", "label": "tempest-ServersTestJSON-5464492-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "a4a7099974504a798e1607c8e6a1f570", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd4c65816-73", "ovs_interfaceid": "d4c65816-736d-45f1-a48d-ee78e03b1bea", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.492 2 DEBUG nova.network.os_vif_util [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:d4:32:59,bridge_name='br-int',has_traffic_filtering=True,id=d4c65816-736d-45f1-a48d-ee78e03b1bea,network=Network(1acf42c5-084c-4cc4-bdc5-910eec0249e3),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd4c65816-73') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.492 2 DEBUG os_vif [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:d4:32:59,bridge_name='br-int',has_traffic_filtering=True,id=d4c65816-736d-45f1-a48d-ee78e03b1bea,network=Network(1acf42c5-084c-4cc4-bdc5-910eec0249e3),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd4c65816-73') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.493 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.494 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.494 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.499 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.499 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapd4c65816-73, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.500 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapd4c65816-73, col_values=(('external_ids', {'iface-id': 'd4c65816-736d-45f1-a48d-ee78e03b1bea', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:d4:32:59', 'vm-uuid': '2d10392f-7700-4f17-8be6-9fe493836a58'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.501 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:05 compute-0 NetworkManager[51160]: <info>  [1759408385.5021] manager: (tapd4c65816-73): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/272)
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.504 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.511 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.512 2 INFO os_vif [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:d4:32:59,bridge_name='br-int',has_traffic_filtering=True,id=d4c65816-736d-45f1-a48d-ee78e03b1bea,network=Network(1acf42c5-084c-4cc4-bdc5-910eec0249e3),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd4c65816-73')
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.576 2 DEBUG nova.virt.libvirt.driver [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.576 2 DEBUG nova.virt.libvirt.driver [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.576 2 DEBUG nova.virt.libvirt.driver [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] No VIF found with MAC fa:16:3e:d4:32:59, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:33:05 compute-0 nova_compute[192079]: 2025-10-02 12:33:05.577 2 INFO nova.virt.libvirt.driver [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Using config drive
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.059 2 INFO nova.virt.libvirt.driver [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Creating config drive at /var/lib/nova/instances/2d10392f-7700-4f17-8be6-9fe493836a58/disk.config
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.065 2 DEBUG oslo_concurrency.processutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/2d10392f-7700-4f17-8be6-9fe493836a58/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp1fmy6ari execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.208 2 DEBUG oslo_concurrency.processutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/2d10392f-7700-4f17-8be6-9fe493836a58/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp1fmy6ari" returned: 0 in 0.144s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:33:06 compute-0 NetworkManager[51160]: <info>  [1759408386.2816] manager: (tapd4c65816-73): new Tun device (/org/freedesktop/NetworkManager/Devices/273)
Oct 02 12:33:06 compute-0 kernel: tapd4c65816-73: entered promiscuous mode
Oct 02 12:33:06 compute-0 ovn_controller[94336]: 2025-10-02T12:33:06Z|00549|binding|INFO|Claiming lport d4c65816-736d-45f1-a48d-ee78e03b1bea for this chassis.
Oct 02 12:33:06 compute-0 ovn_controller[94336]: 2025-10-02T12:33:06Z|00550|binding|INFO|d4c65816-736d-45f1-a48d-ee78e03b1bea: Claiming fa:16:3e:d4:32:59 10.100.0.3
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.284 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.299 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:06.298 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:d4:32:59 10.100.0.3'], port_security=['fa:16:3e:d4:32:59 10.100.0.3'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.3/28', 'neutron:device_id': '2d10392f-7700-4f17-8be6-9fe493836a58', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-1acf42c5-084c-4cc4-bdc5-910eec0249e3', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'a4a7099974504a798e1607c8e6a1f570', 'neutron:revision_number': '2', 'neutron:security_group_ids': '99e51855-93ef-45a8-a4a3-2b0a8aec1882', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=498d5b4e-c711-4633-9705-7db30a0fb056, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=5, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=d4c65816-736d-45f1-a48d-ee78e03b1bea) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:06.300 103294 INFO neutron.agent.ovn.metadata.agent [-] Port d4c65816-736d-45f1-a48d-ee78e03b1bea in datapath 1acf42c5-084c-4cc4-bdc5-910eec0249e3 bound to our chassis
Oct 02 12:33:06 compute-0 ovn_controller[94336]: 2025-10-02T12:33:06Z|00551|binding|INFO|Setting lport d4c65816-736d-45f1-a48d-ee78e03b1bea ovn-installed in OVS
Oct 02 12:33:06 compute-0 ovn_controller[94336]: 2025-10-02T12:33:06Z|00552|binding|INFO|Setting lport d4c65816-736d-45f1-a48d-ee78e03b1bea up in Southbound
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:06.302 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 1acf42c5-084c-4cc4-bdc5-910eec0249e3
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.302 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:06.314 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d87f0f98-2d99-4cd7-b315-fa1aa29e2a5f]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:06.314 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap1acf42c5-01 in ovnmeta-1acf42c5-084c-4cc4-bdc5-910eec0249e3 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:06.316 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap1acf42c5-00 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:06.316 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8ae88bcc-1d03-4d3b-9e6d-d18283c8d1ef]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:06.318 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7c4a6d21-6668-4ed1-9648-da68d679cb58]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:06 compute-0 systemd-udevd[244940]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:33:06 compute-0 systemd-machined[152150]: New machine qemu-70-instance-00000093.
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:06.329 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[ababa568-4d82-4f9a-b346-95a8349f61b0]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:06 compute-0 NetworkManager[51160]: <info>  [1759408386.3323] device (tapd4c65816-73): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:33:06 compute-0 NetworkManager[51160]: <info>  [1759408386.3332] device (tapd4c65816-73): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:33:06 compute-0 systemd[1]: Started Virtual Machine qemu-70-instance-00000093.
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:06.354 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e7146ef6-07cf-4109-8c90-9c474c0c2992]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:06.380 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[54f67bac-5e11-4ff3-b9f2-236d4eca3719]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:06 compute-0 systemd-udevd[244945]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:33:06 compute-0 NetworkManager[51160]: <info>  [1759408386.3853] manager: (tap1acf42c5-00): new Veth device (/org/freedesktop/NetworkManager/Devices/274)
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:06.385 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6ecd2034-427a-4fdd-aabc-b9133b402acf]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:06.415 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[3bf1fe06-aa17-45e8-951c-dabdcd4dc0ae]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:06.418 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[5632fec4-2121-4724-a660-e6335a0a398c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:06 compute-0 NetworkManager[51160]: <info>  [1759408386.4459] device (tap1acf42c5-00): carrier: link connected
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:06.452 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[fabb3ce9-fbd2-49f1-b4fc-13778c87dd6d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:06.476 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7c1fdf9b-5757-42bb-bb08-9896800d8d0b]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap1acf42c5-01'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:0a:5b:cd'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 176], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 638407, 'reachable_time': 33484, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 244973, 'error': None, 'target': 'ovnmeta-1acf42c5-084c-4cc4-bdc5-910eec0249e3', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:06.489 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1adf74ee-7e5b-4d0b-bc0d-951551c138b2]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe0a:5bcd'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 638407, 'tstamp': 638407}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 244975, 'error': None, 'target': 'ovnmeta-1acf42c5-084c-4cc4-bdc5-910eec0249e3', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:06.508 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[60b06c5b-8144-40dc-97ce-a5694b8d801c]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap1acf42c5-01'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:0a:5b:cd'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 176], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 638407, 'reachable_time': 33484, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 244977, 'error': None, 'target': 'ovnmeta-1acf42c5-084c-4cc4-bdc5-910eec0249e3', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:06.539 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c7f9a6f0-3e98-4982-8020-cd5af1189f72]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:06.593 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2a95d8bf-e8c6-4716-b828-40de551053d0]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:06.595 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap1acf42c5-00, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:06.595 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:06.595 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap1acf42c5-00, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.597 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:06 compute-0 NetworkManager[51160]: <info>  [1759408386.5985] manager: (tap1acf42c5-00): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/275)
Oct 02 12:33:06 compute-0 kernel: tap1acf42c5-00: entered promiscuous mode
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.601 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:06.603 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap1acf42c5-00, col_values=(('external_ids', {'iface-id': 'c198cb2e-a850-46e4-8295-a2f9c280ee53'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:33:06 compute-0 ovn_controller[94336]: 2025-10-02T12:33:06Z|00553|binding|INFO|Releasing lport c198cb2e-a850-46e4-8295-a2f9c280ee53 from this chassis (sb_readonly=0)
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.604 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.605 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:06.607 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/1acf42c5-084c-4cc4-bdc5-910eec0249e3.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/1acf42c5-084c-4cc4-bdc5-910eec0249e3.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:06.608 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[95990fd2-6cd5-496d-a250-772b79d7aec7]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:06.608 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-1acf42c5-084c-4cc4-bdc5-910eec0249e3
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/1acf42c5-084c-4cc4-bdc5-910eec0249e3.pid.haproxy
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 1acf42c5-084c-4cc4-bdc5-910eec0249e3
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:06.609 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-1acf42c5-084c-4cc4-bdc5-910eec0249e3', 'env', 'PROCESS_TAG=haproxy-1acf42c5-084c-4cc4-bdc5-910eec0249e3', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/1acf42c5-084c-4cc4-bdc5-910eec0249e3.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.616 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.678 2 DEBUG nova.compute.manager [req-cfcfae35-f650-4447-9796-e8ebd5a9aa6b req-11358b5e-6680-4adf-8d47-ae8492732678 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Received event network-vif-plugged-d4c65816-736d-45f1-a48d-ee78e03b1bea external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.679 2 DEBUG oslo_concurrency.lockutils [req-cfcfae35-f650-4447-9796-e8ebd5a9aa6b req-11358b5e-6680-4adf-8d47-ae8492732678 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "2d10392f-7700-4f17-8be6-9fe493836a58-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.679 2 DEBUG oslo_concurrency.lockutils [req-cfcfae35-f650-4447-9796-e8ebd5a9aa6b req-11358b5e-6680-4adf-8d47-ae8492732678 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2d10392f-7700-4f17-8be6-9fe493836a58-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.679 2 DEBUG oslo_concurrency.lockutils [req-cfcfae35-f650-4447-9796-e8ebd5a9aa6b req-11358b5e-6680-4adf-8d47-ae8492732678 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2d10392f-7700-4f17-8be6-9fe493836a58-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.679 2 DEBUG nova.compute.manager [req-cfcfae35-f650-4447-9796-e8ebd5a9aa6b req-11358b5e-6680-4adf-8d47-ae8492732678 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Processing event network-vif-plugged-d4c65816-736d-45f1-a48d-ee78e03b1bea _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.686 2 DEBUG nova.compute.manager [req-74169fb4-3d64-4cae-b8a5-89d5cd5220c3 req-5996fb2f-03c3-452e-8323-ec947e3e36dd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Received event network-changed-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.686 2 DEBUG nova.compute.manager [req-74169fb4-3d64-4cae-b8a5-89d5cd5220c3 req-5996fb2f-03c3-452e-8323-ec947e3e36dd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Refreshing instance network info cache due to event network-changed-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.687 2 DEBUG oslo_concurrency.lockutils [req-74169fb4-3d64-4cae-b8a5-89d5cd5220c3 req-5996fb2f-03c3-452e-8323-ec947e3e36dd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-a0a5e290-69d3-4ce0-9533-6df7cf06c204" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.687 2 DEBUG oslo_concurrency.lockutils [req-74169fb4-3d64-4cae-b8a5-89d5cd5220c3 req-5996fb2f-03c3-452e-8323-ec947e3e36dd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-a0a5e290-69d3-4ce0-9533-6df7cf06c204" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.687 2 DEBUG nova.network.neutron [req-74169fb4-3d64-4cae-b8a5-89d5cd5220c3 req-5996fb2f-03c3-452e-8323-ec947e3e36dd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Refreshing network info cache for port d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.835 2 DEBUG oslo_concurrency.lockutils [None req-a7a51d06-222e-4a9a-91eb-cdf74809ea44 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.836 2 DEBUG oslo_concurrency.lockutils [None req-a7a51d06-222e-4a9a-91eb-cdf74809ea44 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.836 2 DEBUG oslo_concurrency.lockutils [None req-a7a51d06-222e-4a9a-91eb-cdf74809ea44 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.836 2 DEBUG oslo_concurrency.lockutils [None req-a7a51d06-222e-4a9a-91eb-cdf74809ea44 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.836 2 DEBUG oslo_concurrency.lockutils [None req-a7a51d06-222e-4a9a-91eb-cdf74809ea44 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.847 2 INFO nova.compute.manager [None req-a7a51d06-222e-4a9a-91eb-cdf74809ea44 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Terminating instance
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.862 2 DEBUG nova.compute.manager [None req-a7a51d06-222e-4a9a-91eb-cdf74809ea44 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:33:06 compute-0 kernel: tapd3cfd23d-04 (unregistering): left promiscuous mode
Oct 02 12:33:06 compute-0 NetworkManager[51160]: <info>  [1759408386.8910] device (tapd3cfd23d-04): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:33:06 compute-0 ovn_controller[94336]: 2025-10-02T12:33:06Z|00554|binding|INFO|Releasing lport d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 from this chassis (sb_readonly=0)
Oct 02 12:33:06 compute-0 ovn_controller[94336]: 2025-10-02T12:33:06Z|00555|binding|INFO|Setting lport d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 down in Southbound
Oct 02 12:33:06 compute-0 ovn_controller[94336]: 2025-10-02T12:33:06Z|00556|binding|INFO|Removing iface tapd3cfd23d-04 ovn-installed in OVS
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.899 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.901 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:06.912 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:5b:11:fa 10.100.0.5'], port_security=['fa:16:3e:5b:11:fa 10.100.0.5'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.5/28', 'neutron:device_id': 'a0a5e290-69d3-4ce0-9533-6df7cf06c204', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-574af896-2fe0-426b-87eb-93e7ba659a79', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '76c7dd40d83e4e3ca71abbebf57921b6', 'neutron:revision_number': '6', 'neutron:security_group_ids': '24011c1c-187e-42ed-b64a-06bc43fab21b', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=1b106f5d-d8f5-4be2-bc91-66bf2d8dc8a3, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=d3cfd23d-04b0-4f18-b20d-14f75e69b2a3) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.913 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:06 compute-0 systemd[1]: machine-qemu\x2d69\x2dinstance\x2d0000008d.scope: Deactivated successfully.
Oct 02 12:33:06 compute-0 systemd[1]: machine-qemu\x2d69\x2dinstance\x2d0000008d.scope: Consumed 14.103s CPU time.
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.953 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408386.9530146, 2d10392f-7700-4f17-8be6-9fe493836a58 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.954 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] VM Started (Lifecycle Event)
Oct 02 12:33:06 compute-0 systemd-machined[152150]: Machine qemu-69-instance-0000008d terminated.
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.956 2 DEBUG nova.compute.manager [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.962 2 DEBUG nova.virt.libvirt.driver [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.965 2 INFO nova.virt.libvirt.driver [-] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Instance spawned successfully.
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.965 2 DEBUG nova.virt.libvirt.driver [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:33:06 compute-0 podman[245015]: 2025-10-02 12:33:06.983963836 +0000 UTC m=+0.051032360 container create 2d0d239610cb24d1c3675ed15ab86cea3699099b0377b55c4f13d191dd5a54b6 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-1acf42c5-084c-4cc4-bdc5-910eec0249e3, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.987 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.993 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.995 2 DEBUG nova.virt.libvirt.driver [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.996 2 DEBUG nova.virt.libvirt.driver [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.996 2 DEBUG nova.virt.libvirt.driver [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.996 2 DEBUG nova.virt.libvirt.driver [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.997 2 DEBUG nova.virt.libvirt.driver [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:33:06 compute-0 nova_compute[192079]: 2025-10-02 12:33:06.997 2 DEBUG nova.virt.libvirt.driver [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.010 2 DEBUG nova.network.neutron [req-b914a6b5-df8a-44d9-a8a8-e4210c592345 req-376f07f0-c970-4856-bb9b-b09371a222e4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Updated VIF entry in instance network info cache for port d4c65816-736d-45f1-a48d-ee78e03b1bea. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.011 2 DEBUG nova.network.neutron [req-b914a6b5-df8a-44d9-a8a8-e4210c592345 req-376f07f0-c970-4856-bb9b-b09371a222e4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Updating instance_info_cache with network_info: [{"id": "d4c65816-736d-45f1-a48d-ee78e03b1bea", "address": "fa:16:3e:d4:32:59", "network": {"id": "1acf42c5-084c-4cc4-bdc5-910eec0249e3", "bridge": "br-int", "label": "tempest-ServersTestJSON-5464492-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "a4a7099974504a798e1607c8e6a1f570", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd4c65816-73", "ovs_interfaceid": "d4c65816-736d-45f1-a48d-ee78e03b1bea", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:33:07 compute-0 systemd[1]: Started libpod-conmon-2d0d239610cb24d1c3675ed15ab86cea3699099b0377b55c4f13d191dd5a54b6.scope.
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.024 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.024 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408386.9531896, 2d10392f-7700-4f17-8be6-9fe493836a58 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.024 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] VM Paused (Lifecycle Event)
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.032 2 DEBUG oslo_concurrency.lockutils [req-b914a6b5-df8a-44d9-a8a8-e4210c592345 req-376f07f0-c970-4856-bb9b-b09371a222e4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-2d10392f-7700-4f17-8be6-9fe493836a58" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:33:07 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:33:07 compute-0 podman[245015]: 2025-10-02 12:33:06.952271473 +0000 UTC m=+0.019340017 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:33:07 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/f276851438072cc792ac2dcd409834de633fc629647c8a41cb27f04e2ccd9c7d/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.051 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.055 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408386.96047, 2d10392f-7700-4f17-8be6-9fe493836a58 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.055 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] VM Resumed (Lifecycle Event)
Oct 02 12:33:07 compute-0 podman[245015]: 2025-10-02 12:33:07.063753559 +0000 UTC m=+0.130822103 container init 2d0d239610cb24d1c3675ed15ab86cea3699099b0377b55c4f13d191dd5a54b6 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-1acf42c5-084c-4cc4-bdc5-910eec0249e3, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:33:07 compute-0 podman[245015]: 2025-10-02 12:33:07.072396864 +0000 UTC m=+0.139465388 container start 2d0d239610cb24d1c3675ed15ab86cea3699099b0377b55c4f13d191dd5a54b6 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-1acf42c5-084c-4cc4-bdc5-910eec0249e3, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.075 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.079 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.082 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.085 2 INFO nova.compute.manager [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Took 5.15 seconds to spawn the instance on the hypervisor.
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.085 2 DEBUG nova.compute.manager [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.088 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.096 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:33:07 compute-0 neutron-haproxy-ovnmeta-1acf42c5-084c-4cc4-bdc5-910eec0249e3[245030]: [NOTICE]   (245034) : New worker (245048) forked
Oct 02 12:33:07 compute-0 neutron-haproxy-ovnmeta-1acf42c5-084c-4cc4-bdc5-910eec0249e3[245030]: [NOTICE]   (245034) : Loading success.
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.120 2 INFO nova.virt.libvirt.driver [-] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Instance destroyed successfully.
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.120 2 DEBUG nova.objects.instance [None req-a7a51d06-222e-4a9a-91eb-cdf74809ea44 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'resources' on Instance uuid a0a5e290-69d3-4ce0-9533-6df7cf06c204 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:33:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:07.148 103294 INFO neutron.agent.ovn.metadata.agent [-] Port d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 in datapath 574af896-2fe0-426b-87eb-93e7ba659a79 unbound from our chassis
Oct 02 12:33:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:07.150 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 574af896-2fe0-426b-87eb-93e7ba659a79, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:33:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:07.150 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[64b209d2-31e4-4a2c-8362-8474fe2c56ae]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:07.151 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79 namespace which is not needed anymore
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.153 2 DEBUG nova.virt.libvirt.vif [None req-a7a51d06-222e-4a9a-91eb-cdf74809ea44 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:32:01Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestNetworkAdvancedServerOps-server-1138303133',display_name='tempest-TestNetworkAdvancedServerOps-server-1138303133',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(2),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkadvancedserverops-server-1138303133',id=141,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=2,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBHZXGbdK2BsOqbaUUYa7XicNW8CV2qKZlqbls3huCtjRaED+CX4fIrUFMW9LtPV4B8c3A6SEeNHwH0MqJ7ttz8hSi/AjegEpdsa/s/FufbgfsmCM0TXi1lTy8HcOB+sZ9g==',key_name='tempest-TestNetworkAdvancedServerOps-1099183543',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:32:47Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=192,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='76c7dd40d83e4e3ca71abbebf57921b6',ramdisk_id='',reservation_id='r-iebv0qns',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-TestNetworkAdvancedServerOps-597114071',owner_user_name='tempest-TestNetworkAdvancedServerOps-597114071-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:32:50Z,user_data=None,user_id='1faa7e121a0e43ad8cb4ae5b2cfcc6a2',uuid=a0a5e290-69d3-4ce0-9533-6df7cf06c204,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "address": "fa:16:3e:5b:11:fa", "network": {"id": "574af896-2fe0-426b-87eb-93e7ba659a79", "bridge": "br-int", "label": "tempest-network-smoke--1599149382", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd3cfd23d-04", "ovs_interfaceid": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.154 2 DEBUG nova.network.os_vif_util [None req-a7a51d06-222e-4a9a-91eb-cdf74809ea44 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converting VIF {"id": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "address": "fa:16:3e:5b:11:fa", "network": {"id": "574af896-2fe0-426b-87eb-93e7ba659a79", "bridge": "br-int", "label": "tempest-network-smoke--1599149382", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd3cfd23d-04", "ovs_interfaceid": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.154 2 DEBUG nova.network.os_vif_util [None req-a7a51d06-222e-4a9a-91eb-cdf74809ea44 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:5b:11:fa,bridge_name='br-int',has_traffic_filtering=True,id=d3cfd23d-04b0-4f18-b20d-14f75e69b2a3,network=Network(574af896-2fe0-426b-87eb-93e7ba659a79),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd3cfd23d-04') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.155 2 DEBUG os_vif [None req-a7a51d06-222e-4a9a-91eb-cdf74809ea44 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:5b:11:fa,bridge_name='br-int',has_traffic_filtering=True,id=d3cfd23d-04b0-4f18-b20d-14f75e69b2a3,network=Network(574af896-2fe0-426b-87eb-93e7ba659a79),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd3cfd23d-04') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.157 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.157 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapd3cfd23d-04, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.159 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.159 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.161 2 INFO os_vif [None req-a7a51d06-222e-4a9a-91eb-cdf74809ea44 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:5b:11:fa,bridge_name='br-int',has_traffic_filtering=True,id=d3cfd23d-04b0-4f18-b20d-14f75e69b2a3,network=Network(574af896-2fe0-426b-87eb-93e7ba659a79),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd3cfd23d-04')
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.162 2 INFO nova.virt.libvirt.driver [None req-a7a51d06-222e-4a9a-91eb-cdf74809ea44 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Deleting instance files /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204_del
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.162 2 INFO nova.virt.libvirt.driver [None req-a7a51d06-222e-4a9a-91eb-cdf74809ea44 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Deletion of /var/lib/nova/instances/a0a5e290-69d3-4ce0-9533-6df7cf06c204_del complete
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.187 2 INFO nova.compute.manager [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Took 5.75 seconds to build instance.
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.227 2 DEBUG oslo_concurrency.lockutils [None req-e97901ce-d9a3-462e-a384-7eedd0f7aa6a 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Lock "2d10392f-7700-4f17-8be6-9fe493836a58" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 5.894s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.252 2 INFO nova.compute.manager [None req-a7a51d06-222e-4a9a-91eb-cdf74809ea44 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Took 0.39 seconds to destroy the instance on the hypervisor.
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.252 2 DEBUG oslo.service.loopingcall [None req-a7a51d06-222e-4a9a-91eb-cdf74809ea44 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.252 2 DEBUG nova.compute.manager [-] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.252 2 DEBUG nova.network.neutron [-] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:33:07 compute-0 neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79[244777]: [NOTICE]   (244781) : haproxy version is 2.8.14-c23fe91
Oct 02 12:33:07 compute-0 neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79[244777]: [NOTICE]   (244781) : path to executable is /usr/sbin/haproxy
Oct 02 12:33:07 compute-0 neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79[244777]: [WARNING]  (244781) : Exiting Master process...
Oct 02 12:33:07 compute-0 neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79[244777]: [WARNING]  (244781) : Exiting Master process...
Oct 02 12:33:07 compute-0 neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79[244777]: [ALERT]    (244781) : Current worker (244783) exited with code 143 (Terminated)
Oct 02 12:33:07 compute-0 neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79[244777]: [WARNING]  (244781) : All workers exited. Exiting... (0)
Oct 02 12:33:07 compute-0 systemd[1]: libpod-02b554e8c6f67a43ab6044e93c5af21e4aa2bcd3ae46a77574d8eee0732ae53b.scope: Deactivated successfully.
Oct 02 12:33:07 compute-0 podman[245078]: 2025-10-02 12:33:07.336830302 +0000 UTC m=+0.062214284 container died 02b554e8c6f67a43ab6044e93c5af21e4aa2bcd3ae46a77574d8eee0732ae53b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_managed=true)
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.368 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:07 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-02b554e8c6f67a43ab6044e93c5af21e4aa2bcd3ae46a77574d8eee0732ae53b-userdata-shm.mount: Deactivated successfully.
Oct 02 12:33:07 compute-0 systemd[1]: var-lib-containers-storage-overlay-88efe688ea47ea9e4f774a20e1d0cac5b14587b88e849fede8e67e5f148e7bb1-merged.mount: Deactivated successfully.
Oct 02 12:33:07 compute-0 podman[245078]: 2025-10-02 12:33:07.387631565 +0000 UTC m=+0.113015547 container cleanup 02b554e8c6f67a43ab6044e93c5af21e4aa2bcd3ae46a77574d8eee0732ae53b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:33:07 compute-0 systemd[1]: libpod-conmon-02b554e8c6f67a43ab6044e93c5af21e4aa2bcd3ae46a77574d8eee0732ae53b.scope: Deactivated successfully.
Oct 02 12:33:07 compute-0 podman[245092]: 2025-10-02 12:33:07.424239272 +0000 UTC m=+0.065088863 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, managed_by=edpm_ansible, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent)
Oct 02 12:33:07 compute-0 podman[245102]: 2025-10-02 12:33:07.430222584 +0000 UTC m=+0.063992672 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']})
Oct 02 12:33:07 compute-0 podman[245135]: 2025-10-02 12:33:07.480427281 +0000 UTC m=+0.072586267 container remove 02b554e8c6f67a43ab6044e93c5af21e4aa2bcd3ae46a77574d8eee0732ae53b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3)
Oct 02 12:33:07 compute-0 podman[245096]: 2025-10-02 12:33:07.484559014 +0000 UTC m=+0.120868831 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3)
Oct 02 12:33:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:07.486 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[70a275d8-6ce6-41d3-87bd-a001ba69f9b7]: (4, ('Thu Oct  2 12:33:07 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79 (02b554e8c6f67a43ab6044e93c5af21e4aa2bcd3ae46a77574d8eee0732ae53b)\n02b554e8c6f67a43ab6044e93c5af21e4aa2bcd3ae46a77574d8eee0732ae53b\nThu Oct  2 12:33:07 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79 (02b554e8c6f67a43ab6044e93c5af21e4aa2bcd3ae46a77574d8eee0732ae53b)\n02b554e8c6f67a43ab6044e93c5af21e4aa2bcd3ae46a77574d8eee0732ae53b\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:07.488 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f9f41cd0-3054-4375-88c9-1bd166de4ff1]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:07.490 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap574af896-20, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:33:07 compute-0 kernel: tap574af896-20: left promiscuous mode
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.491 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:07 compute-0 nova_compute[192079]: 2025-10-02 12:33:07.503 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:07.506 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3d45d5b2-e10d-48e3-a901-c62aa5099f44]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:07.534 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c9ff4466-a919-4744-ab37-438497b03722]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:07.535 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7f59338a-4044-48f6-bd12-8b796b07025f]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:07.549 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[bbaef59c-dfaf-47bc-8a1c-7918b5f693f3]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 636403, 'reachable_time': 24971, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 245187, 'error': None, 'target': 'ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:07 compute-0 systemd[1]: run-netns-ovnmeta\x2d574af896\x2d2fe0\x2d426b\x2d87eb\x2d93e7ba659a79.mount: Deactivated successfully.
Oct 02 12:33:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:07.551 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-574af896-2fe0-426b-87eb-93e7ba659a79 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:33:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:07.551 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[f9c798fd-1538-445e-bab5-8ffb92808fad]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:08 compute-0 nova_compute[192079]: 2025-10-02 12:33:08.244 2 DEBUG nova.network.neutron [-] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:33:08 compute-0 nova_compute[192079]: 2025-10-02 12:33:08.269 2 INFO nova.compute.manager [-] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Took 1.02 seconds to deallocate network for instance.
Oct 02 12:33:08 compute-0 nova_compute[192079]: 2025-10-02 12:33:08.311 2 DEBUG nova.compute.manager [req-a7457e3c-637d-4d1b-967d-bf325d8a3798 req-389a9b4d-854a-4e3c-b98a-4a8aaf6d0942 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Received event network-vif-deleted-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:33:08 compute-0 nova_compute[192079]: 2025-10-02 12:33:08.344 2 DEBUG nova.network.neutron [req-74169fb4-3d64-4cae-b8a5-89d5cd5220c3 req-5996fb2f-03c3-452e-8323-ec947e3e36dd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Updated VIF entry in instance network info cache for port d3cfd23d-04b0-4f18-b20d-14f75e69b2a3. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:33:08 compute-0 nova_compute[192079]: 2025-10-02 12:33:08.344 2 DEBUG nova.network.neutron [req-74169fb4-3d64-4cae-b8a5-89d5cd5220c3 req-5996fb2f-03c3-452e-8323-ec947e3e36dd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Updating instance_info_cache with network_info: [{"id": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "address": "fa:16:3e:5b:11:fa", "network": {"id": "574af896-2fe0-426b-87eb-93e7ba659a79", "bridge": "br-int", "label": "tempest-network-smoke--1599149382", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd3cfd23d-04", "ovs_interfaceid": "d3cfd23d-04b0-4f18-b20d-14f75e69b2a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:33:08 compute-0 nova_compute[192079]: 2025-10-02 12:33:08.382 2 DEBUG oslo_concurrency.lockutils [None req-a7a51d06-222e-4a9a-91eb-cdf74809ea44 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:08 compute-0 nova_compute[192079]: 2025-10-02 12:33:08.382 2 DEBUG oslo_concurrency.lockutils [None req-a7a51d06-222e-4a9a-91eb-cdf74809ea44 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:08 compute-0 nova_compute[192079]: 2025-10-02 12:33:08.384 2 DEBUG oslo_concurrency.lockutils [req-74169fb4-3d64-4cae-b8a5-89d5cd5220c3 req-5996fb2f-03c3-452e-8323-ec947e3e36dd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-a0a5e290-69d3-4ce0-9533-6df7cf06c204" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:33:08 compute-0 nova_compute[192079]: 2025-10-02 12:33:08.390 2 DEBUG oslo_concurrency.lockutils [None req-a7a51d06-222e-4a9a-91eb-cdf74809ea44 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.007s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:08 compute-0 nova_compute[192079]: 2025-10-02 12:33:08.424 2 INFO nova.scheduler.client.report [None req-a7a51d06-222e-4a9a-91eb-cdf74809ea44 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Deleted allocations for instance a0a5e290-69d3-4ce0-9533-6df7cf06c204
Oct 02 12:33:08 compute-0 nova_compute[192079]: 2025-10-02 12:33:08.530 2 DEBUG oslo_concurrency.lockutils [None req-a7a51d06-222e-4a9a-91eb-cdf74809ea44 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.694s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:08 compute-0 nova_compute[192079]: 2025-10-02 12:33:08.772 2 DEBUG nova.compute.manager [req-8daf494c-ab84-48c5-9e00-b878e595ece7 req-1eab3db3-c2bd-404b-8b62-a11efa6e8a4c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Received event network-vif-plugged-d4c65816-736d-45f1-a48d-ee78e03b1bea external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:33:08 compute-0 nova_compute[192079]: 2025-10-02 12:33:08.772 2 DEBUG oslo_concurrency.lockutils [req-8daf494c-ab84-48c5-9e00-b878e595ece7 req-1eab3db3-c2bd-404b-8b62-a11efa6e8a4c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "2d10392f-7700-4f17-8be6-9fe493836a58-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:08 compute-0 nova_compute[192079]: 2025-10-02 12:33:08.773 2 DEBUG oslo_concurrency.lockutils [req-8daf494c-ab84-48c5-9e00-b878e595ece7 req-1eab3db3-c2bd-404b-8b62-a11efa6e8a4c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2d10392f-7700-4f17-8be6-9fe493836a58-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:08 compute-0 nova_compute[192079]: 2025-10-02 12:33:08.773 2 DEBUG oslo_concurrency.lockutils [req-8daf494c-ab84-48c5-9e00-b878e595ece7 req-1eab3db3-c2bd-404b-8b62-a11efa6e8a4c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2d10392f-7700-4f17-8be6-9fe493836a58-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:08 compute-0 nova_compute[192079]: 2025-10-02 12:33:08.773 2 DEBUG nova.compute.manager [req-8daf494c-ab84-48c5-9e00-b878e595ece7 req-1eab3db3-c2bd-404b-8b62-a11efa6e8a4c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] No waiting events found dispatching network-vif-plugged-d4c65816-736d-45f1-a48d-ee78e03b1bea pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:33:08 compute-0 nova_compute[192079]: 2025-10-02 12:33:08.773 2 WARNING nova.compute.manager [req-8daf494c-ab84-48c5-9e00-b878e595ece7 req-1eab3db3-c2bd-404b-8b62-a11efa6e8a4c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Received unexpected event network-vif-plugged-d4c65816-736d-45f1-a48d-ee78e03b1bea for instance with vm_state active and task_state None.
Oct 02 12:33:08 compute-0 nova_compute[192079]: 2025-10-02 12:33:08.773 2 DEBUG nova.compute.manager [req-8daf494c-ab84-48c5-9e00-b878e595ece7 req-1eab3db3-c2bd-404b-8b62-a11efa6e8a4c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Received event network-vif-unplugged-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:33:08 compute-0 nova_compute[192079]: 2025-10-02 12:33:08.774 2 DEBUG oslo_concurrency.lockutils [req-8daf494c-ab84-48c5-9e00-b878e595ece7 req-1eab3db3-c2bd-404b-8b62-a11efa6e8a4c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:08 compute-0 nova_compute[192079]: 2025-10-02 12:33:08.774 2 DEBUG oslo_concurrency.lockutils [req-8daf494c-ab84-48c5-9e00-b878e595ece7 req-1eab3db3-c2bd-404b-8b62-a11efa6e8a4c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:08 compute-0 nova_compute[192079]: 2025-10-02 12:33:08.774 2 DEBUG oslo_concurrency.lockutils [req-8daf494c-ab84-48c5-9e00-b878e595ece7 req-1eab3db3-c2bd-404b-8b62-a11efa6e8a4c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:08 compute-0 nova_compute[192079]: 2025-10-02 12:33:08.774 2 DEBUG nova.compute.manager [req-8daf494c-ab84-48c5-9e00-b878e595ece7 req-1eab3db3-c2bd-404b-8b62-a11efa6e8a4c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] No waiting events found dispatching network-vif-unplugged-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:33:08 compute-0 nova_compute[192079]: 2025-10-02 12:33:08.774 2 WARNING nova.compute.manager [req-8daf494c-ab84-48c5-9e00-b878e595ece7 req-1eab3db3-c2bd-404b-8b62-a11efa6e8a4c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Received unexpected event network-vif-unplugged-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 for instance with vm_state deleted and task_state None.
Oct 02 12:33:08 compute-0 nova_compute[192079]: 2025-10-02 12:33:08.775 2 DEBUG nova.compute.manager [req-8daf494c-ab84-48c5-9e00-b878e595ece7 req-1eab3db3-c2bd-404b-8b62-a11efa6e8a4c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Received event network-vif-plugged-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:33:08 compute-0 nova_compute[192079]: 2025-10-02 12:33:08.775 2 DEBUG oslo_concurrency.lockutils [req-8daf494c-ab84-48c5-9e00-b878e595ece7 req-1eab3db3-c2bd-404b-8b62-a11efa6e8a4c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:08 compute-0 nova_compute[192079]: 2025-10-02 12:33:08.775 2 DEBUG oslo_concurrency.lockutils [req-8daf494c-ab84-48c5-9e00-b878e595ece7 req-1eab3db3-c2bd-404b-8b62-a11efa6e8a4c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:08 compute-0 nova_compute[192079]: 2025-10-02 12:33:08.775 2 DEBUG oslo_concurrency.lockutils [req-8daf494c-ab84-48c5-9e00-b878e595ece7 req-1eab3db3-c2bd-404b-8b62-a11efa6e8a4c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "a0a5e290-69d3-4ce0-9533-6df7cf06c204-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:08 compute-0 nova_compute[192079]: 2025-10-02 12:33:08.775 2 DEBUG nova.compute.manager [req-8daf494c-ab84-48c5-9e00-b878e595ece7 req-1eab3db3-c2bd-404b-8b62-a11efa6e8a4c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] No waiting events found dispatching network-vif-plugged-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:33:08 compute-0 nova_compute[192079]: 2025-10-02 12:33:08.776 2 WARNING nova.compute.manager [req-8daf494c-ab84-48c5-9e00-b878e595ece7 req-1eab3db3-c2bd-404b-8b62-a11efa6e8a4c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Received unexpected event network-vif-plugged-d3cfd23d-04b0-4f18-b20d-14f75e69b2a3 for instance with vm_state deleted and task_state None.
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.452 2 DEBUG oslo_concurrency.lockutils [None req-2ba32190-2517-4963-8663-a3415ce7b149 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Acquiring lock "2d10392f-7700-4f17-8be6-9fe493836a58" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.452 2 DEBUG oslo_concurrency.lockutils [None req-2ba32190-2517-4963-8663-a3415ce7b149 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Lock "2d10392f-7700-4f17-8be6-9fe493836a58" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.453 2 DEBUG oslo_concurrency.lockutils [None req-2ba32190-2517-4963-8663-a3415ce7b149 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Acquiring lock "2d10392f-7700-4f17-8be6-9fe493836a58-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.453 2 DEBUG oslo_concurrency.lockutils [None req-2ba32190-2517-4963-8663-a3415ce7b149 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Lock "2d10392f-7700-4f17-8be6-9fe493836a58-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.453 2 DEBUG oslo_concurrency.lockutils [None req-2ba32190-2517-4963-8663-a3415ce7b149 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Lock "2d10392f-7700-4f17-8be6-9fe493836a58-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.464 2 INFO nova.compute.manager [None req-2ba32190-2517-4963-8663-a3415ce7b149 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Terminating instance
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.476 2 DEBUG nova.compute.manager [None req-2ba32190-2517-4963-8663-a3415ce7b149 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:33:10 compute-0 kernel: tapd4c65816-73 (unregistering): left promiscuous mode
Oct 02 12:33:10 compute-0 NetworkManager[51160]: <info>  [1759408390.4997] device (tapd4c65816-73): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:33:10 compute-0 ovn_controller[94336]: 2025-10-02T12:33:10Z|00557|binding|INFO|Releasing lport d4c65816-736d-45f1-a48d-ee78e03b1bea from this chassis (sb_readonly=0)
Oct 02 12:33:10 compute-0 ovn_controller[94336]: 2025-10-02T12:33:10Z|00558|binding|INFO|Setting lport d4c65816-736d-45f1-a48d-ee78e03b1bea down in Southbound
Oct 02 12:33:10 compute-0 ovn_controller[94336]: 2025-10-02T12:33:10Z|00559|binding|INFO|Removing iface tapd4c65816-73 ovn-installed in OVS
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.508 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.509 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:10.526 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:d4:32:59 10.100.0.3'], port_security=['fa:16:3e:d4:32:59 10.100.0.3'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.3/28', 'neutron:device_id': '2d10392f-7700-4f17-8be6-9fe493836a58', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-1acf42c5-084c-4cc4-bdc5-910eec0249e3', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'a4a7099974504a798e1607c8e6a1f570', 'neutron:revision_number': '4', 'neutron:security_group_ids': '99e51855-93ef-45a8-a4a3-2b0a8aec1882', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=498d5b4e-c711-4633-9705-7db30a0fb056, chassis=[], tunnel_key=5, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=d4c65816-736d-45f1-a48d-ee78e03b1bea) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:33:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:10.527 103294 INFO neutron.agent.ovn.metadata.agent [-] Port d4c65816-736d-45f1-a48d-ee78e03b1bea in datapath 1acf42c5-084c-4cc4-bdc5-910eec0249e3 unbound from our chassis
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.528 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:10.528 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 1acf42c5-084c-4cc4-bdc5-910eec0249e3, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:33:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:10.529 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[56e52f77-e257-4c5d-98da-c9607afbe204]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:10.530 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-1acf42c5-084c-4cc4-bdc5-910eec0249e3 namespace which is not needed anymore
Oct 02 12:33:10 compute-0 systemd[1]: machine-qemu\x2d70\x2dinstance\x2d00000093.scope: Deactivated successfully.
Oct 02 12:33:10 compute-0 systemd[1]: machine-qemu\x2d70\x2dinstance\x2d00000093.scope: Consumed 4.146s CPU time.
Oct 02 12:33:10 compute-0 systemd-machined[152150]: Machine qemu-70-instance-00000093 terminated.
Oct 02 12:33:10 compute-0 neutron-haproxy-ovnmeta-1acf42c5-084c-4cc4-bdc5-910eec0249e3[245030]: [NOTICE]   (245034) : haproxy version is 2.8.14-c23fe91
Oct 02 12:33:10 compute-0 neutron-haproxy-ovnmeta-1acf42c5-084c-4cc4-bdc5-910eec0249e3[245030]: [NOTICE]   (245034) : path to executable is /usr/sbin/haproxy
Oct 02 12:33:10 compute-0 neutron-haproxy-ovnmeta-1acf42c5-084c-4cc4-bdc5-910eec0249e3[245030]: [WARNING]  (245034) : Exiting Master process...
Oct 02 12:33:10 compute-0 neutron-haproxy-ovnmeta-1acf42c5-084c-4cc4-bdc5-910eec0249e3[245030]: [ALERT]    (245034) : Current worker (245048) exited with code 143 (Terminated)
Oct 02 12:33:10 compute-0 neutron-haproxy-ovnmeta-1acf42c5-084c-4cc4-bdc5-910eec0249e3[245030]: [WARNING]  (245034) : All workers exited. Exiting... (0)
Oct 02 12:33:10 compute-0 systemd[1]: libpod-2d0d239610cb24d1c3675ed15ab86cea3699099b0377b55c4f13d191dd5a54b6.scope: Deactivated successfully.
Oct 02 12:33:10 compute-0 podman[245213]: 2025-10-02 12:33:10.653182516 +0000 UTC m=+0.045788067 container died 2d0d239610cb24d1c3675ed15ab86cea3699099b0377b55c4f13d191dd5a54b6 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-1acf42c5-084c-4cc4-bdc5-910eec0249e3, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:33:10 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-2d0d239610cb24d1c3675ed15ab86cea3699099b0377b55c4f13d191dd5a54b6-userdata-shm.mount: Deactivated successfully.
Oct 02 12:33:10 compute-0 systemd[1]: var-lib-containers-storage-overlay-f276851438072cc792ac2dcd409834de633fc629647c8a41cb27f04e2ccd9c7d-merged.mount: Deactivated successfully.
Oct 02 12:33:10 compute-0 podman[245213]: 2025-10-02 12:33:10.692719082 +0000 UTC m=+0.085324633 container cleanup 2d0d239610cb24d1c3675ed15ab86cea3699099b0377b55c4f13d191dd5a54b6 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-1acf42c5-084c-4cc4-bdc5-910eec0249e3, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:33:10 compute-0 systemd[1]: libpod-conmon-2d0d239610cb24d1c3675ed15ab86cea3699099b0377b55c4f13d191dd5a54b6.scope: Deactivated successfully.
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.742 2 INFO nova.virt.libvirt.driver [-] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Instance destroyed successfully.
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.743 2 DEBUG nova.objects.instance [None req-2ba32190-2517-4963-8663-a3415ce7b149 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Lazy-loading 'resources' on Instance uuid 2d10392f-7700-4f17-8be6-9fe493836a58 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:33:10 compute-0 podman[245249]: 2025-10-02 12:33:10.761122644 +0000 UTC m=+0.045198711 container remove 2d0d239610cb24d1c3675ed15ab86cea3699099b0377b55c4f13d191dd5a54b6 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-1acf42c5-084c-4cc4-bdc5-910eec0249e3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS)
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.762 2 DEBUG nova.virt.libvirt.vif [None req-2ba32190-2517-4963-8663-a3415ce7b149 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:33:00Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-ServersTestJSON-server-1562978230',display_name='tempest-ServersTestJSON-server-1562978230',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-serverstestjson-server-1562978230',id=147,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:33:07Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='a4a7099974504a798e1607c8e6a1f570',ramdisk_id='',reservation_id='r-97apu7d8',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-ServersTestJSON-1163535506',owner_user_name='tempest-ServersTestJSON-1163535506-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:33:07Z,user_data=None,user_id='27daa263abb54d4d8e3ae34cd1c5ccf5',uuid=2d10392f-7700-4f17-8be6-9fe493836a58,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "d4c65816-736d-45f1-a48d-ee78e03b1bea", "address": "fa:16:3e:d4:32:59", "network": {"id": "1acf42c5-084c-4cc4-bdc5-910eec0249e3", "bridge": "br-int", "label": "tempest-ServersTestJSON-5464492-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "a4a7099974504a798e1607c8e6a1f570", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd4c65816-73", "ovs_interfaceid": "d4c65816-736d-45f1-a48d-ee78e03b1bea", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.762 2 DEBUG nova.network.os_vif_util [None req-2ba32190-2517-4963-8663-a3415ce7b149 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Converting VIF {"id": "d4c65816-736d-45f1-a48d-ee78e03b1bea", "address": "fa:16:3e:d4:32:59", "network": {"id": "1acf42c5-084c-4cc4-bdc5-910eec0249e3", "bridge": "br-int", "label": "tempest-ServersTestJSON-5464492-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "a4a7099974504a798e1607c8e6a1f570", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapd4c65816-73", "ovs_interfaceid": "d4c65816-736d-45f1-a48d-ee78e03b1bea", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.763 2 DEBUG nova.network.os_vif_util [None req-2ba32190-2517-4963-8663-a3415ce7b149 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:d4:32:59,bridge_name='br-int',has_traffic_filtering=True,id=d4c65816-736d-45f1-a48d-ee78e03b1bea,network=Network(1acf42c5-084c-4cc4-bdc5-910eec0249e3),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd4c65816-73') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.764 2 DEBUG os_vif [None req-2ba32190-2517-4963-8663-a3415ce7b149 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:d4:32:59,bridge_name='br-int',has_traffic_filtering=True,id=d4c65816-736d-45f1-a48d-ee78e03b1bea,network=Network(1acf42c5-084c-4cc4-bdc5-910eec0249e3),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd4c65816-73') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.765 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:10.765 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[25cb16d1-8986-404d-b938-c15d544bc524]: (4, ('Thu Oct  2 12:33:10 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-1acf42c5-084c-4cc4-bdc5-910eec0249e3 (2d0d239610cb24d1c3675ed15ab86cea3699099b0377b55c4f13d191dd5a54b6)\n2d0d239610cb24d1c3675ed15ab86cea3699099b0377b55c4f13d191dd5a54b6\nThu Oct  2 12:33:10 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-1acf42c5-084c-4cc4-bdc5-910eec0249e3 (2d0d239610cb24d1c3675ed15ab86cea3699099b0377b55c4f13d191dd5a54b6)\n2d0d239610cb24d1c3675ed15ab86cea3699099b0377b55c4f13d191dd5a54b6\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.765 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapd4c65816-73, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.767 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:10.767 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5be8261b-c1b2-4876-865e-c2d9e9a70f88]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:10.768 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap1acf42c5-00, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.768 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.770 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:10 compute-0 kernel: tap1acf42c5-00: left promiscuous mode
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.771 2 INFO os_vif [None req-2ba32190-2517-4963-8663-a3415ce7b149 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:d4:32:59,bridge_name='br-int',has_traffic_filtering=True,id=d4c65816-736d-45f1-a48d-ee78e03b1bea,network=Network(1acf42c5-084c-4cc4-bdc5-910eec0249e3),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapd4c65816-73')
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.772 2 INFO nova.virt.libvirt.driver [None req-2ba32190-2517-4963-8663-a3415ce7b149 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Deleting instance files /var/lib/nova/instances/2d10392f-7700-4f17-8be6-9fe493836a58_del
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.773 2 INFO nova.virt.libvirt.driver [None req-2ba32190-2517-4963-8663-a3415ce7b149 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Deletion of /var/lib/nova/instances/2d10392f-7700-4f17-8be6-9fe493836a58_del complete
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.782 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:10.784 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7eba79c1-e8d9-488a-9421-8688de178493]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:10.811 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[519ca843-7bc9-4b69-abdb-f7164766c575]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:10.813 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c1d80fda-ed8e-4f0a-9d47-6dafe19c62f0]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:10.827 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[23a00fcc-da42-4d88-bb03-49490696bd44]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 638400, 'reachable_time': 17495, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 245274, 'error': None, 'target': 'ovnmeta-1acf42c5-084c-4cc4-bdc5-910eec0249e3', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:10 compute-0 systemd[1]: run-netns-ovnmeta\x2d1acf42c5\x2d084c\x2d4cc4\x2dbdc5\x2d910eec0249e3.mount: Deactivated successfully.
Oct 02 12:33:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:10.830 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-1acf42c5-084c-4cc4-bdc5-910eec0249e3 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:33:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:10.830 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[5f5725d0-a2ab-4289-8bd1-5c4621311750]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.869 2 INFO nova.compute.manager [None req-2ba32190-2517-4963-8663-a3415ce7b149 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Took 0.39 seconds to destroy the instance on the hypervisor.
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.870 2 DEBUG oslo.service.loopingcall [None req-2ba32190-2517-4963-8663-a3415ce7b149 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.871 2 DEBUG nova.compute.manager [-] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.871 2 DEBUG nova.network.neutron [-] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.938 2 DEBUG nova.compute.manager [req-8519a6e4-6aaa-4157-9737-730d9f8b47f8 req-2a811a20-dade-46f5-bd2d-b18cc1711180 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Received event network-vif-unplugged-d4c65816-736d-45f1-a48d-ee78e03b1bea external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.939 2 DEBUG oslo_concurrency.lockutils [req-8519a6e4-6aaa-4157-9737-730d9f8b47f8 req-2a811a20-dade-46f5-bd2d-b18cc1711180 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "2d10392f-7700-4f17-8be6-9fe493836a58-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.939 2 DEBUG oslo_concurrency.lockutils [req-8519a6e4-6aaa-4157-9737-730d9f8b47f8 req-2a811a20-dade-46f5-bd2d-b18cc1711180 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2d10392f-7700-4f17-8be6-9fe493836a58-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.939 2 DEBUG oslo_concurrency.lockutils [req-8519a6e4-6aaa-4157-9737-730d9f8b47f8 req-2a811a20-dade-46f5-bd2d-b18cc1711180 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2d10392f-7700-4f17-8be6-9fe493836a58-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.939 2 DEBUG nova.compute.manager [req-8519a6e4-6aaa-4157-9737-730d9f8b47f8 req-2a811a20-dade-46f5-bd2d-b18cc1711180 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] No waiting events found dispatching network-vif-unplugged-d4c65816-736d-45f1-a48d-ee78e03b1bea pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:33:10 compute-0 nova_compute[192079]: 2025-10-02 12:33:10.940 2 DEBUG nova.compute.manager [req-8519a6e4-6aaa-4157-9737-730d9f8b47f8 req-2a811a20-dade-46f5-bd2d-b18cc1711180 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Received event network-vif-unplugged-d4c65816-736d-45f1-a48d-ee78e03b1bea for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:33:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:11.091 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=37, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=36) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:33:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:11.092 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 9 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:33:11 compute-0 nova_compute[192079]: 2025-10-02 12:33:11.136 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:11 compute-0 nova_compute[192079]: 2025-10-02 12:33:11.476 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:11 compute-0 nova_compute[192079]: 2025-10-02 12:33:11.655 2 DEBUG nova.network.neutron [-] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:33:11 compute-0 nova_compute[192079]: 2025-10-02 12:33:11.674 2 INFO nova.compute.manager [-] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Took 0.80 seconds to deallocate network for instance.
Oct 02 12:33:11 compute-0 nova_compute[192079]: 2025-10-02 12:33:11.683 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:11 compute-0 nova_compute[192079]: 2025-10-02 12:33:11.711 2 DEBUG nova.compute.manager [req-f69c71f9-6a4f-45c6-b1d7-77b98d8ccffe req-0c09ae99-74ae-46f0-bac7-26b35fb38702 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Received event network-vif-deleted-d4c65816-736d-45f1-a48d-ee78e03b1bea external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:33:11 compute-0 nova_compute[192079]: 2025-10-02 12:33:11.779 2 DEBUG oslo_concurrency.lockutils [None req-2ba32190-2517-4963-8663-a3415ce7b149 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:11 compute-0 nova_compute[192079]: 2025-10-02 12:33:11.779 2 DEBUG oslo_concurrency.lockutils [None req-2ba32190-2517-4963-8663-a3415ce7b149 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:11 compute-0 nova_compute[192079]: 2025-10-02 12:33:11.831 2 DEBUG nova.compute.provider_tree [None req-2ba32190-2517-4963-8663-a3415ce7b149 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:33:11 compute-0 nova_compute[192079]: 2025-10-02 12:33:11.849 2 DEBUG nova.scheduler.client.report [None req-2ba32190-2517-4963-8663-a3415ce7b149 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:33:11 compute-0 nova_compute[192079]: 2025-10-02 12:33:11.877 2 DEBUG oslo_concurrency.lockutils [None req-2ba32190-2517-4963-8663-a3415ce7b149 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.098s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:11 compute-0 nova_compute[192079]: 2025-10-02 12:33:11.900 2 INFO nova.scheduler.client.report [None req-2ba32190-2517-4963-8663-a3415ce7b149 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Deleted allocations for instance 2d10392f-7700-4f17-8be6-9fe493836a58
Oct 02 12:33:12 compute-0 nova_compute[192079]: 2025-10-02 12:33:12.005 2 DEBUG oslo_concurrency.lockutils [None req-2ba32190-2517-4963-8663-a3415ce7b149 27daa263abb54d4d8e3ae34cd1c5ccf5 a4a7099974504a798e1607c8e6a1f570 - - default default] Lock "2d10392f-7700-4f17-8be6-9fe493836a58" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.552s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:12 compute-0 nova_compute[192079]: 2025-10-02 12:33:12.370 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:13 compute-0 nova_compute[192079]: 2025-10-02 12:33:13.030 2 DEBUG nova.compute.manager [req-314db67f-e1be-4de9-ae84-72ce33e299ff req-333da8d2-eff8-4c35-a953-341340592b82 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Received event network-vif-plugged-d4c65816-736d-45f1-a48d-ee78e03b1bea external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:33:13 compute-0 nova_compute[192079]: 2025-10-02 12:33:13.031 2 DEBUG oslo_concurrency.lockutils [req-314db67f-e1be-4de9-ae84-72ce33e299ff req-333da8d2-eff8-4c35-a953-341340592b82 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "2d10392f-7700-4f17-8be6-9fe493836a58-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:13 compute-0 nova_compute[192079]: 2025-10-02 12:33:13.031 2 DEBUG oslo_concurrency.lockutils [req-314db67f-e1be-4de9-ae84-72ce33e299ff req-333da8d2-eff8-4c35-a953-341340592b82 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2d10392f-7700-4f17-8be6-9fe493836a58-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:13 compute-0 nova_compute[192079]: 2025-10-02 12:33:13.031 2 DEBUG oslo_concurrency.lockutils [req-314db67f-e1be-4de9-ae84-72ce33e299ff req-333da8d2-eff8-4c35-a953-341340592b82 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2d10392f-7700-4f17-8be6-9fe493836a58-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:13 compute-0 nova_compute[192079]: 2025-10-02 12:33:13.032 2 DEBUG nova.compute.manager [req-314db67f-e1be-4de9-ae84-72ce33e299ff req-333da8d2-eff8-4c35-a953-341340592b82 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] No waiting events found dispatching network-vif-plugged-d4c65816-736d-45f1-a48d-ee78e03b1bea pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:33:13 compute-0 nova_compute[192079]: 2025-10-02 12:33:13.032 2 WARNING nova.compute.manager [req-314db67f-e1be-4de9-ae84-72ce33e299ff req-333da8d2-eff8-4c35-a953-341340592b82 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Received unexpected event network-vif-plugged-d4c65816-736d-45f1-a48d-ee78e03b1bea for instance with vm_state deleted and task_state None.
Oct 02 12:33:15 compute-0 nova_compute[192079]: 2025-10-02 12:33:15.767 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:33:17.111 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:33:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:33:17.111 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:33:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:33:17.112 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:33:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:33:17.112 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:33:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:33:17.112 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:33:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:33:17.112 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:33:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:33:17.113 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:33:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:33:17.113 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:33:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:33:17.113 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:33:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:33:17.113 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.capacity, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:33:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:33:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:33:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:33:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:33:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:33:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.iops, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:33:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:33:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:33:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:33:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:33:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:33:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:33:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:33:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:33:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:33:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster cpu, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:33:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:33:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:33:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:33:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:33:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:33:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:33:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:33:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster memory.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:33:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:33:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:33:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:33:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.allocation, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:33:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:33:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:33:17 compute-0 nova_compute[192079]: 2025-10-02 12:33:17.373 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:19 compute-0 podman[245275]: 2025-10-02 12:33:19.146236021 +0000 UTC m=+0.063395667 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.build-date=20251001, tcib_managed=true, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=edpm)
Oct 02 12:33:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:20.094 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '37'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:33:20 compute-0 nova_compute[192079]: 2025-10-02 12:33:20.770 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:22 compute-0 nova_compute[192079]: 2025-10-02 12:33:22.119 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759408387.1180623, a0a5e290-69d3-4ce0-9533-6df7cf06c204 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:33:22 compute-0 nova_compute[192079]: 2025-10-02 12:33:22.119 2 INFO nova.compute.manager [-] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] VM Stopped (Lifecycle Event)
Oct 02 12:33:22 compute-0 nova_compute[192079]: 2025-10-02 12:33:22.159 2 DEBUG nova.compute.manager [None req-07809426-6aee-46b7-9f4c-3dc313b5f421 - - - - - -] [instance: a0a5e290-69d3-4ce0-9533-6df7cf06c204] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:33:22 compute-0 nova_compute[192079]: 2025-10-02 12:33:22.376 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:25 compute-0 podman[245295]: 2025-10-02 12:33:25.170295318 +0000 UTC m=+0.074741036 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.buildah.version=1.33.7, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., io.openshift.tags=minimal rhel9, architecture=x86_64, managed_by=edpm_ansible, version=9.6, url=https://catalog.redhat.com/en/search?searchType=containers, vendor=Red Hat, Inc., container_name=openstack_network_exporter, io.openshift.expose-services=, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., maintainer=Red Hat, Inc., build-date=2025-08-20T13:12:41, distribution-scope=public, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vcs-type=git, config_id=edpm, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, release=1755695350, name=ubi9-minimal, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, com.redhat.component=ubi9-minimal-container)
Oct 02 12:33:25 compute-0 podman[245296]: 2025-10-02 12:33:25.17183359 +0000 UTC m=+0.072970379 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, container_name=multipathd, managed_by=edpm_ansible, org.label-schema.license=GPLv2, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:33:25 compute-0 nova_compute[192079]: 2025-10-02 12:33:25.741 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759408390.7397916, 2d10392f-7700-4f17-8be6-9fe493836a58 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:33:25 compute-0 nova_compute[192079]: 2025-10-02 12:33:25.741 2 INFO nova.compute.manager [-] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] VM Stopped (Lifecycle Event)
Oct 02 12:33:25 compute-0 nova_compute[192079]: 2025-10-02 12:33:25.773 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:25 compute-0 nova_compute[192079]: 2025-10-02 12:33:25.783 2 DEBUG nova.compute.manager [None req-9d33afee-8273-42a0-b08c-5a494f6720ac - - - - - -] [instance: 2d10392f-7700-4f17-8be6-9fe493836a58] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:33:27 compute-0 nova_compute[192079]: 2025-10-02 12:33:27.378 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:30 compute-0 nova_compute[192079]: 2025-10-02 12:33:30.775 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:31 compute-0 podman[245335]: 2025-10-02 12:33:31.140029727 +0000 UTC m=+0.053385864 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=iscsid, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, container_name=iscsid, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']})
Oct 02 12:33:31 compute-0 podman[245334]: 2025-10-02 12:33:31.147191193 +0000 UTC m=+0.055358279 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']})
Oct 02 12:33:32 compute-0 nova_compute[192079]: 2025-10-02 12:33:32.379 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:33 compute-0 nova_compute[192079]: 2025-10-02 12:33:33.756 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:33:33 compute-0 nova_compute[192079]: 2025-10-02 12:33:33.760 2 DEBUG oslo_concurrency.lockutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "9c817262-fee7-483c-ac98-6d7648890eb0" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:33 compute-0 nova_compute[192079]: 2025-10-02 12:33:33.761 2 DEBUG oslo_concurrency.lockutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "9c817262-fee7-483c-ac98-6d7648890eb0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:33 compute-0 nova_compute[192079]: 2025-10-02 12:33:33.809 2 DEBUG nova.compute.manager [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:33:33 compute-0 nova_compute[192079]: 2025-10-02 12:33:33.912 2 DEBUG oslo_concurrency.lockutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:33 compute-0 nova_compute[192079]: 2025-10-02 12:33:33.913 2 DEBUG oslo_concurrency.lockutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:33 compute-0 nova_compute[192079]: 2025-10-02 12:33:33.919 2 DEBUG nova.virt.hardware [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:33:33 compute-0 nova_compute[192079]: 2025-10-02 12:33:33.920 2 INFO nova.compute.claims [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.070 2 DEBUG nova.compute.provider_tree [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.093 2 DEBUG nova.scheduler.client.report [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.130 2 DEBUG oslo_concurrency.lockutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.217s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.131 2 DEBUG nova.compute.manager [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.194 2 DEBUG nova.compute.manager [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.194 2 DEBUG nova.network.neutron [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.214 2 INFO nova.virt.libvirt.driver [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.233 2 DEBUG nova.compute.manager [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.362 2 DEBUG nova.compute.manager [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.364 2 DEBUG nova.virt.libvirt.driver [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.365 2 INFO nova.virt.libvirt.driver [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Creating image(s)
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.366 2 DEBUG oslo_concurrency.lockutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "/var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.366 2 DEBUG oslo_concurrency.lockutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "/var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.368 2 DEBUG oslo_concurrency.lockutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "/var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.397 2 DEBUG nova.policy [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1faa7e121a0e43ad8cb4ae5b2cfcc6a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '76c7dd40d83e4e3ca71abbebf57921b6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.400 2 DEBUG oslo_concurrency.processutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.494 2 DEBUG oslo_concurrency.processutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.093s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.495 2 DEBUG oslo_concurrency.lockutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.496 2 DEBUG oslo_concurrency.lockutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.506 2 DEBUG oslo_concurrency.processutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.558 2 DEBUG oslo_concurrency.processutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.052s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.559 2 DEBUG oslo_concurrency.processutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.597 2 DEBUG oslo_concurrency.processutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk 1073741824" returned: 0 in 0.037s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.598 2 DEBUG oslo_concurrency.lockutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.102s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.599 2 DEBUG oslo_concurrency.processutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.654 2 DEBUG oslo_concurrency.processutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.655 2 DEBUG nova.virt.disk.api [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Checking if we can resize image /var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.656 2 DEBUG oslo_concurrency.processutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.671 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.708 2 DEBUG oslo_concurrency.processutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk --force-share --output=json" returned: 0 in 0.052s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.709 2 DEBUG nova.virt.disk.api [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Cannot resize image /var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.709 2 DEBUG nova.objects.instance [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'migration_context' on Instance uuid 9c817262-fee7-483c-ac98-6d7648890eb0 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.728 2 DEBUG nova.virt.libvirt.driver [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.728 2 DEBUG nova.virt.libvirt.driver [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Ensure instance console log exists: /var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.729 2 DEBUG oslo_concurrency.lockutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.729 2 DEBUG oslo_concurrency.lockutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:34 compute-0 nova_compute[192079]: 2025-10-02 12:33:34.730 2 DEBUG oslo_concurrency.lockutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:35 compute-0 nova_compute[192079]: 2025-10-02 12:33:35.585 2 DEBUG nova.network.neutron [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Successfully created port: c4934bb6-5047-47ec-b0c6-127b4274769a _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:33:35 compute-0 nova_compute[192079]: 2025-10-02 12:33:35.776 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:37 compute-0 nova_compute[192079]: 2025-10-02 12:33:37.078 2 DEBUG nova.network.neutron [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Successfully updated port: c4934bb6-5047-47ec-b0c6-127b4274769a _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:33:37 compute-0 nova_compute[192079]: 2025-10-02 12:33:37.099 2 DEBUG oslo_concurrency.lockutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "refresh_cache-9c817262-fee7-483c-ac98-6d7648890eb0" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:33:37 compute-0 nova_compute[192079]: 2025-10-02 12:33:37.099 2 DEBUG oslo_concurrency.lockutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquired lock "refresh_cache-9c817262-fee7-483c-ac98-6d7648890eb0" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:33:37 compute-0 nova_compute[192079]: 2025-10-02 12:33:37.099 2 DEBUG nova.network.neutron [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:33:37 compute-0 nova_compute[192079]: 2025-10-02 12:33:37.381 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:37 compute-0 nova_compute[192079]: 2025-10-02 12:33:37.932 2 DEBUG nova.network.neutron [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:33:38 compute-0 podman[245394]: 2025-10-02 12:33:38.182727967 +0000 UTC m=+0.075499207 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, container_name=ovn_metadata_agent, config_id=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS)
Oct 02 12:33:38 compute-0 podman[245396]: 2025-10-02 12:33:38.197968811 +0000 UTC m=+0.072032511 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 12:33:38 compute-0 podman[245395]: 2025-10-02 12:33:38.230932549 +0000 UTC m=+0.119041811 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller, container_name=ovn_controller, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, tcib_managed=true)
Oct 02 12:33:38 compute-0 nova_compute[192079]: 2025-10-02 12:33:38.565 2 DEBUG nova.compute.manager [req-6d35a76b-df60-48e7-b6dd-5527caec1077 req-8009b2c5-f6e6-4666-9684-8581fac844cf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Received event network-changed-c4934bb6-5047-47ec-b0c6-127b4274769a external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:33:38 compute-0 nova_compute[192079]: 2025-10-02 12:33:38.565 2 DEBUG nova.compute.manager [req-6d35a76b-df60-48e7-b6dd-5527caec1077 req-8009b2c5-f6e6-4666-9684-8581fac844cf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Refreshing instance network info cache due to event network-changed-c4934bb6-5047-47ec-b0c6-127b4274769a. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:33:38 compute-0 nova_compute[192079]: 2025-10-02 12:33:38.565 2 DEBUG oslo_concurrency.lockutils [req-6d35a76b-df60-48e7-b6dd-5527caec1077 req-8009b2c5-f6e6-4666-9684-8581fac844cf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-9c817262-fee7-483c-ac98-6d7648890eb0" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:33:38 compute-0 nova_compute[192079]: 2025-10-02 12:33:38.663 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:33:38 compute-0 nova_compute[192079]: 2025-10-02 12:33:38.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:33:38 compute-0 nova_compute[192079]: 2025-10-02 12:33:38.695 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:38 compute-0 nova_compute[192079]: 2025-10-02 12:33:38.696 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:38 compute-0 nova_compute[192079]: 2025-10-02 12:33:38.696 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:38 compute-0 nova_compute[192079]: 2025-10-02 12:33:38.696 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:33:38 compute-0 nova_compute[192079]: 2025-10-02 12:33:38.911 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:33:38 compute-0 nova_compute[192079]: 2025-10-02 12:33:38.912 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5695MB free_disk=73.33972930908203GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:33:38 compute-0 nova_compute[192079]: 2025-10-02 12:33:38.913 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:38 compute-0 nova_compute[192079]: 2025-10-02 12:33:38.913 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:38 compute-0 nova_compute[192079]: 2025-10-02 12:33:38.990 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance 9c817262-fee7-483c-ac98-6d7648890eb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:33:38 compute-0 nova_compute[192079]: 2025-10-02 12:33:38.990 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 1 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:33:38 compute-0 nova_compute[192079]: 2025-10-02 12:33:38.990 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=640MB phys_disk=79GB used_disk=1GB total_vcpus=8 used_vcpus=1 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:33:39 compute-0 nova_compute[192079]: 2025-10-02 12:33:39.033 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:33:39 compute-0 nova_compute[192079]: 2025-10-02 12:33:39.047 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:33:39 compute-0 nova_compute[192079]: 2025-10-02 12:33:39.129 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:33:39 compute-0 nova_compute[192079]: 2025-10-02 12:33:39.129 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.216s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:40 compute-0 nova_compute[192079]: 2025-10-02 12:33:40.778 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.057 2 DEBUG nova.network.neutron [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Updating instance_info_cache with network_info: [{"id": "c4934bb6-5047-47ec-b0c6-127b4274769a", "address": "fa:16:3e:11:3b:3b", "network": {"id": "d5d28c5b-6eab-4c56-bc64-1dd8250f45c6", "bridge": "br-int", "label": "tempest-network-smoke--1759416782", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4934bb6-50", "ovs_interfaceid": "c4934bb6-5047-47ec-b0c6-127b4274769a", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.081 2 DEBUG oslo_concurrency.lockutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Releasing lock "refresh_cache-9c817262-fee7-483c-ac98-6d7648890eb0" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.081 2 DEBUG nova.compute.manager [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Instance network_info: |[{"id": "c4934bb6-5047-47ec-b0c6-127b4274769a", "address": "fa:16:3e:11:3b:3b", "network": {"id": "d5d28c5b-6eab-4c56-bc64-1dd8250f45c6", "bridge": "br-int", "label": "tempest-network-smoke--1759416782", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4934bb6-50", "ovs_interfaceid": "c4934bb6-5047-47ec-b0c6-127b4274769a", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.082 2 DEBUG oslo_concurrency.lockutils [req-6d35a76b-df60-48e7-b6dd-5527caec1077 req-8009b2c5-f6e6-4666-9684-8581fac844cf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-9c817262-fee7-483c-ac98-6d7648890eb0" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.082 2 DEBUG nova.network.neutron [req-6d35a76b-df60-48e7-b6dd-5527caec1077 req-8009b2c5-f6e6-4666-9684-8581fac844cf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Refreshing network info cache for port c4934bb6-5047-47ec-b0c6-127b4274769a _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.084 2 DEBUG nova.virt.libvirt.driver [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Start _get_guest_xml network_info=[{"id": "c4934bb6-5047-47ec-b0c6-127b4274769a", "address": "fa:16:3e:11:3b:3b", "network": {"id": "d5d28c5b-6eab-4c56-bc64-1dd8250f45c6", "bridge": "br-int", "label": "tempest-network-smoke--1759416782", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4934bb6-50", "ovs_interfaceid": "c4934bb6-5047-47ec-b0c6-127b4274769a", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.088 2 WARNING nova.virt.libvirt.driver [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.092 2 DEBUG nova.virt.libvirt.host [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.093 2 DEBUG nova.virt.libvirt.host [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.096 2 DEBUG nova.virt.libvirt.host [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.097 2 DEBUG nova.virt.libvirt.host [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.098 2 DEBUG nova.virt.libvirt.driver [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.098 2 DEBUG nova.virt.hardware [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.098 2 DEBUG nova.virt.hardware [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.099 2 DEBUG nova.virt.hardware [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.099 2 DEBUG nova.virt.hardware [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.099 2 DEBUG nova.virt.hardware [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.099 2 DEBUG nova.virt.hardware [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.099 2 DEBUG nova.virt.hardware [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.100 2 DEBUG nova.virt.hardware [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.100 2 DEBUG nova.virt.hardware [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.100 2 DEBUG nova.virt.hardware [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.100 2 DEBUG nova.virt.hardware [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.103 2 DEBUG nova.virt.libvirt.vif [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:33:32Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestNetworkAdvancedServerOps-server-1842419854',display_name='tempest-TestNetworkAdvancedServerOps-server-1842419854',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkadvancedserverops-server-1842419854',id=150,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBBcmc6POLJD4DWxVpjM7Q+mpn1vqWiz84SJ4bJOoXuhI8e1ZYxo5xDkFJVaGxPGGCkjomFU7VMydgd3IiJebhoaGTMBDztV9vB5kp4HC2Ekh6aB6IjhW19nhgZQ5E8+LRw==',key_name='tempest-TestNetworkAdvancedServerOps-467970098',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='76c7dd40d83e4e3ca71abbebf57921b6',ramdisk_id='',reservation_id='r-aaoru9xk',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestNetworkAdvancedServerOps-597114071',owner_user_name='tempest-TestNetworkAdvancedServerOps-597114071-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:33:34Z,user_data=None,user_id='1faa7e121a0e43ad8cb4ae5b2cfcc6a2',uuid=9c817262-fee7-483c-ac98-6d7648890eb0,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "c4934bb6-5047-47ec-b0c6-127b4274769a", "address": "fa:16:3e:11:3b:3b", "network": {"id": "d5d28c5b-6eab-4c56-bc64-1dd8250f45c6", "bridge": "br-int", "label": "tempest-network-smoke--1759416782", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4934bb6-50", "ovs_interfaceid": "c4934bb6-5047-47ec-b0c6-127b4274769a", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.103 2 DEBUG nova.network.os_vif_util [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converting VIF {"id": "c4934bb6-5047-47ec-b0c6-127b4274769a", "address": "fa:16:3e:11:3b:3b", "network": {"id": "d5d28c5b-6eab-4c56-bc64-1dd8250f45c6", "bridge": "br-int", "label": "tempest-network-smoke--1759416782", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4934bb6-50", "ovs_interfaceid": "c4934bb6-5047-47ec-b0c6-127b4274769a", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.104 2 DEBUG nova.network.os_vif_util [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:11:3b:3b,bridge_name='br-int',has_traffic_filtering=True,id=c4934bb6-5047-47ec-b0c6-127b4274769a,network=Network(d5d28c5b-6eab-4c56-bc64-1dd8250f45c6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapc4934bb6-50') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.105 2 DEBUG nova.objects.instance [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'pci_devices' on Instance uuid 9c817262-fee7-483c-ac98-6d7648890eb0 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.124 2 DEBUG nova.virt.libvirt.driver [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:33:41 compute-0 nova_compute[192079]:   <uuid>9c817262-fee7-483c-ac98-6d7648890eb0</uuid>
Oct 02 12:33:41 compute-0 nova_compute[192079]:   <name>instance-00000096</name>
Oct 02 12:33:41 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:33:41 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:33:41 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:33:41 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:       <nova:name>tempest-TestNetworkAdvancedServerOps-server-1842419854</nova:name>
Oct 02 12:33:41 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:33:41</nova:creationTime>
Oct 02 12:33:41 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:33:41 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:33:41 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:33:41 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:33:41 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:33:41 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:33:41 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:33:41 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:33:41 compute-0 nova_compute[192079]:         <nova:user uuid="1faa7e121a0e43ad8cb4ae5b2cfcc6a2">tempest-TestNetworkAdvancedServerOps-597114071-project-member</nova:user>
Oct 02 12:33:41 compute-0 nova_compute[192079]:         <nova:project uuid="76c7dd40d83e4e3ca71abbebf57921b6">tempest-TestNetworkAdvancedServerOps-597114071</nova:project>
Oct 02 12:33:41 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:33:41 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:33:41 compute-0 nova_compute[192079]:         <nova:port uuid="c4934bb6-5047-47ec-b0c6-127b4274769a">
Oct 02 12:33:41 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.5" ipVersion="4"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:33:41 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:33:41 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:33:41 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <system>
Oct 02 12:33:41 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:33:41 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:33:41 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:33:41 compute-0 nova_compute[192079]:       <entry name="serial">9c817262-fee7-483c-ac98-6d7648890eb0</entry>
Oct 02 12:33:41 compute-0 nova_compute[192079]:       <entry name="uuid">9c817262-fee7-483c-ac98-6d7648890eb0</entry>
Oct 02 12:33:41 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     </system>
Oct 02 12:33:41 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:33:41 compute-0 nova_compute[192079]:   <os>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:   </os>
Oct 02 12:33:41 compute-0 nova_compute[192079]:   <features>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:   </features>
Oct 02 12:33:41 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:33:41 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:33:41 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:33:41 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:33:41 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk.config"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:33:41 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:11:3b:3b"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:       <target dev="tapc4934bb6-50"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:33:41 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/console.log" append="off"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <video>
Oct 02 12:33:41 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     </video>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:33:41 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:33:41 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:33:41 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:33:41 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:33:41 compute-0 nova_compute[192079]: </domain>
Oct 02 12:33:41 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.126 2 DEBUG nova.compute.manager [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Preparing to wait for external event network-vif-plugged-c4934bb6-5047-47ec-b0c6-127b4274769a prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.126 2 DEBUG oslo_concurrency.lockutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.126 2 DEBUG oslo_concurrency.lockutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.126 2 DEBUG oslo_concurrency.lockutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.127 2 DEBUG nova.virt.libvirt.vif [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:33:32Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestNetworkAdvancedServerOps-server-1842419854',display_name='tempest-TestNetworkAdvancedServerOps-server-1842419854',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkadvancedserverops-server-1842419854',id=150,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBBcmc6POLJD4DWxVpjM7Q+mpn1vqWiz84SJ4bJOoXuhI8e1ZYxo5xDkFJVaGxPGGCkjomFU7VMydgd3IiJebhoaGTMBDztV9vB5kp4HC2Ekh6aB6IjhW19nhgZQ5E8+LRw==',key_name='tempest-TestNetworkAdvancedServerOps-467970098',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='76c7dd40d83e4e3ca71abbebf57921b6',ramdisk_id='',reservation_id='r-aaoru9xk',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestNetworkAdvancedServerOps-597114071',owner_user_name='tempest-TestNetworkAdvancedServerOps-597114071-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:33:34Z,user_data=None,user_id='1faa7e121a0e43ad8cb4ae5b2cfcc6a2',uuid=9c817262-fee7-483c-ac98-6d7648890eb0,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "c4934bb6-5047-47ec-b0c6-127b4274769a", "address": "fa:16:3e:11:3b:3b", "network": {"id": "d5d28c5b-6eab-4c56-bc64-1dd8250f45c6", "bridge": "br-int", "label": "tempest-network-smoke--1759416782", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4934bb6-50", "ovs_interfaceid": "c4934bb6-5047-47ec-b0c6-127b4274769a", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.128 2 DEBUG nova.network.os_vif_util [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converting VIF {"id": "c4934bb6-5047-47ec-b0c6-127b4274769a", "address": "fa:16:3e:11:3b:3b", "network": {"id": "d5d28c5b-6eab-4c56-bc64-1dd8250f45c6", "bridge": "br-int", "label": "tempest-network-smoke--1759416782", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4934bb6-50", "ovs_interfaceid": "c4934bb6-5047-47ec-b0c6-127b4274769a", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.128 2 DEBUG nova.network.os_vif_util [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:11:3b:3b,bridge_name='br-int',has_traffic_filtering=True,id=c4934bb6-5047-47ec-b0c6-127b4274769a,network=Network(d5d28c5b-6eab-4c56-bc64-1dd8250f45c6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapc4934bb6-50') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.129 2 DEBUG os_vif [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:11:3b:3b,bridge_name='br-int',has_traffic_filtering=True,id=c4934bb6-5047-47ec-b0c6-127b4274769a,network=Network(d5d28c5b-6eab-4c56-bc64-1dd8250f45c6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapc4934bb6-50') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.129 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.130 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.130 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.131 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.133 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.134 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapc4934bb6-50, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.134 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapc4934bb6-50, col_values=(('external_ids', {'iface-id': 'c4934bb6-5047-47ec-b0c6-127b4274769a', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:11:3b:3b', 'vm-uuid': '9c817262-fee7-483c-ac98-6d7648890eb0'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.136 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:41 compute-0 NetworkManager[51160]: <info>  [1759408421.1368] manager: (tapc4934bb6-50): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/276)
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.138 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.145 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.145 2 INFO os_vif [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:11:3b:3b,bridge_name='br-int',has_traffic_filtering=True,id=c4934bb6-5047-47ec-b0c6-127b4274769a,network=Network(d5d28c5b-6eab-4c56-bc64-1dd8250f45c6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapc4934bb6-50')
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.228 2 DEBUG nova.virt.libvirt.driver [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.229 2 DEBUG nova.virt.libvirt.driver [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.229 2 DEBUG nova.virt.libvirt.driver [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] No VIF found with MAC fa:16:3e:11:3b:3b, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.230 2 INFO nova.virt.libvirt.driver [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Using config drive
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.811 2 INFO nova.virt.libvirt.driver [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Creating config drive at /var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk.config
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.818 2 DEBUG oslo_concurrency.processutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpbbdu5l3g execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:33:41 compute-0 nova_compute[192079]: 2025-10-02 12:33:41.950 2 DEBUG oslo_concurrency.processutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpbbdu5l3g" returned: 0 in 0.132s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:33:42 compute-0 kernel: tapc4934bb6-50: entered promiscuous mode
Oct 02 12:33:42 compute-0 NetworkManager[51160]: <info>  [1759408422.0113] manager: (tapc4934bb6-50): new Tun device (/org/freedesktop/NetworkManager/Devices/277)
Oct 02 12:33:42 compute-0 nova_compute[192079]: 2025-10-02 12:33:42.012 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:42 compute-0 ovn_controller[94336]: 2025-10-02T12:33:42Z|00560|binding|INFO|Claiming lport c4934bb6-5047-47ec-b0c6-127b4274769a for this chassis.
Oct 02 12:33:42 compute-0 ovn_controller[94336]: 2025-10-02T12:33:42Z|00561|binding|INFO|c4934bb6-5047-47ec-b0c6-127b4274769a: Claiming fa:16:3e:11:3b:3b 10.100.0.5
Oct 02 12:33:42 compute-0 nova_compute[192079]: 2025-10-02 12:33:42.020 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:42.028 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:11:3b:3b 10.100.0.5'], port_security=['fa:16:3e:11:3b:3b 10.100.0.5'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.5/28', 'neutron:device_id': '9c817262-fee7-483c-ac98-6d7648890eb0', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '76c7dd40d83e4e3ca71abbebf57921b6', 'neutron:revision_number': '2', 'neutron:security_group_ids': '96cf165f-4eb7-4b43-884d-a9b6e5a897e1', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=9097c5a0-e0b8-419e-918d-de3827bd6390, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=c4934bb6-5047-47ec-b0c6-127b4274769a) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:42.029 103294 INFO neutron.agent.ovn.metadata.agent [-] Port c4934bb6-5047-47ec-b0c6-127b4274769a in datapath d5d28c5b-6eab-4c56-bc64-1dd8250f45c6 bound to our chassis
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:42.031 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network d5d28c5b-6eab-4c56-bc64-1dd8250f45c6
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:42.045 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[020b0c48-3036-4eb9-98f5-d8090d1a570e]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:42.047 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tapd5d28c5b-61 in ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:33:42 compute-0 systemd-udevd[245481]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:42.048 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tapd5d28c5b-60 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:42.048 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3b630d7c-8d3d-43a7-8c8f-b36b90061fd1]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:42.049 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7544b321-7220-4bb8-8d4c-f6d818ddc116]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:42 compute-0 systemd-machined[152150]: New machine qemu-71-instance-00000096.
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:42.063 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[0387e968-7d22-45da-889e-ff7d303ed36c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:42 compute-0 NetworkManager[51160]: <info>  [1759408422.0648] device (tapc4934bb6-50): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:33:42 compute-0 NetworkManager[51160]: <info>  [1759408422.0656] device (tapc4934bb6-50): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:33:42 compute-0 nova_compute[192079]: 2025-10-02 12:33:42.073 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:42 compute-0 ovn_controller[94336]: 2025-10-02T12:33:42Z|00562|binding|INFO|Setting lport c4934bb6-5047-47ec-b0c6-127b4274769a ovn-installed in OVS
Oct 02 12:33:42 compute-0 ovn_controller[94336]: 2025-10-02T12:33:42Z|00563|binding|INFO|Setting lport c4934bb6-5047-47ec-b0c6-127b4274769a up in Southbound
Oct 02 12:33:42 compute-0 nova_compute[192079]: 2025-10-02 12:33:42.078 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:42 compute-0 systemd[1]: Started Virtual Machine qemu-71-instance-00000096.
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:42.090 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[177d5f43-6be8-4aa1-b6b3-aeb0f6468b01]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:42.118 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[4653ed19-4cc7-4c70-8162-dfa9582444a4]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:42 compute-0 systemd-udevd[245484]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:42.124 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[62fb97ed-456f-43f5-9306-2fa1f60aad99]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:42 compute-0 NetworkManager[51160]: <info>  [1759408422.1262] manager: (tapd5d28c5b-60): new Veth device (/org/freedesktop/NetworkManager/Devices/278)
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:42.155 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[eb068ea0-9678-49bd-87ae-61587b4131ad]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:42.158 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[76979858-2a64-4d4e-8523-b9437bd4e026]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:42 compute-0 NetworkManager[51160]: <info>  [1759408422.1826] device (tapd5d28c5b-60): carrier: link connected
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:42.188 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[11775725-7939-4971-bae4-d5474d2af994]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:42.205 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[106ea383-59b9-4a68-a9ef-0892974be271]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapd5d28c5b-61'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:09:71:68'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 180], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 641981, 'reachable_time': 21312, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 245513, 'error': None, 'target': 'ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:42.220 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[39fa60d9-5b62-4f8e-b250-15d2258f17e3]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe09:7168'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 641981, 'tstamp': 641981}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 245514, 'error': None, 'target': 'ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:42.237 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8a0432ca-8517-4e07-a983-e152e183db5b]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapd5d28c5b-61'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:09:71:68'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 2, 'tx_packets': 1, 'rx_bytes': 196, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 2, 'tx_packets': 1, 'rx_bytes': 196, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 180], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 641981, 'reachable_time': 21312, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 2, 'inoctets': 168, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 2, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 168, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 2, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 245515, 'error': None, 'target': 'ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:42.265 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[64a6fc0c-dc81-4910-a157-204682e45f10]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:42.328 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d1561104-2277-49be-a4aa-7aa1b28890b5]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:42.331 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapd5d28c5b-60, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:42.331 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:42.331 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapd5d28c5b-60, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:33:42 compute-0 nova_compute[192079]: 2025-10-02 12:33:42.333 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:42 compute-0 NetworkManager[51160]: <info>  [1759408422.3339] manager: (tapd5d28c5b-60): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/279)
Oct 02 12:33:42 compute-0 kernel: tapd5d28c5b-60: entered promiscuous mode
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:42.342 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tapd5d28c5b-60, col_values=(('external_ids', {'iface-id': '410dd08f-af3d-48c2-b0fd-475c37ff4bed'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:33:42 compute-0 nova_compute[192079]: 2025-10-02 12:33:42.343 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:42 compute-0 ovn_controller[94336]: 2025-10-02T12:33:42Z|00564|binding|INFO|Releasing lport 410dd08f-af3d-48c2-b0fd-475c37ff4bed from this chassis (sb_readonly=0)
Oct 02 12:33:42 compute-0 nova_compute[192079]: 2025-10-02 12:33:42.344 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:42.357 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/d5d28c5b-6eab-4c56-bc64-1dd8250f45c6.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/d5d28c5b-6eab-4c56-bc64-1dd8250f45c6.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:33:42 compute-0 nova_compute[192079]: 2025-10-02 12:33:42.359 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:42.360 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[92834bcb-3083-42c4-859a-aea690a70663]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:42.361 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/d5d28c5b-6eab-4c56-bc64-1dd8250f45c6.pid.haproxy
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID d5d28c5b-6eab-4c56-bc64-1dd8250f45c6
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:33:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:42.362 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6', 'env', 'PROCESS_TAG=haproxy-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/d5d28c5b-6eab-4c56-bc64-1dd8250f45c6.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:33:42 compute-0 nova_compute[192079]: 2025-10-02 12:33:42.383 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:42 compute-0 nova_compute[192079]: 2025-10-02 12:33:42.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:33:42 compute-0 nova_compute[192079]: 2025-10-02 12:33:42.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:33:42 compute-0 podman[245547]: 2025-10-02 12:33:42.725982182 +0000 UTC m=+0.047343650 container create b29aed02c1b812072bf3b768f1e9aeb77072b509da6445056a7648a1aa1199fd (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS)
Oct 02 12:33:42 compute-0 podman[245547]: 2025-10-02 12:33:42.702023069 +0000 UTC m=+0.023384557 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:33:42 compute-0 systemd[1]: Started libpod-conmon-b29aed02c1b812072bf3b768f1e9aeb77072b509da6445056a7648a1aa1199fd.scope.
Oct 02 12:33:42 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:33:42 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/23bf39bbb3daa16a07390a05580ebf15054917020bee0f15cb0ffb4c22357eda/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:33:42 compute-0 podman[245547]: 2025-10-02 12:33:42.849631318 +0000 UTC m=+0.170992836 container init b29aed02c1b812072bf3b768f1e9aeb77072b509da6445056a7648a1aa1199fd (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, tcib_managed=true)
Oct 02 12:33:42 compute-0 podman[245547]: 2025-10-02 12:33:42.856569766 +0000 UTC m=+0.177931244 container start b29aed02c1b812072bf3b768f1e9aeb77072b509da6445056a7648a1aa1199fd (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6, org.label-schema.schema-version=1.0, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2)
Oct 02 12:33:42 compute-0 neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6[245562]: [NOTICE]   (245566) : New worker (245568) forked
Oct 02 12:33:42 compute-0 neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6[245562]: [NOTICE]   (245566) : Loading success.
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.189 2 DEBUG nova.compute.manager [req-d003fa8e-56de-4bc0-b2d1-c880c950be65 req-3a53d539-9cc3-4480-bfc7-023041b3927b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Received event network-vif-plugged-c4934bb6-5047-47ec-b0c6-127b4274769a external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.190 2 DEBUG oslo_concurrency.lockutils [req-d003fa8e-56de-4bc0-b2d1-c880c950be65 req-3a53d539-9cc3-4480-bfc7-023041b3927b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.191 2 DEBUG oslo_concurrency.lockutils [req-d003fa8e-56de-4bc0-b2d1-c880c950be65 req-3a53d539-9cc3-4480-bfc7-023041b3927b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.191 2 DEBUG oslo_concurrency.lockutils [req-d003fa8e-56de-4bc0-b2d1-c880c950be65 req-3a53d539-9cc3-4480-bfc7-023041b3927b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.192 2 DEBUG nova.compute.manager [req-d003fa8e-56de-4bc0-b2d1-c880c950be65 req-3a53d539-9cc3-4480-bfc7-023041b3927b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Processing event network-vif-plugged-c4934bb6-5047-47ec-b0c6-127b4274769a _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.635 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408423.6347535, 9c817262-fee7-483c-ac98-6d7648890eb0 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.636 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] VM Started (Lifecycle Event)
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.639 2 DEBUG nova.compute.manager [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.643 2 DEBUG nova.virt.libvirt.driver [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.647 2 INFO nova.virt.libvirt.driver [-] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Instance spawned successfully.
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.647 2 DEBUG nova.virt.libvirt.driver [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.659 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.662 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.671 2 DEBUG nova.virt.libvirt.driver [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.672 2 DEBUG nova.virt.libvirt.driver [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.672 2 DEBUG nova.virt.libvirt.driver [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.673 2 DEBUG nova.virt.libvirt.driver [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.673 2 DEBUG nova.virt.libvirt.driver [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.674 2 DEBUG nova.virt.libvirt.driver [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.708 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.709 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408423.634925, 9c817262-fee7-483c-ac98-6d7648890eb0 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.709 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] VM Paused (Lifecycle Event)
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.738 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.742 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408423.6429224, 9c817262-fee7-483c-ac98-6d7648890eb0 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.742 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] VM Resumed (Lifecycle Event)
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.851 2 INFO nova.compute.manager [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Took 9.49 seconds to spawn the instance on the hypervisor.
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.852 2 DEBUG nova.compute.manager [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.852 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.857 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.879 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.926 2 INFO nova.compute.manager [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Took 10.06 seconds to build instance.
Oct 02 12:33:43 compute-0 nova_compute[192079]: 2025-10-02 12:33:43.950 2 DEBUG oslo_concurrency.lockutils [None req-1b35463e-f4e3-417f-b8f3-17bca8427930 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "9c817262-fee7-483c-ac98-6d7648890eb0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 10.189s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:44 compute-0 nova_compute[192079]: 2025-10-02 12:33:44.011 2 DEBUG nova.network.neutron [req-6d35a76b-df60-48e7-b6dd-5527caec1077 req-8009b2c5-f6e6-4666-9684-8581fac844cf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Updated VIF entry in instance network info cache for port c4934bb6-5047-47ec-b0c6-127b4274769a. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:33:44 compute-0 nova_compute[192079]: 2025-10-02 12:33:44.012 2 DEBUG nova.network.neutron [req-6d35a76b-df60-48e7-b6dd-5527caec1077 req-8009b2c5-f6e6-4666-9684-8581fac844cf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Updating instance_info_cache with network_info: [{"id": "c4934bb6-5047-47ec-b0c6-127b4274769a", "address": "fa:16:3e:11:3b:3b", "network": {"id": "d5d28c5b-6eab-4c56-bc64-1dd8250f45c6", "bridge": "br-int", "label": "tempest-network-smoke--1759416782", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4934bb6-50", "ovs_interfaceid": "c4934bb6-5047-47ec-b0c6-127b4274769a", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:33:44 compute-0 nova_compute[192079]: 2025-10-02 12:33:44.034 2 DEBUG oslo_concurrency.lockutils [req-6d35a76b-df60-48e7-b6dd-5527caec1077 req-8009b2c5-f6e6-4666-9684-8581fac844cf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-9c817262-fee7-483c-ac98-6d7648890eb0" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:33:44 compute-0 nova_compute[192079]: 2025-10-02 12:33:44.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:33:44 compute-0 nova_compute[192079]: 2025-10-02 12:33:44.667 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:33:44 compute-0 nova_compute[192079]: 2025-10-02 12:33:44.668 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:33:44 compute-0 nova_compute[192079]: 2025-10-02 12:33:44.989 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "refresh_cache-9c817262-fee7-483c-ac98-6d7648890eb0" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:33:44 compute-0 nova_compute[192079]: 2025-10-02 12:33:44.990 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquired lock "refresh_cache-9c817262-fee7-483c-ac98-6d7648890eb0" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:33:44 compute-0 nova_compute[192079]: 2025-10-02 12:33:44.990 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Forcefully refreshing network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2004
Oct 02 12:33:44 compute-0 nova_compute[192079]: 2025-10-02 12:33:44.990 2 DEBUG nova.objects.instance [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lazy-loading 'info_cache' on Instance uuid 9c817262-fee7-483c-ac98-6d7648890eb0 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:33:45 compute-0 nova_compute[192079]: 2025-10-02 12:33:45.306 2 DEBUG nova.compute.manager [req-676656d0-8533-426f-b8c9-6b7d7433392b req-0a85d28a-88b4-455f-adcb-422446ca5d2c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Received event network-vif-plugged-c4934bb6-5047-47ec-b0c6-127b4274769a external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:33:45 compute-0 nova_compute[192079]: 2025-10-02 12:33:45.307 2 DEBUG oslo_concurrency.lockutils [req-676656d0-8533-426f-b8c9-6b7d7433392b req-0a85d28a-88b4-455f-adcb-422446ca5d2c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:45 compute-0 nova_compute[192079]: 2025-10-02 12:33:45.307 2 DEBUG oslo_concurrency.lockutils [req-676656d0-8533-426f-b8c9-6b7d7433392b req-0a85d28a-88b4-455f-adcb-422446ca5d2c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:45 compute-0 nova_compute[192079]: 2025-10-02 12:33:45.307 2 DEBUG oslo_concurrency.lockutils [req-676656d0-8533-426f-b8c9-6b7d7433392b req-0a85d28a-88b4-455f-adcb-422446ca5d2c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:45 compute-0 nova_compute[192079]: 2025-10-02 12:33:45.307 2 DEBUG nova.compute.manager [req-676656d0-8533-426f-b8c9-6b7d7433392b req-0a85d28a-88b4-455f-adcb-422446ca5d2c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] No waiting events found dispatching network-vif-plugged-c4934bb6-5047-47ec-b0c6-127b4274769a pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:33:45 compute-0 nova_compute[192079]: 2025-10-02 12:33:45.308 2 WARNING nova.compute.manager [req-676656d0-8533-426f-b8c9-6b7d7433392b req-0a85d28a-88b4-455f-adcb-422446ca5d2c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Received unexpected event network-vif-plugged-c4934bb6-5047-47ec-b0c6-127b4274769a for instance with vm_state active and task_state None.
Oct 02 12:33:46 compute-0 nova_compute[192079]: 2025-10-02 12:33:46.137 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:47 compute-0 nova_compute[192079]: 2025-10-02 12:33:47.385 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:47 compute-0 nova_compute[192079]: 2025-10-02 12:33:47.999 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Updating instance_info_cache with network_info: [{"id": "c4934bb6-5047-47ec-b0c6-127b4274769a", "address": "fa:16:3e:11:3b:3b", "network": {"id": "d5d28c5b-6eab-4c56-bc64-1dd8250f45c6", "bridge": "br-int", "label": "tempest-network-smoke--1759416782", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4934bb6-50", "ovs_interfaceid": "c4934bb6-5047-47ec-b0c6-127b4274769a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:33:48 compute-0 nova_compute[192079]: 2025-10-02 12:33:48.022 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Releasing lock "refresh_cache-9c817262-fee7-483c-ac98-6d7648890eb0" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:33:48 compute-0 nova_compute[192079]: 2025-10-02 12:33:48.022 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Updated the network info_cache for instance _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9929
Oct 02 12:33:48 compute-0 nova_compute[192079]: 2025-10-02 12:33:48.683 2 DEBUG oslo_concurrency.lockutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:48 compute-0 nova_compute[192079]: 2025-10-02 12:33:48.684 2 DEBUG oslo_concurrency.lockutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:48 compute-0 nova_compute[192079]: 2025-10-02 12:33:48.778 2 DEBUG nova.compute.manager [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:33:49 compute-0 nova_compute[192079]: 2025-10-02 12:33:49.002 2 DEBUG oslo_concurrency.lockutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:49 compute-0 nova_compute[192079]: 2025-10-02 12:33:49.003 2 DEBUG oslo_concurrency.lockutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:49 compute-0 nova_compute[192079]: 2025-10-02 12:33:49.014 2 DEBUG nova.virt.hardware [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:33:49 compute-0 nova_compute[192079]: 2025-10-02 12:33:49.015 2 INFO nova.compute.claims [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:33:49 compute-0 nova_compute[192079]: 2025-10-02 12:33:49.336 2 DEBUG nova.compute.provider_tree [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:33:49 compute-0 nova_compute[192079]: 2025-10-02 12:33:49.348 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:49 compute-0 NetworkManager[51160]: <info>  [1759408429.3487] manager: (patch-br-int-to-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/280)
Oct 02 12:33:49 compute-0 NetworkManager[51160]: <info>  [1759408429.3499] manager: (patch-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d-to-br-int): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/281)
Oct 02 12:33:49 compute-0 nova_compute[192079]: 2025-10-02 12:33:49.383 2 DEBUG nova.scheduler.client.report [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:33:49 compute-0 nova_compute[192079]: 2025-10-02 12:33:49.429 2 DEBUG oslo_concurrency.lockutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.425s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:49 compute-0 nova_compute[192079]: 2025-10-02 12:33:49.430 2 DEBUG nova.compute.manager [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:33:49 compute-0 nova_compute[192079]: 2025-10-02 12:33:49.517 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:49 compute-0 ovn_controller[94336]: 2025-10-02T12:33:49Z|00565|binding|INFO|Releasing lport 410dd08f-af3d-48c2-b0fd-475c37ff4bed from this chassis (sb_readonly=0)
Oct 02 12:33:49 compute-0 nova_compute[192079]: 2025-10-02 12:33:49.537 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:49 compute-0 nova_compute[192079]: 2025-10-02 12:33:49.594 2 DEBUG nova.compute.manager [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:33:49 compute-0 nova_compute[192079]: 2025-10-02 12:33:49.596 2 DEBUG nova.network.neutron [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:33:49 compute-0 nova_compute[192079]: 2025-10-02 12:33:49.712 2 INFO nova.virt.libvirt.driver [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:33:49 compute-0 nova_compute[192079]: 2025-10-02 12:33:49.759 2 DEBUG nova.compute.manager [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:33:49 compute-0 nova_compute[192079]: 2025-10-02 12:33:49.931 2 DEBUG nova.compute.manager [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:33:49 compute-0 nova_compute[192079]: 2025-10-02 12:33:49.934 2 DEBUG nova.virt.libvirt.driver [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:33:49 compute-0 nova_compute[192079]: 2025-10-02 12:33:49.935 2 INFO nova.virt.libvirt.driver [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Creating image(s)
Oct 02 12:33:49 compute-0 nova_compute[192079]: 2025-10-02 12:33:49.935 2 DEBUG oslo_concurrency.lockutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "/var/lib/nova/instances/10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:49 compute-0 nova_compute[192079]: 2025-10-02 12:33:49.936 2 DEBUG oslo_concurrency.lockutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "/var/lib/nova/instances/10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:49 compute-0 nova_compute[192079]: 2025-10-02 12:33:49.937 2 DEBUG oslo_concurrency.lockutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "/var/lib/nova/instances/10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:49 compute-0 nova_compute[192079]: 2025-10-02 12:33:49.954 2 DEBUG oslo_concurrency.processutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:33:49 compute-0 nova_compute[192079]: 2025-10-02 12:33:49.990 2 DEBUG nova.policy [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:33:50 compute-0 nova_compute[192079]: 2025-10-02 12:33:50.031 2 DEBUG oslo_concurrency.processutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.077s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:33:50 compute-0 nova_compute[192079]: 2025-10-02 12:33:50.032 2 DEBUG oslo_concurrency.lockutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:50 compute-0 nova_compute[192079]: 2025-10-02 12:33:50.033 2 DEBUG oslo_concurrency.lockutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:50 compute-0 nova_compute[192079]: 2025-10-02 12:33:50.044 2 DEBUG oslo_concurrency.processutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:33:50 compute-0 nova_compute[192079]: 2025-10-02 12:33:50.099 2 DEBUG oslo_concurrency.processutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:33:50 compute-0 nova_compute[192079]: 2025-10-02 12:33:50.101 2 DEBUG oslo_concurrency.processutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:33:50 compute-0 nova_compute[192079]: 2025-10-02 12:33:50.141 2 DEBUG oslo_concurrency.processutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a/disk 1073741824" returned: 0 in 0.040s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:33:50 compute-0 nova_compute[192079]: 2025-10-02 12:33:50.142 2 DEBUG oslo_concurrency.lockutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.109s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:50 compute-0 nova_compute[192079]: 2025-10-02 12:33:50.143 2 DEBUG oslo_concurrency.processutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:33:50 compute-0 podman[245589]: 2025-10-02 12:33:50.153400304 +0000 UTC m=+0.064729013 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_id=edpm, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, container_name=ceilometer_agent_compute, org.label-schema.build-date=20251001)
Oct 02 12:33:50 compute-0 nova_compute[192079]: 2025-10-02 12:33:50.202 2 DEBUG oslo_concurrency.processutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.059s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:33:50 compute-0 nova_compute[192079]: 2025-10-02 12:33:50.209 2 DEBUG nova.virt.disk.api [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Checking if we can resize image /var/lib/nova/instances/10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:33:50 compute-0 nova_compute[192079]: 2025-10-02 12:33:50.210 2 DEBUG oslo_concurrency.processutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:33:50 compute-0 nova_compute[192079]: 2025-10-02 12:33:50.272 2 DEBUG oslo_concurrency.processutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a/disk --force-share --output=json" returned: 0 in 0.062s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:33:50 compute-0 nova_compute[192079]: 2025-10-02 12:33:50.273 2 DEBUG nova.virt.disk.api [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Cannot resize image /var/lib/nova/instances/10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:33:50 compute-0 nova_compute[192079]: 2025-10-02 12:33:50.274 2 DEBUG nova.objects.instance [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lazy-loading 'migration_context' on Instance uuid 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:33:50 compute-0 nova_compute[192079]: 2025-10-02 12:33:50.297 2 DEBUG nova.compute.manager [req-19a6589d-1dc3-48c0-b45a-4c41eeab99fe req-c3b2e914-3406-4e73-bbe4-9c6bad9690eb 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Received event network-changed-c4934bb6-5047-47ec-b0c6-127b4274769a external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:33:50 compute-0 nova_compute[192079]: 2025-10-02 12:33:50.298 2 DEBUG nova.compute.manager [req-19a6589d-1dc3-48c0-b45a-4c41eeab99fe req-c3b2e914-3406-4e73-bbe4-9c6bad9690eb 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Refreshing instance network info cache due to event network-changed-c4934bb6-5047-47ec-b0c6-127b4274769a. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:33:50 compute-0 nova_compute[192079]: 2025-10-02 12:33:50.298 2 DEBUG oslo_concurrency.lockutils [req-19a6589d-1dc3-48c0-b45a-4c41eeab99fe req-c3b2e914-3406-4e73-bbe4-9c6bad9690eb 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-9c817262-fee7-483c-ac98-6d7648890eb0" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:33:50 compute-0 nova_compute[192079]: 2025-10-02 12:33:50.299 2 DEBUG oslo_concurrency.lockutils [req-19a6589d-1dc3-48c0-b45a-4c41eeab99fe req-c3b2e914-3406-4e73-bbe4-9c6bad9690eb 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-9c817262-fee7-483c-ac98-6d7648890eb0" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:33:50 compute-0 nova_compute[192079]: 2025-10-02 12:33:50.299 2 DEBUG nova.network.neutron [req-19a6589d-1dc3-48c0-b45a-4c41eeab99fe req-c3b2e914-3406-4e73-bbe4-9c6bad9690eb 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Refreshing network info cache for port c4934bb6-5047-47ec-b0c6-127b4274769a _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:33:50 compute-0 nova_compute[192079]: 2025-10-02 12:33:50.337 2 DEBUG nova.virt.libvirt.driver [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:33:50 compute-0 nova_compute[192079]: 2025-10-02 12:33:50.337 2 DEBUG nova.virt.libvirt.driver [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Ensure instance console log exists: /var/lib/nova/instances/10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:33:50 compute-0 nova_compute[192079]: 2025-10-02 12:33:50.338 2 DEBUG oslo_concurrency.lockutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:50 compute-0 nova_compute[192079]: 2025-10-02 12:33:50.338 2 DEBUG oslo_concurrency.lockutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:50 compute-0 nova_compute[192079]: 2025-10-02 12:33:50.339 2 DEBUG oslo_concurrency.lockutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:51 compute-0 nova_compute[192079]: 2025-10-02 12:33:51.140 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:51 compute-0 nova_compute[192079]: 2025-10-02 12:33:51.346 2 DEBUG nova.network.neutron [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Successfully created port: 6fc2406f-6b10-4cc3-a5a7-090de3a926bf _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:33:51 compute-0 nova_compute[192079]: 2025-10-02 12:33:51.932 2 DEBUG nova.network.neutron [req-19a6589d-1dc3-48c0-b45a-4c41eeab99fe req-c3b2e914-3406-4e73-bbe4-9c6bad9690eb 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Updated VIF entry in instance network info cache for port c4934bb6-5047-47ec-b0c6-127b4274769a. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:33:51 compute-0 nova_compute[192079]: 2025-10-02 12:33:51.933 2 DEBUG nova.network.neutron [req-19a6589d-1dc3-48c0-b45a-4c41eeab99fe req-c3b2e914-3406-4e73-bbe4-9c6bad9690eb 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Updating instance_info_cache with network_info: [{"id": "c4934bb6-5047-47ec-b0c6-127b4274769a", "address": "fa:16:3e:11:3b:3b", "network": {"id": "d5d28c5b-6eab-4c56-bc64-1dd8250f45c6", "bridge": "br-int", "label": "tempest-network-smoke--1759416782", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4934bb6-50", "ovs_interfaceid": "c4934bb6-5047-47ec-b0c6-127b4274769a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:33:52 compute-0 nova_compute[192079]: 2025-10-02 12:33:52.179 2 DEBUG oslo_concurrency.lockutils [req-19a6589d-1dc3-48c0-b45a-4c41eeab99fe req-c3b2e914-3406-4e73-bbe4-9c6bad9690eb 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-9c817262-fee7-483c-ac98-6d7648890eb0" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:33:52 compute-0 nova_compute[192079]: 2025-10-02 12:33:52.389 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:52 compute-0 nova_compute[192079]: 2025-10-02 12:33:52.683 2 DEBUG nova.network.neutron [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Successfully updated port: 6fc2406f-6b10-4cc3-a5a7-090de3a926bf _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:33:52 compute-0 nova_compute[192079]: 2025-10-02 12:33:52.703 2 DEBUG oslo_concurrency.lockutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "refresh_cache-10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:33:52 compute-0 nova_compute[192079]: 2025-10-02 12:33:52.703 2 DEBUG oslo_concurrency.lockutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquired lock "refresh_cache-10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:33:52 compute-0 nova_compute[192079]: 2025-10-02 12:33:52.703 2 DEBUG nova.network.neutron [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:33:52 compute-0 nova_compute[192079]: 2025-10-02 12:33:52.755 2 DEBUG nova.compute.manager [req-19a33eab-9831-45a8-bc70-10635cfc33d7 req-87c7dc64-1d92-4945-b84a-14b98817a2b7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Received event network-changed-6fc2406f-6b10-4cc3-a5a7-090de3a926bf external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:33:52 compute-0 nova_compute[192079]: 2025-10-02 12:33:52.756 2 DEBUG nova.compute.manager [req-19a33eab-9831-45a8-bc70-10635cfc33d7 req-87c7dc64-1d92-4945-b84a-14b98817a2b7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Refreshing instance network info cache due to event network-changed-6fc2406f-6b10-4cc3-a5a7-090de3a926bf. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:33:52 compute-0 nova_compute[192079]: 2025-10-02 12:33:52.756 2 DEBUG oslo_concurrency.lockutils [req-19a33eab-9831-45a8-bc70-10635cfc33d7 req-87c7dc64-1d92-4945-b84a-14b98817a2b7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:33:52 compute-0 nova_compute[192079]: 2025-10-02 12:33:52.921 2 DEBUG nova.network.neutron [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.570 2 DEBUG nova.network.neutron [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Updating instance_info_cache with network_info: [{"id": "6fc2406f-6b10-4cc3-a5a7-090de3a926bf", "address": "fa:16:3e:fe:89:d4", "network": {"id": "e1db9fd6-3b23-47bd-a491-8e04b76ccc0a", "bridge": "br-int", "label": "tempest-network-smoke--559146252", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap6fc2406f-6b", "ovs_interfaceid": "6fc2406f-6b10-4cc3-a5a7-090de3a926bf", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.589 2 DEBUG oslo_concurrency.lockutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Releasing lock "refresh_cache-10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.590 2 DEBUG nova.compute.manager [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Instance network_info: |[{"id": "6fc2406f-6b10-4cc3-a5a7-090de3a926bf", "address": "fa:16:3e:fe:89:d4", "network": {"id": "e1db9fd6-3b23-47bd-a491-8e04b76ccc0a", "bridge": "br-int", "label": "tempest-network-smoke--559146252", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap6fc2406f-6b", "ovs_interfaceid": "6fc2406f-6b10-4cc3-a5a7-090de3a926bf", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.590 2 DEBUG oslo_concurrency.lockutils [req-19a33eab-9831-45a8-bc70-10635cfc33d7 req-87c7dc64-1d92-4945-b84a-14b98817a2b7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.590 2 DEBUG nova.network.neutron [req-19a33eab-9831-45a8-bc70-10635cfc33d7 req-87c7dc64-1d92-4945-b84a-14b98817a2b7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Refreshing network info cache for port 6fc2406f-6b10-4cc3-a5a7-090de3a926bf _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.593 2 DEBUG nova.virt.libvirt.driver [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Start _get_guest_xml network_info=[{"id": "6fc2406f-6b10-4cc3-a5a7-090de3a926bf", "address": "fa:16:3e:fe:89:d4", "network": {"id": "e1db9fd6-3b23-47bd-a491-8e04b76ccc0a", "bridge": "br-int", "label": "tempest-network-smoke--559146252", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap6fc2406f-6b", "ovs_interfaceid": "6fc2406f-6b10-4cc3-a5a7-090de3a926bf", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.596 2 WARNING nova.virt.libvirt.driver [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.603 2 DEBUG nova.virt.libvirt.host [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.604 2 DEBUG nova.virt.libvirt.host [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.612 2 DEBUG nova.virt.libvirt.host [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.612 2 DEBUG nova.virt.libvirt.host [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.613 2 DEBUG nova.virt.libvirt.driver [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.614 2 DEBUG nova.virt.hardware [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.614 2 DEBUG nova.virt.hardware [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.614 2 DEBUG nova.virt.hardware [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.615 2 DEBUG nova.virt.hardware [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.615 2 DEBUG nova.virt.hardware [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.615 2 DEBUG nova.virt.hardware [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.616 2 DEBUG nova.virt.hardware [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.616 2 DEBUG nova.virt.hardware [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.616 2 DEBUG nova.virt.hardware [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.617 2 DEBUG nova.virt.hardware [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.617 2 DEBUG nova.virt.hardware [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.620 2 DEBUG nova.virt.libvirt.vif [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:33:47Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestNetworkBasicOps-server-316138620',display_name='tempest-TestNetworkBasicOps-server-316138620',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkbasicops-server-316138620',id=152,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBCZJ044bOF+4CbUAqlAd/fAKzGJ3BjJQ1O3aAWUjn6R/ZF20KqJILd3qify46f7iuXSfM7SXhCJSXuQOr57zgsxMPL+k0QdvGYXa4GlPAiCAJ2rRRSs//k7mKcwWNdtJgQ==',key_name='tempest-TestNetworkBasicOps-1887469931',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='6e2a4899168a47618e377cb3ac85ddd2',ramdisk_id='',reservation_id='r-281286p4',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestNetworkBasicOps-1323893370',owner_user_name='tempest-TestNetworkBasicOps-1323893370-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:33:49Z,user_data=None,user_id='a1898fdf056c4a249c33590f26d4d845',uuid=10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "6fc2406f-6b10-4cc3-a5a7-090de3a926bf", "address": "fa:16:3e:fe:89:d4", "network": {"id": "e1db9fd6-3b23-47bd-a491-8e04b76ccc0a", "bridge": "br-int", "label": "tempest-network-smoke--559146252", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap6fc2406f-6b", "ovs_interfaceid": "6fc2406f-6b10-4cc3-a5a7-090de3a926bf", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.620 2 DEBUG nova.network.os_vif_util [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converting VIF {"id": "6fc2406f-6b10-4cc3-a5a7-090de3a926bf", "address": "fa:16:3e:fe:89:d4", "network": {"id": "e1db9fd6-3b23-47bd-a491-8e04b76ccc0a", "bridge": "br-int", "label": "tempest-network-smoke--559146252", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap6fc2406f-6b", "ovs_interfaceid": "6fc2406f-6b10-4cc3-a5a7-090de3a926bf", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.621 2 DEBUG nova.network.os_vif_util [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:fe:89:d4,bridge_name='br-int',has_traffic_filtering=True,id=6fc2406f-6b10-4cc3-a5a7-090de3a926bf,network=Network(e1db9fd6-3b23-47bd-a491-8e04b76ccc0a),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap6fc2406f-6b') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.622 2 DEBUG nova.objects.instance [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lazy-loading 'pci_devices' on Instance uuid 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.636 2 DEBUG nova.virt.libvirt.driver [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:33:53 compute-0 nova_compute[192079]:   <uuid>10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a</uuid>
Oct 02 12:33:53 compute-0 nova_compute[192079]:   <name>instance-00000098</name>
Oct 02 12:33:53 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:33:53 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:33:53 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:33:53 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:       <nova:name>tempest-TestNetworkBasicOps-server-316138620</nova:name>
Oct 02 12:33:53 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:33:53</nova:creationTime>
Oct 02 12:33:53 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:33:53 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:33:53 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:33:53 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:33:53 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:33:53 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:33:53 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:33:53 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:33:53 compute-0 nova_compute[192079]:         <nova:user uuid="a1898fdf056c4a249c33590f26d4d845">tempest-TestNetworkBasicOps-1323893370-project-member</nova:user>
Oct 02 12:33:53 compute-0 nova_compute[192079]:         <nova:project uuid="6e2a4899168a47618e377cb3ac85ddd2">tempest-TestNetworkBasicOps-1323893370</nova:project>
Oct 02 12:33:53 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:33:53 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:33:53 compute-0 nova_compute[192079]:         <nova:port uuid="6fc2406f-6b10-4cc3-a5a7-090de3a926bf">
Oct 02 12:33:53 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.7" ipVersion="4"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:33:53 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:33:53 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:33:53 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <system>
Oct 02 12:33:53 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:33:53 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:33:53 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:33:53 compute-0 nova_compute[192079]:       <entry name="serial">10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a</entry>
Oct 02 12:33:53 compute-0 nova_compute[192079]:       <entry name="uuid">10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a</entry>
Oct 02 12:33:53 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     </system>
Oct 02 12:33:53 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:33:53 compute-0 nova_compute[192079]:   <os>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:   </os>
Oct 02 12:33:53 compute-0 nova_compute[192079]:   <features>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:   </features>
Oct 02 12:33:53 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:33:53 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:33:53 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:33:53 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a/disk"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:33:53 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a/disk.config"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:33:53 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:fe:89:d4"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:       <target dev="tap6fc2406f-6b"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:33:53 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a/console.log" append="off"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <video>
Oct 02 12:33:53 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     </video>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:33:53 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:33:53 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:33:53 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:33:53 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:33:53 compute-0 nova_compute[192079]: </domain>
Oct 02 12:33:53 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.642 2 DEBUG nova.compute.manager [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Preparing to wait for external event network-vif-plugged-6fc2406f-6b10-4cc3-a5a7-090de3a926bf prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.642 2 DEBUG oslo_concurrency.lockutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.642 2 DEBUG oslo_concurrency.lockutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.643 2 DEBUG oslo_concurrency.lockutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.644 2 DEBUG nova.virt.libvirt.vif [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:33:47Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestNetworkBasicOps-server-316138620',display_name='tempest-TestNetworkBasicOps-server-316138620',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkbasicops-server-316138620',id=152,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBCZJ044bOF+4CbUAqlAd/fAKzGJ3BjJQ1O3aAWUjn6R/ZF20KqJILd3qify46f7iuXSfM7SXhCJSXuQOr57zgsxMPL+k0QdvGYXa4GlPAiCAJ2rRRSs//k7mKcwWNdtJgQ==',key_name='tempest-TestNetworkBasicOps-1887469931',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='6e2a4899168a47618e377cb3ac85ddd2',ramdisk_id='',reservation_id='r-281286p4',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestNetworkBasicOps-1323893370',owner_user_name='tempest-TestNetworkBasicOps-1323893370-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:33:49Z,user_data=None,user_id='a1898fdf056c4a249c33590f26d4d845',uuid=10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "6fc2406f-6b10-4cc3-a5a7-090de3a926bf", "address": "fa:16:3e:fe:89:d4", "network": {"id": "e1db9fd6-3b23-47bd-a491-8e04b76ccc0a", "bridge": "br-int", "label": "tempest-network-smoke--559146252", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap6fc2406f-6b", "ovs_interfaceid": "6fc2406f-6b10-4cc3-a5a7-090de3a926bf", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.644 2 DEBUG nova.network.os_vif_util [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converting VIF {"id": "6fc2406f-6b10-4cc3-a5a7-090de3a926bf", "address": "fa:16:3e:fe:89:d4", "network": {"id": "e1db9fd6-3b23-47bd-a491-8e04b76ccc0a", "bridge": "br-int", "label": "tempest-network-smoke--559146252", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap6fc2406f-6b", "ovs_interfaceid": "6fc2406f-6b10-4cc3-a5a7-090de3a926bf", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.645 2 DEBUG nova.network.os_vif_util [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:fe:89:d4,bridge_name='br-int',has_traffic_filtering=True,id=6fc2406f-6b10-4cc3-a5a7-090de3a926bf,network=Network(e1db9fd6-3b23-47bd-a491-8e04b76ccc0a),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap6fc2406f-6b') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.645 2 DEBUG os_vif [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:fe:89:d4,bridge_name='br-int',has_traffic_filtering=True,id=6fc2406f-6b10-4cc3-a5a7-090de3a926bf,network=Network(e1db9fd6-3b23-47bd-a491-8e04b76ccc0a),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap6fc2406f-6b') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.646 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.647 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.647 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.651 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.651 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap6fc2406f-6b, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.652 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap6fc2406f-6b, col_values=(('external_ids', {'iface-id': '6fc2406f-6b10-4cc3-a5a7-090de3a926bf', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:fe:89:d4', 'vm-uuid': '10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.653 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:53 compute-0 NetworkManager[51160]: <info>  [1759408433.6544] manager: (tap6fc2406f-6b): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/282)
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.655 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.659 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.660 2 INFO os_vif [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:fe:89:d4,bridge_name='br-int',has_traffic_filtering=True,id=6fc2406f-6b10-4cc3-a5a7-090de3a926bf,network=Network(e1db9fd6-3b23-47bd-a491-8e04b76ccc0a),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap6fc2406f-6b')
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.713 2 DEBUG nova.virt.libvirt.driver [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.714 2 DEBUG nova.virt.libvirt.driver [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.714 2 DEBUG nova.virt.libvirt.driver [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] No VIF found with MAC fa:16:3e:fe:89:d4, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:33:53 compute-0 nova_compute[192079]: 2025-10-02 12:33:53.715 2 INFO nova.virt.libvirt.driver [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Using config drive
Oct 02 12:33:54 compute-0 nova_compute[192079]: 2025-10-02 12:33:54.058 2 INFO nova.virt.libvirt.driver [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Creating config drive at /var/lib/nova/instances/10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a/disk.config
Oct 02 12:33:54 compute-0 nova_compute[192079]: 2025-10-02 12:33:54.063 2 DEBUG oslo_concurrency.processutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpk7_y673c execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:33:54 compute-0 nova_compute[192079]: 2025-10-02 12:33:54.189 2 DEBUG oslo_concurrency.processutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpk7_y673c" returned: 0 in 0.126s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:33:54 compute-0 NetworkManager[51160]: <info>  [1759408434.2444] manager: (tap6fc2406f-6b): new Tun device (/org/freedesktop/NetworkManager/Devices/283)
Oct 02 12:33:54 compute-0 kernel: tap6fc2406f-6b: entered promiscuous mode
Oct 02 12:33:54 compute-0 nova_compute[192079]: 2025-10-02 12:33:54.250 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:54 compute-0 ovn_controller[94336]: 2025-10-02T12:33:54Z|00566|binding|INFO|Claiming lport 6fc2406f-6b10-4cc3-a5a7-090de3a926bf for this chassis.
Oct 02 12:33:54 compute-0 ovn_controller[94336]: 2025-10-02T12:33:54Z|00567|binding|INFO|6fc2406f-6b10-4cc3-a5a7-090de3a926bf: Claiming fa:16:3e:fe:89:d4 10.100.0.7
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:54.257 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:fe:89:d4 10.100.0.7'], port_security=['fa:16:3e:fe:89:d4 10.100.0.7'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.7/28', 'neutron:device_id': '10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-e1db9fd6-3b23-47bd-a491-8e04b76ccc0a', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'neutron:revision_number': '2', 'neutron:security_group_ids': '4dfabd8c-e266-4906-bd78-314554158351', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=00403231-2053-4b5b-8468-60aa92bc86a7, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=6fc2406f-6b10-4cc3-a5a7-090de3a926bf) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:54.259 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 6fc2406f-6b10-4cc3-a5a7-090de3a926bf in datapath e1db9fd6-3b23-47bd-a491-8e04b76ccc0a bound to our chassis
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:54.260 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network e1db9fd6-3b23-47bd-a491-8e04b76ccc0a
Oct 02 12:33:54 compute-0 nova_compute[192079]: 2025-10-02 12:33:54.266 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:54 compute-0 ovn_controller[94336]: 2025-10-02T12:33:54Z|00568|binding|INFO|Setting lport 6fc2406f-6b10-4cc3-a5a7-090de3a926bf ovn-installed in OVS
Oct 02 12:33:54 compute-0 ovn_controller[94336]: 2025-10-02T12:33:54Z|00569|binding|INFO|Setting lport 6fc2406f-6b10-4cc3-a5a7-090de3a926bf up in Southbound
Oct 02 12:33:54 compute-0 nova_compute[192079]: 2025-10-02 12:33:54.271 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:54 compute-0 nova_compute[192079]: 2025-10-02 12:33:54.272 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:54.272 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d9e849ab-a71e-474b-8714-b14302d999a0]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:54.273 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tape1db9fd6-31 in ovnmeta-e1db9fd6-3b23-47bd-a491-8e04b76ccc0a namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:54.275 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tape1db9fd6-30 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:54.275 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0a7e73f6-bb9a-463e-8197-cda9d7b4f86c]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:54 compute-0 nova_compute[192079]: 2025-10-02 12:33:54.281 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:54.278 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[dfb250ad-3f25-4719-9607-73f7a22dafa1]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:54 compute-0 systemd-udevd[245638]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:54.295 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[d2b19601-1471-4f78-b93a-b6bd3aff562e]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:54 compute-0 NetworkManager[51160]: <info>  [1759408434.3010] device (tap6fc2406f-6b): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:33:54 compute-0 NetworkManager[51160]: <info>  [1759408434.3029] device (tap6fc2406f-6b): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:33:54 compute-0 systemd-machined[152150]: New machine qemu-72-instance-00000098.
Oct 02 12:33:54 compute-0 systemd[1]: Started Virtual Machine qemu-72-instance-00000098.
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:54.321 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5815611e-6c65-4c12-9eae-5f2a8419f089]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:54.349 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[e7242e36-db9e-4d5a-beb1-b886359593a9]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:54.354 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[262ef681-0634-4347-9b8b-ff8c060eb47d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:54 compute-0 systemd-udevd[245643]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:33:54 compute-0 NetworkManager[51160]: <info>  [1759408434.3571] manager: (tape1db9fd6-30): new Veth device (/org/freedesktop/NetworkManager/Devices/284)
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:54.389 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[0de963f6-6801-4840-b76c-0872fc93f843]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:54.392 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[4a4f733c-968a-4861-ad5f-08e780b6b510]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:54 compute-0 NetworkManager[51160]: <info>  [1759408434.4100] device (tape1db9fd6-30): carrier: link connected
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:54.414 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[15d34389-1b9a-425e-ae43-8b6ab98910ee]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:54.430 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[fef32aac-f59b-4cd9-9066-5e23c70f552a]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tape1db9fd6-31'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:d2:ef:0c'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 182], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 643203, 'reachable_time': 25606, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 245671, 'error': None, 'target': 'ovnmeta-e1db9fd6-3b23-47bd-a491-8e04b76ccc0a', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:54.447 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c220479e-f7a7-464a-9b1a-22f1d8d2324c]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fed2:ef0c'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 643203, 'tstamp': 643203}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 245672, 'error': None, 'target': 'ovnmeta-e1db9fd6-3b23-47bd-a491-8e04b76ccc0a', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:54.463 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[21e9ce23-063e-4aa5-956c-7fc1da60e598]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tape1db9fd6-31'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:d2:ef:0c'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 2, 'rx_bytes': 110, 'tx_bytes': 176, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 2, 'rx_bytes': 110, 'tx_bytes': 176, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 182], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 643203, 'reachable_time': 25606, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 2, 'outoctets': 148, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 2, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 148, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 2, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 245673, 'error': None, 'target': 'ovnmeta-e1db9fd6-3b23-47bd-a491-8e04b76ccc0a', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:54.492 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f27fc155-7748-458e-a6e0-90d2eb8afc61]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:54 compute-0 nova_compute[192079]: 2025-10-02 12:33:54.515 2 DEBUG nova.compute.manager [req-354b5a05-f758-4c1b-95e2-4f873ebdc9e8 req-b0fde70c-106f-4976-b566-28266b29bc7d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Received event network-vif-plugged-6fc2406f-6b10-4cc3-a5a7-090de3a926bf external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:33:54 compute-0 nova_compute[192079]: 2025-10-02 12:33:54.516 2 DEBUG oslo_concurrency.lockutils [req-354b5a05-f758-4c1b-95e2-4f873ebdc9e8 req-b0fde70c-106f-4976-b566-28266b29bc7d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:54 compute-0 nova_compute[192079]: 2025-10-02 12:33:54.517 2 DEBUG oslo_concurrency.lockutils [req-354b5a05-f758-4c1b-95e2-4f873ebdc9e8 req-b0fde70c-106f-4976-b566-28266b29bc7d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:54 compute-0 nova_compute[192079]: 2025-10-02 12:33:54.517 2 DEBUG oslo_concurrency.lockutils [req-354b5a05-f758-4c1b-95e2-4f873ebdc9e8 req-b0fde70c-106f-4976-b566-28266b29bc7d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:54 compute-0 nova_compute[192079]: 2025-10-02 12:33:54.518 2 DEBUG nova.compute.manager [req-354b5a05-f758-4c1b-95e2-4f873ebdc9e8 req-b0fde70c-106f-4976-b566-28266b29bc7d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Processing event network-vif-plugged-6fc2406f-6b10-4cc3-a5a7-090de3a926bf _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:54.546 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[70b3474c-3a9c-4721-a6ad-f70eb7841834]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:54.548 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tape1db9fd6-30, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:54.548 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:54.549 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tape1db9fd6-30, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:33:54 compute-0 NetworkManager[51160]: <info>  [1759408434.5517] manager: (tape1db9fd6-30): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/285)
Oct 02 12:33:54 compute-0 kernel: tape1db9fd6-30: entered promiscuous mode
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:54.553 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tape1db9fd6-30, col_values=(('external_ids', {'iface-id': '19523e8b-8d03-4c24-9feb-c50955a083af'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:33:54 compute-0 ovn_controller[94336]: 2025-10-02T12:33:54Z|00570|binding|INFO|Releasing lport 19523e8b-8d03-4c24-9feb-c50955a083af from this chassis (sb_readonly=0)
Oct 02 12:33:54 compute-0 nova_compute[192079]: 2025-10-02 12:33:54.553 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:54 compute-0 nova_compute[192079]: 2025-10-02 12:33:54.568 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:54.568 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/e1db9fd6-3b23-47bd-a491-8e04b76ccc0a.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/e1db9fd6-3b23-47bd-a491-8e04b76ccc0a.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:54.569 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1d6e462f-9719-4040-adee-ace8a230ed26]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:54.570 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-e1db9fd6-3b23-47bd-a491-8e04b76ccc0a
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/e1db9fd6-3b23-47bd-a491-8e04b76ccc0a.pid.haproxy
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID e1db9fd6-3b23-47bd-a491-8e04b76ccc0a
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:33:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:33:54.571 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-e1db9fd6-3b23-47bd-a491-8e04b76ccc0a', 'env', 'PROCESS_TAG=haproxy-e1db9fd6-3b23-47bd-a491-8e04b76ccc0a', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/e1db9fd6-3b23-47bd-a491-8e04b76ccc0a.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:33:54 compute-0 nova_compute[192079]: 2025-10-02 12:33:54.616 2 DEBUG nova.network.neutron [req-19a33eab-9831-45a8-bc70-10635cfc33d7 req-87c7dc64-1d92-4945-b84a-14b98817a2b7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Updated VIF entry in instance network info cache for port 6fc2406f-6b10-4cc3-a5a7-090de3a926bf. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:33:54 compute-0 nova_compute[192079]: 2025-10-02 12:33:54.624 2 DEBUG nova.network.neutron [req-19a33eab-9831-45a8-bc70-10635cfc33d7 req-87c7dc64-1d92-4945-b84a-14b98817a2b7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Updating instance_info_cache with network_info: [{"id": "6fc2406f-6b10-4cc3-a5a7-090de3a926bf", "address": "fa:16:3e:fe:89:d4", "network": {"id": "e1db9fd6-3b23-47bd-a491-8e04b76ccc0a", "bridge": "br-int", "label": "tempest-network-smoke--559146252", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap6fc2406f-6b", "ovs_interfaceid": "6fc2406f-6b10-4cc3-a5a7-090de3a926bf", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:33:54 compute-0 nova_compute[192079]: 2025-10-02 12:33:54.638 2 DEBUG oslo_concurrency.lockutils [req-19a33eab-9831-45a8-bc70-10635cfc33d7 req-87c7dc64-1d92-4945-b84a-14b98817a2b7 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:33:54 compute-0 podman[245705]: 2025-10-02 12:33:54.935200343 +0000 UTC m=+0.057294420 container create a8376c27ba2363fddc0ba3e9b2b7b44f0a667517f705e17de2fa1607fe532fc7 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-e1db9fd6-3b23-47bd-a491-8e04b76ccc0a, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0)
Oct 02 12:33:54 compute-0 systemd[1]: Started libpod-conmon-a8376c27ba2363fddc0ba3e9b2b7b44f0a667517f705e17de2fa1607fe532fc7.scope.
Oct 02 12:33:54 compute-0 podman[245705]: 2025-10-02 12:33:54.908701902 +0000 UTC m=+0.030795999 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:33:55 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:33:55 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/60cb7848c5101fb2592b6eec444b4cefc63212b634cc86402b2439d65cba57f8/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:33:55 compute-0 podman[245705]: 2025-10-02 12:33:55.021129243 +0000 UTC m=+0.143223320 container init a8376c27ba2363fddc0ba3e9b2b7b44f0a667517f705e17de2fa1607fe532fc7 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-e1db9fd6-3b23-47bd-a491-8e04b76ccc0a, org.label-schema.license=GPLv2, tcib_managed=true, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:33:55 compute-0 podman[245705]: 2025-10-02 12:33:55.026749536 +0000 UTC m=+0.148843613 container start a8376c27ba2363fddc0ba3e9b2b7b44f0a667517f705e17de2fa1607fe532fc7 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-e1db9fd6-3b23-47bd-a491-8e04b76ccc0a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, tcib_managed=true, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:33:55 compute-0 neutron-haproxy-ovnmeta-e1db9fd6-3b23-47bd-a491-8e04b76ccc0a[245728]: [NOTICE]   (245734) : New worker (245736) forked
Oct 02 12:33:55 compute-0 neutron-haproxy-ovnmeta-e1db9fd6-3b23-47bd-a491-8e04b76ccc0a[245728]: [NOTICE]   (245734) : Loading success.
Oct 02 12:33:55 compute-0 ovn_controller[94336]: 2025-10-02T12:33:55Z|00058|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:11:3b:3b 10.100.0.5
Oct 02 12:33:55 compute-0 ovn_controller[94336]: 2025-10-02T12:33:55Z|00059|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:11:3b:3b 10.100.0.5
Oct 02 12:33:56 compute-0 podman[245754]: 2025-10-02 12:33:56.154671082 +0000 UTC m=+0.055925374 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_id=multipathd, container_name=multipathd, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:33:56 compute-0 podman[245753]: 2025-10-02 12:33:56.158816935 +0000 UTC m=+0.062598216 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.openshift.expose-services=, io.openshift.tags=minimal rhel9, url=https://catalog.redhat.com/en/search?searchType=containers, vendor=Red Hat, Inc., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, distribution-scope=public, name=ubi9-minimal, version=9.6, maintainer=Red Hat, Inc., managed_by=edpm_ansible, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., release=1755695350, io.buildah.version=1.33.7, vcs-type=git, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, container_name=openstack_network_exporter, build-date=2025-08-20T13:12:41, config_id=edpm, com.redhat.component=ubi9-minimal-container, architecture=x86_64, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly.)
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.440 2 DEBUG nova.compute.manager [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.441 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408436.4408293, 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.441 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] VM Started (Lifecycle Event)
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.445 2 DEBUG nova.virt.libvirt.driver [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.448 2 INFO nova.virt.libvirt.driver [-] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Instance spawned successfully.
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.448 2 DEBUG nova.virt.libvirt.driver [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.463 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.469 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.472 2 DEBUG nova.virt.libvirt.driver [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.473 2 DEBUG nova.virt.libvirt.driver [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.473 2 DEBUG nova.virt.libvirt.driver [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.474 2 DEBUG nova.virt.libvirt.driver [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.474 2 DEBUG nova.virt.libvirt.driver [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.475 2 DEBUG nova.virt.libvirt.driver [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.503 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.504 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408436.4426231, 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.504 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] VM Paused (Lifecycle Event)
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.533 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.536 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408436.4445148, 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.537 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] VM Resumed (Lifecycle Event)
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.560 2 INFO nova.compute.manager [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Took 6.63 seconds to spawn the instance on the hypervisor.
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.560 2 DEBUG nova.compute.manager [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.561 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.569 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.600 2 DEBUG nova.compute.manager [req-df51ab87-3985-4e9b-a3fd-5c89bb47486a req-404f79af-c85e-4077-a54c-c82954f35bb3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Received event network-vif-plugged-6fc2406f-6b10-4cc3-a5a7-090de3a926bf external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.601 2 DEBUG oslo_concurrency.lockutils [req-df51ab87-3985-4e9b-a3fd-5c89bb47486a req-404f79af-c85e-4077-a54c-c82954f35bb3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.601 2 DEBUG oslo_concurrency.lockutils [req-df51ab87-3985-4e9b-a3fd-5c89bb47486a req-404f79af-c85e-4077-a54c-c82954f35bb3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.602 2 DEBUG oslo_concurrency.lockutils [req-df51ab87-3985-4e9b-a3fd-5c89bb47486a req-404f79af-c85e-4077-a54c-c82954f35bb3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.602 2 DEBUG nova.compute.manager [req-df51ab87-3985-4e9b-a3fd-5c89bb47486a req-404f79af-c85e-4077-a54c-c82954f35bb3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] No waiting events found dispatching network-vif-plugged-6fc2406f-6b10-4cc3-a5a7-090de3a926bf pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.603 2 WARNING nova.compute.manager [req-df51ab87-3985-4e9b-a3fd-5c89bb47486a req-404f79af-c85e-4077-a54c-c82954f35bb3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Received unexpected event network-vif-plugged-6fc2406f-6b10-4cc3-a5a7-090de3a926bf for instance with vm_state building and task_state spawning.
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.605 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.672 2 INFO nova.compute.manager [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Took 7.73 seconds to build instance.
Oct 02 12:33:56 compute-0 nova_compute[192079]: 2025-10-02 12:33:56.691 2 DEBUG oslo_concurrency.lockutils [None req-bc689a30-00ee-497a-90a2-a0645415b12b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 8.007s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:33:57 compute-0 nova_compute[192079]: 2025-10-02 12:33:57.389 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:33:58 compute-0 nova_compute[192079]: 2025-10-02 12:33:58.654 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:00 compute-0 nova_compute[192079]: 2025-10-02 12:34:00.925 2 DEBUG nova.compute.manager [req-8707b30f-12af-4ef3-baf8-5f692278fbc3 req-f04ff134-22fb-4e8f-a6d1-d96b92f7c812 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Received event network-changed-6fc2406f-6b10-4cc3-a5a7-090de3a926bf external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:34:00 compute-0 nova_compute[192079]: 2025-10-02 12:34:00.926 2 DEBUG nova.compute.manager [req-8707b30f-12af-4ef3-baf8-5f692278fbc3 req-f04ff134-22fb-4e8f-a6d1-d96b92f7c812 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Refreshing instance network info cache due to event network-changed-6fc2406f-6b10-4cc3-a5a7-090de3a926bf. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:34:00 compute-0 nova_compute[192079]: 2025-10-02 12:34:00.927 2 DEBUG oslo_concurrency.lockutils [req-8707b30f-12af-4ef3-baf8-5f692278fbc3 req-f04ff134-22fb-4e8f-a6d1-d96b92f7c812 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:34:00 compute-0 nova_compute[192079]: 2025-10-02 12:34:00.927 2 DEBUG oslo_concurrency.lockutils [req-8707b30f-12af-4ef3-baf8-5f692278fbc3 req-f04ff134-22fb-4e8f-a6d1-d96b92f7c812 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:34:00 compute-0 nova_compute[192079]: 2025-10-02 12:34:00.927 2 DEBUG nova.network.neutron [req-8707b30f-12af-4ef3-baf8-5f692278fbc3 req-f04ff134-22fb-4e8f-a6d1-d96b92f7c812 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Refreshing network info cache for port 6fc2406f-6b10-4cc3-a5a7-090de3a926bf _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:34:01 compute-0 nova_compute[192079]: 2025-10-02 12:34:01.605 2 INFO nova.compute.manager [None req-f42fe1ef-3303-4e80-beb0-9910029ced6f 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Get console output
Oct 02 12:34:01 compute-0 nova_compute[192079]: 2025-10-02 12:34:01.611 55 INFO nova.privsep.libvirt [-] Ignored error while reading from instance console pty: can't concat NoneType to bytes
Oct 02 12:34:01 compute-0 nova_compute[192079]: 2025-10-02 12:34:01.914 2 DEBUG oslo_concurrency.lockutils [None req-bf1b2c53-cb82-4cde-b574-f9ad1c2e587c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "9c817262-fee7-483c-ac98-6d7648890eb0" by "nova.compute.manager.ComputeManager.stop_instance.<locals>.do_stop_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:34:01 compute-0 nova_compute[192079]: 2025-10-02 12:34:01.915 2 DEBUG oslo_concurrency.lockutils [None req-bf1b2c53-cb82-4cde-b574-f9ad1c2e587c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "9c817262-fee7-483c-ac98-6d7648890eb0" acquired by "nova.compute.manager.ComputeManager.stop_instance.<locals>.do_stop_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:34:01 compute-0 nova_compute[192079]: 2025-10-02 12:34:01.915 2 DEBUG nova.compute.manager [None req-bf1b2c53-cb82-4cde-b574-f9ad1c2e587c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:34:01 compute-0 nova_compute[192079]: 2025-10-02 12:34:01.919 2 DEBUG nova.compute.manager [None req-bf1b2c53-cb82-4cde-b574-f9ad1c2e587c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 do_stop_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3338
Oct 02 12:34:01 compute-0 nova_compute[192079]: 2025-10-02 12:34:01.919 2 DEBUG nova.objects.instance [None req-bf1b2c53-cb82-4cde-b574-f9ad1c2e587c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'flavor' on Instance uuid 9c817262-fee7-483c-ac98-6d7648890eb0 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:34:01 compute-0 nova_compute[192079]: 2025-10-02 12:34:01.947 2 DEBUG nova.objects.instance [None req-bf1b2c53-cb82-4cde-b574-f9ad1c2e587c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'info_cache' on Instance uuid 9c817262-fee7-483c-ac98-6d7648890eb0 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:34:01 compute-0 nova_compute[192079]: 2025-10-02 12:34:01.986 2 DEBUG nova.virt.libvirt.driver [None req-bf1b2c53-cb82-4cde-b574-f9ad1c2e587c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Shutting down instance from state 1 _clean_shutdown /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4071
Oct 02 12:34:02 compute-0 podman[245793]: 2025-10-02 12:34:02.140104718 +0000 UTC m=+0.058766391 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:34:02 compute-0 podman[245794]: 2025-10-02 12:34:02.141833895 +0000 UTC m=+0.057707052 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, container_name=iscsid, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:34:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:02.236 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:34:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:02.236 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:34:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:02.237 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.347 2 DEBUG nova.network.neutron [req-8707b30f-12af-4ef3-baf8-5f692278fbc3 req-f04ff134-22fb-4e8f-a6d1-d96b92f7c812 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Updated VIF entry in instance network info cache for port 6fc2406f-6b10-4cc3-a5a7-090de3a926bf. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.348 2 DEBUG nova.network.neutron [req-8707b30f-12af-4ef3-baf8-5f692278fbc3 req-f04ff134-22fb-4e8f-a6d1-d96b92f7c812 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Updating instance_info_cache with network_info: [{"id": "6fc2406f-6b10-4cc3-a5a7-090de3a926bf", "address": "fa:16:3e:fe:89:d4", "network": {"id": "e1db9fd6-3b23-47bd-a491-8e04b76ccc0a", "bridge": "br-int", "label": "tempest-network-smoke--559146252", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap6fc2406f-6b", "ovs_interfaceid": "6fc2406f-6b10-4cc3-a5a7-090de3a926bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.390 2 DEBUG oslo_concurrency.lockutils [req-8707b30f-12af-4ef3-baf8-5f692278fbc3 req-f04ff134-22fb-4e8f-a6d1-d96b92f7c812 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.391 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._run_image_cache_manager_pass run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.665 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "storage-registry-lock" by "nova.virt.storage_users.register_storage_use.<locals>.do_register_storage_use" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.666 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "storage-registry-lock" acquired by "nova.virt.storage_users.register_storage_use.<locals>.do_register_storage_use" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.666 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "storage-registry-lock" "released" by "nova.virt.storage_users.register_storage_use.<locals>.do_register_storage_use" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.667 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "storage-registry-lock" by "nova.virt.storage_users.get_storage_users.<locals>.do_get_storage_users" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.667 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "storage-registry-lock" acquired by "nova.virt.storage_users.get_storage_users.<locals>.do_get_storage_users" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.667 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "storage-registry-lock" "released" by "nova.virt.storage_users.get_storage_users.<locals>.do_get_storage_users" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.687 2 DEBUG nova.virt.libvirt.imagecache [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Adding ephemeral_1_0706d66 into backend ephemeral images _store_ephemeral_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/imagecache.py:100
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.701 2 DEBUG nova.virt.libvirt.imagecache [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Verify base images _age_and_verify_cached_images /usr/lib/python3.9/site-packages/nova/virt/libvirt/imagecache.py:314
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.702 2 DEBUG nova.virt.libvirt.imagecache [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Image id cf60d86d-f1d5-4be4-976e-7488dbdcf0b2 yields fingerprint 068b233e8d7f49e215e2900dde7d25b776cad955 _age_and_verify_cached_images /usr/lib/python3.9/site-packages/nova/virt/libvirt/imagecache.py:319
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.702 2 INFO nova.virt.libvirt.imagecache [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] image cf60d86d-f1d5-4be4-976e-7488dbdcf0b2 at (/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955): checking
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.703 2 DEBUG nova.virt.libvirt.imagecache [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] image cf60d86d-f1d5-4be4-976e-7488dbdcf0b2 at (/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955): image is in use _mark_in_use /usr/lib/python3.9/site-packages/nova/virt/libvirt/imagecache.py:279
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.704 2 DEBUG nova.virt.libvirt.imagecache [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Image id  yields fingerprint da39a3ee5e6b4b0d3255bfef95601890afd80709 _age_and_verify_cached_images /usr/lib/python3.9/site-packages/nova/virt/libvirt/imagecache.py:319
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.705 2 DEBUG nova.virt.libvirt.imagecache [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] 9c817262-fee7-483c-ac98-6d7648890eb0 is a valid instance name _list_backing_images /usr/lib/python3.9/site-packages/nova/virt/libvirt/imagecache.py:126
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.705 2 DEBUG nova.virt.libvirt.imagecache [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] 9c817262-fee7-483c-ac98-6d7648890eb0 has a disk file _list_backing_images /usr/lib/python3.9/site-packages/nova/virt/libvirt/imagecache.py:129
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.706 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.790 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk --force-share --output=json" returned: 0 in 0.084s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.791 2 DEBUG nova.virt.libvirt.imagecache [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance 9c817262-fee7-483c-ac98-6d7648890eb0 is backed by 068b233e8d7f49e215e2900dde7d25b776cad955 _list_backing_images /usr/lib/python3.9/site-packages/nova/virt/libvirt/imagecache.py:141
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.791 2 DEBUG nova.virt.libvirt.imagecache [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a is a valid instance name _list_backing_images /usr/lib/python3.9/site-packages/nova/virt/libvirt/imagecache.py:126
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.792 2 DEBUG nova.virt.libvirt.imagecache [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a has a disk file _list_backing_images /usr/lib/python3.9/site-packages/nova/virt/libvirt/imagecache.py:129
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.792 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.844 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a/disk --force-share --output=json" returned: 0 in 0.052s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.845 2 DEBUG nova.virt.libvirt.imagecache [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a is backed by 068b233e8d7f49e215e2900dde7d25b776cad955 _list_backing_images /usr/lib/python3.9/site-packages/nova/virt/libvirt/imagecache.py:141
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.846 2 WARNING nova.virt.libvirt.imagecache [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Unknown base file: /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.846 2 WARNING nova.virt.libvirt.imagecache [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Unknown base file: /var/lib/nova/instances/_base/1c97c4192acfe97009a420fda390ab0403d0e46a
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.846 2 INFO nova.virt.libvirt.imagecache [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Active base files: /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.846 2 INFO nova.virt.libvirt.imagecache [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Removable base files: /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2 /var/lib/nova/instances/_base/1c97c4192acfe97009a420fda390ab0403d0e46a
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.847 2 INFO nova.virt.libvirt.imagecache [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Base, swap or ephemeral file too young to remove: /var/lib/nova/instances/_base/d7f074efa852dc950deac120296f6eecf48a40d2
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.847 2 INFO nova.virt.libvirt.imagecache [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Base, swap or ephemeral file too young to remove: /var/lib/nova/instances/_base/1c97c4192acfe97009a420fda390ab0403d0e46a
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.848 2 DEBUG nova.virt.libvirt.imagecache [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Verification complete _age_and_verify_cached_images /usr/lib/python3.9/site-packages/nova/virt/libvirt/imagecache.py:350
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.848 2 DEBUG nova.virt.libvirt.imagecache [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Verify swap images _age_and_verify_swap_images /usr/lib/python3.9/site-packages/nova/virt/libvirt/imagecache.py:299
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.848 2 DEBUG nova.virt.libvirt.imagecache [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Verify ephemeral images _age_and_verify_ephemeral_images /usr/lib/python3.9/site-packages/nova/virt/libvirt/imagecache.py:284
Oct 02 12:34:02 compute-0 nova_compute[192079]: 2025-10-02 12:34:02.848 2 INFO nova.virt.libvirt.imagecache [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Base, swap or ephemeral file too young to remove: /var/lib/nova/instances/_base/ephemeral_1_0706d66
Oct 02 12:34:03 compute-0 nova_compute[192079]: 2025-10-02 12:34:03.659 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:04 compute-0 kernel: tapc4934bb6-50 (unregistering): left promiscuous mode
Oct 02 12:34:04 compute-0 NetworkManager[51160]: <info>  [1759408444.1781] device (tapc4934bb6-50): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:34:04 compute-0 ovn_controller[94336]: 2025-10-02T12:34:04Z|00571|binding|INFO|Releasing lport c4934bb6-5047-47ec-b0c6-127b4274769a from this chassis (sb_readonly=0)
Oct 02 12:34:04 compute-0 nova_compute[192079]: 2025-10-02 12:34:04.192 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:04 compute-0 ovn_controller[94336]: 2025-10-02T12:34:04Z|00572|binding|INFO|Setting lport c4934bb6-5047-47ec-b0c6-127b4274769a down in Southbound
Oct 02 12:34:04 compute-0 ovn_controller[94336]: 2025-10-02T12:34:04Z|00573|binding|INFO|Removing iface tapc4934bb6-50 ovn-installed in OVS
Oct 02 12:34:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:04.202 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:11:3b:3b 10.100.0.5'], port_security=['fa:16:3e:11:3b:3b 10.100.0.5'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.5/28', 'neutron:device_id': '9c817262-fee7-483c-ac98-6d7648890eb0', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '76c7dd40d83e4e3ca71abbebf57921b6', 'neutron:revision_number': '4', 'neutron:security_group_ids': '96cf165f-4eb7-4b43-884d-a9b6e5a897e1', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com', 'neutron:port_fip': '192.168.122.232'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=9097c5a0-e0b8-419e-918d-de3827bd6390, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=c4934bb6-5047-47ec-b0c6-127b4274769a) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:34:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:04.203 103294 INFO neutron.agent.ovn.metadata.agent [-] Port c4934bb6-5047-47ec-b0c6-127b4274769a in datapath d5d28c5b-6eab-4c56-bc64-1dd8250f45c6 unbound from our chassis
Oct 02 12:34:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:04.204 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network d5d28c5b-6eab-4c56-bc64-1dd8250f45c6, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:34:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:04.206 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[81c3f5fd-543a-4645-a0ff-6f01c4687b0e]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:04 compute-0 nova_compute[192079]: 2025-10-02 12:34:04.207 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:04.207 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6 namespace which is not needed anymore
Oct 02 12:34:04 compute-0 nova_compute[192079]: 2025-10-02 12:34:04.212 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:04 compute-0 systemd[1]: machine-qemu\x2d71\x2dinstance\x2d00000096.scope: Deactivated successfully.
Oct 02 12:34:04 compute-0 systemd[1]: machine-qemu\x2d71\x2dinstance\x2d00000096.scope: Consumed 14.219s CPU time.
Oct 02 12:34:04 compute-0 systemd-machined[152150]: Machine qemu-71-instance-00000096 terminated.
Oct 02 12:34:04 compute-0 neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6[245562]: [NOTICE]   (245566) : haproxy version is 2.8.14-c23fe91
Oct 02 12:34:04 compute-0 neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6[245562]: [NOTICE]   (245566) : path to executable is /usr/sbin/haproxy
Oct 02 12:34:04 compute-0 neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6[245562]: [ALERT]    (245566) : Current worker (245568) exited with code 143 (Terminated)
Oct 02 12:34:04 compute-0 neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6[245562]: [WARNING]  (245566) : All workers exited. Exiting... (0)
Oct 02 12:34:04 compute-0 systemd[1]: libpod-b29aed02c1b812072bf3b768f1e9aeb77072b509da6445056a7648a1aa1199fd.scope: Deactivated successfully.
Oct 02 12:34:04 compute-0 podman[245864]: 2025-10-02 12:34:04.35622792 +0000 UTC m=+0.048048389 container died b29aed02c1b812072bf3b768f1e9aeb77072b509da6445056a7648a1aa1199fd (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:34:04 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-b29aed02c1b812072bf3b768f1e9aeb77072b509da6445056a7648a1aa1199fd-userdata-shm.mount: Deactivated successfully.
Oct 02 12:34:04 compute-0 systemd[1]: var-lib-containers-storage-overlay-23bf39bbb3daa16a07390a05580ebf15054917020bee0f15cb0ffb4c22357eda-merged.mount: Deactivated successfully.
Oct 02 12:34:04 compute-0 podman[245864]: 2025-10-02 12:34:04.392758145 +0000 UTC m=+0.084578604 container cleanup b29aed02c1b812072bf3b768f1e9aeb77072b509da6445056a7648a1aa1199fd (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2)
Oct 02 12:34:04 compute-0 systemd[1]: libpod-conmon-b29aed02c1b812072bf3b768f1e9aeb77072b509da6445056a7648a1aa1199fd.scope: Deactivated successfully.
Oct 02 12:34:04 compute-0 nova_compute[192079]: 2025-10-02 12:34:04.441 2 DEBUG nova.compute.manager [req-bce99841-33cc-4710-95be-11bfaee4341d req-fd079d4b-74cf-45a0-8006-861f3225a8bc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Received event network-vif-unplugged-c4934bb6-5047-47ec-b0c6-127b4274769a external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:34:04 compute-0 nova_compute[192079]: 2025-10-02 12:34:04.442 2 DEBUG oslo_concurrency.lockutils [req-bce99841-33cc-4710-95be-11bfaee4341d req-fd079d4b-74cf-45a0-8006-861f3225a8bc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:34:04 compute-0 nova_compute[192079]: 2025-10-02 12:34:04.443 2 DEBUG oslo_concurrency.lockutils [req-bce99841-33cc-4710-95be-11bfaee4341d req-fd079d4b-74cf-45a0-8006-861f3225a8bc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:34:04 compute-0 nova_compute[192079]: 2025-10-02 12:34:04.443 2 DEBUG oslo_concurrency.lockutils [req-bce99841-33cc-4710-95be-11bfaee4341d req-fd079d4b-74cf-45a0-8006-861f3225a8bc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:34:04 compute-0 nova_compute[192079]: 2025-10-02 12:34:04.444 2 DEBUG nova.compute.manager [req-bce99841-33cc-4710-95be-11bfaee4341d req-fd079d4b-74cf-45a0-8006-861f3225a8bc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] No waiting events found dispatching network-vif-unplugged-c4934bb6-5047-47ec-b0c6-127b4274769a pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:34:04 compute-0 nova_compute[192079]: 2025-10-02 12:34:04.444 2 WARNING nova.compute.manager [req-bce99841-33cc-4710-95be-11bfaee4341d req-fd079d4b-74cf-45a0-8006-861f3225a8bc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Received unexpected event network-vif-unplugged-c4934bb6-5047-47ec-b0c6-127b4274769a for instance with vm_state active and task_state powering-off.
Oct 02 12:34:04 compute-0 podman[245893]: 2025-10-02 12:34:04.470575263 +0000 UTC m=+0.054661259 container remove b29aed02c1b812072bf3b768f1e9aeb77072b509da6445056a7648a1aa1199fd (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001)
Oct 02 12:34:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:04.477 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[71becbde-f00f-48aa-a9c2-6a9ebd3abf19]: (4, ('Thu Oct  2 12:34:04 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6 (b29aed02c1b812072bf3b768f1e9aeb77072b509da6445056a7648a1aa1199fd)\nb29aed02c1b812072bf3b768f1e9aeb77072b509da6445056a7648a1aa1199fd\nThu Oct  2 12:34:04 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6 (b29aed02c1b812072bf3b768f1e9aeb77072b509da6445056a7648a1aa1199fd)\nb29aed02c1b812072bf3b768f1e9aeb77072b509da6445056a7648a1aa1199fd\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:04.478 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9ddae7ce-a67f-4c13-964f-27a8b0d6a411]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:04.479 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapd5d28c5b-60, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:34:04 compute-0 nova_compute[192079]: 2025-10-02 12:34:04.481 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:04 compute-0 kernel: tapd5d28c5b-60: left promiscuous mode
Oct 02 12:34:04 compute-0 nova_compute[192079]: 2025-10-02 12:34:04.493 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:04 compute-0 nova_compute[192079]: 2025-10-02 12:34:04.498 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:04.502 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6a0134cf-234d-492e-b6e5-225bdbd64d80]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:04.537 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[81417b7f-f6f2-41eb-aa38-d38594960cd1]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:04.539 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[538f09f0-4902-42f7-9739-2f641a2de888]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:04.559 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1590f2d0-ce45-49ab-a8bb-7a4b430824f6]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 641974, 'reachable_time': 17496, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 245931, 'error': None, 'target': 'ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:04.562 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:34:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:04.562 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[5e05475c-2c7a-4e89-ab4e-e85f6cc89e50]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:04 compute-0 systemd[1]: run-netns-ovnmeta\x2dd5d28c5b\x2d6eab\x2d4c56\x2dbc64\x2d1dd8250f45c6.mount: Deactivated successfully.
Oct 02 12:34:05 compute-0 nova_compute[192079]: 2025-10-02 12:34:05.001 2 INFO nova.virt.libvirt.driver [None req-bf1b2c53-cb82-4cde-b574-f9ad1c2e587c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Instance shutdown successfully after 3 seconds.
Oct 02 12:34:05 compute-0 nova_compute[192079]: 2025-10-02 12:34:05.008 2 INFO nova.virt.libvirt.driver [-] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Instance destroyed successfully.
Oct 02 12:34:05 compute-0 nova_compute[192079]: 2025-10-02 12:34:05.009 2 DEBUG nova.objects.instance [None req-bf1b2c53-cb82-4cde-b574-f9ad1c2e587c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'numa_topology' on Instance uuid 9c817262-fee7-483c-ac98-6d7648890eb0 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:34:05 compute-0 nova_compute[192079]: 2025-10-02 12:34:05.027 2 DEBUG nova.compute.manager [None req-bf1b2c53-cb82-4cde-b574-f9ad1c2e587c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:34:05 compute-0 nova_compute[192079]: 2025-10-02 12:34:05.160 2 DEBUG oslo_concurrency.lockutils [None req-bf1b2c53-cb82-4cde-b574-f9ad1c2e587c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "9c817262-fee7-483c-ac98-6d7648890eb0" "released" by "nova.compute.manager.ComputeManager.stop_instance.<locals>.do_stop_instance" :: held 3.245s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:34:06 compute-0 nova_compute[192079]: 2025-10-02 12:34:06.579 2 DEBUG nova.compute.manager [req-d760b337-a899-41db-926f-e3fce3891d4d req-fd531642-4ec9-4c7e-abe1-c240036f7723 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Received event network-vif-plugged-c4934bb6-5047-47ec-b0c6-127b4274769a external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:34:06 compute-0 nova_compute[192079]: 2025-10-02 12:34:06.580 2 DEBUG oslo_concurrency.lockutils [req-d760b337-a899-41db-926f-e3fce3891d4d req-fd531642-4ec9-4c7e-abe1-c240036f7723 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:34:06 compute-0 nova_compute[192079]: 2025-10-02 12:34:06.581 2 DEBUG oslo_concurrency.lockutils [req-d760b337-a899-41db-926f-e3fce3891d4d req-fd531642-4ec9-4c7e-abe1-c240036f7723 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:34:06 compute-0 nova_compute[192079]: 2025-10-02 12:34:06.582 2 DEBUG oslo_concurrency.lockutils [req-d760b337-a899-41db-926f-e3fce3891d4d req-fd531642-4ec9-4c7e-abe1-c240036f7723 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:34:06 compute-0 nova_compute[192079]: 2025-10-02 12:34:06.582 2 DEBUG nova.compute.manager [req-d760b337-a899-41db-926f-e3fce3891d4d req-fd531642-4ec9-4c7e-abe1-c240036f7723 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] No waiting events found dispatching network-vif-plugged-c4934bb6-5047-47ec-b0c6-127b4274769a pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:34:06 compute-0 nova_compute[192079]: 2025-10-02 12:34:06.583 2 WARNING nova.compute.manager [req-d760b337-a899-41db-926f-e3fce3891d4d req-fd531642-4ec9-4c7e-abe1-c240036f7723 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Received unexpected event network-vif-plugged-c4934bb6-5047-47ec-b0c6-127b4274769a for instance with vm_state stopped and task_state None.
Oct 02 12:34:07 compute-0 nova_compute[192079]: 2025-10-02 12:34:07.372 2 INFO nova.compute.manager [None req-b54fcb56-e25d-4c47-b70f-914fbabe0a3c 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Get console output
Oct 02 12:34:07 compute-0 nova_compute[192079]: 2025-10-02 12:34:07.393 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:07 compute-0 nova_compute[192079]: 2025-10-02 12:34:07.681 2 DEBUG nova.objects.instance [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'flavor' on Instance uuid 9c817262-fee7-483c-ac98-6d7648890eb0 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:34:07 compute-0 nova_compute[192079]: 2025-10-02 12:34:07.703 2 DEBUG nova.objects.instance [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'info_cache' on Instance uuid 9c817262-fee7-483c-ac98-6d7648890eb0 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:34:07 compute-0 nova_compute[192079]: 2025-10-02 12:34:07.730 2 DEBUG oslo_concurrency.lockutils [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "refresh_cache-9c817262-fee7-483c-ac98-6d7648890eb0" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:34:07 compute-0 nova_compute[192079]: 2025-10-02 12:34:07.731 2 DEBUG oslo_concurrency.lockutils [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquired lock "refresh_cache-9c817262-fee7-483c-ac98-6d7648890eb0" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:34:07 compute-0 nova_compute[192079]: 2025-10-02 12:34:07.731 2 DEBUG nova.network.neutron [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:34:08 compute-0 nova_compute[192079]: 2025-10-02 12:34:08.663 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:09 compute-0 podman[245960]: 2025-10-02 12:34:09.181861193 +0000 UTC m=+0.071580500 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>)
Oct 02 12:34:09 compute-0 ovn_controller[94336]: 2025-10-02T12:34:09Z|00060|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:fe:89:d4 10.100.0.7
Oct 02 12:34:09 compute-0 ovn_controller[94336]: 2025-10-02T12:34:09Z|00061|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:fe:89:d4 10.100.0.7
Oct 02 12:34:09 compute-0 podman[245959]: 2025-10-02 12:34:09.225757477 +0000 UTC m=+0.120579343 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, tcib_managed=true, config_id=ovn_controller, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_controller)
Oct 02 12:34:09 compute-0 podman[245958]: 2025-10-02 12:34:09.233275482 +0000 UTC m=+0.131157812 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_metadata_agent, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.443 2 DEBUG nova.network.neutron [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Updating instance_info_cache with network_info: [{"id": "c4934bb6-5047-47ec-b0c6-127b4274769a", "address": "fa:16:3e:11:3b:3b", "network": {"id": "d5d28c5b-6eab-4c56-bc64-1dd8250f45c6", "bridge": "br-int", "label": "tempest-network-smoke--1759416782", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4934bb6-50", "ovs_interfaceid": "c4934bb6-5047-47ec-b0c6-127b4274769a", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.462 2 DEBUG oslo_concurrency.lockutils [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Releasing lock "refresh_cache-9c817262-fee7-483c-ac98-6d7648890eb0" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.490 2 INFO nova.virt.libvirt.driver [-] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Instance destroyed successfully.
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.490 2 DEBUG nova.objects.instance [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'numa_topology' on Instance uuid 9c817262-fee7-483c-ac98-6d7648890eb0 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.505 2 DEBUG nova.objects.instance [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'resources' on Instance uuid 9c817262-fee7-483c-ac98-6d7648890eb0 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.520 2 DEBUG nova.virt.libvirt.vif [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:33:32Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestNetworkAdvancedServerOps-server-1842419854',display_name='tempest-TestNetworkAdvancedServerOps-server-1842419854',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkadvancedserverops-server-1842419854',id=150,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBBcmc6POLJD4DWxVpjM7Q+mpn1vqWiz84SJ4bJOoXuhI8e1ZYxo5xDkFJVaGxPGGCkjomFU7VMydgd3IiJebhoaGTMBDztV9vB5kp4HC2Ekh6aB6IjhW19nhgZQ5E8+LRw==',key_name='tempest-TestNetworkAdvancedServerOps-467970098',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:33:43Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=4,progress=0,project_id='76c7dd40d83e4e3ca71abbebf57921b6',ramdisk_id='',reservation_id='r-aaoru9xk',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=<?>,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-TestNetworkAdvancedServerOps-597114071',owner_user_name='tempest-TestNetworkAdvancedServerOps-597114071-project-member'},tags=<?>,task_state='powering-on',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:34:05Z,user_data=None,user_id='1faa7e121a0e43ad8cb4ae5b2cfcc6a2',uuid=9c817262-fee7-483c-ac98-6d7648890eb0,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='stopped') vif={"id": "c4934bb6-5047-47ec-b0c6-127b4274769a", "address": "fa:16:3e:11:3b:3b", "network": {"id": "d5d28c5b-6eab-4c56-bc64-1dd8250f45c6", "bridge": "br-int", "label": "tempest-network-smoke--1759416782", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4934bb6-50", "ovs_interfaceid": "c4934bb6-5047-47ec-b0c6-127b4274769a", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.520 2 DEBUG nova.network.os_vif_util [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converting VIF {"id": "c4934bb6-5047-47ec-b0c6-127b4274769a", "address": "fa:16:3e:11:3b:3b", "network": {"id": "d5d28c5b-6eab-4c56-bc64-1dd8250f45c6", "bridge": "br-int", "label": "tempest-network-smoke--1759416782", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4934bb6-50", "ovs_interfaceid": "c4934bb6-5047-47ec-b0c6-127b4274769a", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.521 2 DEBUG nova.network.os_vif_util [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:11:3b:3b,bridge_name='br-int',has_traffic_filtering=True,id=c4934bb6-5047-47ec-b0c6-127b4274769a,network=Network(d5d28c5b-6eab-4c56-bc64-1dd8250f45c6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapc4934bb6-50') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.521 2 DEBUG os_vif [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:11:3b:3b,bridge_name='br-int',has_traffic_filtering=True,id=c4934bb6-5047-47ec-b0c6-127b4274769a,network=Network(d5d28c5b-6eab-4c56-bc64-1dd8250f45c6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapc4934bb6-50') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.523 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.524 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapc4934bb6-50, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.527 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.529 2 INFO os_vif [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:11:3b:3b,bridge_name='br-int',has_traffic_filtering=True,id=c4934bb6-5047-47ec-b0c6-127b4274769a,network=Network(d5d28c5b-6eab-4c56-bc64-1dd8250f45c6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapc4934bb6-50')
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.536 2 DEBUG nova.virt.libvirt.driver [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Start _get_guest_xml network_info=[{"id": "c4934bb6-5047-47ec-b0c6-127b4274769a", "address": "fa:16:3e:11:3b:3b", "network": {"id": "d5d28c5b-6eab-4c56-bc64-1dd8250f45c6", "bridge": "br-int", "label": "tempest-network-smoke--1759416782", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4934bb6-50", "ovs_interfaceid": "c4934bb6-5047-47ec-b0c6-127b4274769a", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum=<?>,container_format='bare',created_at=<?>,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=1,min_ram=0,name=<?>,owner=<?>,properties=ImageMetaProps,protected=<?>,size=<?>,status=<?>,tags=<?>,updated_at=<?>,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.541 2 WARNING nova.virt.libvirt.driver [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.549 2 DEBUG nova.virt.libvirt.host [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.550 2 DEBUG nova.virt.libvirt.host [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.553 2 DEBUG nova.virt.libvirt.host [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.553 2 DEBUG nova.virt.libvirt.host [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.555 2 DEBUG nova.virt.libvirt.driver [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.555 2 DEBUG nova.virt.hardware [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=<?>,container_format='bare',created_at=<?>,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=1,min_ram=0,name=<?>,owner=<?>,properties=ImageMetaProps,protected=<?>,size=<?>,status=<?>,tags=<?>,updated_at=<?>,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.555 2 DEBUG nova.virt.hardware [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.556 2 DEBUG nova.virt.hardware [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.556 2 DEBUG nova.virt.hardware [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.556 2 DEBUG nova.virt.hardware [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.557 2 DEBUG nova.virt.hardware [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.557 2 DEBUG nova.virt.hardware [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.557 2 DEBUG nova.virt.hardware [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.558 2 DEBUG nova.virt.hardware [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.558 2 DEBUG nova.virt.hardware [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.558 2 DEBUG nova.virt.hardware [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.559 2 DEBUG nova.objects.instance [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'vcpu_model' on Instance uuid 9c817262-fee7-483c-ac98-6d7648890eb0 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.578 2 DEBUG oslo_concurrency.processutils [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk.config --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.649 2 DEBUG oslo_concurrency.processutils [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk.config --force-share --output=json" returned: 0 in 0.071s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.650 2 DEBUG oslo_concurrency.lockutils [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "/var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.650 2 DEBUG oslo_concurrency.lockutils [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "/var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.651 2 DEBUG oslo_concurrency.lockutils [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "/var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.652 2 DEBUG nova.virt.libvirt.vif [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:33:32Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestNetworkAdvancedServerOps-server-1842419854',display_name='tempest-TestNetworkAdvancedServerOps-server-1842419854',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkadvancedserverops-server-1842419854',id=150,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBBcmc6POLJD4DWxVpjM7Q+mpn1vqWiz84SJ4bJOoXuhI8e1ZYxo5xDkFJVaGxPGGCkjomFU7VMydgd3IiJebhoaGTMBDztV9vB5kp4HC2Ekh6aB6IjhW19nhgZQ5E8+LRw==',key_name='tempest-TestNetworkAdvancedServerOps-467970098',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:33:43Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=4,progress=0,project_id='76c7dd40d83e4e3ca71abbebf57921b6',ramdisk_id='',reservation_id='r-aaoru9xk',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=<?>,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-TestNetworkAdvancedServerOps-597114071',owner_user_name='tempest-TestNetworkAdvancedServerOps-597114071-project-member'},tags=<?>,task_state='powering-on',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:34:05Z,user_data=None,user_id='1faa7e121a0e43ad8cb4ae5b2cfcc6a2',uuid=9c817262-fee7-483c-ac98-6d7648890eb0,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='stopped') vif={"id": "c4934bb6-5047-47ec-b0c6-127b4274769a", "address": "fa:16:3e:11:3b:3b", "network": {"id": "d5d28c5b-6eab-4c56-bc64-1dd8250f45c6", "bridge": "br-int", "label": "tempest-network-smoke--1759416782", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4934bb6-50", "ovs_interfaceid": "c4934bb6-5047-47ec-b0c6-127b4274769a", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.652 2 DEBUG nova.network.os_vif_util [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converting VIF {"id": "c4934bb6-5047-47ec-b0c6-127b4274769a", "address": "fa:16:3e:11:3b:3b", "network": {"id": "d5d28c5b-6eab-4c56-bc64-1dd8250f45c6", "bridge": "br-int", "label": "tempest-network-smoke--1759416782", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4934bb6-50", "ovs_interfaceid": "c4934bb6-5047-47ec-b0c6-127b4274769a", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.653 2 DEBUG nova.network.os_vif_util [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:11:3b:3b,bridge_name='br-int',has_traffic_filtering=True,id=c4934bb6-5047-47ec-b0c6-127b4274769a,network=Network(d5d28c5b-6eab-4c56-bc64-1dd8250f45c6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapc4934bb6-50') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.654 2 DEBUG nova.objects.instance [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'pci_devices' on Instance uuid 9c817262-fee7-483c-ac98-6d7648890eb0 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.676 2 DEBUG nova.virt.libvirt.driver [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:34:09 compute-0 nova_compute[192079]:   <uuid>9c817262-fee7-483c-ac98-6d7648890eb0</uuid>
Oct 02 12:34:09 compute-0 nova_compute[192079]:   <name>instance-00000096</name>
Oct 02 12:34:09 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:34:09 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:34:09 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:34:09 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:       <nova:name>tempest-TestNetworkAdvancedServerOps-server-1842419854</nova:name>
Oct 02 12:34:09 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:34:09</nova:creationTime>
Oct 02 12:34:09 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:34:09 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:34:09 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:34:09 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:34:09 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:34:09 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:34:09 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:34:09 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:34:09 compute-0 nova_compute[192079]:         <nova:user uuid="1faa7e121a0e43ad8cb4ae5b2cfcc6a2">tempest-TestNetworkAdvancedServerOps-597114071-project-member</nova:user>
Oct 02 12:34:09 compute-0 nova_compute[192079]:         <nova:project uuid="76c7dd40d83e4e3ca71abbebf57921b6">tempest-TestNetworkAdvancedServerOps-597114071</nova:project>
Oct 02 12:34:09 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:34:09 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:34:09 compute-0 nova_compute[192079]:         <nova:port uuid="c4934bb6-5047-47ec-b0c6-127b4274769a">
Oct 02 12:34:09 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.5" ipVersion="4"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:34:09 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:34:09 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:34:09 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <system>
Oct 02 12:34:09 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:34:09 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:34:09 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:34:09 compute-0 nova_compute[192079]:       <entry name="serial">9c817262-fee7-483c-ac98-6d7648890eb0</entry>
Oct 02 12:34:09 compute-0 nova_compute[192079]:       <entry name="uuid">9c817262-fee7-483c-ac98-6d7648890eb0</entry>
Oct 02 12:34:09 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     </system>
Oct 02 12:34:09 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:34:09 compute-0 nova_compute[192079]:   <os>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:   </os>
Oct 02 12:34:09 compute-0 nova_compute[192079]:   <features>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:   </features>
Oct 02 12:34:09 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:34:09 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:34:09 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:34:09 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:34:09 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk.config"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:34:09 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:11:3b:3b"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:       <target dev="tapc4934bb6-50"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:34:09 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/console.log" append="off"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <video>
Oct 02 12:34:09 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     </video>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <input type="keyboard" bus="usb"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:34:09 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:34:09 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:34:09 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:34:09 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:34:09 compute-0 nova_compute[192079]: </domain>
Oct 02 12:34:09 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.679 2 DEBUG oslo_concurrency.processutils [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.737 2 DEBUG oslo_concurrency.processutils [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk --force-share --output=json" returned: 0 in 0.058s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.739 2 DEBUG oslo_concurrency.processutils [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.796 2 DEBUG oslo_concurrency.processutils [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk --force-share --output=json" returned: 0 in 0.057s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.799 2 DEBUG nova.objects.instance [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'trusted_certs' on Instance uuid 9c817262-fee7-483c-ac98-6d7648890eb0 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.814 2 DEBUG oslo_concurrency.processutils [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.867 2 DEBUG oslo_concurrency.processutils [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.053s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.868 2 DEBUG nova.virt.disk.api [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Checking if we can resize image /var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.868 2 DEBUG oslo_concurrency.processutils [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.922 2 DEBUG oslo_concurrency.processutils [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk --force-share --output=json" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.923 2 DEBUG nova.virt.disk.api [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Cannot resize image /var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.924 2 DEBUG nova.objects.instance [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'migration_context' on Instance uuid 9c817262-fee7-483c-ac98-6d7648890eb0 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.942 2 DEBUG nova.virt.libvirt.vif [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:33:32Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestNetworkAdvancedServerOps-server-1842419854',display_name='tempest-TestNetworkAdvancedServerOps-server-1842419854',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkadvancedserverops-server-1842419854',id=150,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBBcmc6POLJD4DWxVpjM7Q+mpn1vqWiz84SJ4bJOoXuhI8e1ZYxo5xDkFJVaGxPGGCkjomFU7VMydgd3IiJebhoaGTMBDztV9vB5kp4HC2Ekh6aB6IjhW19nhgZQ5E8+LRw==',key_name='tempest-TestNetworkAdvancedServerOps-467970098',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:33:43Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=<?>,power_state=4,progress=0,project_id='76c7dd40d83e4e3ca71abbebf57921b6',ramdisk_id='',reservation_id='r-aaoru9xk',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=<?>,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-TestNetworkAdvancedServerOps-597114071',owner_user_name='tempest-TestNetworkAdvancedServerOps-597114071-project-member'},tags=<?>,task_state='powering-on',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:34:05Z,user_data=None,user_id='1faa7e121a0e43ad8cb4ae5b2cfcc6a2',uuid=9c817262-fee7-483c-ac98-6d7648890eb0,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='stopped') vif={"id": "c4934bb6-5047-47ec-b0c6-127b4274769a", "address": "fa:16:3e:11:3b:3b", "network": {"id": "d5d28c5b-6eab-4c56-bc64-1dd8250f45c6", "bridge": "br-int", "label": "tempest-network-smoke--1759416782", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4934bb6-50", "ovs_interfaceid": "c4934bb6-5047-47ec-b0c6-127b4274769a", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.943 2 DEBUG nova.network.os_vif_util [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converting VIF {"id": "c4934bb6-5047-47ec-b0c6-127b4274769a", "address": "fa:16:3e:11:3b:3b", "network": {"id": "d5d28c5b-6eab-4c56-bc64-1dd8250f45c6", "bridge": "br-int", "label": "tempest-network-smoke--1759416782", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4934bb6-50", "ovs_interfaceid": "c4934bb6-5047-47ec-b0c6-127b4274769a", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.944 2 DEBUG nova.network.os_vif_util [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:11:3b:3b,bridge_name='br-int',has_traffic_filtering=True,id=c4934bb6-5047-47ec-b0c6-127b4274769a,network=Network(d5d28c5b-6eab-4c56-bc64-1dd8250f45c6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapc4934bb6-50') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.944 2 DEBUG os_vif [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:11:3b:3b,bridge_name='br-int',has_traffic_filtering=True,id=c4934bb6-5047-47ec-b0c6-127b4274769a,network=Network(d5d28c5b-6eab-4c56-bc64-1dd8250f45c6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapc4934bb6-50') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.945 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.945 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.946 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.948 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.948 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapc4934bb6-50, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.949 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapc4934bb6-50, col_values=(('external_ids', {'iface-id': 'c4934bb6-5047-47ec-b0c6-127b4274769a', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:11:3b:3b', 'vm-uuid': '9c817262-fee7-483c-ac98-6d7648890eb0'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.950 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:09 compute-0 NetworkManager[51160]: <info>  [1759408449.9518] manager: (tapc4934bb6-50): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/286)
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.955 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.956 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:09 compute-0 nova_compute[192079]: 2025-10-02 12:34:09.957 2 INFO os_vif [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:11:3b:3b,bridge_name='br-int',has_traffic_filtering=True,id=c4934bb6-5047-47ec-b0c6-127b4274769a,network=Network(d5d28c5b-6eab-4c56-bc64-1dd8250f45c6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapc4934bb6-50')
Oct 02 12:34:10 compute-0 kernel: tapc4934bb6-50: entered promiscuous mode
Oct 02 12:34:10 compute-0 NetworkManager[51160]: <info>  [1759408450.0372] manager: (tapc4934bb6-50): new Tun device (/org/freedesktop/NetworkManager/Devices/287)
Oct 02 12:34:10 compute-0 nova_compute[192079]: 2025-10-02 12:34:10.039 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:10 compute-0 ovn_controller[94336]: 2025-10-02T12:34:10Z|00574|binding|INFO|Claiming lport c4934bb6-5047-47ec-b0c6-127b4274769a for this chassis.
Oct 02 12:34:10 compute-0 ovn_controller[94336]: 2025-10-02T12:34:10Z|00575|binding|INFO|c4934bb6-5047-47ec-b0c6-127b4274769a: Claiming fa:16:3e:11:3b:3b 10.100.0.5
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:10.047 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:11:3b:3b 10.100.0.5'], port_security=['fa:16:3e:11:3b:3b 10.100.0.5'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.5/28', 'neutron:device_id': '9c817262-fee7-483c-ac98-6d7648890eb0', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '76c7dd40d83e4e3ca71abbebf57921b6', 'neutron:revision_number': '5', 'neutron:security_group_ids': '96cf165f-4eb7-4b43-884d-a9b6e5a897e1', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:port_fip': '192.168.122.232'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=9097c5a0-e0b8-419e-918d-de3827bd6390, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=c4934bb6-5047-47ec-b0c6-127b4274769a) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:10.048 103294 INFO neutron.agent.ovn.metadata.agent [-] Port c4934bb6-5047-47ec-b0c6-127b4274769a in datapath d5d28c5b-6eab-4c56-bc64-1dd8250f45c6 bound to our chassis
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:10.050 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network d5d28c5b-6eab-4c56-bc64-1dd8250f45c6
Oct 02 12:34:10 compute-0 ovn_controller[94336]: 2025-10-02T12:34:10Z|00576|binding|INFO|Setting lport c4934bb6-5047-47ec-b0c6-127b4274769a ovn-installed in OVS
Oct 02 12:34:10 compute-0 ovn_controller[94336]: 2025-10-02T12:34:10Z|00577|binding|INFO|Setting lport c4934bb6-5047-47ec-b0c6-127b4274769a up in Southbound
Oct 02 12:34:10 compute-0 nova_compute[192079]: 2025-10-02 12:34:10.055 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:10 compute-0 nova_compute[192079]: 2025-10-02 12:34:10.058 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:10.061 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[cac46f48-143b-4e69-b0d9-bbc0c24fbc3a]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:10.061 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tapd5d28c5b-61 in ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:10.063 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tapd5d28c5b-60 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:10.063 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7dee4d48-f39f-4548-ab3d-e2aafc811cf3]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:10.064 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[fd318450-a73a-4f83-8f0b-a5f0a6684143]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:10 compute-0 systemd-udevd[246053]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:10.075 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[f3594cf5-f7d3-4b41-91c1-5ad494c72526]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:10 compute-0 systemd-machined[152150]: New machine qemu-73-instance-00000096.
Oct 02 12:34:10 compute-0 NetworkManager[51160]: <info>  [1759408450.0865] device (tapc4934bb6-50): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:34:10 compute-0 NetworkManager[51160]: <info>  [1759408450.0874] device (tapc4934bb6-50): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:10.100 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d8e04278-321c-43a3-873b-fb47fcb9c1a0]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:10 compute-0 systemd[1]: Started Virtual Machine qemu-73-instance-00000096.
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:10.132 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[71b6cd52-6c01-422d-a5a6-9c1eb165475b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:10 compute-0 NetworkManager[51160]: <info>  [1759408450.1377] manager: (tapd5d28c5b-60): new Veth device (/org/freedesktop/NetworkManager/Devices/288)
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:10.136 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[612568a4-b499-4d77-9c50-f0b942ca8c51]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:10.170 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[46d02ead-e34a-4f70-8882-1b8026dc4550]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:10.173 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[9694db42-c795-4bca-b79a-cbcddc1cd548]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:10 compute-0 NetworkManager[51160]: <info>  [1759408450.2005] device (tapd5d28c5b-60): carrier: link connected
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:10.206 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[11826fb5-d370-4491-bf54-dac8d481e528]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:10.222 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[db4b91bc-bc90-4b61-af36-ede594156f1d]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapd5d28c5b-61'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:09:71:68'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 185], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 644782, 'reachable_time': 41014, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 246086, 'error': None, 'target': 'ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:10.241 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8dcb4927-c411-4b7b-bf6b-ad25b4cd4343]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe09:7168'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 644782, 'tstamp': 644782}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 246087, 'error': None, 'target': 'ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:10.256 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[118f81b8-94f0-4d04-8859-b76f022d26c0]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapd5d28c5b-61'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:09:71:68'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 185], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 644782, 'reachable_time': 41014, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 246088, 'error': None, 'target': 'ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:10.293 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1a8091ea-2a43-41f4-8e13-108dd7dd565e]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:10 compute-0 nova_compute[192079]: 2025-10-02 12:34:10.315 2 DEBUG nova.compute.manager [req-4306d0ab-f52f-4bb6-ba02-3aad844f088d req-7b3dc327-2315-4a0d-ba70-a4773330f523 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Received event network-vif-plugged-c4934bb6-5047-47ec-b0c6-127b4274769a external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:34:10 compute-0 nova_compute[192079]: 2025-10-02 12:34:10.315 2 DEBUG oslo_concurrency.lockutils [req-4306d0ab-f52f-4bb6-ba02-3aad844f088d req-7b3dc327-2315-4a0d-ba70-a4773330f523 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:34:10 compute-0 nova_compute[192079]: 2025-10-02 12:34:10.316 2 DEBUG oslo_concurrency.lockutils [req-4306d0ab-f52f-4bb6-ba02-3aad844f088d req-7b3dc327-2315-4a0d-ba70-a4773330f523 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:34:10 compute-0 nova_compute[192079]: 2025-10-02 12:34:10.316 2 DEBUG oslo_concurrency.lockutils [req-4306d0ab-f52f-4bb6-ba02-3aad844f088d req-7b3dc327-2315-4a0d-ba70-a4773330f523 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:34:10 compute-0 nova_compute[192079]: 2025-10-02 12:34:10.316 2 DEBUG nova.compute.manager [req-4306d0ab-f52f-4bb6-ba02-3aad844f088d req-7b3dc327-2315-4a0d-ba70-a4773330f523 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] No waiting events found dispatching network-vif-plugged-c4934bb6-5047-47ec-b0c6-127b4274769a pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:34:10 compute-0 nova_compute[192079]: 2025-10-02 12:34:10.316 2 WARNING nova.compute.manager [req-4306d0ab-f52f-4bb6-ba02-3aad844f088d req-7b3dc327-2315-4a0d-ba70-a4773330f523 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Received unexpected event network-vif-plugged-c4934bb6-5047-47ec-b0c6-127b4274769a for instance with vm_state stopped and task_state powering-on.
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:10.352 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[469a79d3-f3d6-4fe1-9fd4-1b901564b242]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:10.353 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapd5d28c5b-60, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:10.353 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:10.353 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapd5d28c5b-60, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:34:10 compute-0 kernel: tapd5d28c5b-60: entered promiscuous mode
Oct 02 12:34:10 compute-0 NetworkManager[51160]: <info>  [1759408450.3558] manager: (tapd5d28c5b-60): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/289)
Oct 02 12:34:10 compute-0 nova_compute[192079]: 2025-10-02 12:34:10.355 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:10 compute-0 nova_compute[192079]: 2025-10-02 12:34:10.358 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:10.359 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tapd5d28c5b-60, col_values=(('external_ids', {'iface-id': '410dd08f-af3d-48c2-b0fd-475c37ff4bed'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:34:10 compute-0 nova_compute[192079]: 2025-10-02 12:34:10.360 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:10 compute-0 nova_compute[192079]: 2025-10-02 12:34:10.363 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:10.364 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/d5d28c5b-6eab-4c56-bc64-1dd8250f45c6.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/d5d28c5b-6eab-4c56-bc64-1dd8250f45c6.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:34:10 compute-0 ovn_controller[94336]: 2025-10-02T12:34:10Z|00578|binding|INFO|Releasing lport 410dd08f-af3d-48c2-b0fd-475c37ff4bed from this chassis (sb_readonly=0)
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:10.366 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1fb9d56b-33f1-41bc-85c7-8a510d5b97cf]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:10.367 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/d5d28c5b-6eab-4c56-bc64-1dd8250f45c6.pid.haproxy
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID d5d28c5b-6eab-4c56-bc64-1dd8250f45c6
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:34:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:10.369 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6', 'env', 'PROCESS_TAG=haproxy-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/d5d28c5b-6eab-4c56-bc64-1dd8250f45c6.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:34:10 compute-0 nova_compute[192079]: 2025-10-02 12:34:10.376 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:10 compute-0 podman[246127]: 2025-10-02 12:34:10.747737502 +0000 UTC m=+0.051183725 container create 8c717c6428c8e0a5c8e5a3f32cc8604c28beb1f08e4fb9c23391af843b37e31f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001)
Oct 02 12:34:10 compute-0 systemd[1]: Started libpod-conmon-8c717c6428c8e0a5c8e5a3f32cc8604c28beb1f08e4fb9c23391af843b37e31f.scope.
Oct 02 12:34:10 compute-0 nova_compute[192079]: 2025-10-02 12:34:10.805 2 DEBUG nova.compute.manager [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:34:10 compute-0 nova_compute[192079]: 2025-10-02 12:34:10.807 2 DEBUG nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Removed pending event for 9c817262-fee7-483c-ac98-6d7648890eb0 due to event _event_emit_delayed /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:438
Oct 02 12:34:10 compute-0 nova_compute[192079]: 2025-10-02 12:34:10.807 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408450.806294, 9c817262-fee7-483c-ac98-6d7648890eb0 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:34:10 compute-0 nova_compute[192079]: 2025-10-02 12:34:10.807 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] VM Resumed (Lifecycle Event)
Oct 02 12:34:10 compute-0 nova_compute[192079]: 2025-10-02 12:34:10.812 2 INFO nova.virt.libvirt.driver [-] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Instance rebooted successfully.
Oct 02 12:34:10 compute-0 nova_compute[192079]: 2025-10-02 12:34:10.813 2 DEBUG nova.compute.manager [None req-792e852e-67b4-42e4-a62f-74c391971f4a 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:34:10 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:34:10 compute-0 podman[246127]: 2025-10-02 12:34:10.721734833 +0000 UTC m=+0.025181076 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:34:10 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/9cf9fde5aa8b5f02e1d05f9c5158d0371319c043b3c1caf20376c52fa9483ed8/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:34:10 compute-0 podman[246127]: 2025-10-02 12:34:10.832956771 +0000 UTC m=+0.136403074 container init 8c717c6428c8e0a5c8e5a3f32cc8604c28beb1f08e4fb9c23391af843b37e31f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3, tcib_managed=true)
Oct 02 12:34:10 compute-0 podman[246127]: 2025-10-02 12:34:10.839010546 +0000 UTC m=+0.142456809 container start 8c717c6428c8e0a5c8e5a3f32cc8604c28beb1f08e4fb9c23391af843b37e31f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:34:10 compute-0 neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6[246142]: [NOTICE]   (246146) : New worker (246148) forked
Oct 02 12:34:10 compute-0 neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6[246142]: [NOTICE]   (246146) : Loading success.
Oct 02 12:34:10 compute-0 nova_compute[192079]: 2025-10-02 12:34:10.902 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:34:10 compute-0 nova_compute[192079]: 2025-10-02 12:34:10.906 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: stopped, current task_state: powering-on, current DB power_state: 4, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:34:10 compute-0 nova_compute[192079]: 2025-10-02 12:34:10.934 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] During sync_power_state the instance has a pending task (powering-on). Skip.
Oct 02 12:34:10 compute-0 nova_compute[192079]: 2025-10-02 12:34:10.934 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408450.8063846, 9c817262-fee7-483c-ac98-6d7648890eb0 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:34:10 compute-0 nova_compute[192079]: 2025-10-02 12:34:10.934 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] VM Started (Lifecycle Event)
Oct 02 12:34:10 compute-0 nova_compute[192079]: 2025-10-02 12:34:10.957 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:34:10 compute-0 nova_compute[192079]: 2025-10-02 12:34:10.960 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Synchronizing instance power state after lifecycle event "Started"; current vm_state: active, current task_state: None, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:34:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:11.877 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=38, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=37) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:34:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:11.879 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 3 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:34:11 compute-0 nova_compute[192079]: 2025-10-02 12:34:11.882 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:12 compute-0 nova_compute[192079]: 2025-10-02 12:34:12.396 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:12 compute-0 nova_compute[192079]: 2025-10-02 12:34:12.422 2 DEBUG nova.compute.manager [req-34656bdc-43a0-4988-b66f-9e7f4554063f req-87ac2353-d95c-4198-b02e-648858a81dcf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Received event network-vif-plugged-c4934bb6-5047-47ec-b0c6-127b4274769a external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:34:12 compute-0 nova_compute[192079]: 2025-10-02 12:34:12.422 2 DEBUG oslo_concurrency.lockutils [req-34656bdc-43a0-4988-b66f-9e7f4554063f req-87ac2353-d95c-4198-b02e-648858a81dcf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:34:12 compute-0 nova_compute[192079]: 2025-10-02 12:34:12.423 2 DEBUG oslo_concurrency.lockutils [req-34656bdc-43a0-4988-b66f-9e7f4554063f req-87ac2353-d95c-4198-b02e-648858a81dcf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:34:12 compute-0 nova_compute[192079]: 2025-10-02 12:34:12.423 2 DEBUG oslo_concurrency.lockutils [req-34656bdc-43a0-4988-b66f-9e7f4554063f req-87ac2353-d95c-4198-b02e-648858a81dcf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:34:12 compute-0 nova_compute[192079]: 2025-10-02 12:34:12.423 2 DEBUG nova.compute.manager [req-34656bdc-43a0-4988-b66f-9e7f4554063f req-87ac2353-d95c-4198-b02e-648858a81dcf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] No waiting events found dispatching network-vif-plugged-c4934bb6-5047-47ec-b0c6-127b4274769a pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:34:12 compute-0 nova_compute[192079]: 2025-10-02 12:34:12.424 2 WARNING nova.compute.manager [req-34656bdc-43a0-4988-b66f-9e7f4554063f req-87ac2353-d95c-4198-b02e-648858a81dcf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Received unexpected event network-vif-plugged-c4934bb6-5047-47ec-b0c6-127b4274769a for instance with vm_state active and task_state None.
Oct 02 12:34:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:14.880 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '38'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:34:14 compute-0 nova_compute[192079]: 2025-10-02 12:34:14.951 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:15 compute-0 nova_compute[192079]: 2025-10-02 12:34:15.450 2 INFO nova.compute.manager [None req-35ea43cb-5c02-438d-9332-57d161d72d30 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Get console output
Oct 02 12:34:15 compute-0 nova_compute[192079]: 2025-10-02 12:34:15.457 55 INFO nova.privsep.libvirt [-] Ignored error while reading from instance console pty: can't concat NoneType to bytes
Oct 02 12:34:15 compute-0 nova_compute[192079]: 2025-10-02 12:34:15.889 2 INFO nova.compute.manager [None req-14637151-82f1-4259-ae2c-32ff93eb52ab a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Get console output
Oct 02 12:34:15 compute-0 nova_compute[192079]: 2025-10-02 12:34:15.893 55 INFO nova.privsep.libvirt [-] Ignored error while reading from instance console pty: can't concat NoneType to bytes
Oct 02 12:34:16 compute-0 nova_compute[192079]: 2025-10-02 12:34:16.703 2 DEBUG oslo_concurrency.lockutils [None req-85e737d4-31cf-49b6-ab05-b9f70ef95b94 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:34:16 compute-0 nova_compute[192079]: 2025-10-02 12:34:16.704 2 DEBUG oslo_concurrency.lockutils [None req-85e737d4-31cf-49b6-ab05-b9f70ef95b94 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:34:16 compute-0 nova_compute[192079]: 2025-10-02 12:34:16.704 2 DEBUG oslo_concurrency.lockutils [None req-85e737d4-31cf-49b6-ab05-b9f70ef95b94 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:34:16 compute-0 nova_compute[192079]: 2025-10-02 12:34:16.704 2 DEBUG oslo_concurrency.lockutils [None req-85e737d4-31cf-49b6-ab05-b9f70ef95b94 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:34:16 compute-0 nova_compute[192079]: 2025-10-02 12:34:16.704 2 DEBUG oslo_concurrency.lockutils [None req-85e737d4-31cf-49b6-ab05-b9f70ef95b94 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:34:16 compute-0 nova_compute[192079]: 2025-10-02 12:34:16.715 2 INFO nova.compute.manager [None req-85e737d4-31cf-49b6-ab05-b9f70ef95b94 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Terminating instance
Oct 02 12:34:16 compute-0 nova_compute[192079]: 2025-10-02 12:34:16.727 2 DEBUG nova.compute.manager [None req-85e737d4-31cf-49b6-ab05-b9f70ef95b94 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:34:16 compute-0 kernel: tap6fc2406f-6b (unregistering): left promiscuous mode
Oct 02 12:34:16 compute-0 NetworkManager[51160]: <info>  [1759408456.7591] device (tap6fc2406f-6b): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:34:16 compute-0 nova_compute[192079]: 2025-10-02 12:34:16.789 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:16 compute-0 ovn_controller[94336]: 2025-10-02T12:34:16Z|00579|binding|INFO|Releasing lport 6fc2406f-6b10-4cc3-a5a7-090de3a926bf from this chassis (sb_readonly=0)
Oct 02 12:34:16 compute-0 ovn_controller[94336]: 2025-10-02T12:34:16Z|00580|binding|INFO|Setting lport 6fc2406f-6b10-4cc3-a5a7-090de3a926bf down in Southbound
Oct 02 12:34:16 compute-0 ovn_controller[94336]: 2025-10-02T12:34:16Z|00581|binding|INFO|Removing iface tap6fc2406f-6b ovn-installed in OVS
Oct 02 12:34:16 compute-0 nova_compute[192079]: 2025-10-02 12:34:16.800 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:16.806 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:fe:89:d4 10.100.0.7'], port_security=['fa:16:3e:fe:89:d4 10.100.0.7'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.7/28', 'neutron:device_id': '10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-e1db9fd6-3b23-47bd-a491-8e04b76ccc0a', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'neutron:revision_number': '4', 'neutron:security_group_ids': '4dfabd8c-e266-4906-bd78-314554158351', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=00403231-2053-4b5b-8468-60aa92bc86a7, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=6fc2406f-6b10-4cc3-a5a7-090de3a926bf) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:34:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:16.807 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 6fc2406f-6b10-4cc3-a5a7-090de3a926bf in datapath e1db9fd6-3b23-47bd-a491-8e04b76ccc0a unbound from our chassis
Oct 02 12:34:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:16.809 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network e1db9fd6-3b23-47bd-a491-8e04b76ccc0a, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:34:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:16.810 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2f78ebcd-57fc-40d4-b637-c9ec120077f3]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:16.810 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-e1db9fd6-3b23-47bd-a491-8e04b76ccc0a namespace which is not needed anymore
Oct 02 12:34:16 compute-0 nova_compute[192079]: 2025-10-02 12:34:16.819 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:16 compute-0 systemd[1]: machine-qemu\x2d72\x2dinstance\x2d00000098.scope: Deactivated successfully.
Oct 02 12:34:16 compute-0 systemd[1]: machine-qemu\x2d72\x2dinstance\x2d00000098.scope: Consumed 14.856s CPU time.
Oct 02 12:34:16 compute-0 systemd-machined[152150]: Machine qemu-72-instance-00000098 terminated.
Oct 02 12:34:16 compute-0 nova_compute[192079]: 2025-10-02 12:34:16.947 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:16 compute-0 nova_compute[192079]: 2025-10-02 12:34:16.952 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:16 compute-0 neutron-haproxy-ovnmeta-e1db9fd6-3b23-47bd-a491-8e04b76ccc0a[245728]: [NOTICE]   (245734) : haproxy version is 2.8.14-c23fe91
Oct 02 12:34:16 compute-0 neutron-haproxy-ovnmeta-e1db9fd6-3b23-47bd-a491-8e04b76ccc0a[245728]: [NOTICE]   (245734) : path to executable is /usr/sbin/haproxy
Oct 02 12:34:16 compute-0 neutron-haproxy-ovnmeta-e1db9fd6-3b23-47bd-a491-8e04b76ccc0a[245728]: [WARNING]  (245734) : Exiting Master process...
Oct 02 12:34:16 compute-0 neutron-haproxy-ovnmeta-e1db9fd6-3b23-47bd-a491-8e04b76ccc0a[245728]: [ALERT]    (245734) : Current worker (245736) exited with code 143 (Terminated)
Oct 02 12:34:16 compute-0 neutron-haproxy-ovnmeta-e1db9fd6-3b23-47bd-a491-8e04b76ccc0a[245728]: [WARNING]  (245734) : All workers exited. Exiting... (0)
Oct 02 12:34:16 compute-0 systemd[1]: libpod-a8376c27ba2363fddc0ba3e9b2b7b44f0a667517f705e17de2fa1607fe532fc7.scope: Deactivated successfully.
Oct 02 12:34:16 compute-0 podman[246181]: 2025-10-02 12:34:16.965106812 +0000 UTC m=+0.064188298 container died a8376c27ba2363fddc0ba3e9b2b7b44f0a667517f705e17de2fa1607fe532fc7 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-e1db9fd6-3b23-47bd-a491-8e04b76ccc0a, tcib_managed=true, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:34:16 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-a8376c27ba2363fddc0ba3e9b2b7b44f0a667517f705e17de2fa1607fe532fc7-userdata-shm.mount: Deactivated successfully.
Oct 02 12:34:16 compute-0 nova_compute[192079]: 2025-10-02 12:34:16.992 2 INFO nova.virt.libvirt.driver [-] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Instance destroyed successfully.
Oct 02 12:34:16 compute-0 nova_compute[192079]: 2025-10-02 12:34:16.993 2 DEBUG nova.objects.instance [None req-85e737d4-31cf-49b6-ab05-b9f70ef95b94 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lazy-loading 'resources' on Instance uuid 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:34:16 compute-0 systemd[1]: var-lib-containers-storage-overlay-60cb7848c5101fb2592b6eec444b4cefc63212b634cc86402b2439d65cba57f8-merged.mount: Deactivated successfully.
Oct 02 12:34:17 compute-0 podman[246181]: 2025-10-02 12:34:17.005497721 +0000 UTC m=+0.104579207 container cleanup a8376c27ba2363fddc0ba3e9b2b7b44f0a667517f705e17de2fa1607fe532fc7 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-e1db9fd6-3b23-47bd-a491-8e04b76ccc0a, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:34:17 compute-0 systemd[1]: libpod-conmon-a8376c27ba2363fddc0ba3e9b2b7b44f0a667517f705e17de2fa1607fe532fc7.scope: Deactivated successfully.
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.013 2 DEBUG nova.virt.libvirt.vif [None req-85e737d4-31cf-49b6-ab05-b9f70ef95b94 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:33:47Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestNetworkBasicOps-server-316138620',display_name='tempest-TestNetworkBasicOps-server-316138620',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkbasicops-server-316138620',id=152,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBCZJ044bOF+4CbUAqlAd/fAKzGJ3BjJQ1O3aAWUjn6R/ZF20KqJILd3qify46f7iuXSfM7SXhCJSXuQOr57zgsxMPL+k0QdvGYXa4GlPAiCAJ2rRRSs//k7mKcwWNdtJgQ==',key_name='tempest-TestNetworkBasicOps-1887469931',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:33:56Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='6e2a4899168a47618e377cb3ac85ddd2',ramdisk_id='',reservation_id='r-281286p4',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-TestNetworkBasicOps-1323893370',owner_user_name='tempest-TestNetworkBasicOps-1323893370-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:33:56Z,user_data=None,user_id='a1898fdf056c4a249c33590f26d4d845',uuid=10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "6fc2406f-6b10-4cc3-a5a7-090de3a926bf", "address": "fa:16:3e:fe:89:d4", "network": {"id": "e1db9fd6-3b23-47bd-a491-8e04b76ccc0a", "bridge": "br-int", "label": "tempest-network-smoke--559146252", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap6fc2406f-6b", "ovs_interfaceid": "6fc2406f-6b10-4cc3-a5a7-090de3a926bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.014 2 DEBUG nova.network.os_vif_util [None req-85e737d4-31cf-49b6-ab05-b9f70ef95b94 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converting VIF {"id": "6fc2406f-6b10-4cc3-a5a7-090de3a926bf", "address": "fa:16:3e:fe:89:d4", "network": {"id": "e1db9fd6-3b23-47bd-a491-8e04b76ccc0a", "bridge": "br-int", "label": "tempest-network-smoke--559146252", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap6fc2406f-6b", "ovs_interfaceid": "6fc2406f-6b10-4cc3-a5a7-090de3a926bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.014 2 DEBUG nova.network.os_vif_util [None req-85e737d4-31cf-49b6-ab05-b9f70ef95b94 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:fe:89:d4,bridge_name='br-int',has_traffic_filtering=True,id=6fc2406f-6b10-4cc3-a5a7-090de3a926bf,network=Network(e1db9fd6-3b23-47bd-a491-8e04b76ccc0a),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap6fc2406f-6b') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.015 2 DEBUG os_vif [None req-85e737d4-31cf-49b6-ab05-b9f70ef95b94 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:fe:89:d4,bridge_name='br-int',has_traffic_filtering=True,id=6fc2406f-6b10-4cc3-a5a7-090de3a926bf,network=Network(e1db9fd6-3b23-47bd-a491-8e04b76ccc0a),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap6fc2406f-6b') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.016 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.017 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap6fc2406f-6b, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.019 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.022 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.024 2 INFO os_vif [None req-85e737d4-31cf-49b6-ab05-b9f70ef95b94 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:fe:89:d4,bridge_name='br-int',has_traffic_filtering=True,id=6fc2406f-6b10-4cc3-a5a7-090de3a926bf,network=Network(e1db9fd6-3b23-47bd-a491-8e04b76ccc0a),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap6fc2406f-6b')
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.025 2 INFO nova.virt.libvirt.driver [None req-85e737d4-31cf-49b6-ab05-b9f70ef95b94 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Deleting instance files /var/lib/nova/instances/10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a_del
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.026 2 INFO nova.virt.libvirt.driver [None req-85e737d4-31cf-49b6-ab05-b9f70ef95b94 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Deletion of /var/lib/nova/instances/10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a_del complete
Oct 02 12:34:17 compute-0 podman[246224]: 2025-10-02 12:34:17.070323757 +0000 UTC m=+0.042787997 container remove a8376c27ba2363fddc0ba3e9b2b7b44f0a667517f705e17de2fa1607fe532fc7 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-e1db9fd6-3b23-47bd-a491-8e04b76ccc0a, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0)
Oct 02 12:34:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:17.076 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c802a0ed-e769-4ef9-9a86-6cc93413c36f]: (4, ('Thu Oct  2 12:34:16 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-e1db9fd6-3b23-47bd-a491-8e04b76ccc0a (a8376c27ba2363fddc0ba3e9b2b7b44f0a667517f705e17de2fa1607fe532fc7)\na8376c27ba2363fddc0ba3e9b2b7b44f0a667517f705e17de2fa1607fe532fc7\nThu Oct  2 12:34:17 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-e1db9fd6-3b23-47bd-a491-8e04b76ccc0a (a8376c27ba2363fddc0ba3e9b2b7b44f0a667517f705e17de2fa1607fe532fc7)\na8376c27ba2363fddc0ba3e9b2b7b44f0a667517f705e17de2fa1607fe532fc7\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:17.078 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[69602977-37ce-44ec-9650-667a917e9c33]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:17.079 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tape1db9fd6-30, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.081 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:17 compute-0 kernel: tape1db9fd6-30: left promiscuous mode
Oct 02 12:34:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:17.089 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b265358a-3e6c-4ef9-80b5-85143cd63af4]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.090 2 INFO nova.compute.manager [None req-85e737d4-31cf-49b6-ab05-b9f70ef95b94 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Took 0.36 seconds to destroy the instance on the hypervisor.
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.091 2 DEBUG oslo.service.loopingcall [None req-85e737d4-31cf-49b6-ab05-b9f70ef95b94 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.091 2 DEBUG nova.compute.manager [-] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.091 2 DEBUG nova.network.neutron [-] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.098 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:17.110 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a4388ccf-c388-4bc3-ae19-ab96755874e6]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:17.112 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[714d2f2d-efc5-4cc7-a5ad-1359296c96d6]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:17.130 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4491f97a-c520-426c-aa0e-fb15b0db08f8]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 643197, 'reachable_time': 20753, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 246239, 'error': None, 'target': 'ovnmeta-e1db9fd6-3b23-47bd-a491-8e04b76ccc0a', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:17.133 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-e1db9fd6-3b23-47bd-a491-8e04b76ccc0a deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:34:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:17.133 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[dfb5397a-1f11-47a4-8a31-66b657cb6bfe]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:17 compute-0 systemd[1]: run-netns-ovnmeta\x2de1db9fd6\x2d3b23\x2d47bd\x2da491\x2d8e04b76ccc0a.mount: Deactivated successfully.
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.423 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.771 2 DEBUG nova.network.neutron [-] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.789 2 INFO nova.compute.manager [-] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Took 0.70 seconds to deallocate network for instance.
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.836 2 DEBUG nova.compute.manager [req-998e5b8d-86ce-4a01-85b6-a7cb0bb1a196 req-f5c0af6c-8411-4854-99f8-583c74ca185f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Received event network-changed-6fc2406f-6b10-4cc3-a5a7-090de3a926bf external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.836 2 DEBUG nova.compute.manager [req-998e5b8d-86ce-4a01-85b6-a7cb0bb1a196 req-f5c0af6c-8411-4854-99f8-583c74ca185f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Refreshing instance network info cache due to event network-changed-6fc2406f-6b10-4cc3-a5a7-090de3a926bf. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.836 2 DEBUG oslo_concurrency.lockutils [req-998e5b8d-86ce-4a01-85b6-a7cb0bb1a196 req-f5c0af6c-8411-4854-99f8-583c74ca185f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.837 2 DEBUG oslo_concurrency.lockutils [req-998e5b8d-86ce-4a01-85b6-a7cb0bb1a196 req-f5c0af6c-8411-4854-99f8-583c74ca185f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.837 2 DEBUG nova.network.neutron [req-998e5b8d-86ce-4a01-85b6-a7cb0bb1a196 req-f5c0af6c-8411-4854-99f8-583c74ca185f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Refreshing network info cache for port 6fc2406f-6b10-4cc3-a5a7-090de3a926bf _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.854 2 DEBUG oslo_concurrency.lockutils [None req-85e737d4-31cf-49b6-ab05-b9f70ef95b94 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.855 2 DEBUG oslo_concurrency.lockutils [None req-85e737d4-31cf-49b6-ab05-b9f70ef95b94 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.923 2 DEBUG nova.compute.provider_tree [None req-85e737d4-31cf-49b6-ab05-b9f70ef95b94 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.939 2 DEBUG nova.scheduler.client.report [None req-85e737d4-31cf-49b6-ab05-b9f70ef95b94 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.959 2 DEBUG oslo_concurrency.lockutils [None req-85e737d4-31cf-49b6-ab05-b9f70ef95b94 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.105s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.986 2 INFO nova.scheduler.client.report [None req-85e737d4-31cf-49b6-ab05-b9f70ef95b94 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Deleted allocations for instance 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a
Oct 02 12:34:17 compute-0 nova_compute[192079]: 2025-10-02 12:34:17.991 2 DEBUG nova.network.neutron [req-998e5b8d-86ce-4a01-85b6-a7cb0bb1a196 req-f5c0af6c-8411-4854-99f8-583c74ca185f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:34:18 compute-0 nova_compute[192079]: 2025-10-02 12:34:18.039 2 DEBUG nova.compute.manager [req-2c2a8e14-6b4d-49c2-92b5-12de764bd7c7 req-4dc8ef5f-ca0b-4250-9d2d-fd3a8f5924cf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Received event network-vif-deleted-6fc2406f-6b10-4cc3-a5a7-090de3a926bf external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:34:18 compute-0 nova_compute[192079]: 2025-10-02 12:34:18.070 2 DEBUG oslo_concurrency.lockutils [None req-85e737d4-31cf-49b6-ab05-b9f70ef95b94 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.367s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:34:18 compute-0 nova_compute[192079]: 2025-10-02 12:34:18.284 2 DEBUG nova.network.neutron [req-998e5b8d-86ce-4a01-85b6-a7cb0bb1a196 req-f5c0af6c-8411-4854-99f8-583c74ca185f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:34:18 compute-0 nova_compute[192079]: 2025-10-02 12:34:18.299 2 DEBUG oslo_concurrency.lockutils [req-998e5b8d-86ce-4a01-85b6-a7cb0bb1a196 req-f5c0af6c-8411-4854-99f8-583c74ca185f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:34:18 compute-0 nova_compute[192079]: 2025-10-02 12:34:18.300 2 DEBUG nova.compute.manager [req-998e5b8d-86ce-4a01-85b6-a7cb0bb1a196 req-f5c0af6c-8411-4854-99f8-583c74ca185f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Received event network-vif-unplugged-6fc2406f-6b10-4cc3-a5a7-090de3a926bf external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:34:18 compute-0 nova_compute[192079]: 2025-10-02 12:34:18.300 2 DEBUG oslo_concurrency.lockutils [req-998e5b8d-86ce-4a01-85b6-a7cb0bb1a196 req-f5c0af6c-8411-4854-99f8-583c74ca185f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:34:18 compute-0 nova_compute[192079]: 2025-10-02 12:34:18.300 2 DEBUG oslo_concurrency.lockutils [req-998e5b8d-86ce-4a01-85b6-a7cb0bb1a196 req-f5c0af6c-8411-4854-99f8-583c74ca185f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:34:18 compute-0 nova_compute[192079]: 2025-10-02 12:34:18.301 2 DEBUG oslo_concurrency.lockutils [req-998e5b8d-86ce-4a01-85b6-a7cb0bb1a196 req-f5c0af6c-8411-4854-99f8-583c74ca185f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:34:18 compute-0 nova_compute[192079]: 2025-10-02 12:34:18.301 2 DEBUG nova.compute.manager [req-998e5b8d-86ce-4a01-85b6-a7cb0bb1a196 req-f5c0af6c-8411-4854-99f8-583c74ca185f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] No waiting events found dispatching network-vif-unplugged-6fc2406f-6b10-4cc3-a5a7-090de3a926bf pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:34:18 compute-0 nova_compute[192079]: 2025-10-02 12:34:18.301 2 DEBUG nova.compute.manager [req-998e5b8d-86ce-4a01-85b6-a7cb0bb1a196 req-f5c0af6c-8411-4854-99f8-583c74ca185f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Received event network-vif-unplugged-6fc2406f-6b10-4cc3-a5a7-090de3a926bf for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:34:18 compute-0 nova_compute[192079]: 2025-10-02 12:34:18.301 2 DEBUG nova.compute.manager [req-998e5b8d-86ce-4a01-85b6-a7cb0bb1a196 req-f5c0af6c-8411-4854-99f8-583c74ca185f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Received event network-vif-plugged-6fc2406f-6b10-4cc3-a5a7-090de3a926bf external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:34:18 compute-0 nova_compute[192079]: 2025-10-02 12:34:18.301 2 DEBUG oslo_concurrency.lockutils [req-998e5b8d-86ce-4a01-85b6-a7cb0bb1a196 req-f5c0af6c-8411-4854-99f8-583c74ca185f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:34:18 compute-0 nova_compute[192079]: 2025-10-02 12:34:18.302 2 DEBUG oslo_concurrency.lockutils [req-998e5b8d-86ce-4a01-85b6-a7cb0bb1a196 req-f5c0af6c-8411-4854-99f8-583c74ca185f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:34:18 compute-0 nova_compute[192079]: 2025-10-02 12:34:18.302 2 DEBUG oslo_concurrency.lockutils [req-998e5b8d-86ce-4a01-85b6-a7cb0bb1a196 req-f5c0af6c-8411-4854-99f8-583c74ca185f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:34:18 compute-0 nova_compute[192079]: 2025-10-02 12:34:18.302 2 DEBUG nova.compute.manager [req-998e5b8d-86ce-4a01-85b6-a7cb0bb1a196 req-f5c0af6c-8411-4854-99f8-583c74ca185f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] No waiting events found dispatching network-vif-plugged-6fc2406f-6b10-4cc3-a5a7-090de3a926bf pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:34:18 compute-0 nova_compute[192079]: 2025-10-02 12:34:18.302 2 WARNING nova.compute.manager [req-998e5b8d-86ce-4a01-85b6-a7cb0bb1a196 req-f5c0af6c-8411-4854-99f8-583c74ca185f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Received unexpected event network-vif-plugged-6fc2406f-6b10-4cc3-a5a7-090de3a926bf for instance with vm_state active and task_state deleting.
Oct 02 12:34:21 compute-0 podman[246241]: 2025-10-02 12:34:21.179050242 +0000 UTC m=+0.073117032 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, tcib_managed=true)
Oct 02 12:34:22 compute-0 nova_compute[192079]: 2025-10-02 12:34:22.021 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:22 compute-0 nova_compute[192079]: 2025-10-02 12:34:22.425 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:22 compute-0 ovn_controller[94336]: 2025-10-02T12:34:22Z|00582|binding|INFO|Releasing lport 410dd08f-af3d-48c2-b0fd-475c37ff4bed from this chassis (sb_readonly=0)
Oct 02 12:34:23 compute-0 nova_compute[192079]: 2025-10-02 12:34:23.080 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:23 compute-0 ovn_controller[94336]: 2025-10-02T12:34:23Z|00062|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:11:3b:3b 10.100.0.5
Oct 02 12:34:25 compute-0 ovn_controller[94336]: 2025-10-02T12:34:25Z|00583|binding|INFO|Releasing lport 410dd08f-af3d-48c2-b0fd-475c37ff4bed from this chassis (sb_readonly=0)
Oct 02 12:34:25 compute-0 nova_compute[192079]: 2025-10-02 12:34:25.128 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:26 compute-0 ovn_controller[94336]: 2025-10-02T12:34:26Z|00584|binding|INFO|Releasing lport 410dd08f-af3d-48c2-b0fd-475c37ff4bed from this chassis (sb_readonly=0)
Oct 02 12:34:26 compute-0 nova_compute[192079]: 2025-10-02 12:34:26.515 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:27 compute-0 nova_compute[192079]: 2025-10-02 12:34:27.028 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:27 compute-0 podman[246269]: 2025-10-02 12:34:27.156450279 +0000 UTC m=+0.060239701 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, url=https://catalog.redhat.com/en/search?searchType=containers, architecture=x86_64, maintainer=Red Hat, Inc., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., name=ubi9-minimal, vcs-type=git, io.openshift.expose-services=, managed_by=edpm_ansible, vendor=Red Hat, Inc., com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, distribution-scope=public, io.buildah.version=1.33.7, io.openshift.tags=minimal rhel9, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., com.redhat.component=ubi9-minimal-container, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, release=1755695350, config_id=edpm, build-date=2025-08-20T13:12:41, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, version=9.6, container_name=openstack_network_exporter)
Oct 02 12:34:27 compute-0 podman[246270]: 2025-10-02 12:34:27.175988911 +0000 UTC m=+0.065717611 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, container_name=multipathd, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, config_id=multipathd, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:34:27 compute-0 nova_compute[192079]: 2025-10-02 12:34:27.428 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:28 compute-0 nova_compute[192079]: 2025-10-02 12:34:28.977 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:29 compute-0 nova_compute[192079]: 2025-10-02 12:34:29.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_incomplete_migrations run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:34:29 compute-0 nova_compute[192079]: 2025-10-02 12:34:29.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Cleaning up deleted instances with incomplete migration  _cleanup_incomplete_migrations /usr/lib/python3.9/site-packages/nova/compute/manager.py:11183
Oct 02 12:34:30 compute-0 nova_compute[192079]: 2025-10-02 12:34:30.448 2 INFO nova.compute.manager [None req-fa52ab26-95ea-4888-9c86-8db67ea368c1 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Get console output
Oct 02 12:34:30 compute-0 nova_compute[192079]: 2025-10-02 12:34:30.453 55 INFO nova.privsep.libvirt [-] Ignored error while reading from instance console pty: can't concat NoneType to bytes
Oct 02 12:34:31 compute-0 nova_compute[192079]: 2025-10-02 12:34:31.729 2 DEBUG nova.compute.manager [req-74eaa99c-414d-46f6-a12a-ea491686aed6 req-1dec1083-6b45-4651-887b-6ba4df7e9a3d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Received event network-changed-c4934bb6-5047-47ec-b0c6-127b4274769a external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:34:31 compute-0 nova_compute[192079]: 2025-10-02 12:34:31.729 2 DEBUG nova.compute.manager [req-74eaa99c-414d-46f6-a12a-ea491686aed6 req-1dec1083-6b45-4651-887b-6ba4df7e9a3d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Refreshing instance network info cache due to event network-changed-c4934bb6-5047-47ec-b0c6-127b4274769a. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:34:31 compute-0 nova_compute[192079]: 2025-10-02 12:34:31.730 2 DEBUG oslo_concurrency.lockutils [req-74eaa99c-414d-46f6-a12a-ea491686aed6 req-1dec1083-6b45-4651-887b-6ba4df7e9a3d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-9c817262-fee7-483c-ac98-6d7648890eb0" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:34:31 compute-0 nova_compute[192079]: 2025-10-02 12:34:31.730 2 DEBUG oslo_concurrency.lockutils [req-74eaa99c-414d-46f6-a12a-ea491686aed6 req-1dec1083-6b45-4651-887b-6ba4df7e9a3d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-9c817262-fee7-483c-ac98-6d7648890eb0" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:34:31 compute-0 nova_compute[192079]: 2025-10-02 12:34:31.731 2 DEBUG nova.network.neutron [req-74eaa99c-414d-46f6-a12a-ea491686aed6 req-1dec1083-6b45-4651-887b-6ba4df7e9a3d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Refreshing network info cache for port c4934bb6-5047-47ec-b0c6-127b4274769a _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:34:31 compute-0 nova_compute[192079]: 2025-10-02 12:34:31.809 2 DEBUG oslo_concurrency.lockutils [None req-86f773ed-7bc3-4cc6-ac68-bfd3f2ae8095 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "9c817262-fee7-483c-ac98-6d7648890eb0" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:34:31 compute-0 nova_compute[192079]: 2025-10-02 12:34:31.810 2 DEBUG oslo_concurrency.lockutils [None req-86f773ed-7bc3-4cc6-ac68-bfd3f2ae8095 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "9c817262-fee7-483c-ac98-6d7648890eb0" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:34:31 compute-0 nova_compute[192079]: 2025-10-02 12:34:31.811 2 DEBUG oslo_concurrency.lockutils [None req-86f773ed-7bc3-4cc6-ac68-bfd3f2ae8095 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:34:31 compute-0 nova_compute[192079]: 2025-10-02 12:34:31.811 2 DEBUG oslo_concurrency.lockutils [None req-86f773ed-7bc3-4cc6-ac68-bfd3f2ae8095 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:34:31 compute-0 nova_compute[192079]: 2025-10-02 12:34:31.812 2 DEBUG oslo_concurrency.lockutils [None req-86f773ed-7bc3-4cc6-ac68-bfd3f2ae8095 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:34:31 compute-0 nova_compute[192079]: 2025-10-02 12:34:31.827 2 INFO nova.compute.manager [None req-86f773ed-7bc3-4cc6-ac68-bfd3f2ae8095 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Terminating instance
Oct 02 12:34:31 compute-0 nova_compute[192079]: 2025-10-02 12:34:31.840 2 DEBUG nova.compute.manager [None req-86f773ed-7bc3-4cc6-ac68-bfd3f2ae8095 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:34:31 compute-0 kernel: tapc4934bb6-50 (unregistering): left promiscuous mode
Oct 02 12:34:31 compute-0 NetworkManager[51160]: <info>  [1759408471.8761] device (tapc4934bb6-50): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:34:31 compute-0 ovn_controller[94336]: 2025-10-02T12:34:31Z|00585|binding|INFO|Releasing lport c4934bb6-5047-47ec-b0c6-127b4274769a from this chassis (sb_readonly=0)
Oct 02 12:34:31 compute-0 ovn_controller[94336]: 2025-10-02T12:34:31Z|00586|binding|INFO|Setting lport c4934bb6-5047-47ec-b0c6-127b4274769a down in Southbound
Oct 02 12:34:31 compute-0 ovn_controller[94336]: 2025-10-02T12:34:31Z|00587|binding|INFO|Removing iface tapc4934bb6-50 ovn-installed in OVS
Oct 02 12:34:31 compute-0 nova_compute[192079]: 2025-10-02 12:34:31.884 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:31.897 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:11:3b:3b 10.100.0.5'], port_security=['fa:16:3e:11:3b:3b 10.100.0.5'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.5/28', 'neutron:device_id': '9c817262-fee7-483c-ac98-6d7648890eb0', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '76c7dd40d83e4e3ca71abbebf57921b6', 'neutron:revision_number': '6', 'neutron:security_group_ids': '96cf165f-4eb7-4b43-884d-a9b6e5a897e1', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=9097c5a0-e0b8-419e-918d-de3827bd6390, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=c4934bb6-5047-47ec-b0c6-127b4274769a) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:34:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:31.898 103294 INFO neutron.agent.ovn.metadata.agent [-] Port c4934bb6-5047-47ec-b0c6-127b4274769a in datapath d5d28c5b-6eab-4c56-bc64-1dd8250f45c6 unbound from our chassis
Oct 02 12:34:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:31.899 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network d5d28c5b-6eab-4c56-bc64-1dd8250f45c6, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:34:31 compute-0 nova_compute[192079]: 2025-10-02 12:34:31.900 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:31.900 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5f8ffb24-11f3-4264-9c6e-88d84d6eb5f9]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:31.901 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6 namespace which is not needed anymore
Oct 02 12:34:31 compute-0 systemd[1]: machine-qemu\x2d73\x2dinstance\x2d00000096.scope: Deactivated successfully.
Oct 02 12:34:31 compute-0 systemd[1]: machine-qemu\x2d73\x2dinstance\x2d00000096.scope: Consumed 13.859s CPU time.
Oct 02 12:34:31 compute-0 systemd-machined[152150]: Machine qemu-73-instance-00000096 terminated.
Oct 02 12:34:31 compute-0 nova_compute[192079]: 2025-10-02 12:34:31.991 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759408456.9898155, 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:34:31 compute-0 nova_compute[192079]: 2025-10-02 12:34:31.991 2 INFO nova.compute.manager [-] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] VM Stopped (Lifecycle Event)
Oct 02 12:34:32 compute-0 nova_compute[192079]: 2025-10-02 12:34:32.023 2 DEBUG nova.compute.manager [None req-8bb0cc34-2887-43bf-a2fd-a48a7a2cfa4f - - - - - -] [instance: 10fdb5a2-8b66-4704-b6c5-e0b9bbd6550a] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:34:32 compute-0 neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6[246142]: [NOTICE]   (246146) : haproxy version is 2.8.14-c23fe91
Oct 02 12:34:32 compute-0 neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6[246142]: [NOTICE]   (246146) : path to executable is /usr/sbin/haproxy
Oct 02 12:34:32 compute-0 neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6[246142]: [WARNING]  (246146) : Exiting Master process...
Oct 02 12:34:32 compute-0 neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6[246142]: [WARNING]  (246146) : Exiting Master process...
Oct 02 12:34:32 compute-0 nova_compute[192079]: 2025-10-02 12:34:32.030 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:32 compute-0 neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6[246142]: [ALERT]    (246146) : Current worker (246148) exited with code 143 (Terminated)
Oct 02 12:34:32 compute-0 neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6[246142]: [WARNING]  (246146) : All workers exited. Exiting... (0)
Oct 02 12:34:32 compute-0 systemd[1]: libpod-8c717c6428c8e0a5c8e5a3f32cc8604c28beb1f08e4fb9c23391af843b37e31f.scope: Deactivated successfully.
Oct 02 12:34:32 compute-0 podman[246332]: 2025-10-02 12:34:32.040622294 +0000 UTC m=+0.048693256 container died 8c717c6428c8e0a5c8e5a3f32cc8604c28beb1f08e4fb9c23391af843b37e31f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS)
Oct 02 12:34:32 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-8c717c6428c8e0a5c8e5a3f32cc8604c28beb1f08e4fb9c23391af843b37e31f-userdata-shm.mount: Deactivated successfully.
Oct 02 12:34:32 compute-0 systemd[1]: var-lib-containers-storage-overlay-9cf9fde5aa8b5f02e1d05f9c5158d0371319c043b3c1caf20376c52fa9483ed8-merged.mount: Deactivated successfully.
Oct 02 12:34:32 compute-0 podman[246332]: 2025-10-02 12:34:32.081402784 +0000 UTC m=+0.089473746 container cleanup 8c717c6428c8e0a5c8e5a3f32cc8604c28beb1f08e4fb9c23391af843b37e31f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, tcib_managed=true)
Oct 02 12:34:32 compute-0 systemd[1]: libpod-conmon-8c717c6428c8e0a5c8e5a3f32cc8604c28beb1f08e4fb9c23391af843b37e31f.scope: Deactivated successfully.
Oct 02 12:34:32 compute-0 nova_compute[192079]: 2025-10-02 12:34:32.112 2 INFO nova.virt.libvirt.driver [-] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Instance destroyed successfully.
Oct 02 12:34:32 compute-0 nova_compute[192079]: 2025-10-02 12:34:32.113 2 DEBUG nova.objects.instance [None req-86f773ed-7bc3-4cc6-ac68-bfd3f2ae8095 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lazy-loading 'resources' on Instance uuid 9c817262-fee7-483c-ac98-6d7648890eb0 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:34:32 compute-0 nova_compute[192079]: 2025-10-02 12:34:32.132 2 DEBUG nova.virt.libvirt.vif [None req-86f773ed-7bc3-4cc6-ac68-bfd3f2ae8095 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:33:32Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestNetworkAdvancedServerOps-server-1842419854',display_name='tempest-TestNetworkAdvancedServerOps-server-1842419854',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkadvancedserverops-server-1842419854',id=150,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBBcmc6POLJD4DWxVpjM7Q+mpn1vqWiz84SJ4bJOoXuhI8e1ZYxo5xDkFJVaGxPGGCkjomFU7VMydgd3IiJebhoaGTMBDztV9vB5kp4HC2Ekh6aB6IjhW19nhgZQ5E8+LRw==',key_name='tempest-TestNetworkAdvancedServerOps-467970098',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:33:43Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='76c7dd40d83e4e3ca71abbebf57921b6',ramdisk_id='',reservation_id='r-aaoru9xk',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-TestNetworkAdvancedServerOps-597114071',owner_user_name='tempest-TestNetworkAdvancedServerOps-597114071-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:34:10Z,user_data=None,user_id='1faa7e121a0e43ad8cb4ae5b2cfcc6a2',uuid=9c817262-fee7-483c-ac98-6d7648890eb0,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "c4934bb6-5047-47ec-b0c6-127b4274769a", "address": "fa:16:3e:11:3b:3b", "network": {"id": "d5d28c5b-6eab-4c56-bc64-1dd8250f45c6", "bridge": "br-int", "label": "tempest-network-smoke--1759416782", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4934bb6-50", "ovs_interfaceid": "c4934bb6-5047-47ec-b0c6-127b4274769a", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:34:32 compute-0 nova_compute[192079]: 2025-10-02 12:34:32.132 2 DEBUG nova.network.os_vif_util [None req-86f773ed-7bc3-4cc6-ac68-bfd3f2ae8095 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converting VIF {"id": "c4934bb6-5047-47ec-b0c6-127b4274769a", "address": "fa:16:3e:11:3b:3b", "network": {"id": "d5d28c5b-6eab-4c56-bc64-1dd8250f45c6", "bridge": "br-int", "label": "tempest-network-smoke--1759416782", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4934bb6-50", "ovs_interfaceid": "c4934bb6-5047-47ec-b0c6-127b4274769a", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:34:32 compute-0 nova_compute[192079]: 2025-10-02 12:34:32.133 2 DEBUG nova.network.os_vif_util [None req-86f773ed-7bc3-4cc6-ac68-bfd3f2ae8095 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:11:3b:3b,bridge_name='br-int',has_traffic_filtering=True,id=c4934bb6-5047-47ec-b0c6-127b4274769a,network=Network(d5d28c5b-6eab-4c56-bc64-1dd8250f45c6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapc4934bb6-50') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:34:32 compute-0 nova_compute[192079]: 2025-10-02 12:34:32.133 2 DEBUG os_vif [None req-86f773ed-7bc3-4cc6-ac68-bfd3f2ae8095 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:11:3b:3b,bridge_name='br-int',has_traffic_filtering=True,id=c4934bb6-5047-47ec-b0c6-127b4274769a,network=Network(d5d28c5b-6eab-4c56-bc64-1dd8250f45c6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapc4934bb6-50') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:34:32 compute-0 nova_compute[192079]: 2025-10-02 12:34:32.135 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:32 compute-0 nova_compute[192079]: 2025-10-02 12:34:32.135 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapc4934bb6-50, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:34:32 compute-0 nova_compute[192079]: 2025-10-02 12:34:32.136 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:32 compute-0 nova_compute[192079]: 2025-10-02 12:34:32.138 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:34:32 compute-0 nova_compute[192079]: 2025-10-02 12:34:32.140 2 INFO os_vif [None req-86f773ed-7bc3-4cc6-ac68-bfd3f2ae8095 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:11:3b:3b,bridge_name='br-int',has_traffic_filtering=True,id=c4934bb6-5047-47ec-b0c6-127b4274769a,network=Network(d5d28c5b-6eab-4c56-bc64-1dd8250f45c6),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapc4934bb6-50')
Oct 02 12:34:32 compute-0 nova_compute[192079]: 2025-10-02 12:34:32.141 2 INFO nova.virt.libvirt.driver [None req-86f773ed-7bc3-4cc6-ac68-bfd3f2ae8095 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Deleting instance files /var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0_del
Oct 02 12:34:32 compute-0 nova_compute[192079]: 2025-10-02 12:34:32.142 2 INFO nova.virt.libvirt.driver [None req-86f773ed-7bc3-4cc6-ac68-bfd3f2ae8095 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Deletion of /var/lib/nova/instances/9c817262-fee7-483c-ac98-6d7648890eb0_del complete
Oct 02 12:34:32 compute-0 podman[246372]: 2025-10-02 12:34:32.16390851 +0000 UTC m=+0.050138105 container remove 8c717c6428c8e0a5c8e5a3f32cc8604c28beb1f08e4fb9c23391af843b37e31f (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 12:34:32 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:32.168 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[eec0956a-6fdd-4ce2-a89a-119386d85a11]: (4, ('Thu Oct  2 12:34:31 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6 (8c717c6428c8e0a5c8e5a3f32cc8604c28beb1f08e4fb9c23391af843b37e31f)\n8c717c6428c8e0a5c8e5a3f32cc8604c28beb1f08e4fb9c23391af843b37e31f\nThu Oct  2 12:34:32 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6 (8c717c6428c8e0a5c8e5a3f32cc8604c28beb1f08e4fb9c23391af843b37e31f)\n8c717c6428c8e0a5c8e5a3f32cc8604c28beb1f08e4fb9c23391af843b37e31f\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:32 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:32.170 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[67b8d78a-6c86-4da0-82d1-4fca221b3405]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:32 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:32.170 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapd5d28c5b-60, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:34:32 compute-0 nova_compute[192079]: 2025-10-02 12:34:32.173 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:32 compute-0 kernel: tapd5d28c5b-60: left promiscuous mode
Oct 02 12:34:32 compute-0 nova_compute[192079]: 2025-10-02 12:34:32.184 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:32 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:32.187 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6776c65f-5284-4175-a117-8233d9b8d027]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:32 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:32.216 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[85a9c698-3590-48b2-806f-a46f5ff343ee]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:32 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:32.217 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ecdfca01-7939-40f2-b51e-7f3b70dd7474]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:32 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:32.244 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e6feb815-fd2a-4080-b48e-a5eb516f5a43]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 644775, 'reachable_time': 38793, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 246412, 'error': None, 'target': 'ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:32 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:32.247 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-d5d28c5b-6eab-4c56-bc64-1dd8250f45c6 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:34:32 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:32.247 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[b6eeae0f-c180-47c8-a072-d4a5cf63c9fb]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:32 compute-0 systemd[1]: run-netns-ovnmeta\x2dd5d28c5b\x2d6eab\x2d4c56\x2dbc64\x2d1dd8250f45c6.mount: Deactivated successfully.
Oct 02 12:34:32 compute-0 podman[246388]: 2025-10-02 12:34:32.256956294 +0000 UTC m=+0.055078531 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter)
Oct 02 12:34:32 compute-0 nova_compute[192079]: 2025-10-02 12:34:32.285 2 DEBUG nova.compute.manager [req-706b5c3b-095c-4447-a4b2-8d3a611d38eb req-388b08a5-7b3c-49fb-80f7-8e66742f946a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Received event network-vif-unplugged-c4934bb6-5047-47ec-b0c6-127b4274769a external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:34:32 compute-0 nova_compute[192079]: 2025-10-02 12:34:32.285 2 DEBUG oslo_concurrency.lockutils [req-706b5c3b-095c-4447-a4b2-8d3a611d38eb req-388b08a5-7b3c-49fb-80f7-8e66742f946a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:34:32 compute-0 nova_compute[192079]: 2025-10-02 12:34:32.285 2 DEBUG oslo_concurrency.lockutils [req-706b5c3b-095c-4447-a4b2-8d3a611d38eb req-388b08a5-7b3c-49fb-80f7-8e66742f946a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:34:32 compute-0 nova_compute[192079]: 2025-10-02 12:34:32.285 2 DEBUG oslo_concurrency.lockutils [req-706b5c3b-095c-4447-a4b2-8d3a611d38eb req-388b08a5-7b3c-49fb-80f7-8e66742f946a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:34:32 compute-0 nova_compute[192079]: 2025-10-02 12:34:32.286 2 DEBUG nova.compute.manager [req-706b5c3b-095c-4447-a4b2-8d3a611d38eb req-388b08a5-7b3c-49fb-80f7-8e66742f946a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] No waiting events found dispatching network-vif-unplugged-c4934bb6-5047-47ec-b0c6-127b4274769a pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:34:32 compute-0 nova_compute[192079]: 2025-10-02 12:34:32.286 2 DEBUG nova.compute.manager [req-706b5c3b-095c-4447-a4b2-8d3a611d38eb req-388b08a5-7b3c-49fb-80f7-8e66742f946a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Received event network-vif-unplugged-c4934bb6-5047-47ec-b0c6-127b4274769a for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:34:32 compute-0 nova_compute[192079]: 2025-10-02 12:34:32.290 2 INFO nova.compute.manager [None req-86f773ed-7bc3-4cc6-ac68-bfd3f2ae8095 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Took 0.45 seconds to destroy the instance on the hypervisor.
Oct 02 12:34:32 compute-0 nova_compute[192079]: 2025-10-02 12:34:32.290 2 DEBUG oslo.service.loopingcall [None req-86f773ed-7bc3-4cc6-ac68-bfd3f2ae8095 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:34:32 compute-0 nova_compute[192079]: 2025-10-02 12:34:32.290 2 DEBUG nova.compute.manager [-] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:34:32 compute-0 nova_compute[192079]: 2025-10-02 12:34:32.291 2 DEBUG nova.network.neutron [-] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:34:32 compute-0 podman[246389]: 2025-10-02 12:34:32.303878201 +0000 UTC m=+0.100011613 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_managed=true, container_name=iscsid, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001)
Oct 02 12:34:32 compute-0 nova_compute[192079]: 2025-10-02 12:34:32.428 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:33 compute-0 nova_compute[192079]: 2025-10-02 12:34:33.088 2 DEBUG nova.network.neutron [-] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:34:33 compute-0 nova_compute[192079]: 2025-10-02 12:34:33.111 2 INFO nova.compute.manager [-] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Took 0.82 seconds to deallocate network for instance.
Oct 02 12:34:33 compute-0 nova_compute[192079]: 2025-10-02 12:34:33.175 2 DEBUG nova.compute.manager [req-6ffd5e70-55f0-4392-97d4-61b6ebe96ee6 req-7de1aa22-a0a7-4f0b-aad6-93a943728efa 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Received event network-vif-deleted-c4934bb6-5047-47ec-b0c6-127b4274769a external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:34:33 compute-0 nova_compute[192079]: 2025-10-02 12:34:33.215 2 DEBUG oslo_concurrency.lockutils [None req-86f773ed-7bc3-4cc6-ac68-bfd3f2ae8095 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:34:33 compute-0 nova_compute[192079]: 2025-10-02 12:34:33.215 2 DEBUG oslo_concurrency.lockutils [None req-86f773ed-7bc3-4cc6-ac68-bfd3f2ae8095 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:34:33 compute-0 nova_compute[192079]: 2025-10-02 12:34:33.217 2 DEBUG nova.network.neutron [req-74eaa99c-414d-46f6-a12a-ea491686aed6 req-1dec1083-6b45-4651-887b-6ba4df7e9a3d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Updated VIF entry in instance network info cache for port c4934bb6-5047-47ec-b0c6-127b4274769a. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:34:33 compute-0 nova_compute[192079]: 2025-10-02 12:34:33.218 2 DEBUG nova.network.neutron [req-74eaa99c-414d-46f6-a12a-ea491686aed6 req-1dec1083-6b45-4651-887b-6ba4df7e9a3d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Updating instance_info_cache with network_info: [{"id": "c4934bb6-5047-47ec-b0c6-127b4274769a", "address": "fa:16:3e:11:3b:3b", "network": {"id": "d5d28c5b-6eab-4c56-bc64-1dd8250f45c6", "bridge": "br-int", "label": "tempest-network-smoke--1759416782", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "76c7dd40d83e4e3ca71abbebf57921b6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapc4934bb6-50", "ovs_interfaceid": "c4934bb6-5047-47ec-b0c6-127b4274769a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:34:33 compute-0 nova_compute[192079]: 2025-10-02 12:34:33.235 2 DEBUG oslo_concurrency.lockutils [req-74eaa99c-414d-46f6-a12a-ea491686aed6 req-1dec1083-6b45-4651-887b-6ba4df7e9a3d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-9c817262-fee7-483c-ac98-6d7648890eb0" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:34:33 compute-0 nova_compute[192079]: 2025-10-02 12:34:33.275 2 DEBUG nova.compute.provider_tree [None req-86f773ed-7bc3-4cc6-ac68-bfd3f2ae8095 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:34:33 compute-0 nova_compute[192079]: 2025-10-02 12:34:33.294 2 DEBUG nova.scheduler.client.report [None req-86f773ed-7bc3-4cc6-ac68-bfd3f2ae8095 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:34:33 compute-0 nova_compute[192079]: 2025-10-02 12:34:33.318 2 DEBUG oslo_concurrency.lockutils [None req-86f773ed-7bc3-4cc6-ac68-bfd3f2ae8095 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.103s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:34:33 compute-0 nova_compute[192079]: 2025-10-02 12:34:33.343 2 INFO nova.scheduler.client.report [None req-86f773ed-7bc3-4cc6-ac68-bfd3f2ae8095 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Deleted allocations for instance 9c817262-fee7-483c-ac98-6d7648890eb0
Oct 02 12:34:33 compute-0 nova_compute[192079]: 2025-10-02 12:34:33.444 2 DEBUG oslo_concurrency.lockutils [None req-86f773ed-7bc3-4cc6-ac68-bfd3f2ae8095 1faa7e121a0e43ad8cb4ae5b2cfcc6a2 76c7dd40d83e4e3ca71abbebf57921b6 - - default default] Lock "9c817262-fee7-483c-ac98-6d7648890eb0" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.634s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:34:33 compute-0 nova_compute[192079]: 2025-10-02 12:34:33.955 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:34 compute-0 nova_compute[192079]: 2025-10-02 12:34:34.435 2 DEBUG nova.compute.manager [req-fb559e24-9e81-49ff-ac80-ff213ccbbcc0 req-609c19fa-5ac0-4c6d-b89b-58b7e6b18e40 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Received event network-vif-plugged-c4934bb6-5047-47ec-b0c6-127b4274769a external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:34:34 compute-0 nova_compute[192079]: 2025-10-02 12:34:34.437 2 DEBUG oslo_concurrency.lockutils [req-fb559e24-9e81-49ff-ac80-ff213ccbbcc0 req-609c19fa-5ac0-4c6d-b89b-58b7e6b18e40 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:34:34 compute-0 nova_compute[192079]: 2025-10-02 12:34:34.438 2 DEBUG oslo_concurrency.lockutils [req-fb559e24-9e81-49ff-ac80-ff213ccbbcc0 req-609c19fa-5ac0-4c6d-b89b-58b7e6b18e40 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:34:34 compute-0 nova_compute[192079]: 2025-10-02 12:34:34.438 2 DEBUG oslo_concurrency.lockutils [req-fb559e24-9e81-49ff-ac80-ff213ccbbcc0 req-609c19fa-5ac0-4c6d-b89b-58b7e6b18e40 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "9c817262-fee7-483c-ac98-6d7648890eb0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:34:34 compute-0 nova_compute[192079]: 2025-10-02 12:34:34.438 2 DEBUG nova.compute.manager [req-fb559e24-9e81-49ff-ac80-ff213ccbbcc0 req-609c19fa-5ac0-4c6d-b89b-58b7e6b18e40 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] No waiting events found dispatching network-vif-plugged-c4934bb6-5047-47ec-b0c6-127b4274769a pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:34:34 compute-0 nova_compute[192079]: 2025-10-02 12:34:34.438 2 WARNING nova.compute.manager [req-fb559e24-9e81-49ff-ac80-ff213ccbbcc0 req-609c19fa-5ac0-4c6d-b89b-58b7e6b18e40 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Received unexpected event network-vif-plugged-c4934bb6-5047-47ec-b0c6-127b4274769a for instance with vm_state deleted and task_state None.
Oct 02 12:34:34 compute-0 nova_compute[192079]: 2025-10-02 12:34:34.686 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:34:34 compute-0 nova_compute[192079]: 2025-10-02 12:34:34.686 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:34:35 compute-0 nova_compute[192079]: 2025-10-02 12:34:35.637 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:36 compute-0 nova_compute[192079]: 2025-10-02 12:34:36.224 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:36 compute-0 nova_compute[192079]: 2025-10-02 12:34:36.352 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:37 compute-0 nova_compute[192079]: 2025-10-02 12:34:37.138 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:37 compute-0 nova_compute[192079]: 2025-10-02 12:34:37.430 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:39 compute-0 nova_compute[192079]: 2025-10-02 12:34:39.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:34:40 compute-0 podman[246436]: 2025-10-02 12:34:40.167903681 +0000 UTC m=+0.081195812 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_metadata_agent, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, container_name=ovn_metadata_agent, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, io.buildah.version=1.41.3)
Oct 02 12:34:40 compute-0 podman[246438]: 2025-10-02 12:34:40.171039776 +0000 UTC m=+0.079146377 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']})
Oct 02 12:34:40 compute-0 podman[246437]: 2025-10-02 12:34:40.187906185 +0000 UTC m=+0.095967674 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller, tcib_managed=true, container_name=ovn_controller, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS)
Oct 02 12:34:40 compute-0 nova_compute[192079]: 2025-10-02 12:34:40.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:34:40 compute-0 nova_compute[192079]: 2025-10-02 12:34:40.682 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:34:40 compute-0 nova_compute[192079]: 2025-10-02 12:34:40.682 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:34:40 compute-0 nova_compute[192079]: 2025-10-02 12:34:40.682 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:34:40 compute-0 nova_compute[192079]: 2025-10-02 12:34:40.682 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:34:40 compute-0 nova_compute[192079]: 2025-10-02 12:34:40.817 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:34:40 compute-0 nova_compute[192079]: 2025-10-02 12:34:40.818 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5727MB free_disk=73.33991241455078GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:34:40 compute-0 nova_compute[192079]: 2025-10-02 12:34:40.818 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:34:40 compute-0 nova_compute[192079]: 2025-10-02 12:34:40.818 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:34:40 compute-0 nova_compute[192079]: 2025-10-02 12:34:40.892 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:34:40 compute-0 nova_compute[192079]: 2025-10-02 12:34:40.893 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:34:40 compute-0 nova_compute[192079]: 2025-10-02 12:34:40.913 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:34:40 compute-0 nova_compute[192079]: 2025-10-02 12:34:40.928 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:34:40 compute-0 nova_compute[192079]: 2025-10-02 12:34:40.950 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:34:40 compute-0 nova_compute[192079]: 2025-10-02 12:34:40.950 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.132s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:34:42 compute-0 nova_compute[192079]: 2025-10-02 12:34:42.143 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:42 compute-0 nova_compute[192079]: 2025-10-02 12:34:42.432 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:42 compute-0 nova_compute[192079]: 2025-10-02 12:34:42.770 2 DEBUG oslo_concurrency.lockutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "29e46585-0d8d-450d-b3de-d6d103b90a58" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:34:42 compute-0 nova_compute[192079]: 2025-10-02 12:34:42.771 2 DEBUG oslo_concurrency.lockutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "29e46585-0d8d-450d-b3de-d6d103b90a58" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:34:42 compute-0 nova_compute[192079]: 2025-10-02 12:34:42.798 2 DEBUG nova.compute.manager [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:34:42 compute-0 nova_compute[192079]: 2025-10-02 12:34:42.896 2 DEBUG oslo_concurrency.lockutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:34:42 compute-0 nova_compute[192079]: 2025-10-02 12:34:42.896 2 DEBUG oslo_concurrency.lockutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:34:42 compute-0 nova_compute[192079]: 2025-10-02 12:34:42.903 2 DEBUG nova.virt.hardware [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:34:42 compute-0 nova_compute[192079]: 2025-10-02 12:34:42.904 2 INFO nova.compute.claims [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:34:42 compute-0 nova_compute[192079]: 2025-10-02 12:34:42.950 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:34:42 compute-0 nova_compute[192079]: 2025-10-02 12:34:42.951 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.036 2 DEBUG nova.compute.provider_tree [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.051 2 DEBUG nova.scheduler.client.report [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.076 2 DEBUG oslo_concurrency.lockutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.180s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.077 2 DEBUG nova.compute.manager [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.135 2 DEBUG nova.compute.manager [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.136 2 DEBUG nova.network.neutron [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.176 2 INFO nova.virt.libvirt.driver [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.219 2 DEBUG nova.compute.manager [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.329 2 DEBUG nova.compute.manager [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.331 2 DEBUG nova.virt.libvirt.driver [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.331 2 INFO nova.virt.libvirt.driver [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Creating image(s)
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.332 2 DEBUG oslo_concurrency.lockutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "/var/lib/nova/instances/29e46585-0d8d-450d-b3de-d6d103b90a58/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.332 2 DEBUG oslo_concurrency.lockutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "/var/lib/nova/instances/29e46585-0d8d-450d-b3de-d6d103b90a58/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.333 2 DEBUG oslo_concurrency.lockutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "/var/lib/nova/instances/29e46585-0d8d-450d-b3de-d6d103b90a58/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.344 2 DEBUG oslo_concurrency.processutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.399 2 DEBUG oslo_concurrency.processutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.400 2 DEBUG oslo_concurrency.lockutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.400 2 DEBUG oslo_concurrency.lockutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.411 2 DEBUG oslo_concurrency.processutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.464 2 DEBUG oslo_concurrency.processutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.052s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.465 2 DEBUG oslo_concurrency.processutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/29e46585-0d8d-450d-b3de-d6d103b90a58/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.493 2 DEBUG nova.policy [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.502 2 DEBUG oslo_concurrency.processutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/29e46585-0d8d-450d-b3de-d6d103b90a58/disk 1073741824" returned: 0 in 0.037s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.503 2 DEBUG oslo_concurrency.lockutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.103s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.504 2 DEBUG oslo_concurrency.processutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.564 2 DEBUG oslo_concurrency.processutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.060s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.566 2 DEBUG nova.virt.disk.api [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Checking if we can resize image /var/lib/nova/instances/29e46585-0d8d-450d-b3de-d6d103b90a58/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.567 2 DEBUG oslo_concurrency.processutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/29e46585-0d8d-450d-b3de-d6d103b90a58/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.625 2 DEBUG oslo_concurrency.processutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/29e46585-0d8d-450d-b3de-d6d103b90a58/disk --force-share --output=json" returned: 0 in 0.058s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.627 2 DEBUG nova.virt.disk.api [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Cannot resize image /var/lib/nova/instances/29e46585-0d8d-450d-b3de-d6d103b90a58/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.627 2 DEBUG nova.objects.instance [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lazy-loading 'migration_context' on Instance uuid 29e46585-0d8d-450d-b3de-d6d103b90a58 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.642 2 DEBUG nova.virt.libvirt.driver [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.643 2 DEBUG nova.virt.libvirt.driver [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Ensure instance console log exists: /var/lib/nova/instances/29e46585-0d8d-450d-b3de-d6d103b90a58/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.643 2 DEBUG oslo_concurrency.lockutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.644 2 DEBUG oslo_concurrency.lockutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.644 2 DEBUG oslo_concurrency.lockutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:34:43 compute-0 nova_compute[192079]: 2025-10-02 12:34:43.664 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:34:44 compute-0 nova_compute[192079]: 2025-10-02 12:34:44.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:34:44 compute-0 nova_compute[192079]: 2025-10-02 12:34:44.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:34:44 compute-0 nova_compute[192079]: 2025-10-02 12:34:44.667 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:34:44 compute-0 nova_compute[192079]: 2025-10-02 12:34:44.683 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Skipping network cache update for instance because it is Building. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9871
Oct 02 12:34:44 compute-0 nova_compute[192079]: 2025-10-02 12:34:44.683 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:34:44 compute-0 nova_compute[192079]: 2025-10-02 12:34:44.684 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:34:45 compute-0 nova_compute[192079]: 2025-10-02 12:34:45.026 2 DEBUG nova.network.neutron [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Successfully created port: a36441d3-2588-4fff-9190-68df21897dec _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:34:47 compute-0 nova_compute[192079]: 2025-10-02 12:34:47.110 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759408472.1089559, 9c817262-fee7-483c-ac98-6d7648890eb0 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:34:47 compute-0 nova_compute[192079]: 2025-10-02 12:34:47.111 2 INFO nova.compute.manager [-] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] VM Stopped (Lifecycle Event)
Oct 02 12:34:47 compute-0 nova_compute[192079]: 2025-10-02 12:34:47.140 2 DEBUG nova.compute.manager [None req-3b33ca63-67c7-4a9a-abb1-d3ee62398d7c - - - - - -] [instance: 9c817262-fee7-483c-ac98-6d7648890eb0] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:34:47 compute-0 nova_compute[192079]: 2025-10-02 12:34:47.147 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:47 compute-0 nova_compute[192079]: 2025-10-02 12:34:47.434 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:48 compute-0 nova_compute[192079]: 2025-10-02 12:34:48.058 2 DEBUG nova.network.neutron [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Successfully updated port: a36441d3-2588-4fff-9190-68df21897dec _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:34:48 compute-0 nova_compute[192079]: 2025-10-02 12:34:48.084 2 DEBUG oslo_concurrency.lockutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "refresh_cache-29e46585-0d8d-450d-b3de-d6d103b90a58" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:34:48 compute-0 nova_compute[192079]: 2025-10-02 12:34:48.084 2 DEBUG oslo_concurrency.lockutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquired lock "refresh_cache-29e46585-0d8d-450d-b3de-d6d103b90a58" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:34:48 compute-0 nova_compute[192079]: 2025-10-02 12:34:48.084 2 DEBUG nova.network.neutron [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:34:48 compute-0 nova_compute[192079]: 2025-10-02 12:34:48.281 2 DEBUG nova.compute.manager [req-d3d99b4e-d875-44df-9efd-f346fc32e3b5 req-7593bdf4-fdd2-4bdd-83a8-2bd0ec849b0f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Received event network-changed-a36441d3-2588-4fff-9190-68df21897dec external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:34:48 compute-0 nova_compute[192079]: 2025-10-02 12:34:48.281 2 DEBUG nova.compute.manager [req-d3d99b4e-d875-44df-9efd-f346fc32e3b5 req-7593bdf4-fdd2-4bdd-83a8-2bd0ec849b0f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Refreshing instance network info cache due to event network-changed-a36441d3-2588-4fff-9190-68df21897dec. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:34:48 compute-0 nova_compute[192079]: 2025-10-02 12:34:48.282 2 DEBUG oslo_concurrency.lockutils [req-d3d99b4e-d875-44df-9efd-f346fc32e3b5 req-7593bdf4-fdd2-4bdd-83a8-2bd0ec849b0f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-29e46585-0d8d-450d-b3de-d6d103b90a58" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:34:48 compute-0 nova_compute[192079]: 2025-10-02 12:34:48.326 2 DEBUG nova.network.neutron [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.424 2 DEBUG nova.network.neutron [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Updating instance_info_cache with network_info: [{"id": "a36441d3-2588-4fff-9190-68df21897dec", "address": "fa:16:3e:6e:e8:1d", "network": {"id": "6512ce78-9132-4dd4-88c2-d82efca10339", "bridge": "br-int", "label": "tempest-network-smoke--937181", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa36441d3-25", "ovs_interfaceid": "a36441d3-2588-4fff-9190-68df21897dec", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.456 2 DEBUG oslo_concurrency.lockutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Releasing lock "refresh_cache-29e46585-0d8d-450d-b3de-d6d103b90a58" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.457 2 DEBUG nova.compute.manager [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Instance network_info: |[{"id": "a36441d3-2588-4fff-9190-68df21897dec", "address": "fa:16:3e:6e:e8:1d", "network": {"id": "6512ce78-9132-4dd4-88c2-d82efca10339", "bridge": "br-int", "label": "tempest-network-smoke--937181", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa36441d3-25", "ovs_interfaceid": "a36441d3-2588-4fff-9190-68df21897dec", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.458 2 DEBUG oslo_concurrency.lockutils [req-d3d99b4e-d875-44df-9efd-f346fc32e3b5 req-7593bdf4-fdd2-4bdd-83a8-2bd0ec849b0f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-29e46585-0d8d-450d-b3de-d6d103b90a58" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.459 2 DEBUG nova.network.neutron [req-d3d99b4e-d875-44df-9efd-f346fc32e3b5 req-7593bdf4-fdd2-4bdd-83a8-2bd0ec849b0f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Refreshing network info cache for port a36441d3-2588-4fff-9190-68df21897dec _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.464 2 DEBUG nova.virt.libvirt.driver [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Start _get_guest_xml network_info=[{"id": "a36441d3-2588-4fff-9190-68df21897dec", "address": "fa:16:3e:6e:e8:1d", "network": {"id": "6512ce78-9132-4dd4-88c2-d82efca10339", "bridge": "br-int", "label": "tempest-network-smoke--937181", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa36441d3-25", "ovs_interfaceid": "a36441d3-2588-4fff-9190-68df21897dec", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.471 2 WARNING nova.virt.libvirt.driver [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.480 2 DEBUG nova.virt.libvirt.host [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.481 2 DEBUG nova.virt.libvirt.host [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.486 2 DEBUG nova.virt.libvirt.host [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.486 2 DEBUG nova.virt.libvirt.host [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.487 2 DEBUG nova.virt.libvirt.driver [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.487 2 DEBUG nova.virt.hardware [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.488 2 DEBUG nova.virt.hardware [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.488 2 DEBUG nova.virt.hardware [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.488 2 DEBUG nova.virt.hardware [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.488 2 DEBUG nova.virt.hardware [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.489 2 DEBUG nova.virt.hardware [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.489 2 DEBUG nova.virt.hardware [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.489 2 DEBUG nova.virt.hardware [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.489 2 DEBUG nova.virt.hardware [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.489 2 DEBUG nova.virt.hardware [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.490 2 DEBUG nova.virt.hardware [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.494 2 DEBUG nova.virt.libvirt.vif [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:34:41Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestNetworkBasicOps-server-1074026563',display_name='tempest-TestNetworkBasicOps-server-1074026563',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkbasicops-server-1074026563',id=153,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBIruC8NBbt0fiPM5vjUcW7I4aWOaOqn0nOX/moE5SBYvSbwgZuheNp45snU/Zu/Yc1PtgYRP83VhLxhPzxMwsidZgk5yFQ8uQoKwlTkw6XHGAZZ+9OGwmu29t+3aBvQGYQ==',key_name='tempest-TestNetworkBasicOps-1331898182',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='6e2a4899168a47618e377cb3ac85ddd2',ramdisk_id='',reservation_id='r-isys9n10',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestNetworkBasicOps-1323893370',owner_user_name='tempest-TestNetworkBasicOps-1323893370-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:34:43Z,user_data=None,user_id='a1898fdf056c4a249c33590f26d4d845',uuid=29e46585-0d8d-450d-b3de-d6d103b90a58,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "a36441d3-2588-4fff-9190-68df21897dec", "address": "fa:16:3e:6e:e8:1d", "network": {"id": "6512ce78-9132-4dd4-88c2-d82efca10339", "bridge": "br-int", "label": "tempest-network-smoke--937181", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa36441d3-25", "ovs_interfaceid": "a36441d3-2588-4fff-9190-68df21897dec", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.494 2 DEBUG nova.network.os_vif_util [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converting VIF {"id": "a36441d3-2588-4fff-9190-68df21897dec", "address": "fa:16:3e:6e:e8:1d", "network": {"id": "6512ce78-9132-4dd4-88c2-d82efca10339", "bridge": "br-int", "label": "tempest-network-smoke--937181", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa36441d3-25", "ovs_interfaceid": "a36441d3-2588-4fff-9190-68df21897dec", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.495 2 DEBUG nova.network.os_vif_util [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:6e:e8:1d,bridge_name='br-int',has_traffic_filtering=True,id=a36441d3-2588-4fff-9190-68df21897dec,network=Network(6512ce78-9132-4dd4-88c2-d82efca10339),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapa36441d3-25') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.496 2 DEBUG nova.objects.instance [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lazy-loading 'pci_devices' on Instance uuid 29e46585-0d8d-450d-b3de-d6d103b90a58 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.520 2 DEBUG nova.virt.libvirt.driver [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:34:49 compute-0 nova_compute[192079]:   <uuid>29e46585-0d8d-450d-b3de-d6d103b90a58</uuid>
Oct 02 12:34:49 compute-0 nova_compute[192079]:   <name>instance-00000099</name>
Oct 02 12:34:49 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:34:49 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:34:49 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:34:49 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:       <nova:name>tempest-TestNetworkBasicOps-server-1074026563</nova:name>
Oct 02 12:34:49 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:34:49</nova:creationTime>
Oct 02 12:34:49 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:34:49 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:34:49 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:34:49 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:34:49 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:34:49 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:34:49 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:34:49 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:34:49 compute-0 nova_compute[192079]:         <nova:user uuid="a1898fdf056c4a249c33590f26d4d845">tempest-TestNetworkBasicOps-1323893370-project-member</nova:user>
Oct 02 12:34:49 compute-0 nova_compute[192079]:         <nova:project uuid="6e2a4899168a47618e377cb3ac85ddd2">tempest-TestNetworkBasicOps-1323893370</nova:project>
Oct 02 12:34:49 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:34:49 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:34:49 compute-0 nova_compute[192079]:         <nova:port uuid="a36441d3-2588-4fff-9190-68df21897dec">
Oct 02 12:34:49 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.9" ipVersion="4"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:34:49 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:34:49 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:34:49 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <system>
Oct 02 12:34:49 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:34:49 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:34:49 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:34:49 compute-0 nova_compute[192079]:       <entry name="serial">29e46585-0d8d-450d-b3de-d6d103b90a58</entry>
Oct 02 12:34:49 compute-0 nova_compute[192079]:       <entry name="uuid">29e46585-0d8d-450d-b3de-d6d103b90a58</entry>
Oct 02 12:34:49 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     </system>
Oct 02 12:34:49 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:34:49 compute-0 nova_compute[192079]:   <os>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:   </os>
Oct 02 12:34:49 compute-0 nova_compute[192079]:   <features>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:   </features>
Oct 02 12:34:49 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:34:49 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:34:49 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:34:49 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/29e46585-0d8d-450d-b3de-d6d103b90a58/disk"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:34:49 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/29e46585-0d8d-450d-b3de-d6d103b90a58/disk.config"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:34:49 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:6e:e8:1d"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:       <target dev="tapa36441d3-25"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:34:49 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/29e46585-0d8d-450d-b3de-d6d103b90a58/console.log" append="off"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <video>
Oct 02 12:34:49 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     </video>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:34:49 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:34:49 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:34:49 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:34:49 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:34:49 compute-0 nova_compute[192079]: </domain>
Oct 02 12:34:49 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.521 2 DEBUG nova.compute.manager [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Preparing to wait for external event network-vif-plugged-a36441d3-2588-4fff-9190-68df21897dec prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.522 2 DEBUG oslo_concurrency.lockutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "29e46585-0d8d-450d-b3de-d6d103b90a58-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.522 2 DEBUG oslo_concurrency.lockutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "29e46585-0d8d-450d-b3de-d6d103b90a58-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.522 2 DEBUG oslo_concurrency.lockutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "29e46585-0d8d-450d-b3de-d6d103b90a58-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.523 2 DEBUG nova.virt.libvirt.vif [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:34:41Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestNetworkBasicOps-server-1074026563',display_name='tempest-TestNetworkBasicOps-server-1074026563',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkbasicops-server-1074026563',id=153,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBIruC8NBbt0fiPM5vjUcW7I4aWOaOqn0nOX/moE5SBYvSbwgZuheNp45snU/Zu/Yc1PtgYRP83VhLxhPzxMwsidZgk5yFQ8uQoKwlTkw6XHGAZZ+9OGwmu29t+3aBvQGYQ==',key_name='tempest-TestNetworkBasicOps-1331898182',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='6e2a4899168a47618e377cb3ac85ddd2',ramdisk_id='',reservation_id='r-isys9n10',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestNetworkBasicOps-1323893370',owner_user_name='tempest-TestNetworkBasicOps-1323893370-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:34:43Z,user_data=None,user_id='a1898fdf056c4a249c33590f26d4d845',uuid=29e46585-0d8d-450d-b3de-d6d103b90a58,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "a36441d3-2588-4fff-9190-68df21897dec", "address": "fa:16:3e:6e:e8:1d", "network": {"id": "6512ce78-9132-4dd4-88c2-d82efca10339", "bridge": "br-int", "label": "tempest-network-smoke--937181", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa36441d3-25", "ovs_interfaceid": "a36441d3-2588-4fff-9190-68df21897dec", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.523 2 DEBUG nova.network.os_vif_util [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converting VIF {"id": "a36441d3-2588-4fff-9190-68df21897dec", "address": "fa:16:3e:6e:e8:1d", "network": {"id": "6512ce78-9132-4dd4-88c2-d82efca10339", "bridge": "br-int", "label": "tempest-network-smoke--937181", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa36441d3-25", "ovs_interfaceid": "a36441d3-2588-4fff-9190-68df21897dec", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.524 2 DEBUG nova.network.os_vif_util [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:6e:e8:1d,bridge_name='br-int',has_traffic_filtering=True,id=a36441d3-2588-4fff-9190-68df21897dec,network=Network(6512ce78-9132-4dd4-88c2-d82efca10339),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapa36441d3-25') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.524 2 DEBUG os_vif [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:6e:e8:1d,bridge_name='br-int',has_traffic_filtering=True,id=a36441d3-2588-4fff-9190-68df21897dec,network=Network(6512ce78-9132-4dd4-88c2-d82efca10339),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapa36441d3-25') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.524 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.525 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.525 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.528 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.528 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapa36441d3-25, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.528 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapa36441d3-25, col_values=(('external_ids', {'iface-id': 'a36441d3-2588-4fff-9190-68df21897dec', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:6e:e8:1d', 'vm-uuid': '29e46585-0d8d-450d-b3de-d6d103b90a58'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.530 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:49 compute-0 NetworkManager[51160]: <info>  [1759408489.5311] manager: (tapa36441d3-25): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/290)
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.532 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.536 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.537 2 INFO os_vif [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:6e:e8:1d,bridge_name='br-int',has_traffic_filtering=True,id=a36441d3-2588-4fff-9190-68df21897dec,network=Network(6512ce78-9132-4dd4-88c2-d82efca10339),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapa36441d3-25')
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.605 2 DEBUG nova.virt.libvirt.driver [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.605 2 DEBUG nova.virt.libvirt.driver [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.605 2 DEBUG nova.virt.libvirt.driver [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] No VIF found with MAC fa:16:3e:6e:e8:1d, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:34:49 compute-0 nova_compute[192079]: 2025-10-02 12:34:49.606 2 INFO nova.virt.libvirt.driver [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Using config drive
Oct 02 12:34:50 compute-0 nova_compute[192079]: 2025-10-02 12:34:50.206 2 INFO nova.virt.libvirt.driver [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Creating config drive at /var/lib/nova/instances/29e46585-0d8d-450d-b3de-d6d103b90a58/disk.config
Oct 02 12:34:50 compute-0 nova_compute[192079]: 2025-10-02 12:34:50.210 2 DEBUG oslo_concurrency.processutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/29e46585-0d8d-450d-b3de-d6d103b90a58/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp5yy6dg6j execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:34:50 compute-0 nova_compute[192079]: 2025-10-02 12:34:50.350 2 DEBUG oslo_concurrency.processutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/29e46585-0d8d-450d-b3de-d6d103b90a58/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp5yy6dg6j" returned: 0 in 0.140s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:34:50 compute-0 kernel: tapa36441d3-25: entered promiscuous mode
Oct 02 12:34:50 compute-0 ovn_controller[94336]: 2025-10-02T12:34:50Z|00588|binding|INFO|Claiming lport a36441d3-2588-4fff-9190-68df21897dec for this chassis.
Oct 02 12:34:50 compute-0 nova_compute[192079]: 2025-10-02 12:34:50.428 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:50 compute-0 NetworkManager[51160]: <info>  [1759408490.4294] manager: (tapa36441d3-25): new Tun device (/org/freedesktop/NetworkManager/Devices/291)
Oct 02 12:34:50 compute-0 ovn_controller[94336]: 2025-10-02T12:34:50Z|00589|binding|INFO|a36441d3-2588-4fff-9190-68df21897dec: Claiming fa:16:3e:6e:e8:1d 10.100.0.9
Oct 02 12:34:50 compute-0 nova_compute[192079]: 2025-10-02 12:34:50.433 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:50.444 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:6e:e8:1d 10.100.0.9'], port_security=['fa:16:3e:6e:e8:1d 10.100.0.9'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.9/28', 'neutron:device_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-6512ce78-9132-4dd4-88c2-d82efca10339', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'neutron:revision_number': '2', 'neutron:security_group_ids': '36435018-4a6e-494b-8da7-cfcae8505cf6', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=360bc7aa-2f85-45e8-93bc-76083b104e89, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=a36441d3-2588-4fff-9190-68df21897dec) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:50.446 103294 INFO neutron.agent.ovn.metadata.agent [-] Port a36441d3-2588-4fff-9190-68df21897dec in datapath 6512ce78-9132-4dd4-88c2-d82efca10339 bound to our chassis
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:50.448 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 6512ce78-9132-4dd4-88c2-d82efca10339
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:50.461 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[10e3eb51-1112-493f-a2dc-a38e810562a5]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:50.462 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap6512ce78-91 in ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:34:50 compute-0 systemd-udevd[246540]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:50.464 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap6512ce78-90 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:50.464 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4f6b110f-9575-4081-9f9c-3a09d56680b3]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:50.464 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[90e0edd6-2e04-40e7-a30f-cc3672ec68b5]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:50 compute-0 systemd-machined[152150]: New machine qemu-74-instance-00000099.
Oct 02 12:34:50 compute-0 NetworkManager[51160]: <info>  [1759408490.4779] device (tapa36441d3-25): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:50.478 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[5604d31d-237c-47ad-833c-c1a4c26f0157]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:50 compute-0 NetworkManager[51160]: <info>  [1759408490.4799] device (tapa36441d3-25): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:34:50 compute-0 nova_compute[192079]: 2025-10-02 12:34:50.486 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:50 compute-0 systemd[1]: Started Virtual Machine qemu-74-instance-00000099.
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:50.495 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6e5f7d51-0075-410c-979d-db31db8cd07a]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:50 compute-0 ovn_controller[94336]: 2025-10-02T12:34:50Z|00590|binding|INFO|Setting lport a36441d3-2588-4fff-9190-68df21897dec ovn-installed in OVS
Oct 02 12:34:50 compute-0 ovn_controller[94336]: 2025-10-02T12:34:50Z|00591|binding|INFO|Setting lport a36441d3-2588-4fff-9190-68df21897dec up in Southbound
Oct 02 12:34:50 compute-0 nova_compute[192079]: 2025-10-02 12:34:50.499 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:50.542 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[d1ecb1d2-53bc-4c5a-9241-4083d65b4a6b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:50 compute-0 systemd-udevd[246544]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:50.549 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[837ef42b-0816-4faf-831e-0d083b41a99a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:50 compute-0 NetworkManager[51160]: <info>  [1759408490.5524] manager: (tap6512ce78-90): new Veth device (/org/freedesktop/NetworkManager/Devices/292)
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:50.588 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[719111ae-eb93-46c6-bb68-ac9080ff37c7]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:50.591 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[a8300260-de51-4505-bb81-bf84abd552c5]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:50 compute-0 NetworkManager[51160]: <info>  [1759408490.6189] device (tap6512ce78-90): carrier: link connected
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:50.625 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[4b85fe2c-931c-400b-84e8-c2356c6eed65]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:50.643 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a0d42516-a384-4981-b699-482f894e2f74]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap6512ce78-91'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:3c:04:0d'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 2, 'rx_bytes': 110, 'tx_bytes': 176, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 2, 'rx_bytes': 110, 'tx_bytes': 176, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 189], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 648824, 'reachable_time': 37638, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 2, 'outoctets': 148, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 2, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 148, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 2, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 246573, 'error': None, 'target': 'ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:50.663 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5d2772c9-138b-4533-9307-bc4cf8bd848f]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe3c:40d'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 648824, 'tstamp': 648824}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 246574, 'error': None, 'target': 'ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:50.690 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b517a7b3-b500-49c5-91aa-734b87ed9696]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap6512ce78-91'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:3c:04:0d'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 2, 'rx_bytes': 110, 'tx_bytes': 176, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 2, 'rx_bytes': 110, 'tx_bytes': 176, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 189], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 648824, 'reachable_time': 37638, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 2, 'outoctets': 148, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 2, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 148, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 2, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 246575, 'error': None, 'target': 'ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:50.733 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[47e047bf-e1ac-4203-b558-8e7f2e6bc8e4]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:50.821 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c321dd3d-d86b-4334-bdac-9ae03151d602]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:50.822 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap6512ce78-90, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:50.823 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:50.823 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap6512ce78-90, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:34:50 compute-0 NetworkManager[51160]: <info>  [1759408490.8257] manager: (tap6512ce78-90): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/293)
Oct 02 12:34:50 compute-0 kernel: tap6512ce78-90: entered promiscuous mode
Oct 02 12:34:50 compute-0 nova_compute[192079]: 2025-10-02 12:34:50.825 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:50.829 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap6512ce78-90, col_values=(('external_ids', {'iface-id': '23796c0f-f19b-4655-83fb-cbec481641fa'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:34:50 compute-0 nova_compute[192079]: 2025-10-02 12:34:50.830 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:50 compute-0 ovn_controller[94336]: 2025-10-02T12:34:50Z|00592|binding|INFO|Releasing lport 23796c0f-f19b-4655-83fb-cbec481641fa from this chassis (sb_readonly=0)
Oct 02 12:34:50 compute-0 nova_compute[192079]: 2025-10-02 12:34:50.850 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:50.851 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/6512ce78-9132-4dd4-88c2-d82efca10339.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/6512ce78-9132-4dd4-88c2-d82efca10339.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:50.852 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2741b5a8-1da8-4f84-b109-b9e7470679e7]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:50.852 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-6512ce78-9132-4dd4-88c2-d82efca10339
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/6512ce78-9132-4dd4-88c2-d82efca10339.pid.haproxy
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 6512ce78-9132-4dd4-88c2-d82efca10339
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:34:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:34:50.853 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339', 'env', 'PROCESS_TAG=haproxy-6512ce78-9132-4dd4-88c2-d82efca10339', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/6512ce78-9132-4dd4-88c2-d82efca10339.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:34:51 compute-0 nova_compute[192079]: 2025-10-02 12:34:51.190 2 DEBUG nova.network.neutron [req-d3d99b4e-d875-44df-9efd-f346fc32e3b5 req-7593bdf4-fdd2-4bdd-83a8-2bd0ec849b0f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Updated VIF entry in instance network info cache for port a36441d3-2588-4fff-9190-68df21897dec. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:34:51 compute-0 nova_compute[192079]: 2025-10-02 12:34:51.192 2 DEBUG nova.network.neutron [req-d3d99b4e-d875-44df-9efd-f346fc32e3b5 req-7593bdf4-fdd2-4bdd-83a8-2bd0ec849b0f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Updating instance_info_cache with network_info: [{"id": "a36441d3-2588-4fff-9190-68df21897dec", "address": "fa:16:3e:6e:e8:1d", "network": {"id": "6512ce78-9132-4dd4-88c2-d82efca10339", "bridge": "br-int", "label": "tempest-network-smoke--937181", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa36441d3-25", "ovs_interfaceid": "a36441d3-2588-4fff-9190-68df21897dec", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:34:51 compute-0 nova_compute[192079]: 2025-10-02 12:34:51.211 2 DEBUG oslo_concurrency.lockutils [req-d3d99b4e-d875-44df-9efd-f346fc32e3b5 req-7593bdf4-fdd2-4bdd-83a8-2bd0ec849b0f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-29e46585-0d8d-450d-b3de-d6d103b90a58" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:34:51 compute-0 podman[246607]: 2025-10-02 12:34:51.287695115 +0000 UTC m=+0.071375905 container create b4c37ab82e9b424e554c6b8d4c505775e75fec335a8fe5d1996a9397464f3805 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS)
Oct 02 12:34:51 compute-0 systemd[1]: Started libpod-conmon-b4c37ab82e9b424e554c6b8d4c505775e75fec335a8fe5d1996a9397464f3805.scope.
Oct 02 12:34:51 compute-0 podman[246607]: 2025-10-02 12:34:51.240187322 +0000 UTC m=+0.023868162 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:34:51 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:34:51 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/9f8ec0911cce4a26d0406caf72028e05f1f30238b65d2ab43f66cb07a4e681ef/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:34:51 compute-0 podman[246607]: 2025-10-02 12:34:51.373873191 +0000 UTC m=+0.157553951 container init b4c37ab82e9b424e554c6b8d4c505775e75fec335a8fe5d1996a9397464f3805 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.build-date=20251001)
Oct 02 12:34:51 compute-0 podman[246607]: 2025-10-02 12:34:51.380292836 +0000 UTC m=+0.163973596 container start b4c37ab82e9b424e554c6b8d4c505775e75fec335a8fe5d1996a9397464f3805 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:34:51 compute-0 podman[246626]: 2025-10-02 12:34:51.388443248 +0000 UTC m=+0.055727508 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:34:51 compute-0 neutron-haproxy-ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339[246630]: [NOTICE]   (246652) : New worker (246655) forked
Oct 02 12:34:51 compute-0 neutron-haproxy-ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339[246630]: [NOTICE]   (246652) : Loading success.
Oct 02 12:34:51 compute-0 nova_compute[192079]: 2025-10-02 12:34:51.679 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:34:51 compute-0 nova_compute[192079]: 2025-10-02 12:34:51.792 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408491.791936, 29e46585-0d8d-450d-b3de-d6d103b90a58 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:34:51 compute-0 nova_compute[192079]: 2025-10-02 12:34:51.792 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] VM Started (Lifecycle Event)
Oct 02 12:34:51 compute-0 nova_compute[192079]: 2025-10-02 12:34:51.821 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:34:51 compute-0 nova_compute[192079]: 2025-10-02 12:34:51.825 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408491.793188, 29e46585-0d8d-450d-b3de-d6d103b90a58 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:34:51 compute-0 nova_compute[192079]: 2025-10-02 12:34:51.825 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] VM Paused (Lifecycle Event)
Oct 02 12:34:51 compute-0 nova_compute[192079]: 2025-10-02 12:34:51.859 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:34:51 compute-0 nova_compute[192079]: 2025-10-02 12:34:51.862 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:34:51 compute-0 nova_compute[192079]: 2025-10-02 12:34:51.888 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:34:52 compute-0 nova_compute[192079]: 2025-10-02 12:34:52.436 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:54 compute-0 nova_compute[192079]: 2025-10-02 12:34:54.531 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:55 compute-0 nova_compute[192079]: 2025-10-02 12:34:55.088 2 DEBUG nova.compute.manager [req-9882add0-e94d-4ca4-b56e-f4f813b57e06 req-fb05543a-0902-4567-a6e7-81c6c1834a5d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Received event network-vif-plugged-a36441d3-2588-4fff-9190-68df21897dec external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:34:55 compute-0 nova_compute[192079]: 2025-10-02 12:34:55.089 2 DEBUG oslo_concurrency.lockutils [req-9882add0-e94d-4ca4-b56e-f4f813b57e06 req-fb05543a-0902-4567-a6e7-81c6c1834a5d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "29e46585-0d8d-450d-b3de-d6d103b90a58-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:34:55 compute-0 nova_compute[192079]: 2025-10-02 12:34:55.089 2 DEBUG oslo_concurrency.lockutils [req-9882add0-e94d-4ca4-b56e-f4f813b57e06 req-fb05543a-0902-4567-a6e7-81c6c1834a5d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "29e46585-0d8d-450d-b3de-d6d103b90a58-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:34:55 compute-0 nova_compute[192079]: 2025-10-02 12:34:55.090 2 DEBUG oslo_concurrency.lockutils [req-9882add0-e94d-4ca4-b56e-f4f813b57e06 req-fb05543a-0902-4567-a6e7-81c6c1834a5d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "29e46585-0d8d-450d-b3de-d6d103b90a58-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:34:55 compute-0 nova_compute[192079]: 2025-10-02 12:34:55.090 2 DEBUG nova.compute.manager [req-9882add0-e94d-4ca4-b56e-f4f813b57e06 req-fb05543a-0902-4567-a6e7-81c6c1834a5d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Processing event network-vif-plugged-a36441d3-2588-4fff-9190-68df21897dec _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:34:55 compute-0 nova_compute[192079]: 2025-10-02 12:34:55.091 2 DEBUG nova.compute.manager [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Instance event wait completed in 3 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:34:55 compute-0 nova_compute[192079]: 2025-10-02 12:34:55.097 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408495.0969756, 29e46585-0d8d-450d-b3de-d6d103b90a58 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:34:55 compute-0 nova_compute[192079]: 2025-10-02 12:34:55.098 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] VM Resumed (Lifecycle Event)
Oct 02 12:34:55 compute-0 nova_compute[192079]: 2025-10-02 12:34:55.102 2 DEBUG nova.virt.libvirt.driver [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:34:55 compute-0 nova_compute[192079]: 2025-10-02 12:34:55.107 2 INFO nova.virt.libvirt.driver [-] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Instance spawned successfully.
Oct 02 12:34:55 compute-0 nova_compute[192079]: 2025-10-02 12:34:55.108 2 DEBUG nova.virt.libvirt.driver [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:34:55 compute-0 nova_compute[192079]: 2025-10-02 12:34:55.135 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:34:55 compute-0 nova_compute[192079]: 2025-10-02 12:34:55.142 2 DEBUG nova.virt.libvirt.driver [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:34:55 compute-0 nova_compute[192079]: 2025-10-02 12:34:55.143 2 DEBUG nova.virt.libvirt.driver [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:34:55 compute-0 nova_compute[192079]: 2025-10-02 12:34:55.144 2 DEBUG nova.virt.libvirt.driver [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:34:55 compute-0 nova_compute[192079]: 2025-10-02 12:34:55.144 2 DEBUG nova.virt.libvirt.driver [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:34:55 compute-0 nova_compute[192079]: 2025-10-02 12:34:55.145 2 DEBUG nova.virt.libvirt.driver [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:34:55 compute-0 nova_compute[192079]: 2025-10-02 12:34:55.146 2 DEBUG nova.virt.libvirt.driver [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:34:55 compute-0 nova_compute[192079]: 2025-10-02 12:34:55.153 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:34:55 compute-0 nova_compute[192079]: 2025-10-02 12:34:55.191 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:34:55 compute-0 nova_compute[192079]: 2025-10-02 12:34:55.241 2 INFO nova.compute.manager [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Took 11.91 seconds to spawn the instance on the hypervisor.
Oct 02 12:34:55 compute-0 nova_compute[192079]: 2025-10-02 12:34:55.242 2 DEBUG nova.compute.manager [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:34:55 compute-0 nova_compute[192079]: 2025-10-02 12:34:55.316 2 INFO nova.compute.manager [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Took 12.45 seconds to build instance.
Oct 02 12:34:55 compute-0 nova_compute[192079]: 2025-10-02 12:34:55.329 2 DEBUG oslo_concurrency.lockutils [None req-2a164edf-de75-4070-9f20-950dd1fbf352 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "29e46585-0d8d-450d-b3de-d6d103b90a58" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 12.558s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:34:57 compute-0 nova_compute[192079]: 2025-10-02 12:34:57.258 2 DEBUG nova.compute.manager [req-4ddcf326-f58f-44e7-9100-681d1e48dc30 req-0c9b4d16-93eb-4acb-914e-3bfcd99962d5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Received event network-vif-plugged-a36441d3-2588-4fff-9190-68df21897dec external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:34:57 compute-0 nova_compute[192079]: 2025-10-02 12:34:57.258 2 DEBUG oslo_concurrency.lockutils [req-4ddcf326-f58f-44e7-9100-681d1e48dc30 req-0c9b4d16-93eb-4acb-914e-3bfcd99962d5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "29e46585-0d8d-450d-b3de-d6d103b90a58-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:34:57 compute-0 nova_compute[192079]: 2025-10-02 12:34:57.259 2 DEBUG oslo_concurrency.lockutils [req-4ddcf326-f58f-44e7-9100-681d1e48dc30 req-0c9b4d16-93eb-4acb-914e-3bfcd99962d5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "29e46585-0d8d-450d-b3de-d6d103b90a58-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:34:57 compute-0 nova_compute[192079]: 2025-10-02 12:34:57.259 2 DEBUG oslo_concurrency.lockutils [req-4ddcf326-f58f-44e7-9100-681d1e48dc30 req-0c9b4d16-93eb-4acb-914e-3bfcd99962d5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "29e46585-0d8d-450d-b3de-d6d103b90a58-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:34:57 compute-0 nova_compute[192079]: 2025-10-02 12:34:57.259 2 DEBUG nova.compute.manager [req-4ddcf326-f58f-44e7-9100-681d1e48dc30 req-0c9b4d16-93eb-4acb-914e-3bfcd99962d5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] No waiting events found dispatching network-vif-plugged-a36441d3-2588-4fff-9190-68df21897dec pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:34:57 compute-0 nova_compute[192079]: 2025-10-02 12:34:57.259 2 WARNING nova.compute.manager [req-4ddcf326-f58f-44e7-9100-681d1e48dc30 req-0c9b4d16-93eb-4acb-914e-3bfcd99962d5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Received unexpected event network-vif-plugged-a36441d3-2588-4fff-9190-68df21897dec for instance with vm_state active and task_state None.
Oct 02 12:34:57 compute-0 nova_compute[192079]: 2025-10-02 12:34:57.438 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:58 compute-0 podman[246665]: 2025-10-02 12:34:58.156811758 +0000 UTC m=+0.064219240 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, vcs-type=git, vendor=Red Hat, Inc., config_id=edpm, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., container_name=openstack_network_exporter, io.openshift.expose-services=, io.openshift.tags=minimal rhel9, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, architecture=x86_64, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, distribution-scope=public, name=ubi9-minimal, build-date=2025-08-20T13:12:41, io.buildah.version=1.33.7, managed_by=edpm_ansible, release=1755695350, version=9.6, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., maintainer=Red Hat, Inc., com.redhat.component=ubi9-minimal-container, url=https://catalog.redhat.com/en/search?searchType=containers, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal)
Oct 02 12:34:58 compute-0 podman[246666]: 2025-10-02 12:34:58.15982572 +0000 UTC m=+0.060913340 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=multipathd, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, tcib_managed=true, container_name=multipathd)
Oct 02 12:34:58 compute-0 NetworkManager[51160]: <info>  [1759408498.6613] manager: (patch-br-int-to-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/294)
Oct 02 12:34:58 compute-0 NetworkManager[51160]: <info>  [1759408498.6624] manager: (patch-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d-to-br-int): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/295)
Oct 02 12:34:58 compute-0 nova_compute[192079]: 2025-10-02 12:34:58.660 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:58 compute-0 nova_compute[192079]: 2025-10-02 12:34:58.847 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:58 compute-0 ovn_controller[94336]: 2025-10-02T12:34:58Z|00593|binding|INFO|Releasing lport 23796c0f-f19b-4655-83fb-cbec481641fa from this chassis (sb_readonly=0)
Oct 02 12:34:58 compute-0 nova_compute[192079]: 2025-10-02 12:34:58.883 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:34:59 compute-0 nova_compute[192079]: 2025-10-02 12:34:59.354 2 DEBUG nova.compute.manager [req-5ab82f80-bdea-4280-995c-97ce0589f4cf req-1a3b4302-5a26-4386-8aa8-83daad75736e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Received event network-changed-a36441d3-2588-4fff-9190-68df21897dec external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:34:59 compute-0 nova_compute[192079]: 2025-10-02 12:34:59.355 2 DEBUG nova.compute.manager [req-5ab82f80-bdea-4280-995c-97ce0589f4cf req-1a3b4302-5a26-4386-8aa8-83daad75736e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Refreshing instance network info cache due to event network-changed-a36441d3-2588-4fff-9190-68df21897dec. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:34:59 compute-0 nova_compute[192079]: 2025-10-02 12:34:59.355 2 DEBUG oslo_concurrency.lockutils [req-5ab82f80-bdea-4280-995c-97ce0589f4cf req-1a3b4302-5a26-4386-8aa8-83daad75736e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-29e46585-0d8d-450d-b3de-d6d103b90a58" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:34:59 compute-0 nova_compute[192079]: 2025-10-02 12:34:59.355 2 DEBUG oslo_concurrency.lockutils [req-5ab82f80-bdea-4280-995c-97ce0589f4cf req-1a3b4302-5a26-4386-8aa8-83daad75736e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-29e46585-0d8d-450d-b3de-d6d103b90a58" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:34:59 compute-0 nova_compute[192079]: 2025-10-02 12:34:59.355 2 DEBUG nova.network.neutron [req-5ab82f80-bdea-4280-995c-97ce0589f4cf req-1a3b4302-5a26-4386-8aa8-83daad75736e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Refreshing network info cache for port a36441d3-2588-4fff-9190-68df21897dec _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:34:59 compute-0 nova_compute[192079]: 2025-10-02 12:34:59.534 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:00 compute-0 nova_compute[192079]: 2025-10-02 12:35:00.474 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_power_states run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:35:00 compute-0 nova_compute[192079]: 2025-10-02 12:35:00.507 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Triggering sync for uuid 29e46585-0d8d-450d-b3de-d6d103b90a58 _sync_power_states /usr/lib/python3.9/site-packages/nova/compute/manager.py:10268
Oct 02 12:35:00 compute-0 nova_compute[192079]: 2025-10-02 12:35:00.508 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "29e46585-0d8d-450d-b3de-d6d103b90a58" by "nova.compute.manager.ComputeManager._sync_power_states.<locals>._sync.<locals>.query_driver_power_state_and_sync" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:35:00 compute-0 nova_compute[192079]: 2025-10-02 12:35:00.508 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "29e46585-0d8d-450d-b3de-d6d103b90a58" acquired by "nova.compute.manager.ComputeManager._sync_power_states.<locals>._sync.<locals>.query_driver_power_state_and_sync" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:35:00 compute-0 nova_compute[192079]: 2025-10-02 12:35:00.561 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "29e46585-0d8d-450d-b3de-d6d103b90a58" "released" by "nova.compute.manager.ComputeManager._sync_power_states.<locals>._sync.<locals>.query_driver_power_state_and_sync" :: held 0.053s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:35:00 compute-0 nova_compute[192079]: 2025-10-02 12:35:00.675 2 DEBUG nova.network.neutron [req-5ab82f80-bdea-4280-995c-97ce0589f4cf req-1a3b4302-5a26-4386-8aa8-83daad75736e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Updated VIF entry in instance network info cache for port a36441d3-2588-4fff-9190-68df21897dec. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:35:00 compute-0 nova_compute[192079]: 2025-10-02 12:35:00.675 2 DEBUG nova.network.neutron [req-5ab82f80-bdea-4280-995c-97ce0589f4cf req-1a3b4302-5a26-4386-8aa8-83daad75736e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Updating instance_info_cache with network_info: [{"id": "a36441d3-2588-4fff-9190-68df21897dec", "address": "fa:16:3e:6e:e8:1d", "network": {"id": "6512ce78-9132-4dd4-88c2-d82efca10339", "bridge": "br-int", "label": "tempest-network-smoke--937181", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.233", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa36441d3-25", "ovs_interfaceid": "a36441d3-2588-4fff-9190-68df21897dec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:35:00 compute-0 nova_compute[192079]: 2025-10-02 12:35:00.705 2 DEBUG oslo_concurrency.lockutils [req-5ab82f80-bdea-4280-995c-97ce0589f4cf req-1a3b4302-5a26-4386-8aa8-83daad75736e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-29e46585-0d8d-450d-b3de-d6d103b90a58" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:35:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:02.238 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:35:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:02.238 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:35:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:02.239 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:35:02 compute-0 nova_compute[192079]: 2025-10-02 12:35:02.474 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:03 compute-0 podman[246704]: 2025-10-02 12:35:03.160776955 +0000 UTC m=+0.062644166 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']})
Oct 02 12:35:03 compute-0 podman[246705]: 2025-10-02 12:35:03.18446641 +0000 UTC m=+0.094062752 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0)
Oct 02 12:35:04 compute-0 nova_compute[192079]: 2025-10-02 12:35:04.538 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:07 compute-0 nova_compute[192079]: 2025-10-02 12:35:07.477 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:07 compute-0 ovn_controller[94336]: 2025-10-02T12:35:07Z|00063|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:6e:e8:1d 10.100.0.9
Oct 02 12:35:07 compute-0 ovn_controller[94336]: 2025-10-02T12:35:07Z|00064|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:6e:e8:1d 10.100.0.9
Oct 02 12:35:08 compute-0 nova_compute[192079]: 2025-10-02 12:35:08.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:35:09 compute-0 nova_compute[192079]: 2025-10-02 12:35:09.541 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:11 compute-0 podman[246761]: 2025-10-02 12:35:11.175881188 +0000 UTC m=+0.076129824 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, config_id=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_metadata_agent, managed_by=edpm_ansible, org.label-schema.license=GPLv2)
Oct 02 12:35:11 compute-0 podman[246763]: 2025-10-02 12:35:11.210064749 +0000 UTC m=+0.106874811 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']})
Oct 02 12:35:11 compute-0 podman[246762]: 2025-10-02 12:35:11.240429845 +0000 UTC m=+0.137142694 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, config_id=ovn_controller, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:35:12 compute-0 nova_compute[192079]: 2025-10-02 12:35:12.481 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:13 compute-0 nova_compute[192079]: 2025-10-02 12:35:13.725 2 INFO nova.compute.manager [None req-2d90af22-9cb4-4743-9ddc-e7b129deaa8d a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Get console output
Oct 02 12:35:13 compute-0 nova_compute[192079]: 2025-10-02 12:35:13.730 55 INFO nova.privsep.libvirt [-] Ignored error while reading from instance console pty: can't concat NoneType to bytes
Oct 02 12:35:14 compute-0 nova_compute[192079]: 2025-10-02 12:35:14.543 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:14 compute-0 nova_compute[192079]: 2025-10-02 12:35:14.683 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._run_pending_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:35:14 compute-0 nova_compute[192079]: 2025-10-02 12:35:14.683 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Cleaning up deleted instances _run_pending_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:11145
Oct 02 12:35:14 compute-0 nova_compute[192079]: 2025-10-02 12:35:14.699 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] There are 0 instances to clean _run_pending_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:11154
Oct 02 12:35:15 compute-0 nova_compute[192079]: 2025-10-02 12:35:15.286 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:15 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:15.286 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=39, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=38) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:35:15 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:15.288 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 7 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:35:15 compute-0 nova_compute[192079]: 2025-10-02 12:35:15.333 2 DEBUG nova.compute.manager [req-0790b4e6-a720-4138-a8cb-99e0c7ca3f9d req-3540f493-4694-4472-abb9-90916e65f50c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Received event network-changed-a36441d3-2588-4fff-9190-68df21897dec external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:35:15 compute-0 nova_compute[192079]: 2025-10-02 12:35:15.334 2 DEBUG nova.compute.manager [req-0790b4e6-a720-4138-a8cb-99e0c7ca3f9d req-3540f493-4694-4472-abb9-90916e65f50c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Refreshing instance network info cache due to event network-changed-a36441d3-2588-4fff-9190-68df21897dec. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:35:15 compute-0 nova_compute[192079]: 2025-10-02 12:35:15.334 2 DEBUG oslo_concurrency.lockutils [req-0790b4e6-a720-4138-a8cb-99e0c7ca3f9d req-3540f493-4694-4472-abb9-90916e65f50c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-29e46585-0d8d-450d-b3de-d6d103b90a58" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:35:15 compute-0 nova_compute[192079]: 2025-10-02 12:35:15.335 2 DEBUG oslo_concurrency.lockutils [req-0790b4e6-a720-4138-a8cb-99e0c7ca3f9d req-3540f493-4694-4472-abb9-90916e65f50c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-29e46585-0d8d-450d-b3de-d6d103b90a58" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:35:15 compute-0 nova_compute[192079]: 2025-10-02 12:35:15.335 2 DEBUG nova.network.neutron [req-0790b4e6-a720-4138-a8cb-99e0c7ca3f9d req-3540f493-4694-4472-abb9-90916e65f50c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Refreshing network info cache for port a36441d3-2588-4fff-9190-68df21897dec _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:35:16 compute-0 nova_compute[192079]: 2025-10-02 12:35:16.436 2 DEBUG nova.network.neutron [req-0790b4e6-a720-4138-a8cb-99e0c7ca3f9d req-3540f493-4694-4472-abb9-90916e65f50c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Updated VIF entry in instance network info cache for port a36441d3-2588-4fff-9190-68df21897dec. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:35:16 compute-0 nova_compute[192079]: 2025-10-02 12:35:16.437 2 DEBUG nova.network.neutron [req-0790b4e6-a720-4138-a8cb-99e0c7ca3f9d req-3540f493-4694-4472-abb9-90916e65f50c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Updating instance_info_cache with network_info: [{"id": "a36441d3-2588-4fff-9190-68df21897dec", "address": "fa:16:3e:6e:e8:1d", "network": {"id": "6512ce78-9132-4dd4-88c2-d82efca10339", "bridge": "br-int", "label": "tempest-network-smoke--937181", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa36441d3-25", "ovs_interfaceid": "a36441d3-2588-4fff-9190-68df21897dec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:35:16 compute-0 nova_compute[192079]: 2025-10-02 12:35:16.473 2 DEBUG oslo_concurrency.lockutils [req-0790b4e6-a720-4138-a8cb-99e0c7ca3f9d req-3540f493-4694-4472-abb9-90916e65f50c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-29e46585-0d8d-450d-b3de-d6d103b90a58" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.113 12 DEBUG ceilometer.compute.discovery [-] instance data: {'id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'name': 'tempest-TestNetworkBasicOps-server-1074026563', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'os_type': 'hvm', 'architecture': 'x86_64', 'OS-EXT-SRV-ATTR:instance_name': 'instance-00000099', 'OS-EXT-SRV-ATTR:host': 'compute-0.ctlplane.example.com', 'OS-EXT-STS:vm_state': 'running', 'tenant_id': '6e2a4899168a47618e377cb3ac85ddd2', 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'hostId': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'status': 'active', 'metadata': {}} discover_libvirt_polling /usr/lib/python3.9/site-packages/ceilometer/compute/discovery.py:228
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.114 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes.delta in the context of pollsters
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.116 12 DEBUG ceilometer.compute.virt.libvirt.inspector [-] No delta meter predecessor for 29e46585-0d8d-450d-b3de-d6d103b90a58 / tapa36441d3-25 inspect_vnics /usr/lib/python3.9/site-packages/ceilometer/compute/virt/libvirt/inspector.py:136
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.116 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/network.outgoing.bytes.delta volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'f4a1c0ca-4830-452d-89cf-9d1850d06791', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.bytes.delta', 'counter_type': 'delta', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'instance-00000099-29e46585-0d8d-450d-b3de-d6d103b90a58-tapa36441d3-25', 'timestamp': '2025-10-02T12:35:17.114243', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'tapa36441d3-25', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:6e:e8:1d', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapa36441d3-25'}, 'message_id': '4047d35e-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.80130728, 'message_signature': '3927afd307b618ef3011e1a14c5da284a0cb0c9e8b694fc0d1587e012bfe9313'}]}, 'timestamp': '2025-10-02 12:35:17.117386', '_unique_id': 'c5047ae7edb5448ba9f6cfce7c84d7f6'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.118 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.119 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets.error in the context of pollsters
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.119 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/network.incoming.packets.error volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'd15fa479-0991-42d1-96ba-9d9c4f5ab991', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets.error', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'instance-00000099-29e46585-0d8d-450d-b3de-d6d103b90a58-tapa36441d3-25', 'timestamp': '2025-10-02T12:35:17.119387', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'tapa36441d3-25', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:6e:e8:1d', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapa36441d3-25'}, 'message_id': '40482cfa-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.80130728, 'message_signature': '1e4e14334db4dde15e1209b57eb017dd658f3844fd5bf4070df448ac60842f47'}]}, 'timestamp': '2025-10-02 12:35:17.119626', '_unique_id': '43a0eb20d655424ba439f4719d523096'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.120 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.usage in the context of pollsters
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.132 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/disk.device.usage volume: 29949952 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.132 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/disk.device.usage volume: 485376 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'afb86a48-8d15-4b1e-b1d8-5899449e26b6', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 29949952, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': '29e46585-0d8d-450d-b3de-d6d103b90a58-vda', 'timestamp': '2025-10-02T12:35:17.120925', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'instance-00000099', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '404a2654-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.807994133, 'message_signature': '0996cd111fa6f2889b58f6af91cd0a811c112a6cae801b06349dc355f648a41e'}, {'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 485376, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': '29e46585-0d8d-450d-b3de-d6d103b90a58-sda', 'timestamp': '2025-10-02T12:35:17.120925', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'instance-00000099', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '404a3086-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.807994133, 'message_signature': '146641e51d4140a38450b0f0e3d9a4f089e358f6e75ce332f7a9264c7f0119da'}]}, 'timestamp': '2025-10-02 12:35:17.132820', '_unique_id': 'e00dc5c581f44a84b07f6d6a3946d8af'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.133 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.134 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets.drop in the context of pollsters
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.134 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/network.outgoing.packets.drop volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '70a9b486-26e8-4f99-a5be-82c6acdd20d5', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets.drop', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'instance-00000099-29e46585-0d8d-450d-b3de-d6d103b90a58-tapa36441d3-25', 'timestamp': '2025-10-02T12:35:17.134380', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'tapa36441d3-25', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:6e:e8:1d', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapa36441d3-25'}, 'message_id': '404a76ae-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.80130728, 'message_signature': '4860cab689e334ecc5846f9f490925c142518d03cc455f8cf49bb6bf32821b59'}]}, 'timestamp': '2025-10-02 12:35:17.134664', '_unique_id': 'fcb78f6237be454cb8ad4898d86213a5'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.135 12 INFO ceilometer.polling.manager [-] Polling pollster cpu in the context of pollsters
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.152 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/cpu volume: 11770000000 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'e44a570f-1453-4cda-a362-9a69a485988e', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'cpu', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 11770000000, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'timestamp': '2025-10-02T12:35:17.135744', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'instance-00000099', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'cpu_number': 1}, 'message_id': '404d3fec-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.839353947, 'message_signature': '553edc9e3cc1544f670d54ec8adb911597772d06fd836996173095a7d6bdbfc4'}]}, 'timestamp': '2025-10-02 12:35:17.152974', '_unique_id': '635c861282254ce0a7682bdf5805a95c'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.153 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.155 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.iops in the context of pollsters
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.155 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for PerDeviceDiskIOPSPollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.155 12 ERROR ceilometer.polling.manager [-] Prevent pollster disk.device.iops from polling [<NovaLikeServer: tempest-TestNetworkBasicOps-server-1074026563>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-TestNetworkBasicOps-server-1074026563>]
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.155 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.bytes in the context of pollsters
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.178 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/disk.device.write.bytes volume: 72921088 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.178 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/disk.device.write.bytes volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '7728af3f-2abb-4ceb-bbe1-7b833cee412e', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 72921088, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': '29e46585-0d8d-450d-b3de-d6d103b90a58-vda', 'timestamp': '2025-10-02T12:35:17.155800', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'instance-00000099', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '405131ce-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.842868112, 'message_signature': '37278453ad178797dd9c4965f3ac33c4690d9188977e331a78e76bda33d92fc9'}, {'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': '29e46585-0d8d-450d-b3de-d6d103b90a58-sda', 'timestamp': '2025-10-02T12:35:17.155800', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'instance-00000099', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '40513fe8-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.842868112, 'message_signature': 'd679290abad8ded7db4d52b9f63005fc8317bc955deb5685d089832c5bfd06da'}]}, 'timestamp': '2025-10-02 12:35:17.179158', '_unique_id': 'a04f33bd32264720a0493a156adf0a6a'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.latency in the context of pollsters
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.180 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for PerDeviceDiskLatencyPollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.181 12 ERROR ceilometer.polling.manager [-] Prevent pollster disk.device.latency from polling [<NovaLikeServer: tempest-TestNetworkBasicOps-server-1074026563>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-TestNetworkBasicOps-server-1074026563>]
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.181 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.capacity in the context of pollsters
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.181 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/disk.device.capacity volume: 1073741824 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.181 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/disk.device.capacity volume: 485376 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'ef1e4573-15bf-4c53-8ac3-72af6a7aec76', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 1073741824, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': '29e46585-0d8d-450d-b3de-d6d103b90a58-vda', 'timestamp': '2025-10-02T12:35:17.181265', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'instance-00000099', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '40519d8a-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.807994133, 'message_signature': '95161d6bac73d6b2f913e82392e3793b419ee1ae8c2cc4bf05fecbb937007a77'}, {'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 485376, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': '29e46585-0d8d-450d-b3de-d6d103b90a58-sda', 'timestamp': '2025-10-02T12:35:17.181265', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'instance-00000099', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '4051a582-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.807994133, 'message_signature': '16481ee7ad97d71ceadad4e3a5700ab3abe5ada344cf1e32b3ec8871780432d1'}]}, 'timestamp': '2025-10-02 12:35:17.181685', '_unique_id': 'd74a5d837dd84db3852aa9974c927ee8'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.requests in the context of pollsters
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.182 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/disk.device.write.requests volume: 315 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/disk.device.write.requests volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '4f14b5ab-56d2-4617-bf52-2f757aec9c8e', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 315, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': '29e46585-0d8d-450d-b3de-d6d103b90a58-vda', 'timestamp': '2025-10-02T12:35:17.182814', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'instance-00000099', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '4051db6a-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.842868112, 'message_signature': '89c48ed52269ff929c3defffd79b540c5faa74a641e56ed66d82c33fe0c421cd'}, {'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 0, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': '29e46585-0d8d-450d-b3de-d6d103b90a58-sda', 'timestamp': '2025-10-02T12:35:17.182814', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'instance-00000099', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '4051e4c0-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.842868112, 'message_signature': 'b5a8abae05395d8aa13966281de1cb1614ffb26cca359a65a913c60d930409c3'}]}, 'timestamp': '2025-10-02 12:35:17.183319', '_unique_id': 'b6662ecef76b4245b476f96c191c298b'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.183 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.184 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets.drop in the context of pollsters
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.184 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/network.incoming.packets.drop volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '82da5420-daa8-41fb-b7f3-6815672d1dbc', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets.drop', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'instance-00000099-29e46585-0d8d-450d-b3de-d6d103b90a58-tapa36441d3-25', 'timestamp': '2025-10-02T12:35:17.184419', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'tapa36441d3-25', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:6e:e8:1d', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapa36441d3-25'}, 'message_id': '40521904-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.80130728, 'message_signature': '3e7aa6f0ec35d6500b100438ea16b9f90120482e6806d79b38e7f5f00f362ea8'}]}, 'timestamp': '2025-10-02 12:35:17.184646', '_unique_id': 'f34cf3b9d7704c219309ef86af480b9f'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.requests in the context of pollsters
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/disk.device.read.requests volume: 1099 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.185 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/disk.device.read.requests volume: 108 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '61ae83a1-8691-4d99-8e7f-9bc0ee3ffbf3', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 1099, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': '29e46585-0d8d-450d-b3de-d6d103b90a58-vda', 'timestamp': '2025-10-02T12:35:17.185743', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'instance-00000099', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '40524c8a-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.842868112, 'message_signature': '4da7798f2392e7f8ac04f4614c98a018f7317dd4710b7fee48cbe492cdc42d79'}, {'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 108, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': '29e46585-0d8d-450d-b3de-d6d103b90a58-sda', 'timestamp': '2025-10-02T12:35:17.185743', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'instance-00000099', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '40525504-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.842868112, 'message_signature': '93a956d507641951fc6df291e884bbf94a1d79aa5f4ccdd6bfcc3728f511e98f'}]}, 'timestamp': '2025-10-02 12:35:17.186203', '_unique_id': '657cf1eec18f4fc3afc2f87d98e8738f'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.186 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.187 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes.rate in the context of pollsters
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.187 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for IncomingBytesRatePollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.187 12 ERROR ceilometer.polling.manager [-] Prevent pollster network.incoming.bytes.rate from polling [<NovaLikeServer: tempest-TestNetworkBasicOps-server-1074026563>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-TestNetworkBasicOps-server-1074026563>]
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.187 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.bytes in the context of pollsters
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.187 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/disk.device.read.bytes volume: 30534144 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.187 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/disk.device.read.bytes volume: 274750 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'a7c9f66b-e561-4921-b5f9-4eff4046d520', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 30534144, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': '29e46585-0d8d-450d-b3de-d6d103b90a58-vda', 'timestamp': '2025-10-02T12:35:17.187737', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'instance-00000099', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '40529b86-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.842868112, 'message_signature': '06eac8457937e1ac3cf07475b21a620525ec1f08a7e96139ace9a5df1f8d3e23'}, {'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 274750, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': '29e46585-0d8d-450d-b3de-d6d103b90a58-sda', 'timestamp': '2025-10-02T12:35:17.187737', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'instance-00000099', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '4052a45a-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.842868112, 'message_signature': '808551abd26cd13aa60967b7298cbec86579ce0acf4ab9c4809e26d61ca5ab18'}]}, 'timestamp': '2025-10-02 12:35:17.188198', '_unique_id': 'eb524574946d42abab2da3803abcaf96'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.188 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.189 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.latency in the context of pollsters
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.189 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/disk.device.read.latency volume: 547641735 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.189 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/disk.device.read.latency volume: 33761042 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '7a98ac51-5376-4006-922e-fa54c2c00bea', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 547641735, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': '29e46585-0d8d-450d-b3de-d6d103b90a58-vda', 'timestamp': '2025-10-02T12:35:17.189336', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'instance-00000099', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '4052d8ee-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.842868112, 'message_signature': 'fd3048e81bc8a88ed675aeb9428709c2a478632888ee3fe23e72317347c854df'}, {'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 33761042, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': '29e46585-0d8d-450d-b3de-d6d103b90a58-sda', 'timestamp': '2025-10-02T12:35:17.189336', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'instance-00000099', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '4052e064-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.842868112, 'message_signature': 'e4ca960b6d05ac0ae624d1439c7db23d4d35386184724e2a72093bdce9259463'}]}, 'timestamp': '2025-10-02 12:35:17.189764', '_unique_id': 'a97b6ecdf6504313827c2c056ada24dd'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.190 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.191 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes.rate in the context of pollsters
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.191 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for OutgoingBytesRatePollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.191 12 ERROR ceilometer.polling.manager [-] Prevent pollster network.outgoing.bytes.rate from polling [<NovaLikeServer: tempest-TestNetworkBasicOps-server-1074026563>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-TestNetworkBasicOps-server-1074026563>]
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.191 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets in the context of pollsters
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.191 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/network.outgoing.packets volume: 109 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '09176cdd-8e90-433f-a76e-e59f8558f34c', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 109, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'instance-00000099-29e46585-0d8d-450d-b3de-d6d103b90a58-tapa36441d3-25', 'timestamp': '2025-10-02T12:35:17.191520', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'tapa36441d3-25', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:6e:e8:1d', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapa36441d3-25'}, 'message_id': '40532fce-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.80130728, 'message_signature': '13b0022ff6c3dbd9aecd6037a71a8be6bf5910f4d0b44c60093a7a2af79ffe6a'}]}, 'timestamp': '2025-10-02 12:35:17.191819', '_unique_id': 'e3046b8d66f54b209a330aa6a016497e'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.192 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.193 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.allocation in the context of pollsters
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.193 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/disk.device.allocation volume: 30679040 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.193 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/disk.device.allocation volume: 487424 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '231f5361-195c-4697-87c0-ed23dc24d0df', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 30679040, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': '29e46585-0d8d-450d-b3de-d6d103b90a58-vda', 'timestamp': '2025-10-02T12:35:17.193197', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'instance-00000099', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '4053707e-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.807994133, 'message_signature': 'acadc57bc71cc21131284a559add9958176a9dd549d8e4b802e7bcb946e9b905'}, {'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 487424, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': '29e46585-0d8d-450d-b3de-d6d103b90a58-sda', 'timestamp': '2025-10-02T12:35:17.193197', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'instance-00000099', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '405379de-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.807994133, 'message_signature': '1817d026473df44979f96337ed9afb996813bd55563ecbe522a989f615490a7e'}]}, 'timestamp': '2025-10-02 12:35:17.193693', '_unique_id': 'd5d0c0988df743448703597966f59487'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.194 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes in the context of pollsters
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/network.incoming.bytes volume: 19424 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '37bb63a1-0b41-4c45-9197-18d93ebbaf20', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 19424, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'instance-00000099-29e46585-0d8d-450d-b3de-d6d103b90a58-tapa36441d3-25', 'timestamp': '2025-10-02T12:35:17.195096', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'tapa36441d3-25', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:6e:e8:1d', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapa36441d3-25'}, 'message_id': '4053bb1a-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.80130728, 'message_signature': '3fa361ae593575961336f3b3ff96cabe87d49115fbfce1fe106f7f88121e2095'}]}, 'timestamp': '2025-10-02 12:35:17.195389', '_unique_id': '6aec9bde377241828ce512055f38add1'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.195 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.196 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes.delta in the context of pollsters
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.196 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/network.incoming.bytes.delta volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '1f827096-db34-42f8-8dcf-86ae4236f51a', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.bytes.delta', 'counter_type': 'delta', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'instance-00000099-29e46585-0d8d-450d-b3de-d6d103b90a58-tapa36441d3-25', 'timestamp': '2025-10-02T12:35:17.196762', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'tapa36441d3-25', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:6e:e8:1d', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapa36441d3-25'}, 'message_id': '4053fc24-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.80130728, 'message_signature': '6fc232ffd15be5b083ab9bf544a53c24f57c6a0352cc3bdfc00641f88281f8ea'}]}, 'timestamp': '2025-10-02 12:35:17.197077', '_unique_id': '343803e144b54232a447e0fc4cfba8f8'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.197 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.198 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets.error in the context of pollsters
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.198 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/network.outgoing.packets.error volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '4f6f845c-6bd1-41cc-b32f-92dcef46278c', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets.error', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'instance-00000099-29e46585-0d8d-450d-b3de-d6d103b90a58-tapa36441d3-25', 'timestamp': '2025-10-02T12:35:17.198377', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'tapa36441d3-25', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:6e:e8:1d', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapa36441d3-25'}, 'message_id': '40543a9a-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.80130728, 'message_signature': 'ef16def51440de3a2001de65c5d305bbac5e1e7b1b2f552a329166f0cd3d4eb1'}]}, 'timestamp': '2025-10-02 12:35:17.198645', '_unique_id': '1acb4f1cbfb34b3a8de1c3a009f47dcf'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.199 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes in the context of pollsters
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/network.outgoing.bytes volume: 16014 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '0168e9ba-2c3c-41c4-a7d9-82c0c72303f2', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 16014, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'instance-00000099-29e46585-0d8d-450d-b3de-d6d103b90a58-tapa36441d3-25', 'timestamp': '2025-10-02T12:35:17.200083', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'tapa36441d3-25', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:6e:e8:1d', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapa36441d3-25'}, 'message_id': '40547e2e-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.80130728, 'message_signature': '2f8b786d9bfd489c542411d15f567887b7a91bfc8042e8cc6ea09f00f4218104'}]}, 'timestamp': '2025-10-02 12:35:17.200376', '_unique_id': '73ba7f98222c402891b88c54038b8a54'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.200 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.201 12 INFO ceilometer.polling.manager [-] Polling pollster memory.usage in the context of pollsters
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.201 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/memory.usage volume: 42.6171875 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '2beab5f3-0987-4d1c-abbc-4acdd8723740', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'memory.usage', 'counter_type': 'gauge', 'counter_unit': 'MB', 'counter_volume': 42.6171875, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'timestamp': '2025-10-02T12:35:17.201453', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'instance-00000099', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1}, 'message_id': '4054b236-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.839353947, 'message_signature': '13af3829a994b76f631ad862238386a19a91374f68cdacf6892f761b688e6328'}]}, 'timestamp': '2025-10-02 12:35:17.201667', '_unique_id': '46024e094e9b4559b7bad8a6a4daf399'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.latency in the context of pollsters
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.202 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/disk.device.write.latency volume: 1301168540 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/disk.device.write.latency volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '0510ec10-71f3-4730-a7ab-2c01a3982c18', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 1301168540, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': '29e46585-0d8d-450d-b3de-d6d103b90a58-vda', 'timestamp': '2025-10-02T12:35:17.202776', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'instance-00000099', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '4054e5d0-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.842868112, 'message_signature': '62bd58b996c68e796f004ca44674c049e4e55e94627e8a2682fc6ade1867e7b6'}, {'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 0, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': '29e46585-0d8d-450d-b3de-d6d103b90a58-sda', 'timestamp': '2025-10-02T12:35:17.202776', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'instance-00000099', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '4054ee36-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.842868112, 'message_signature': '4bea0d4367eb4b10c012edc46710197d5bf04581b607b126126bf580e4a2776d'}]}, 'timestamp': '2025-10-02 12:35:17.203196', '_unique_id': '5847f7ba1843465683c0f9d95610f3ab'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.203 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets in the context of pollsters
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 DEBUG ceilometer.compute.pollsters [-] 29e46585-0d8d-450d-b3de-d6d103b90a58/network.incoming.packets volume: 106 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '97a2eee7-8e1a-4e6c-b933-e1fa6a821885', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 106, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'instance-00000099-29e46585-0d8d-450d-b3de-d6d103b90a58-tapa36441d3-25', 'timestamp': '2025-10-02T12:35:17.204246', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1074026563', 'name': 'tapa36441d3-25', 'instance_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:6e:e8:1d', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapa36441d3-25'}, 'message_id': '40551ff0-9f8c-11f0-af18-fa163efc5e78', 'monotonic_time': 6514.80130728, 'message_signature': '8aa7ee542a83f2ab40e539198b2324196bc64c347101c559ff63a8f31298b74f'}]}, 'timestamp': '2025-10-02 12:35:17.204483', '_unique_id': '34d60a84798d45259c8cdaaf9f4000a7'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:35:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:35:17.204 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:35:17 compute-0 nova_compute[192079]: 2025-10-02 12:35:17.482 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:19 compute-0 nova_compute[192079]: 2025-10-02 12:35:19.548 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.083 2 DEBUG oslo_concurrency.lockutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "68b1a0ef-5b1f-4d43-b759-e385618171ff" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.084 2 DEBUG oslo_concurrency.lockutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "68b1a0ef-5b1f-4d43-b759-e385618171ff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.117 2 DEBUG nova.compute.manager [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:35:22 compute-0 podman[246827]: 2025-10-02 12:35:22.160206976 +0000 UTC m=+0.077541702 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, org.label-schema.build-date=20251001, config_id=edpm, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0)
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.253 2 DEBUG oslo_concurrency.lockutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.253 2 DEBUG oslo_concurrency.lockutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.261 2 DEBUG nova.virt.hardware [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.262 2 INFO nova.compute.claims [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:35:22 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:22.290 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '39'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.413 2 DEBUG nova.compute.provider_tree [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.429 2 DEBUG nova.scheduler.client.report [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.447 2 DEBUG oslo_concurrency.lockutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.194s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.448 2 DEBUG nova.compute.manager [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.486 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.503 2 DEBUG nova.compute.manager [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.504 2 DEBUG nova.network.neutron [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.524 2 INFO nova.virt.libvirt.driver [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.548 2 DEBUG nova.compute.manager [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.678 2 DEBUG nova.compute.manager [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.680 2 DEBUG nova.virt.libvirt.driver [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.680 2 INFO nova.virt.libvirt.driver [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Creating image(s)
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.681 2 DEBUG oslo_concurrency.lockutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "/var/lib/nova/instances/68b1a0ef-5b1f-4d43-b759-e385618171ff/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.681 2 DEBUG oslo_concurrency.lockutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "/var/lib/nova/instances/68b1a0ef-5b1f-4d43-b759-e385618171ff/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.682 2 DEBUG oslo_concurrency.lockutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "/var/lib/nova/instances/68b1a0ef-5b1f-4d43-b759-e385618171ff/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.700 2 DEBUG nova.policy [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.703 2 DEBUG oslo_concurrency.processutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.788 2 DEBUG oslo_concurrency.processutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.085s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.790 2 DEBUG oslo_concurrency.lockutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.790 2 DEBUG oslo_concurrency.lockutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.806 2 DEBUG oslo_concurrency.processutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.885 2 DEBUG oslo_concurrency.processutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.079s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.886 2 DEBUG oslo_concurrency.processutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/68b1a0ef-5b1f-4d43-b759-e385618171ff/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.919 2 DEBUG oslo_concurrency.processutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/68b1a0ef-5b1f-4d43-b759-e385618171ff/disk 1073741824" returned: 0 in 0.033s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.921 2 DEBUG oslo_concurrency.lockutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.130s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.921 2 DEBUG oslo_concurrency.processutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.976 2 DEBUG oslo_concurrency.processutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.977 2 DEBUG nova.virt.disk.api [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Checking if we can resize image /var/lib/nova/instances/68b1a0ef-5b1f-4d43-b759-e385618171ff/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:35:22 compute-0 nova_compute[192079]: 2025-10-02 12:35:22.977 2 DEBUG oslo_concurrency.processutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/68b1a0ef-5b1f-4d43-b759-e385618171ff/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:35:23 compute-0 nova_compute[192079]: 2025-10-02 12:35:23.066 2 DEBUG oslo_concurrency.processutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/68b1a0ef-5b1f-4d43-b759-e385618171ff/disk --force-share --output=json" returned: 0 in 0.089s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:35:23 compute-0 nova_compute[192079]: 2025-10-02 12:35:23.067 2 DEBUG nova.virt.disk.api [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Cannot resize image /var/lib/nova/instances/68b1a0ef-5b1f-4d43-b759-e385618171ff/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:35:23 compute-0 nova_compute[192079]: 2025-10-02 12:35:23.068 2 DEBUG nova.objects.instance [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lazy-loading 'migration_context' on Instance uuid 68b1a0ef-5b1f-4d43-b759-e385618171ff obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:35:23 compute-0 nova_compute[192079]: 2025-10-02 12:35:23.081 2 DEBUG nova.virt.libvirt.driver [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:35:23 compute-0 nova_compute[192079]: 2025-10-02 12:35:23.082 2 DEBUG nova.virt.libvirt.driver [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Ensure instance console log exists: /var/lib/nova/instances/68b1a0ef-5b1f-4d43-b759-e385618171ff/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:35:23 compute-0 nova_compute[192079]: 2025-10-02 12:35:23.082 2 DEBUG oslo_concurrency.lockutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:35:23 compute-0 nova_compute[192079]: 2025-10-02 12:35:23.082 2 DEBUG oslo_concurrency.lockutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:35:23 compute-0 nova_compute[192079]: 2025-10-02 12:35:23.083 2 DEBUG oslo_concurrency.lockutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:35:23 compute-0 nova_compute[192079]: 2025-10-02 12:35:23.511 2 DEBUG nova.network.neutron [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Successfully created port: 5e4c5bee-3b4f-4d75-bcce-f96469aea319 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:35:24 compute-0 nova_compute[192079]: 2025-10-02 12:35:24.285 2 DEBUG nova.network.neutron [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Successfully updated port: 5e4c5bee-3b4f-4d75-bcce-f96469aea319 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:35:24 compute-0 nova_compute[192079]: 2025-10-02 12:35:24.307 2 DEBUG oslo_concurrency.lockutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "refresh_cache-68b1a0ef-5b1f-4d43-b759-e385618171ff" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:35:24 compute-0 nova_compute[192079]: 2025-10-02 12:35:24.307 2 DEBUG oslo_concurrency.lockutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquired lock "refresh_cache-68b1a0ef-5b1f-4d43-b759-e385618171ff" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:35:24 compute-0 nova_compute[192079]: 2025-10-02 12:35:24.307 2 DEBUG nova.network.neutron [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:35:24 compute-0 nova_compute[192079]: 2025-10-02 12:35:24.383 2 DEBUG nova.compute.manager [req-d94a602d-cbc7-494c-9176-924261fb9d32 req-aadb2359-5fea-4410-9eda-dc737c5f4080 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Received event network-changed-5e4c5bee-3b4f-4d75-bcce-f96469aea319 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:35:24 compute-0 nova_compute[192079]: 2025-10-02 12:35:24.383 2 DEBUG nova.compute.manager [req-d94a602d-cbc7-494c-9176-924261fb9d32 req-aadb2359-5fea-4410-9eda-dc737c5f4080 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Refreshing instance network info cache due to event network-changed-5e4c5bee-3b4f-4d75-bcce-f96469aea319. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:35:24 compute-0 nova_compute[192079]: 2025-10-02 12:35:24.383 2 DEBUG oslo_concurrency.lockutils [req-d94a602d-cbc7-494c-9176-924261fb9d32 req-aadb2359-5fea-4410-9eda-dc737c5f4080 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-68b1a0ef-5b1f-4d43-b759-e385618171ff" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:35:24 compute-0 nova_compute[192079]: 2025-10-02 12:35:24.491 2 DEBUG nova.network.neutron [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:35:24 compute-0 nova_compute[192079]: 2025-10-02 12:35:24.550 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.327 2 DEBUG nova.network.neutron [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Updating instance_info_cache with network_info: [{"id": "5e4c5bee-3b4f-4d75-bcce-f96469aea319", "address": "fa:16:3e:3c:96:05", "network": {"id": "6512ce78-9132-4dd4-88c2-d82efca10339", "bridge": "br-int", "label": "tempest-network-smoke--937181", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5e4c5bee-3b", "ovs_interfaceid": "5e4c5bee-3b4f-4d75-bcce-f96469aea319", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.350 2 DEBUG oslo_concurrency.lockutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Releasing lock "refresh_cache-68b1a0ef-5b1f-4d43-b759-e385618171ff" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.351 2 DEBUG nova.compute.manager [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Instance network_info: |[{"id": "5e4c5bee-3b4f-4d75-bcce-f96469aea319", "address": "fa:16:3e:3c:96:05", "network": {"id": "6512ce78-9132-4dd4-88c2-d82efca10339", "bridge": "br-int", "label": "tempest-network-smoke--937181", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5e4c5bee-3b", "ovs_interfaceid": "5e4c5bee-3b4f-4d75-bcce-f96469aea319", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.351 2 DEBUG oslo_concurrency.lockutils [req-d94a602d-cbc7-494c-9176-924261fb9d32 req-aadb2359-5fea-4410-9eda-dc737c5f4080 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-68b1a0ef-5b1f-4d43-b759-e385618171ff" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.351 2 DEBUG nova.network.neutron [req-d94a602d-cbc7-494c-9176-924261fb9d32 req-aadb2359-5fea-4410-9eda-dc737c5f4080 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Refreshing network info cache for port 5e4c5bee-3b4f-4d75-bcce-f96469aea319 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.354 2 DEBUG nova.virt.libvirt.driver [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Start _get_guest_xml network_info=[{"id": "5e4c5bee-3b4f-4d75-bcce-f96469aea319", "address": "fa:16:3e:3c:96:05", "network": {"id": "6512ce78-9132-4dd4-88c2-d82efca10339", "bridge": "br-int", "label": "tempest-network-smoke--937181", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5e4c5bee-3b", "ovs_interfaceid": "5e4c5bee-3b4f-4d75-bcce-f96469aea319", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.358 2 WARNING nova.virt.libvirt.driver [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.362 2 DEBUG nova.virt.libvirt.host [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.363 2 DEBUG nova.virt.libvirt.host [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.365 2 DEBUG nova.virt.libvirt.host [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.366 2 DEBUG nova.virt.libvirt.host [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.367 2 DEBUG nova.virt.libvirt.driver [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.367 2 DEBUG nova.virt.hardware [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.368 2 DEBUG nova.virt.hardware [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.368 2 DEBUG nova.virt.hardware [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.369 2 DEBUG nova.virt.hardware [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.369 2 DEBUG nova.virt.hardware [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.370 2 DEBUG nova.virt.hardware [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.370 2 DEBUG nova.virt.hardware [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.371 2 DEBUG nova.virt.hardware [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.371 2 DEBUG nova.virt.hardware [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.371 2 DEBUG nova.virt.hardware [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.372 2 DEBUG nova.virt.hardware [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.376 2 DEBUG nova.virt.libvirt.vif [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:35:21Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestNetworkBasicOps-server-1363839596',display_name='tempest-TestNetworkBasicOps-server-1363839596',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkbasicops-server-1363839596',id=157,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBNpTTykc7bRxNeMQb/AwyVeQct7kkLrzBlJQM6PMV4uOiLZ/1v6I03B2g34rnSFM0pG31Lc4x0PzTw6rmd6zZQ4f/vsliu/8ODOLDzE9wtG+5ceWwS5zK+amidA8m/9tCA==',key_name='tempest-TestNetworkBasicOps-1262607354',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='6e2a4899168a47618e377cb3ac85ddd2',ramdisk_id='',reservation_id='r-xu11k8se',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestNetworkBasicOps-1323893370',owner_user_name='tempest-TestNetworkBasicOps-1323893370-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:35:22Z,user_data=None,user_id='a1898fdf056c4a249c33590f26d4d845',uuid=68b1a0ef-5b1f-4d43-b759-e385618171ff,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "5e4c5bee-3b4f-4d75-bcce-f96469aea319", "address": "fa:16:3e:3c:96:05", "network": {"id": "6512ce78-9132-4dd4-88c2-d82efca10339", "bridge": "br-int", "label": "tempest-network-smoke--937181", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5e4c5bee-3b", "ovs_interfaceid": "5e4c5bee-3b4f-4d75-bcce-f96469aea319", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.376 2 DEBUG nova.network.os_vif_util [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converting VIF {"id": "5e4c5bee-3b4f-4d75-bcce-f96469aea319", "address": "fa:16:3e:3c:96:05", "network": {"id": "6512ce78-9132-4dd4-88c2-d82efca10339", "bridge": "br-int", "label": "tempest-network-smoke--937181", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5e4c5bee-3b", "ovs_interfaceid": "5e4c5bee-3b4f-4d75-bcce-f96469aea319", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.377 2 DEBUG nova.network.os_vif_util [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:3c:96:05,bridge_name='br-int',has_traffic_filtering=True,id=5e4c5bee-3b4f-4d75-bcce-f96469aea319,network=Network(6512ce78-9132-4dd4-88c2-d82efca10339),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5e4c5bee-3b') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.378 2 DEBUG nova.objects.instance [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lazy-loading 'pci_devices' on Instance uuid 68b1a0ef-5b1f-4d43-b759-e385618171ff obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.392 2 DEBUG nova.virt.libvirt.driver [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:35:25 compute-0 nova_compute[192079]:   <uuid>68b1a0ef-5b1f-4d43-b759-e385618171ff</uuid>
Oct 02 12:35:25 compute-0 nova_compute[192079]:   <name>instance-0000009d</name>
Oct 02 12:35:25 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:35:25 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:35:25 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:35:25 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:       <nova:name>tempest-TestNetworkBasicOps-server-1363839596</nova:name>
Oct 02 12:35:25 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:35:25</nova:creationTime>
Oct 02 12:35:25 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:35:25 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:35:25 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:35:25 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:35:25 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:35:25 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:35:25 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:35:25 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:35:25 compute-0 nova_compute[192079]:         <nova:user uuid="a1898fdf056c4a249c33590f26d4d845">tempest-TestNetworkBasicOps-1323893370-project-member</nova:user>
Oct 02 12:35:25 compute-0 nova_compute[192079]:         <nova:project uuid="6e2a4899168a47618e377cb3ac85ddd2">tempest-TestNetworkBasicOps-1323893370</nova:project>
Oct 02 12:35:25 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:35:25 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:35:25 compute-0 nova_compute[192079]:         <nova:port uuid="5e4c5bee-3b4f-4d75-bcce-f96469aea319">
Oct 02 12:35:25 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.5" ipVersion="4"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:35:25 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:35:25 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:35:25 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <system>
Oct 02 12:35:25 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:35:25 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:35:25 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:35:25 compute-0 nova_compute[192079]:       <entry name="serial">68b1a0ef-5b1f-4d43-b759-e385618171ff</entry>
Oct 02 12:35:25 compute-0 nova_compute[192079]:       <entry name="uuid">68b1a0ef-5b1f-4d43-b759-e385618171ff</entry>
Oct 02 12:35:25 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     </system>
Oct 02 12:35:25 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:35:25 compute-0 nova_compute[192079]:   <os>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:   </os>
Oct 02 12:35:25 compute-0 nova_compute[192079]:   <features>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:   </features>
Oct 02 12:35:25 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:35:25 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:35:25 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:35:25 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/68b1a0ef-5b1f-4d43-b759-e385618171ff/disk"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:35:25 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/68b1a0ef-5b1f-4d43-b759-e385618171ff/disk.config"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:35:25 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:3c:96:05"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:       <target dev="tap5e4c5bee-3b"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:35:25 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/68b1a0ef-5b1f-4d43-b759-e385618171ff/console.log" append="off"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <video>
Oct 02 12:35:25 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     </video>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:35:25 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:35:25 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:35:25 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:35:25 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:35:25 compute-0 nova_compute[192079]: </domain>
Oct 02 12:35:25 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.393 2 DEBUG nova.compute.manager [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Preparing to wait for external event network-vif-plugged-5e4c5bee-3b4f-4d75-bcce-f96469aea319 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.393 2 DEBUG oslo_concurrency.lockutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "68b1a0ef-5b1f-4d43-b759-e385618171ff-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.394 2 DEBUG oslo_concurrency.lockutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "68b1a0ef-5b1f-4d43-b759-e385618171ff-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.394 2 DEBUG oslo_concurrency.lockutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "68b1a0ef-5b1f-4d43-b759-e385618171ff-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.394 2 DEBUG nova.virt.libvirt.vif [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:35:21Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestNetworkBasicOps-server-1363839596',display_name='tempest-TestNetworkBasicOps-server-1363839596',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkbasicops-server-1363839596',id=157,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBNpTTykc7bRxNeMQb/AwyVeQct7kkLrzBlJQM6PMV4uOiLZ/1v6I03B2g34rnSFM0pG31Lc4x0PzTw6rmd6zZQ4f/vsliu/8ODOLDzE9wtG+5ceWwS5zK+amidA8m/9tCA==',key_name='tempest-TestNetworkBasicOps-1262607354',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='6e2a4899168a47618e377cb3ac85ddd2',ramdisk_id='',reservation_id='r-xu11k8se',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestNetworkBasicOps-1323893370',owner_user_name='tempest-TestNetworkBasicOps-1323893370-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:35:22Z,user_data=None,user_id='a1898fdf056c4a249c33590f26d4d845',uuid=68b1a0ef-5b1f-4d43-b759-e385618171ff,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "5e4c5bee-3b4f-4d75-bcce-f96469aea319", "address": "fa:16:3e:3c:96:05", "network": {"id": "6512ce78-9132-4dd4-88c2-d82efca10339", "bridge": "br-int", "label": "tempest-network-smoke--937181", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5e4c5bee-3b", "ovs_interfaceid": "5e4c5bee-3b4f-4d75-bcce-f96469aea319", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.395 2 DEBUG nova.network.os_vif_util [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converting VIF {"id": "5e4c5bee-3b4f-4d75-bcce-f96469aea319", "address": "fa:16:3e:3c:96:05", "network": {"id": "6512ce78-9132-4dd4-88c2-d82efca10339", "bridge": "br-int", "label": "tempest-network-smoke--937181", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5e4c5bee-3b", "ovs_interfaceid": "5e4c5bee-3b4f-4d75-bcce-f96469aea319", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.395 2 DEBUG nova.network.os_vif_util [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:3c:96:05,bridge_name='br-int',has_traffic_filtering=True,id=5e4c5bee-3b4f-4d75-bcce-f96469aea319,network=Network(6512ce78-9132-4dd4-88c2-d82efca10339),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5e4c5bee-3b') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.396 2 DEBUG os_vif [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:3c:96:05,bridge_name='br-int',has_traffic_filtering=True,id=5e4c5bee-3b4f-4d75-bcce-f96469aea319,network=Network(6512ce78-9132-4dd4-88c2-d82efca10339),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5e4c5bee-3b') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.396 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.397 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.397 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.400 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.400 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap5e4c5bee-3b, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.401 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap5e4c5bee-3b, col_values=(('external_ids', {'iface-id': '5e4c5bee-3b4f-4d75-bcce-f96469aea319', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:3c:96:05', 'vm-uuid': '68b1a0ef-5b1f-4d43-b759-e385618171ff'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:35:25 compute-0 NetworkManager[51160]: <info>  [1759408525.4036] manager: (tap5e4c5bee-3b): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/296)
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.406 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.413 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.415 2 INFO os_vif [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:3c:96:05,bridge_name='br-int',has_traffic_filtering=True,id=5e4c5bee-3b4f-4d75-bcce-f96469aea319,network=Network(6512ce78-9132-4dd4-88c2-d82efca10339),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5e4c5bee-3b')
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.487 2 DEBUG nova.virt.libvirt.driver [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.488 2 DEBUG nova.virt.libvirt.driver [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.488 2 DEBUG nova.virt.libvirt.driver [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] No VIF found with MAC fa:16:3e:3c:96:05, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.488 2 INFO nova.virt.libvirt.driver [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Using config drive
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.962 2 INFO nova.virt.libvirt.driver [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Creating config drive at /var/lib/nova/instances/68b1a0ef-5b1f-4d43-b759-e385618171ff/disk.config
Oct 02 12:35:25 compute-0 nova_compute[192079]: 2025-10-02 12:35:25.967 2 DEBUG oslo_concurrency.processutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/68b1a0ef-5b1f-4d43-b759-e385618171ff/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpz4nkex9s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:35:26 compute-0 nova_compute[192079]: 2025-10-02 12:35:26.092 2 DEBUG oslo_concurrency.processutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/68b1a0ef-5b1f-4d43-b759-e385618171ff/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpz4nkex9s" returned: 0 in 0.125s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:35:26 compute-0 kernel: tap5e4c5bee-3b: entered promiscuous mode
Oct 02 12:35:26 compute-0 NetworkManager[51160]: <info>  [1759408526.1593] manager: (tap5e4c5bee-3b): new Tun device (/org/freedesktop/NetworkManager/Devices/297)
Oct 02 12:35:26 compute-0 nova_compute[192079]: 2025-10-02 12:35:26.160 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:26 compute-0 nova_compute[192079]: 2025-10-02 12:35:26.163 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:26 compute-0 nova_compute[192079]: 2025-10-02 12:35:26.168 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:26 compute-0 ovn_controller[94336]: 2025-10-02T12:35:26Z|00594|binding|INFO|Claiming lport 5e4c5bee-3b4f-4d75-bcce-f96469aea319 for this chassis.
Oct 02 12:35:26 compute-0 ovn_controller[94336]: 2025-10-02T12:35:26Z|00595|binding|INFO|5e4c5bee-3b4f-4d75-bcce-f96469aea319: Claiming fa:16:3e:3c:96:05 10.100.0.5
Oct 02 12:35:26 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:26.184 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:3c:96:05 10.100.0.5'], port_security=['fa:16:3e:3c:96:05 10.100.0.5'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.5/28', 'neutron:device_id': '68b1a0ef-5b1f-4d43-b759-e385618171ff', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-6512ce78-9132-4dd4-88c2-d82efca10339', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'neutron:revision_number': '2', 'neutron:security_group_ids': 'e33160c7-6eb4-4a93-93e3-98b50bc12b29', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=360bc7aa-2f85-45e8-93bc-76083b104e89, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=5e4c5bee-3b4f-4d75-bcce-f96469aea319) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:35:26 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:26.185 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 5e4c5bee-3b4f-4d75-bcce-f96469aea319 in datapath 6512ce78-9132-4dd4-88c2-d82efca10339 bound to our chassis
Oct 02 12:35:26 compute-0 ovn_controller[94336]: 2025-10-02T12:35:26Z|00596|binding|INFO|Setting lport 5e4c5bee-3b4f-4d75-bcce-f96469aea319 ovn-installed in OVS
Oct 02 12:35:26 compute-0 ovn_controller[94336]: 2025-10-02T12:35:26Z|00597|binding|INFO|Setting lport 5e4c5bee-3b4f-4d75-bcce-f96469aea319 up in Southbound
Oct 02 12:35:26 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:26.188 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 6512ce78-9132-4dd4-88c2-d82efca10339
Oct 02 12:35:26 compute-0 nova_compute[192079]: 2025-10-02 12:35:26.188 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:26 compute-0 systemd-udevd[246882]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:35:26 compute-0 systemd-machined[152150]: New machine qemu-75-instance-0000009d.
Oct 02 12:35:26 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:26.205 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ba679296-0946-470a-8274-00ebacd1ded3]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:35:26 compute-0 NetworkManager[51160]: <info>  [1759408526.2086] device (tap5e4c5bee-3b): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:35:26 compute-0 NetworkManager[51160]: <info>  [1759408526.2092] device (tap5e4c5bee-3b): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:35:26 compute-0 systemd[1]: Started Virtual Machine qemu-75-instance-0000009d.
Oct 02 12:35:26 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:26.241 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[9fd927c9-51a9-443c-8b05-445ca25fbd2f]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:35:26 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:26.245 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[42af62fa-3dcc-4936-a654-aeb013688aaa]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:35:26 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:26.276 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[2d13c0fa-4109-42c6-88e7-057efb776acd]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:35:26 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:26.293 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7e02db54-11fa-4401-ba60-612d11338eef]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap6512ce78-91'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:3c:04:0d'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 10, 'tx_packets': 6, 'rx_bytes': 916, 'tx_bytes': 440, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 10, 'tx_packets': 6, 'rx_bytes': 916, 'tx_bytes': 440, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 189], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 648824, 'reachable_time': 37638, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 8, 'inoctets': 720, 'indelivers': 1, 'outforwdatagrams': 0, 'outpkts': 4, 'outoctets': 300, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 8, 'outmcastpkts': 4, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 720, 'outmcastoctets': 300, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 8, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 1, 'inerrors': 0, 'outmsgs': 4, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 246896, 'error': None, 'target': 'ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:35:26 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:26.308 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c2f414e1-1e14-4016-86a9-762b6df33db0]: (4, ({'family': 2, 'prefixlen': 28, 'flags': 128, 'scope': 0, 'index': 2, 'attrs': [['IFA_ADDRESS', '10.100.0.2'], ['IFA_LOCAL', '10.100.0.2'], ['IFA_BROADCAST', '10.100.0.15'], ['IFA_LABEL', 'tap6512ce78-91'], ['IFA_FLAGS', 128], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 648839, 'tstamp': 648839}]], 'header': {'length': 96, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 246898, 'error': None, 'target': 'ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'}, {'family': 2, 'prefixlen': 32, 'flags': 128, 'scope': 0, 'index': 2, 'attrs': [['IFA_ADDRESS', '169.254.169.254'], ['IFA_LOCAL', '169.254.169.254'], ['IFA_BROADCAST', '169.254.169.254'], ['IFA_LABEL', 'tap6512ce78-91'], ['IFA_FLAGS', 128], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 648843, 'tstamp': 648843}]], 'header': {'length': 96, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 246898, 'error': None, 'target': 'ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'})) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:35:26 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:26.310 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap6512ce78-90, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:35:26 compute-0 nova_compute[192079]: 2025-10-02 12:35:26.312 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:26 compute-0 nova_compute[192079]: 2025-10-02 12:35:26.313 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:26 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:26.313 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap6512ce78-90, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:35:26 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:26.314 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:35:26 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:26.314 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap6512ce78-90, col_values=(('external_ids', {'iface-id': '23796c0f-f19b-4655-83fb-cbec481641fa'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:35:26 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:26.315 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:35:26 compute-0 nova_compute[192079]: 2025-10-02 12:35:26.838 2 DEBUG nova.network.neutron [req-d94a602d-cbc7-494c-9176-924261fb9d32 req-aadb2359-5fea-4410-9eda-dc737c5f4080 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Updated VIF entry in instance network info cache for port 5e4c5bee-3b4f-4d75-bcce-f96469aea319. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:35:26 compute-0 nova_compute[192079]: 2025-10-02 12:35:26.839 2 DEBUG nova.network.neutron [req-d94a602d-cbc7-494c-9176-924261fb9d32 req-aadb2359-5fea-4410-9eda-dc737c5f4080 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Updating instance_info_cache with network_info: [{"id": "5e4c5bee-3b4f-4d75-bcce-f96469aea319", "address": "fa:16:3e:3c:96:05", "network": {"id": "6512ce78-9132-4dd4-88c2-d82efca10339", "bridge": "br-int", "label": "tempest-network-smoke--937181", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5e4c5bee-3b", "ovs_interfaceid": "5e4c5bee-3b4f-4d75-bcce-f96469aea319", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:35:26 compute-0 nova_compute[192079]: 2025-10-02 12:35:26.858 2 DEBUG oslo_concurrency.lockutils [req-d94a602d-cbc7-494c-9176-924261fb9d32 req-aadb2359-5fea-4410-9eda-dc737c5f4080 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-68b1a0ef-5b1f-4d43-b759-e385618171ff" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.086 2 DEBUG nova.compute.manager [req-50299cac-7576-4bd1-a8fb-f8241eceebe2 req-fbbca7bd-9931-4e49-bca2-78abf585aee4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Received event network-vif-plugged-5e4c5bee-3b4f-4d75-bcce-f96469aea319 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.086 2 DEBUG oslo_concurrency.lockutils [req-50299cac-7576-4bd1-a8fb-f8241eceebe2 req-fbbca7bd-9931-4e49-bca2-78abf585aee4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "68b1a0ef-5b1f-4d43-b759-e385618171ff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.089 2 DEBUG oslo_concurrency.lockutils [req-50299cac-7576-4bd1-a8fb-f8241eceebe2 req-fbbca7bd-9931-4e49-bca2-78abf585aee4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "68b1a0ef-5b1f-4d43-b759-e385618171ff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.003s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.090 2 DEBUG oslo_concurrency.lockutils [req-50299cac-7576-4bd1-a8fb-f8241eceebe2 req-fbbca7bd-9931-4e49-bca2-78abf585aee4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "68b1a0ef-5b1f-4d43-b759-e385618171ff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.090 2 DEBUG nova.compute.manager [req-50299cac-7576-4bd1-a8fb-f8241eceebe2 req-fbbca7bd-9931-4e49-bca2-78abf585aee4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Processing event network-vif-plugged-5e4c5bee-3b4f-4d75-bcce-f96469aea319 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.090 2 DEBUG nova.compute.manager [req-50299cac-7576-4bd1-a8fb-f8241eceebe2 req-fbbca7bd-9931-4e49-bca2-78abf585aee4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Received event network-vif-plugged-5e4c5bee-3b4f-4d75-bcce-f96469aea319 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.091 2 DEBUG oslo_concurrency.lockutils [req-50299cac-7576-4bd1-a8fb-f8241eceebe2 req-fbbca7bd-9931-4e49-bca2-78abf585aee4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "68b1a0ef-5b1f-4d43-b759-e385618171ff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.091 2 DEBUG oslo_concurrency.lockutils [req-50299cac-7576-4bd1-a8fb-f8241eceebe2 req-fbbca7bd-9931-4e49-bca2-78abf585aee4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "68b1a0ef-5b1f-4d43-b759-e385618171ff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.091 2 DEBUG oslo_concurrency.lockutils [req-50299cac-7576-4bd1-a8fb-f8241eceebe2 req-fbbca7bd-9931-4e49-bca2-78abf585aee4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "68b1a0ef-5b1f-4d43-b759-e385618171ff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.091 2 DEBUG nova.compute.manager [req-50299cac-7576-4bd1-a8fb-f8241eceebe2 req-fbbca7bd-9931-4e49-bca2-78abf585aee4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] No waiting events found dispatching network-vif-plugged-5e4c5bee-3b4f-4d75-bcce-f96469aea319 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.092 2 WARNING nova.compute.manager [req-50299cac-7576-4bd1-a8fb-f8241eceebe2 req-fbbca7bd-9931-4e49-bca2-78abf585aee4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Received unexpected event network-vif-plugged-5e4c5bee-3b4f-4d75-bcce-f96469aea319 for instance with vm_state building and task_state spawning.
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.096 2 DEBUG nova.compute.manager [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.098 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408527.0964706, 68b1a0ef-5b1f-4d43-b759-e385618171ff => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.098 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] VM Started (Lifecycle Event)
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.102 2 DEBUG nova.virt.libvirt.driver [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.107 2 INFO nova.virt.libvirt.driver [-] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Instance spawned successfully.
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.108 2 DEBUG nova.virt.libvirt.driver [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.136 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.143 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.149 2 DEBUG nova.virt.libvirt.driver [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.150 2 DEBUG nova.virt.libvirt.driver [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.150 2 DEBUG nova.virt.libvirt.driver [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.151 2 DEBUG nova.virt.libvirt.driver [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.152 2 DEBUG nova.virt.libvirt.driver [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.152 2 DEBUG nova.virt.libvirt.driver [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.192 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.193 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408527.097422, 68b1a0ef-5b1f-4d43-b759-e385618171ff => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.193 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] VM Paused (Lifecycle Event)
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.216 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.221 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408527.100281, 68b1a0ef-5b1f-4d43-b759-e385618171ff => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.221 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] VM Resumed (Lifecycle Event)
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.238 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.244 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.251 2 INFO nova.compute.manager [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Took 4.57 seconds to spawn the instance on the hypervisor.
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.252 2 DEBUG nova.compute.manager [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.265 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.355 2 INFO nova.compute.manager [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Took 5.17 seconds to build instance.
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.370 2 DEBUG oslo_concurrency.lockutils [None req-0366f4c0-d09a-4137-8e30-406b14843f63 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "68b1a0ef-5b1f-4d43-b759-e385618171ff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 5.286s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:35:27 compute-0 nova_compute[192079]: 2025-10-02 12:35:27.486 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:29 compute-0 ovn_controller[94336]: 2025-10-02T12:35:29Z|00598|binding|INFO|Releasing lport 23796c0f-f19b-4655-83fb-cbec481641fa from this chassis (sb_readonly=0)
Oct 02 12:35:29 compute-0 nova_compute[192079]: 2025-10-02 12:35:29.026 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:29 compute-0 podman[246906]: 2025-10-02 12:35:29.202818372 +0000 UTC m=+0.055713557 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, com.redhat.component=ubi9-minimal-container, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., maintainer=Red Hat, Inc., io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, io.openshift.tags=minimal rhel9, managed_by=edpm_ansible, name=ubi9-minimal, release=1755695350, url=https://catalog.redhat.com/en/search?searchType=containers, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vendor=Red Hat, Inc., config_id=edpm, container_name=openstack_network_exporter, io.buildah.version=1.33.7, build-date=2025-08-20T13:12:41, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, vcs-type=git, distribution-scope=public, version=9.6, architecture=x86_64, io.openshift.expose-services=)
Oct 02 12:35:29 compute-0 podman[246907]: 2025-10-02 12:35:29.23066133 +0000 UTC m=+0.083512104 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, tcib_managed=true, managed_by=edpm_ansible, config_id=multipathd, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, container_name=multipathd, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:35:29 compute-0 ovn_controller[94336]: 2025-10-02T12:35:29Z|00599|binding|INFO|Releasing lport 23796c0f-f19b-4655-83fb-cbec481641fa from this chassis (sb_readonly=0)
Oct 02 12:35:29 compute-0 nova_compute[192079]: 2025-10-02 12:35:29.245 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:30 compute-0 nova_compute[192079]: 2025-10-02 12:35:30.403 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:31 compute-0 NetworkManager[51160]: <info>  [1759408531.2990] manager: (patch-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d-to-br-int): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/298)
Oct 02 12:35:31 compute-0 NetworkManager[51160]: <info>  [1759408531.3001] manager: (patch-br-int-to-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/299)
Oct 02 12:35:31 compute-0 nova_compute[192079]: 2025-10-02 12:35:31.307 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:31 compute-0 nova_compute[192079]: 2025-10-02 12:35:31.448 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:31 compute-0 ovn_controller[94336]: 2025-10-02T12:35:31Z|00600|binding|INFO|Releasing lport 23796c0f-f19b-4655-83fb-cbec481641fa from this chassis (sb_readonly=0)
Oct 02 12:35:31 compute-0 nova_compute[192079]: 2025-10-02 12:35:31.479 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:31 compute-0 nova_compute[192079]: 2025-10-02 12:35:31.619 2 DEBUG nova.compute.manager [req-d6498202-c6a7-4b1a-82f2-2f9a6eb40adf req-976f4a83-f8b4-4356-b544-c0765cd325ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Received event network-changed-5e4c5bee-3b4f-4d75-bcce-f96469aea319 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:35:31 compute-0 nova_compute[192079]: 2025-10-02 12:35:31.619 2 DEBUG nova.compute.manager [req-d6498202-c6a7-4b1a-82f2-2f9a6eb40adf req-976f4a83-f8b4-4356-b544-c0765cd325ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Refreshing instance network info cache due to event network-changed-5e4c5bee-3b4f-4d75-bcce-f96469aea319. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:35:31 compute-0 nova_compute[192079]: 2025-10-02 12:35:31.619 2 DEBUG oslo_concurrency.lockutils [req-d6498202-c6a7-4b1a-82f2-2f9a6eb40adf req-976f4a83-f8b4-4356-b544-c0765cd325ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-68b1a0ef-5b1f-4d43-b759-e385618171ff" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:35:31 compute-0 nova_compute[192079]: 2025-10-02 12:35:31.620 2 DEBUG oslo_concurrency.lockutils [req-d6498202-c6a7-4b1a-82f2-2f9a6eb40adf req-976f4a83-f8b4-4356-b544-c0765cd325ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-68b1a0ef-5b1f-4d43-b759-e385618171ff" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:35:31 compute-0 nova_compute[192079]: 2025-10-02 12:35:31.620 2 DEBUG nova.network.neutron [req-d6498202-c6a7-4b1a-82f2-2f9a6eb40adf req-976f4a83-f8b4-4356-b544-c0765cd325ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Refreshing network info cache for port 5e4c5bee-3b4f-4d75-bcce-f96469aea319 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:35:32 compute-0 nova_compute[192079]: 2025-10-02 12:35:32.528 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:34 compute-0 podman[246948]: 2025-10-02 12:35:34.155052741 +0000 UTC m=+0.059018068 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, org.label-schema.license=GPLv2, config_id=iscsid, container_name=iscsid)
Oct 02 12:35:34 compute-0 podman[246947]: 2025-10-02 12:35:34.180788801 +0000 UTC m=+0.081389066 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:35:34 compute-0 nova_compute[192079]: 2025-10-02 12:35:34.183 2 DEBUG nova.network.neutron [req-d6498202-c6a7-4b1a-82f2-2f9a6eb40adf req-976f4a83-f8b4-4356-b544-c0765cd325ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Updated VIF entry in instance network info cache for port 5e4c5bee-3b4f-4d75-bcce-f96469aea319. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:35:34 compute-0 nova_compute[192079]: 2025-10-02 12:35:34.184 2 DEBUG nova.network.neutron [req-d6498202-c6a7-4b1a-82f2-2f9a6eb40adf req-976f4a83-f8b4-4356-b544-c0765cd325ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Updating instance_info_cache with network_info: [{"id": "5e4c5bee-3b4f-4d75-bcce-f96469aea319", "address": "fa:16:3e:3c:96:05", "network": {"id": "6512ce78-9132-4dd4-88c2-d82efca10339", "bridge": "br-int", "label": "tempest-network-smoke--937181", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.233", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5e4c5bee-3b", "ovs_interfaceid": "5e4c5bee-3b4f-4d75-bcce-f96469aea319", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:35:34 compute-0 nova_compute[192079]: 2025-10-02 12:35:34.215 2 DEBUG oslo_concurrency.lockutils [req-d6498202-c6a7-4b1a-82f2-2f9a6eb40adf req-976f4a83-f8b4-4356-b544-c0765cd325ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-68b1a0ef-5b1f-4d43-b759-e385618171ff" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:35:34 compute-0 nova_compute[192079]: 2025-10-02 12:35:34.676 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:35:35 compute-0 nova_compute[192079]: 2025-10-02 12:35:35.405 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:35 compute-0 nova_compute[192079]: 2025-10-02 12:35:35.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:35:37 compute-0 nova_compute[192079]: 2025-10-02 12:35:37.544 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:38 compute-0 ovn_controller[94336]: 2025-10-02T12:35:38Z|00065|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:3c:96:05 10.100.0.5
Oct 02 12:35:38 compute-0 ovn_controller[94336]: 2025-10-02T12:35:38Z|00066|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:3c:96:05 10.100.0.5
Oct 02 12:35:39 compute-0 nova_compute[192079]: 2025-10-02 12:35:39.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:35:40 compute-0 nova_compute[192079]: 2025-10-02 12:35:40.408 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:40 compute-0 nova_compute[192079]: 2025-10-02 12:35:40.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:35:40 compute-0 nova_compute[192079]: 2025-10-02 12:35:40.721 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:35:40 compute-0 nova_compute[192079]: 2025-10-02 12:35:40.721 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:35:40 compute-0 nova_compute[192079]: 2025-10-02 12:35:40.721 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:35:40 compute-0 nova_compute[192079]: 2025-10-02 12:35:40.721 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:35:40 compute-0 nova_compute[192079]: 2025-10-02 12:35:40.812 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/29e46585-0d8d-450d-b3de-d6d103b90a58/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:35:40 compute-0 nova_compute[192079]: 2025-10-02 12:35:40.908 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/29e46585-0d8d-450d-b3de-d6d103b90a58/disk --force-share --output=json" returned: 0 in 0.095s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:35:40 compute-0 nova_compute[192079]: 2025-10-02 12:35:40.909 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/29e46585-0d8d-450d-b3de-d6d103b90a58/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:35:40 compute-0 nova_compute[192079]: 2025-10-02 12:35:40.981 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/29e46585-0d8d-450d-b3de-d6d103b90a58/disk --force-share --output=json" returned: 0 in 0.072s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:35:40 compute-0 nova_compute[192079]: 2025-10-02 12:35:40.988 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/68b1a0ef-5b1f-4d43-b759-e385618171ff/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:35:41 compute-0 nova_compute[192079]: 2025-10-02 12:35:41.060 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/68b1a0ef-5b1f-4d43-b759-e385618171ff/disk --force-share --output=json" returned: 0 in 0.073s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:35:41 compute-0 nova_compute[192079]: 2025-10-02 12:35:41.062 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/68b1a0ef-5b1f-4d43-b759-e385618171ff/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:35:41 compute-0 nova_compute[192079]: 2025-10-02 12:35:41.127 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/68b1a0ef-5b1f-4d43-b759-e385618171ff/disk --force-share --output=json" returned: 0 in 0.065s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:35:41 compute-0 nova_compute[192079]: 2025-10-02 12:35:41.277 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:35:41 compute-0 nova_compute[192079]: 2025-10-02 12:35:41.278 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5404MB free_disk=73.28173065185547GB free_vcpus=6 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:35:41 compute-0 nova_compute[192079]: 2025-10-02 12:35:41.279 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:35:41 compute-0 nova_compute[192079]: 2025-10-02 12:35:41.279 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:35:41 compute-0 nova_compute[192079]: 2025-10-02 12:35:41.411 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance 29e46585-0d8d-450d-b3de-d6d103b90a58 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:35:41 compute-0 nova_compute[192079]: 2025-10-02 12:35:41.412 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance 68b1a0ef-5b1f-4d43-b759-e385618171ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:35:41 compute-0 nova_compute[192079]: 2025-10-02 12:35:41.412 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 2 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:35:41 compute-0 nova_compute[192079]: 2025-10-02 12:35:41.412 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=768MB phys_disk=79GB used_disk=2GB total_vcpus=8 used_vcpus=2 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:35:42 compute-0 podman[247014]: 2025-10-02 12:35:42.140146376 +0000 UTC m=+0.049775456 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, tcib_managed=true, managed_by=edpm_ansible, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_metadata_agent)
Oct 02 12:35:42 compute-0 podman[247016]: 2025-10-02 12:35:42.145880763 +0000 UTC m=+0.048816611 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:35:42 compute-0 podman[247015]: 2025-10-02 12:35:42.209531525 +0000 UTC m=+0.116008249 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, tcib_managed=true, config_id=ovn_controller, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_controller, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:35:42 compute-0 nova_compute[192079]: 2025-10-02 12:35:42.212 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:35:42 compute-0 nova_compute[192079]: 2025-10-02 12:35:42.241 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:35:42 compute-0 nova_compute[192079]: 2025-10-02 12:35:42.301 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:35:42 compute-0 nova_compute[192079]: 2025-10-02 12:35:42.301 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.022s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:35:42 compute-0 nova_compute[192079]: 2025-10-02 12:35:42.546 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:44 compute-0 nova_compute[192079]: 2025-10-02 12:35:44.301 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:35:44 compute-0 nova_compute[192079]: 2025-10-02 12:35:44.302 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:35:44 compute-0 nova_compute[192079]: 2025-10-02 12:35:44.302 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:35:44 compute-0 nova_compute[192079]: 2025-10-02 12:35:44.428 2 INFO nova.compute.manager [None req-4733698f-bc96-4983-9e44-5d221855f51e a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Get console output
Oct 02 12:35:44 compute-0 nova_compute[192079]: 2025-10-02 12:35:44.433 55 INFO nova.privsep.libvirt [-] Ignored error while reading from instance console pty: can't concat NoneType to bytes
Oct 02 12:35:44 compute-0 nova_compute[192079]: 2025-10-02 12:35:44.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:35:44 compute-0 nova_compute[192079]: 2025-10-02 12:35:44.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:35:44 compute-0 nova_compute[192079]: 2025-10-02 12:35:44.919 2 DEBUG oslo_concurrency.lockutils [None req-9c122f30-6187-461b-b141-6c875a381915 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "68b1a0ef-5b1f-4d43-b759-e385618171ff" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:35:44 compute-0 nova_compute[192079]: 2025-10-02 12:35:44.919 2 DEBUG oslo_concurrency.lockutils [None req-9c122f30-6187-461b-b141-6c875a381915 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "68b1a0ef-5b1f-4d43-b759-e385618171ff" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:35:44 compute-0 nova_compute[192079]: 2025-10-02 12:35:44.920 2 DEBUG oslo_concurrency.lockutils [None req-9c122f30-6187-461b-b141-6c875a381915 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "68b1a0ef-5b1f-4d43-b759-e385618171ff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:35:44 compute-0 nova_compute[192079]: 2025-10-02 12:35:44.920 2 DEBUG oslo_concurrency.lockutils [None req-9c122f30-6187-461b-b141-6c875a381915 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "68b1a0ef-5b1f-4d43-b759-e385618171ff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:35:44 compute-0 nova_compute[192079]: 2025-10-02 12:35:44.920 2 DEBUG oslo_concurrency.lockutils [None req-9c122f30-6187-461b-b141-6c875a381915 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "68b1a0ef-5b1f-4d43-b759-e385618171ff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:35:44 compute-0 nova_compute[192079]: 2025-10-02 12:35:44.931 2 INFO nova.compute.manager [None req-9c122f30-6187-461b-b141-6c875a381915 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Terminating instance
Oct 02 12:35:44 compute-0 nova_compute[192079]: 2025-10-02 12:35:44.943 2 DEBUG nova.compute.manager [None req-9c122f30-6187-461b-b141-6c875a381915 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:35:44 compute-0 kernel: tap5e4c5bee-3b (unregistering): left promiscuous mode
Oct 02 12:35:44 compute-0 NetworkManager[51160]: <info>  [1759408544.9812] device (tap5e4c5bee-3b): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:35:44 compute-0 ovn_controller[94336]: 2025-10-02T12:35:44Z|00601|binding|INFO|Releasing lport 5e4c5bee-3b4f-4d75-bcce-f96469aea319 from this chassis (sb_readonly=0)
Oct 02 12:35:44 compute-0 nova_compute[192079]: 2025-10-02 12:35:44.989 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:44 compute-0 ovn_controller[94336]: 2025-10-02T12:35:44Z|00602|binding|INFO|Setting lport 5e4c5bee-3b4f-4d75-bcce-f96469aea319 down in Southbound
Oct 02 12:35:44 compute-0 ovn_controller[94336]: 2025-10-02T12:35:44Z|00603|binding|INFO|Removing iface tap5e4c5bee-3b ovn-installed in OVS
Oct 02 12:35:44 compute-0 nova_compute[192079]: 2025-10-02 12:35:44.994 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.004 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:45.044 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:3c:96:05 10.100.0.5'], port_security=['fa:16:3e:3c:96:05 10.100.0.5'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.5/28', 'neutron:device_id': '68b1a0ef-5b1f-4d43-b759-e385618171ff', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-6512ce78-9132-4dd4-88c2-d82efca10339', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'neutron:revision_number': '4', 'neutron:security_group_ids': 'e33160c7-6eb4-4a93-93e3-98b50bc12b29', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com', 'neutron:port_fip': '192.168.122.233'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=360bc7aa-2f85-45e8-93bc-76083b104e89, chassis=[], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=5e4c5bee-3b4f-4d75-bcce-f96469aea319) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:35:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:45.045 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 5e4c5bee-3b4f-4d75-bcce-f96469aea319 in datapath 6512ce78-9132-4dd4-88c2-d82efca10339 unbound from our chassis
Oct 02 12:35:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:45.047 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 6512ce78-9132-4dd4-88c2-d82efca10339
Oct 02 12:35:45 compute-0 systemd[1]: machine-qemu\x2d75\x2dinstance\x2d0000009d.scope: Deactivated successfully.
Oct 02 12:35:45 compute-0 systemd[1]: machine-qemu\x2d75\x2dinstance\x2d0000009d.scope: Consumed 12.731s CPU time.
Oct 02 12:35:45 compute-0 systemd-machined[152150]: Machine qemu-75-instance-0000009d terminated.
Oct 02 12:35:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:45.060 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[edfd1127-22b8-487c-b814-f4f11e911ab6]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:35:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:45.088 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[5b4613ba-2500-4b09-a313-e05023e5a3e3]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:35:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:45.091 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[a488011c-1f76-49bc-be26-411f1b0da347]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:35:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:45.115 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[02867ea4-a443-4f59-a986-c981334a316d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:35:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:45.131 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[dc02e2a4-f9aa-4113-9976-10cd2c96de69]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap6512ce78-91'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:3c:04:0d'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 11, 'tx_packets': 8, 'rx_bytes': 958, 'tx_bytes': 524, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 11, 'tx_packets': 8, 'rx_bytes': 958, 'tx_bytes': 524, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 189], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 648824, 'reachable_time': 37638, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 8, 'inoctets': 720, 'indelivers': 1, 'outforwdatagrams': 0, 'outpkts': 4, 'outoctets': 300, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 8, 'outmcastpkts': 4, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 720, 'outmcastoctets': 300, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 8, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 1, 'inerrors': 0, 'outmsgs': 4, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 247089, 'error': None, 'target': 'ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:35:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:45.148 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[91af3f61-a4e0-4f54-957d-cf1acc3bce2f]: (4, ({'family': 2, 'prefixlen': 28, 'flags': 128, 'scope': 0, 'index': 2, 'attrs': [['IFA_ADDRESS', '10.100.0.2'], ['IFA_LOCAL', '10.100.0.2'], ['IFA_BROADCAST', '10.100.0.15'], ['IFA_LABEL', 'tap6512ce78-91'], ['IFA_FLAGS', 128], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 648839, 'tstamp': 648839}]], 'header': {'length': 96, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 247090, 'error': None, 'target': 'ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'}, {'family': 2, 'prefixlen': 32, 'flags': 128, 'scope': 0, 'index': 2, 'attrs': [['IFA_ADDRESS', '169.254.169.254'], ['IFA_LOCAL', '169.254.169.254'], ['IFA_BROADCAST', '169.254.169.254'], ['IFA_LABEL', 'tap6512ce78-91'], ['IFA_FLAGS', 128], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 648843, 'tstamp': 648843}]], 'header': {'length': 96, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 247090, 'error': None, 'target': 'ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'})) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:35:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:45.150 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap6512ce78-90, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.151 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.155 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:45.155 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap6512ce78-90, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:35:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:45.156 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:35:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:45.157 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap6512ce78-90, col_values=(('external_ids', {'iface-id': '23796c0f-f19b-4655-83fb-cbec481641fa'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:35:45 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:45.157 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.163 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.166 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.198 2 INFO nova.virt.libvirt.driver [-] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Instance destroyed successfully.
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.199 2 DEBUG nova.objects.instance [None req-9c122f30-6187-461b-b141-6c875a381915 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lazy-loading 'resources' on Instance uuid 68b1a0ef-5b1f-4d43-b759-e385618171ff obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.224 2 DEBUG nova.virt.libvirt.vif [None req-9c122f30-6187-461b-b141-6c875a381915 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:35:21Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestNetworkBasicOps-server-1363839596',display_name='tempest-TestNetworkBasicOps-server-1363839596',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkbasicops-server-1363839596',id=157,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBNpTTykc7bRxNeMQb/AwyVeQct7kkLrzBlJQM6PMV4uOiLZ/1v6I03B2g34rnSFM0pG31Lc4x0PzTw6rmd6zZQ4f/vsliu/8ODOLDzE9wtG+5ceWwS5zK+amidA8m/9tCA==',key_name='tempest-TestNetworkBasicOps-1262607354',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:35:27Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='6e2a4899168a47618e377cb3ac85ddd2',ramdisk_id='',reservation_id='r-xu11k8se',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-TestNetworkBasicOps-1323893370',owner_user_name='tempest-TestNetworkBasicOps-1323893370-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:35:27Z,user_data=None,user_id='a1898fdf056c4a249c33590f26d4d845',uuid=68b1a0ef-5b1f-4d43-b759-e385618171ff,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "5e4c5bee-3b4f-4d75-bcce-f96469aea319", "address": "fa:16:3e:3c:96:05", "network": {"id": "6512ce78-9132-4dd4-88c2-d82efca10339", "bridge": "br-int", "label": "tempest-network-smoke--937181", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.233", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5e4c5bee-3b", "ovs_interfaceid": "5e4c5bee-3b4f-4d75-bcce-f96469aea319", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.224 2 DEBUG nova.network.os_vif_util [None req-9c122f30-6187-461b-b141-6c875a381915 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converting VIF {"id": "5e4c5bee-3b4f-4d75-bcce-f96469aea319", "address": "fa:16:3e:3c:96:05", "network": {"id": "6512ce78-9132-4dd4-88c2-d82efca10339", "bridge": "br-int", "label": "tempest-network-smoke--937181", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.233", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap5e4c5bee-3b", "ovs_interfaceid": "5e4c5bee-3b4f-4d75-bcce-f96469aea319", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.225 2 DEBUG nova.network.os_vif_util [None req-9c122f30-6187-461b-b141-6c875a381915 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:3c:96:05,bridge_name='br-int',has_traffic_filtering=True,id=5e4c5bee-3b4f-4d75-bcce-f96469aea319,network=Network(6512ce78-9132-4dd4-88c2-d82efca10339),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5e4c5bee-3b') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.226 2 DEBUG os_vif [None req-9c122f30-6187-461b-b141-6c875a381915 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:3c:96:05,bridge_name='br-int',has_traffic_filtering=True,id=5e4c5bee-3b4f-4d75-bcce-f96469aea319,network=Network(6512ce78-9132-4dd4-88c2-d82efca10339),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5e4c5bee-3b') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.227 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.228 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap5e4c5bee-3b, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.229 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.231 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.233 2 INFO os_vif [None req-9c122f30-6187-461b-b141-6c875a381915 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:3c:96:05,bridge_name='br-int',has_traffic_filtering=True,id=5e4c5bee-3b4f-4d75-bcce-f96469aea319,network=Network(6512ce78-9132-4dd4-88c2-d82efca10339),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap5e4c5bee-3b')
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.234 2 INFO nova.virt.libvirt.driver [None req-9c122f30-6187-461b-b141-6c875a381915 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Deleting instance files /var/lib/nova/instances/68b1a0ef-5b1f-4d43-b759-e385618171ff_del
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.235 2 INFO nova.virt.libvirt.driver [None req-9c122f30-6187-461b-b141-6c875a381915 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Deletion of /var/lib/nova/instances/68b1a0ef-5b1f-4d43-b759-e385618171ff_del complete
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.347 2 INFO nova.compute.manager [None req-9c122f30-6187-461b-b141-6c875a381915 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Took 0.40 seconds to destroy the instance on the hypervisor.
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.349 2 DEBUG oslo.service.loopingcall [None req-9c122f30-6187-461b-b141-6c875a381915 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.350 2 DEBUG nova.compute.manager [-] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.350 2 DEBUG nova.network.neutron [-] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.702 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Skipping network cache update for instance because it is being deleted. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9875
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.841 2 DEBUG nova.compute.manager [req-be8546ed-bad5-40cf-8efe-665c62b2d9ce req-d2879ef4-b094-4e41-afac-ae53a60c457d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Received event network-vif-unplugged-5e4c5bee-3b4f-4d75-bcce-f96469aea319 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.841 2 DEBUG oslo_concurrency.lockutils [req-be8546ed-bad5-40cf-8efe-665c62b2d9ce req-d2879ef4-b094-4e41-afac-ae53a60c457d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "68b1a0ef-5b1f-4d43-b759-e385618171ff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.842 2 DEBUG oslo_concurrency.lockutils [req-be8546ed-bad5-40cf-8efe-665c62b2d9ce req-d2879ef4-b094-4e41-afac-ae53a60c457d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "68b1a0ef-5b1f-4d43-b759-e385618171ff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.842 2 DEBUG oslo_concurrency.lockutils [req-be8546ed-bad5-40cf-8efe-665c62b2d9ce req-d2879ef4-b094-4e41-afac-ae53a60c457d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "68b1a0ef-5b1f-4d43-b759-e385618171ff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.842 2 DEBUG nova.compute.manager [req-be8546ed-bad5-40cf-8efe-665c62b2d9ce req-d2879ef4-b094-4e41-afac-ae53a60c457d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] No waiting events found dispatching network-vif-unplugged-5e4c5bee-3b4f-4d75-bcce-f96469aea319 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:35:45 compute-0 nova_compute[192079]: 2025-10-02 12:35:45.843 2 DEBUG nova.compute.manager [req-be8546ed-bad5-40cf-8efe-665c62b2d9ce req-d2879ef4-b094-4e41-afac-ae53a60c457d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Received event network-vif-unplugged-5e4c5bee-3b4f-4d75-bcce-f96469aea319 for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:35:46 compute-0 nova_compute[192079]: 2025-10-02 12:35:46.033 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "refresh_cache-29e46585-0d8d-450d-b3de-d6d103b90a58" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:35:46 compute-0 nova_compute[192079]: 2025-10-02 12:35:46.033 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquired lock "refresh_cache-29e46585-0d8d-450d-b3de-d6d103b90a58" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:35:46 compute-0 nova_compute[192079]: 2025-10-02 12:35:46.033 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Forcefully refreshing network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2004
Oct 02 12:35:46 compute-0 nova_compute[192079]: 2025-10-02 12:35:46.033 2 DEBUG nova.objects.instance [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lazy-loading 'info_cache' on Instance uuid 29e46585-0d8d-450d-b3de-d6d103b90a58 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:35:47 compute-0 nova_compute[192079]: 2025-10-02 12:35:47.548 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:48 compute-0 nova_compute[192079]: 2025-10-02 12:35:48.153 2 DEBUG nova.compute.manager [req-71d09467-30c5-440b-b688-7094d277421d req-f35f363e-581c-4d34-9692-611c75152150 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Received event network-vif-plugged-5e4c5bee-3b4f-4d75-bcce-f96469aea319 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:35:48 compute-0 nova_compute[192079]: 2025-10-02 12:35:48.153 2 DEBUG oslo_concurrency.lockutils [req-71d09467-30c5-440b-b688-7094d277421d req-f35f363e-581c-4d34-9692-611c75152150 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "68b1a0ef-5b1f-4d43-b759-e385618171ff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:35:48 compute-0 nova_compute[192079]: 2025-10-02 12:35:48.154 2 DEBUG oslo_concurrency.lockutils [req-71d09467-30c5-440b-b688-7094d277421d req-f35f363e-581c-4d34-9692-611c75152150 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "68b1a0ef-5b1f-4d43-b759-e385618171ff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:35:48 compute-0 nova_compute[192079]: 2025-10-02 12:35:48.154 2 DEBUG oslo_concurrency.lockutils [req-71d09467-30c5-440b-b688-7094d277421d req-f35f363e-581c-4d34-9692-611c75152150 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "68b1a0ef-5b1f-4d43-b759-e385618171ff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:35:48 compute-0 nova_compute[192079]: 2025-10-02 12:35:48.154 2 DEBUG nova.compute.manager [req-71d09467-30c5-440b-b688-7094d277421d req-f35f363e-581c-4d34-9692-611c75152150 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] No waiting events found dispatching network-vif-plugged-5e4c5bee-3b4f-4d75-bcce-f96469aea319 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:35:48 compute-0 nova_compute[192079]: 2025-10-02 12:35:48.155 2 WARNING nova.compute.manager [req-71d09467-30c5-440b-b688-7094d277421d req-f35f363e-581c-4d34-9692-611c75152150 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Received unexpected event network-vif-plugged-5e4c5bee-3b4f-4d75-bcce-f96469aea319 for instance with vm_state active and task_state deleting.
Oct 02 12:35:48 compute-0 nova_compute[192079]: 2025-10-02 12:35:48.906 2 DEBUG nova.network.neutron [-] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:35:48 compute-0 nova_compute[192079]: 2025-10-02 12:35:48.942 2 INFO nova.compute.manager [-] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Took 3.59 seconds to deallocate network for instance.
Oct 02 12:35:49 compute-0 nova_compute[192079]: 2025-10-02 12:35:49.041 2 DEBUG oslo_concurrency.lockutils [None req-9c122f30-6187-461b-b141-6c875a381915 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:35:49 compute-0 nova_compute[192079]: 2025-10-02 12:35:49.041 2 DEBUG oslo_concurrency.lockutils [None req-9c122f30-6187-461b-b141-6c875a381915 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:35:49 compute-0 nova_compute[192079]: 2025-10-02 12:35:49.048 2 DEBUG nova.compute.manager [req-717de210-38a7-4ba2-bba5-7018dc90c7ef req-b5ef13f2-335f-48a2-8627-7804336a63ce 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Received event network-vif-deleted-5e4c5bee-3b4f-4d75-bcce-f96469aea319 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:35:49 compute-0 nova_compute[192079]: 2025-10-02 12:35:49.145 2 DEBUG nova.compute.provider_tree [None req-9c122f30-6187-461b-b141-6c875a381915 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:35:49 compute-0 nova_compute[192079]: 2025-10-02 12:35:49.164 2 DEBUG nova.scheduler.client.report [None req-9c122f30-6187-461b-b141-6c875a381915 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:35:49 compute-0 nova_compute[192079]: 2025-10-02 12:35:49.187 2 DEBUG oslo_concurrency.lockutils [None req-9c122f30-6187-461b-b141-6c875a381915 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.146s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:35:49 compute-0 nova_compute[192079]: 2025-10-02 12:35:49.237 2 INFO nova.scheduler.client.report [None req-9c122f30-6187-461b-b141-6c875a381915 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Deleted allocations for instance 68b1a0ef-5b1f-4d43-b759-e385618171ff
Oct 02 12:35:49 compute-0 nova_compute[192079]: 2025-10-02 12:35:49.276 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Updating instance_info_cache with network_info: [{"id": "a36441d3-2588-4fff-9190-68df21897dec", "address": "fa:16:3e:6e:e8:1d", "network": {"id": "6512ce78-9132-4dd4-88c2-d82efca10339", "bridge": "br-int", "label": "tempest-network-smoke--937181", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa36441d3-25", "ovs_interfaceid": "a36441d3-2588-4fff-9190-68df21897dec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:35:49 compute-0 nova_compute[192079]: 2025-10-02 12:35:49.309 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Releasing lock "refresh_cache-29e46585-0d8d-450d-b3de-d6d103b90a58" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:35:49 compute-0 nova_compute[192079]: 2025-10-02 12:35:49.310 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Updated the network info_cache for instance _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9929
Oct 02 12:35:49 compute-0 nova_compute[192079]: 2025-10-02 12:35:49.337 2 DEBUG oslo_concurrency.lockutils [None req-9c122f30-6187-461b-b141-6c875a381915 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "68b1a0ef-5b1f-4d43-b759-e385618171ff" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 4.418s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:35:49 compute-0 nova_compute[192079]: 2025-10-02 12:35:49.966 2 DEBUG oslo_concurrency.lockutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Acquiring lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:35:49 compute-0 nova_compute[192079]: 2025-10-02 12:35:49.967 2 DEBUG oslo_concurrency.lockutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:35:49 compute-0 nova_compute[192079]: 2025-10-02 12:35:49.999 2 DEBUG nova.compute.manager [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.138 2 DEBUG oslo_concurrency.lockutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.139 2 DEBUG oslo_concurrency.lockutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.143 2 DEBUG nova.virt.hardware [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.144 2 INFO nova.compute.claims [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.228 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.332 2 DEBUG nova.compute.provider_tree [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.350 2 DEBUG nova.scheduler.client.report [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.374 2 DEBUG oslo_concurrency.lockutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.235s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.375 2 DEBUG nova.compute.manager [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.441 2 DEBUG nova.compute.manager [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.441 2 DEBUG nova.network.neutron [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.466 2 INFO nova.virt.libvirt.driver [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.494 2 DEBUG nova.compute.manager [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.621 2 DEBUG nova.compute.manager [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.623 2 DEBUG nova.virt.libvirt.driver [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.623 2 INFO nova.virt.libvirt.driver [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Creating image(s)
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.624 2 DEBUG oslo_concurrency.lockutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Acquiring lock "/var/lib/nova/instances/0f53a1ce-fb3c-4d89-be52-05b2de65acba/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.624 2 DEBUG oslo_concurrency.lockutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Lock "/var/lib/nova/instances/0f53a1ce-fb3c-4d89-be52-05b2de65acba/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.625 2 DEBUG oslo_concurrency.lockutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Lock "/var/lib/nova/instances/0f53a1ce-fb3c-4d89-be52-05b2de65acba/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.637 2 DEBUG oslo_concurrency.processutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.692 2 DEBUG oslo_concurrency.processutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.693 2 DEBUG oslo_concurrency.lockutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.694 2 DEBUG oslo_concurrency.lockutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.705 2 DEBUG oslo_concurrency.processutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.773 2 DEBUG oslo_concurrency.processutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.068s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.774 2 DEBUG oslo_concurrency.processutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/0f53a1ce-fb3c-4d89-be52-05b2de65acba/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.815 2 DEBUG oslo_concurrency.processutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/0f53a1ce-fb3c-4d89-be52-05b2de65acba/disk 1073741824" returned: 0 in 0.041s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.816 2 DEBUG oslo_concurrency.lockutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.122s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.816 2 DEBUG oslo_concurrency.processutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.875 2 DEBUG oslo_concurrency.processutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.058s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.876 2 DEBUG nova.virt.disk.api [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Checking if we can resize image /var/lib/nova/instances/0f53a1ce-fb3c-4d89-be52-05b2de65acba/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.877 2 DEBUG oslo_concurrency.processutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/0f53a1ce-fb3c-4d89-be52-05b2de65acba/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.932 2 DEBUG oslo_concurrency.processutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/0f53a1ce-fb3c-4d89-be52-05b2de65acba/disk --force-share --output=json" returned: 0 in 0.055s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.933 2 DEBUG nova.virt.disk.api [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Cannot resize image /var/lib/nova/instances/0f53a1ce-fb3c-4d89-be52-05b2de65acba/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.933 2 DEBUG nova.objects.instance [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Lazy-loading 'migration_context' on Instance uuid 0f53a1ce-fb3c-4d89-be52-05b2de65acba obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.952 2 DEBUG nova.virt.libvirt.driver [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.952 2 DEBUG nova.virt.libvirt.driver [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Ensure instance console log exists: /var/lib/nova/instances/0f53a1ce-fb3c-4d89-be52-05b2de65acba/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.953 2 DEBUG oslo_concurrency.lockutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.953 2 DEBUG oslo_concurrency.lockutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:35:50 compute-0 nova_compute[192079]: 2025-10-02 12:35:50.954 2 DEBUG oslo_concurrency.lockutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:35:51 compute-0 nova_compute[192079]: 2025-10-02 12:35:51.027 2 DEBUG nova.policy [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bf14abcb3f75420e870a3997dfbedee4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6908d705b9b541669e2fe9a84c2cacd7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.021 2 DEBUG oslo_concurrency.lockutils [None req-34929e12-f705-4ddc-a097-8d5309f19c80 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "29e46585-0d8d-450d-b3de-d6d103b90a58" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.022 2 DEBUG oslo_concurrency.lockutils [None req-34929e12-f705-4ddc-a097-8d5309f19c80 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "29e46585-0d8d-450d-b3de-d6d103b90a58" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.022 2 DEBUG oslo_concurrency.lockutils [None req-34929e12-f705-4ddc-a097-8d5309f19c80 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "29e46585-0d8d-450d-b3de-d6d103b90a58-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.022 2 DEBUG oslo_concurrency.lockutils [None req-34929e12-f705-4ddc-a097-8d5309f19c80 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "29e46585-0d8d-450d-b3de-d6d103b90a58-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.022 2 DEBUG oslo_concurrency.lockutils [None req-34929e12-f705-4ddc-a097-8d5309f19c80 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "29e46585-0d8d-450d-b3de-d6d103b90a58-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.037 2 INFO nova.compute.manager [None req-34929e12-f705-4ddc-a097-8d5309f19c80 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Terminating instance
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.051 2 DEBUG nova.compute.manager [None req-34929e12-f705-4ddc-a097-8d5309f19c80 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:35:52 compute-0 kernel: tapa36441d3-25 (unregistering): left promiscuous mode
Oct 02 12:35:52 compute-0 NetworkManager[51160]: <info>  [1759408552.0766] device (tapa36441d3-25): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:35:52 compute-0 ovn_controller[94336]: 2025-10-02T12:35:52Z|00604|binding|INFO|Releasing lport a36441d3-2588-4fff-9190-68df21897dec from this chassis (sb_readonly=0)
Oct 02 12:35:52 compute-0 ovn_controller[94336]: 2025-10-02T12:35:52Z|00605|binding|INFO|Setting lport a36441d3-2588-4fff-9190-68df21897dec down in Southbound
Oct 02 12:35:52 compute-0 ovn_controller[94336]: 2025-10-02T12:35:52Z|00606|binding|INFO|Removing iface tapa36441d3-25 ovn-installed in OVS
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.298 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:52.306 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:6e:e8:1d 10.100.0.9'], port_security=['fa:16:3e:6e:e8:1d 10.100.0.9'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.9/28', 'neutron:device_id': '29e46585-0d8d-450d-b3de-d6d103b90a58', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-6512ce78-9132-4dd4-88c2-d82efca10339', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'neutron:revision_number': '4', 'neutron:security_group_ids': '36435018-4a6e-494b-8da7-cfcae8505cf6', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=360bc7aa-2f85-45e8-93bc-76083b104e89, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=a36441d3-2588-4fff-9190-68df21897dec) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:35:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:52.307 103294 INFO neutron.agent.ovn.metadata.agent [-] Port a36441d3-2588-4fff-9190-68df21897dec in datapath 6512ce78-9132-4dd4-88c2-d82efca10339 unbound from our chassis
Oct 02 12:35:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:52.309 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 6512ce78-9132-4dd4-88c2-d82efca10339, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:35:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:52.310 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[da07673d-dfc7-4994-932b-3408e1959296]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:35:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:52.311 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339 namespace which is not needed anymore
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.311 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:52 compute-0 systemd[1]: machine-qemu\x2d74\x2dinstance\x2d00000099.scope: Deactivated successfully.
Oct 02 12:35:52 compute-0 systemd[1]: machine-qemu\x2d74\x2dinstance\x2d00000099.scope: Consumed 15.540s CPU time.
Oct 02 12:35:52 compute-0 systemd-machined[152150]: Machine qemu-74-instance-00000099 terminated.
Oct 02 12:35:52 compute-0 neutron-haproxy-ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339[246630]: [NOTICE]   (246652) : haproxy version is 2.8.14-c23fe91
Oct 02 12:35:52 compute-0 neutron-haproxy-ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339[246630]: [NOTICE]   (246652) : path to executable is /usr/sbin/haproxy
Oct 02 12:35:52 compute-0 neutron-haproxy-ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339[246630]: [WARNING]  (246652) : Exiting Master process...
Oct 02 12:35:52 compute-0 neutron-haproxy-ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339[246630]: [WARNING]  (246652) : Exiting Master process...
Oct 02 12:35:52 compute-0 neutron-haproxy-ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339[246630]: [ALERT]    (246652) : Current worker (246655) exited with code 143 (Terminated)
Oct 02 12:35:52 compute-0 neutron-haproxy-ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339[246630]: [WARNING]  (246652) : All workers exited. Exiting... (0)
Oct 02 12:35:52 compute-0 systemd[1]: libpod-b4c37ab82e9b424e554c6b8d4c505775e75fec335a8fe5d1996a9397464f3805.scope: Deactivated successfully.
Oct 02 12:35:52 compute-0 podman[247151]: 2025-10-02 12:35:52.461626315 +0000 UTC m=+0.056511630 container died b4c37ab82e9b424e554c6b8d4c505775e75fec335a8fe5d1996a9397464f3805 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true)
Oct 02 12:35:52 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-b4c37ab82e9b424e554c6b8d4c505775e75fec335a8fe5d1996a9397464f3805-userdata-shm.mount: Deactivated successfully.
Oct 02 12:35:52 compute-0 systemd[1]: var-lib-containers-storage-overlay-9f8ec0911cce4a26d0406caf72028e05f1f30238b65d2ab43f66cb07a4e681ef-merged.mount: Deactivated successfully.
Oct 02 12:35:52 compute-0 podman[247143]: 2025-10-02 12:35:52.510890176 +0000 UTC m=+0.107584560 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, tcib_managed=true, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, config_id=edpm, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0)
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.518 2 INFO nova.virt.libvirt.driver [-] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Instance destroyed successfully.
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.519 2 DEBUG nova.objects.instance [None req-34929e12-f705-4ddc-a097-8d5309f19c80 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lazy-loading 'resources' on Instance uuid 29e46585-0d8d-450d-b3de-d6d103b90a58 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:35:52 compute-0 podman[247151]: 2025-10-02 12:35:52.525032501 +0000 UTC m=+0.119917806 container cleanup b4c37ab82e9b424e554c6b8d4c505775e75fec335a8fe5d1996a9397464f3805 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0)
Oct 02 12:35:52 compute-0 systemd[1]: libpod-conmon-b4c37ab82e9b424e554c6b8d4c505775e75fec335a8fe5d1996a9397464f3805.scope: Deactivated successfully.
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.536 2 DEBUG nova.virt.libvirt.vif [None req-34929e12-f705-4ddc-a097-8d5309f19c80 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:34:41Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestNetworkBasicOps-server-1074026563',display_name='tempest-TestNetworkBasicOps-server-1074026563',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkbasicops-server-1074026563',id=153,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBIruC8NBbt0fiPM5vjUcW7I4aWOaOqn0nOX/moE5SBYvSbwgZuheNp45snU/Zu/Yc1PtgYRP83VhLxhPzxMwsidZgk5yFQ8uQoKwlTkw6XHGAZZ+9OGwmu29t+3aBvQGYQ==',key_name='tempest-TestNetworkBasicOps-1331898182',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:34:55Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='6e2a4899168a47618e377cb3ac85ddd2',ramdisk_id='',reservation_id='r-isys9n10',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-TestNetworkBasicOps-1323893370',owner_user_name='tempest-TestNetworkBasicOps-1323893370-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:34:55Z,user_data=None,user_id='a1898fdf056c4a249c33590f26d4d845',uuid=29e46585-0d8d-450d-b3de-d6d103b90a58,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "a36441d3-2588-4fff-9190-68df21897dec", "address": "fa:16:3e:6e:e8:1d", "network": {"id": "6512ce78-9132-4dd4-88c2-d82efca10339", "bridge": "br-int", "label": "tempest-network-smoke--937181", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa36441d3-25", "ovs_interfaceid": "a36441d3-2588-4fff-9190-68df21897dec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.536 2 DEBUG nova.network.os_vif_util [None req-34929e12-f705-4ddc-a097-8d5309f19c80 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converting VIF {"id": "a36441d3-2588-4fff-9190-68df21897dec", "address": "fa:16:3e:6e:e8:1d", "network": {"id": "6512ce78-9132-4dd4-88c2-d82efca10339", "bridge": "br-int", "label": "tempest-network-smoke--937181", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa36441d3-25", "ovs_interfaceid": "a36441d3-2588-4fff-9190-68df21897dec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.537 2 DEBUG nova.network.os_vif_util [None req-34929e12-f705-4ddc-a097-8d5309f19c80 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:6e:e8:1d,bridge_name='br-int',has_traffic_filtering=True,id=a36441d3-2588-4fff-9190-68df21897dec,network=Network(6512ce78-9132-4dd4-88c2-d82efca10339),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapa36441d3-25') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.538 2 DEBUG os_vif [None req-34929e12-f705-4ddc-a097-8d5309f19c80 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:6e:e8:1d,bridge_name='br-int',has_traffic_filtering=True,id=a36441d3-2588-4fff-9190-68df21897dec,network=Network(6512ce78-9132-4dd4-88c2-d82efca10339),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapa36441d3-25') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.539 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.540 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapa36441d3-25, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.541 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.542 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.544 2 INFO os_vif [None req-34929e12-f705-4ddc-a097-8d5309f19c80 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:6e:e8:1d,bridge_name='br-int',has_traffic_filtering=True,id=a36441d3-2588-4fff-9190-68df21897dec,network=Network(6512ce78-9132-4dd4-88c2-d82efca10339),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapa36441d3-25')
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.545 2 INFO nova.virt.libvirt.driver [None req-34929e12-f705-4ddc-a097-8d5309f19c80 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Deleting instance files /var/lib/nova/instances/29e46585-0d8d-450d-b3de-d6d103b90a58_del
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.545 2 INFO nova.virt.libvirt.driver [None req-34929e12-f705-4ddc-a097-8d5309f19c80 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Deletion of /var/lib/nova/instances/29e46585-0d8d-450d-b3de-d6d103b90a58_del complete
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.551 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:52 compute-0 podman[247215]: 2025-10-02 12:35:52.618424343 +0000 UTC m=+0.070773377 container remove b4c37ab82e9b424e554c6b8d4c505775e75fec335a8fe5d1996a9397464f3805 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3)
Oct 02 12:35:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:52.624 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3b2b100d-b6c8-457b-acc9-4aa471550243]: (4, ('Thu Oct  2 12:35:52 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339 (b4c37ab82e9b424e554c6b8d4c505775e75fec335a8fe5d1996a9397464f3805)\nb4c37ab82e9b424e554c6b8d4c505775e75fec335a8fe5d1996a9397464f3805\nThu Oct  2 12:35:52 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339 (b4c37ab82e9b424e554c6b8d4c505775e75fec335a8fe5d1996a9397464f3805)\nb4c37ab82e9b424e554c6b8d4c505775e75fec335a8fe5d1996a9397464f3805\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:35:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:52.625 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[12308e4e-8aa4-420e-96d0-20a968c35778]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:35:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:52.626 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap6512ce78-90, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.628 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:52 compute-0 kernel: tap6512ce78-90: left promiscuous mode
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.632 2 INFO nova.compute.manager [None req-34929e12-f705-4ddc-a097-8d5309f19c80 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Took 0.58 seconds to destroy the instance on the hypervisor.
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.633 2 DEBUG oslo.service.loopingcall [None req-34929e12-f705-4ddc-a097-8d5309f19c80 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.633 2 DEBUG nova.compute.manager [-] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.633 2 DEBUG nova.network.neutron [-] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.640 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:52.643 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[60aacf57-24ae-47f1-aed2-3e44ab74bae5]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:35:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:52.673 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e99cab3a-ac59-43a3-9a9e-1cb453b07e40]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:35:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:52.675 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3bb1db76-74de-4038-b032-c57ec8abd766]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:35:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:52.689 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[255e84fb-cf99-45aa-b210-f38aa0ca8b3b]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 648816, 'reachable_time': 39581, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 247230, 'error': None, 'target': 'ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:35:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:52.692 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-6512ce78-9132-4dd4-88c2-d82efca10339 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:35:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:35:52.692 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[6071245d-2542-46fa-8423-f6482bf22e20]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:35:52 compute-0 systemd[1]: run-netns-ovnmeta\x2d6512ce78\x2d9132\x2d4dd4\x2d88c2\x2dd82efca10339.mount: Deactivated successfully.
Oct 02 12:35:52 compute-0 nova_compute[192079]: 2025-10-02 12:35:52.963 2 DEBUG nova.network.neutron [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Successfully created port: 64dd0de7-fbd7-4c16-b867-61d61163f4ba _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:35:54 compute-0 nova_compute[192079]: 2025-10-02 12:35:54.498 2 DEBUG nova.network.neutron [-] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:35:54 compute-0 nova_compute[192079]: 2025-10-02 12:35:54.526 2 INFO nova.compute.manager [-] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Took 1.89 seconds to deallocate network for instance.
Oct 02 12:35:54 compute-0 nova_compute[192079]: 2025-10-02 12:35:54.546 2 DEBUG nova.compute.manager [req-91273b6a-0159-4bb2-a10f-8eaab903748f req-33f03189-ff69-4d8c-8d45-e05a9a0e9eb9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Received event network-vif-unplugged-a36441d3-2588-4fff-9190-68df21897dec external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:35:54 compute-0 nova_compute[192079]: 2025-10-02 12:35:54.547 2 DEBUG oslo_concurrency.lockutils [req-91273b6a-0159-4bb2-a10f-8eaab903748f req-33f03189-ff69-4d8c-8d45-e05a9a0e9eb9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "29e46585-0d8d-450d-b3de-d6d103b90a58-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:35:54 compute-0 nova_compute[192079]: 2025-10-02 12:35:54.547 2 DEBUG oslo_concurrency.lockutils [req-91273b6a-0159-4bb2-a10f-8eaab903748f req-33f03189-ff69-4d8c-8d45-e05a9a0e9eb9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "29e46585-0d8d-450d-b3de-d6d103b90a58-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:35:54 compute-0 nova_compute[192079]: 2025-10-02 12:35:54.548 2 DEBUG oslo_concurrency.lockutils [req-91273b6a-0159-4bb2-a10f-8eaab903748f req-33f03189-ff69-4d8c-8d45-e05a9a0e9eb9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "29e46585-0d8d-450d-b3de-d6d103b90a58-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:35:54 compute-0 nova_compute[192079]: 2025-10-02 12:35:54.549 2 DEBUG nova.compute.manager [req-91273b6a-0159-4bb2-a10f-8eaab903748f req-33f03189-ff69-4d8c-8d45-e05a9a0e9eb9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] No waiting events found dispatching network-vif-unplugged-a36441d3-2588-4fff-9190-68df21897dec pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:35:54 compute-0 nova_compute[192079]: 2025-10-02 12:35:54.549 2 DEBUG nova.compute.manager [req-91273b6a-0159-4bb2-a10f-8eaab903748f req-33f03189-ff69-4d8c-8d45-e05a9a0e9eb9 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Received event network-vif-unplugged-a36441d3-2588-4fff-9190-68df21897dec for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:35:54 compute-0 nova_compute[192079]: 2025-10-02 12:35:54.626 2 DEBUG nova.compute.manager [req-531bc764-6feb-4523-b675-db29dc8d83bf req-412160d8-3cbd-42e5-be6d-63a7ca92cccf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Received event network-vif-deleted-a36441d3-2588-4fff-9190-68df21897dec external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:35:54 compute-0 nova_compute[192079]: 2025-10-02 12:35:54.638 2 DEBUG oslo_concurrency.lockutils [None req-34929e12-f705-4ddc-a097-8d5309f19c80 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:35:54 compute-0 nova_compute[192079]: 2025-10-02 12:35:54.638 2 DEBUG oslo_concurrency.lockutils [None req-34929e12-f705-4ddc-a097-8d5309f19c80 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:35:54 compute-0 nova_compute[192079]: 2025-10-02 12:35:54.715 2 DEBUG nova.compute.provider_tree [None req-34929e12-f705-4ddc-a097-8d5309f19c80 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:35:54 compute-0 nova_compute[192079]: 2025-10-02 12:35:54.732 2 DEBUG nova.scheduler.client.report [None req-34929e12-f705-4ddc-a097-8d5309f19c80 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:35:54 compute-0 nova_compute[192079]: 2025-10-02 12:35:54.756 2 DEBUG oslo_concurrency.lockutils [None req-34929e12-f705-4ddc-a097-8d5309f19c80 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.118s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:35:54 compute-0 nova_compute[192079]: 2025-10-02 12:35:54.812 2 INFO nova.scheduler.client.report [None req-34929e12-f705-4ddc-a097-8d5309f19c80 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Deleted allocations for instance 29e46585-0d8d-450d-b3de-d6d103b90a58
Oct 02 12:35:54 compute-0 nova_compute[192079]: 2025-10-02 12:35:54.899 2 DEBUG oslo_concurrency.lockutils [None req-34929e12-f705-4ddc-a097-8d5309f19c80 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "29e46585-0d8d-450d-b3de-d6d103b90a58" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 2.878s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:35:55 compute-0 nova_compute[192079]: 2025-10-02 12:35:55.024 2 DEBUG nova.network.neutron [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Successfully updated port: 64dd0de7-fbd7-4c16-b867-61d61163f4ba _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:35:55 compute-0 nova_compute[192079]: 2025-10-02 12:35:55.057 2 DEBUG oslo_concurrency.lockutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Acquiring lock "refresh_cache-0f53a1ce-fb3c-4d89-be52-05b2de65acba" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:35:55 compute-0 nova_compute[192079]: 2025-10-02 12:35:55.057 2 DEBUG oslo_concurrency.lockutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Acquired lock "refresh_cache-0f53a1ce-fb3c-4d89-be52-05b2de65acba" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:35:55 compute-0 nova_compute[192079]: 2025-10-02 12:35:55.058 2 DEBUG nova.network.neutron [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:35:56 compute-0 nova_compute[192079]: 2025-10-02 12:35:56.001 2 DEBUG nova.network.neutron [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:35:56 compute-0 nova_compute[192079]: 2025-10-02 12:35:56.787 2 DEBUG nova.compute.manager [req-383cd623-aa2d-419b-b37e-edb84989e6e9 req-cffb2162-dbe4-487a-b839-0bd16792177d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Received event network-changed-64dd0de7-fbd7-4c16-b867-61d61163f4ba external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:35:56 compute-0 nova_compute[192079]: 2025-10-02 12:35:56.788 2 DEBUG nova.compute.manager [req-383cd623-aa2d-419b-b37e-edb84989e6e9 req-cffb2162-dbe4-487a-b839-0bd16792177d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Refreshing instance network info cache due to event network-changed-64dd0de7-fbd7-4c16-b867-61d61163f4ba. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:35:56 compute-0 nova_compute[192079]: 2025-10-02 12:35:56.789 2 DEBUG oslo_concurrency.lockutils [req-383cd623-aa2d-419b-b37e-edb84989e6e9 req-cffb2162-dbe4-487a-b839-0bd16792177d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-0f53a1ce-fb3c-4d89-be52-05b2de65acba" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:35:57 compute-0 nova_compute[192079]: 2025-10-02 12:35:57.157 2 DEBUG nova.compute.manager [req-8172a063-4d25-4fac-84f2-c4c8f590bae8 req-c5cf5564-32e8-4aee-8097-2c430b3e836a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Received event network-vif-plugged-a36441d3-2588-4fff-9190-68df21897dec external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:35:57 compute-0 nova_compute[192079]: 2025-10-02 12:35:57.158 2 DEBUG oslo_concurrency.lockutils [req-8172a063-4d25-4fac-84f2-c4c8f590bae8 req-c5cf5564-32e8-4aee-8097-2c430b3e836a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "29e46585-0d8d-450d-b3de-d6d103b90a58-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:35:57 compute-0 nova_compute[192079]: 2025-10-02 12:35:57.158 2 DEBUG oslo_concurrency.lockutils [req-8172a063-4d25-4fac-84f2-c4c8f590bae8 req-c5cf5564-32e8-4aee-8097-2c430b3e836a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "29e46585-0d8d-450d-b3de-d6d103b90a58-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:35:57 compute-0 nova_compute[192079]: 2025-10-02 12:35:57.158 2 DEBUG oslo_concurrency.lockutils [req-8172a063-4d25-4fac-84f2-c4c8f590bae8 req-c5cf5564-32e8-4aee-8097-2c430b3e836a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "29e46585-0d8d-450d-b3de-d6d103b90a58-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:35:57 compute-0 nova_compute[192079]: 2025-10-02 12:35:57.159 2 DEBUG nova.compute.manager [req-8172a063-4d25-4fac-84f2-c4c8f590bae8 req-c5cf5564-32e8-4aee-8097-2c430b3e836a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] No waiting events found dispatching network-vif-plugged-a36441d3-2588-4fff-9190-68df21897dec pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:35:57 compute-0 nova_compute[192079]: 2025-10-02 12:35:57.159 2 WARNING nova.compute.manager [req-8172a063-4d25-4fac-84f2-c4c8f590bae8 req-c5cf5564-32e8-4aee-8097-2c430b3e836a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Received unexpected event network-vif-plugged-a36441d3-2588-4fff-9190-68df21897dec for instance with vm_state deleted and task_state None.
Oct 02 12:35:57 compute-0 nova_compute[192079]: 2025-10-02 12:35:57.544 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:35:57 compute-0 nova_compute[192079]: 2025-10-02 12:35:57.552 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.028 2 DEBUG nova.network.neutron [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Updating instance_info_cache with network_info: [{"id": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "address": "fa:16:3e:e3:22:87", "network": {"id": "616214b7-6fa7-4c4a-92d4-ca6b283a5d5d", "bridge": "br-int", "label": "tempest-TestServerAdvancedOps-1235429694-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6908d705b9b541669e2fe9a84c2cacd7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap64dd0de7-fb", "ovs_interfaceid": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.109 2 DEBUG oslo_concurrency.lockutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Releasing lock "refresh_cache-0f53a1ce-fb3c-4d89-be52-05b2de65acba" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.109 2 DEBUG nova.compute.manager [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Instance network_info: |[{"id": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "address": "fa:16:3e:e3:22:87", "network": {"id": "616214b7-6fa7-4c4a-92d4-ca6b283a5d5d", "bridge": "br-int", "label": "tempest-TestServerAdvancedOps-1235429694-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6908d705b9b541669e2fe9a84c2cacd7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap64dd0de7-fb", "ovs_interfaceid": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.110 2 DEBUG oslo_concurrency.lockutils [req-383cd623-aa2d-419b-b37e-edb84989e6e9 req-cffb2162-dbe4-487a-b839-0bd16792177d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-0f53a1ce-fb3c-4d89-be52-05b2de65acba" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.110 2 DEBUG nova.network.neutron [req-383cd623-aa2d-419b-b37e-edb84989e6e9 req-cffb2162-dbe4-487a-b839-0bd16792177d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Refreshing network info cache for port 64dd0de7-fbd7-4c16-b867-61d61163f4ba _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.112 2 DEBUG nova.virt.libvirt.driver [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Start _get_guest_xml network_info=[{"id": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "address": "fa:16:3e:e3:22:87", "network": {"id": "616214b7-6fa7-4c4a-92d4-ca6b283a5d5d", "bridge": "br-int", "label": "tempest-TestServerAdvancedOps-1235429694-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6908d705b9b541669e2fe9a84c2cacd7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap64dd0de7-fb", "ovs_interfaceid": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.116 2 WARNING nova.virt.libvirt.driver [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.120 2 DEBUG nova.virt.libvirt.host [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.121 2 DEBUG nova.virt.libvirt.host [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.124 2 DEBUG nova.virt.libvirt.host [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.125 2 DEBUG nova.virt.libvirt.host [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.126 2 DEBUG nova.virt.libvirt.driver [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.126 2 DEBUG nova.virt.hardware [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.126 2 DEBUG nova.virt.hardware [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.126 2 DEBUG nova.virt.hardware [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.126 2 DEBUG nova.virt.hardware [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.127 2 DEBUG nova.virt.hardware [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.127 2 DEBUG nova.virt.hardware [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.127 2 DEBUG nova.virt.hardware [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.127 2 DEBUG nova.virt.hardware [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.127 2 DEBUG nova.virt.hardware [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.127 2 DEBUG nova.virt.hardware [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.128 2 DEBUG nova.virt.hardware [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.131 2 DEBUG nova.virt.libvirt.vif [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:35:49Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestServerAdvancedOps-server-16731796',display_name='tempest-TestServerAdvancedOps-server-16731796',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testserveradvancedops-server-16731796',id=159,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='6908d705b9b541669e2fe9a84c2cacd7',ramdisk_id='',reservation_id='r-089z0o6n',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestServerAdvancedOps-1292292677',owner_user_name='tempest-TestServerAdvancedOps-1292292677-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:35:50Z,user_data=None,user_id='bf14abcb3f75420e870a3997dfbedee4',uuid=0f53a1ce-fb3c-4d89-be52-05b2de65acba,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "address": "fa:16:3e:e3:22:87", "network": {"id": "616214b7-6fa7-4c4a-92d4-ca6b283a5d5d", "bridge": "br-int", "label": "tempest-TestServerAdvancedOps-1235429694-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6908d705b9b541669e2fe9a84c2cacd7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap64dd0de7-fb", "ovs_interfaceid": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.132 2 DEBUG nova.network.os_vif_util [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Converting VIF {"id": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "address": "fa:16:3e:e3:22:87", "network": {"id": "616214b7-6fa7-4c4a-92d4-ca6b283a5d5d", "bridge": "br-int", "label": "tempest-TestServerAdvancedOps-1235429694-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6908d705b9b541669e2fe9a84c2cacd7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap64dd0de7-fb", "ovs_interfaceid": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.132 2 DEBUG nova.network.os_vif_util [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:e3:22:87,bridge_name='br-int',has_traffic_filtering=True,id=64dd0de7-fbd7-4c16-b867-61d61163f4ba,network=Network(616214b7-6fa7-4c4a-92d4-ca6b283a5d5d),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap64dd0de7-fb') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.133 2 DEBUG nova.objects.instance [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Lazy-loading 'pci_devices' on Instance uuid 0f53a1ce-fb3c-4d89-be52-05b2de65acba obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:36:00 compute-0 podman[247231]: 2025-10-02 12:36:00.142110956 +0000 UTC m=+0.052809009 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, io.buildah.version=1.33.7, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, maintainer=Red Hat, Inc., distribution-scope=public, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., build-date=2025-08-20T13:12:41, name=ubi9-minimal, version=9.6, architecture=x86_64, container_name=openstack_network_exporter, release=1755695350, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.tags=minimal rhel9, com.redhat.component=ubi9-minimal-container, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., url=https://catalog.redhat.com/en/search?searchType=containers, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, config_id=edpm, vendor=Red Hat, Inc., com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.openshift.expose-services=, vcs-type=git)
Oct 02 12:36:00 compute-0 podman[247232]: 2025-10-02 12:36:00.151044309 +0000 UTC m=+0.057150847 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, config_id=multipathd, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS)
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.160 2 DEBUG nova.virt.libvirt.driver [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:36:00 compute-0 nova_compute[192079]:   <uuid>0f53a1ce-fb3c-4d89-be52-05b2de65acba</uuid>
Oct 02 12:36:00 compute-0 nova_compute[192079]:   <name>instance-0000009f</name>
Oct 02 12:36:00 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:36:00 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:36:00 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:36:00 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:       <nova:name>tempest-TestServerAdvancedOps-server-16731796</nova:name>
Oct 02 12:36:00 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:36:00</nova:creationTime>
Oct 02 12:36:00 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:36:00 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:36:00 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:36:00 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:36:00 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:36:00 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:36:00 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:36:00 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:36:00 compute-0 nova_compute[192079]:         <nova:user uuid="bf14abcb3f75420e870a3997dfbedee4">tempest-TestServerAdvancedOps-1292292677-project-member</nova:user>
Oct 02 12:36:00 compute-0 nova_compute[192079]:         <nova:project uuid="6908d705b9b541669e2fe9a84c2cacd7">tempest-TestServerAdvancedOps-1292292677</nova:project>
Oct 02 12:36:00 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:36:00 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:36:00 compute-0 nova_compute[192079]:         <nova:port uuid="64dd0de7-fbd7-4c16-b867-61d61163f4ba">
Oct 02 12:36:00 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.11" ipVersion="4"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:36:00 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:36:00 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:36:00 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <system>
Oct 02 12:36:00 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:36:00 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:36:00 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:36:00 compute-0 nova_compute[192079]:       <entry name="serial">0f53a1ce-fb3c-4d89-be52-05b2de65acba</entry>
Oct 02 12:36:00 compute-0 nova_compute[192079]:       <entry name="uuid">0f53a1ce-fb3c-4d89-be52-05b2de65acba</entry>
Oct 02 12:36:00 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     </system>
Oct 02 12:36:00 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:36:00 compute-0 nova_compute[192079]:   <os>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:   </os>
Oct 02 12:36:00 compute-0 nova_compute[192079]:   <features>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:   </features>
Oct 02 12:36:00 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:36:00 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:36:00 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:36:00 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/0f53a1ce-fb3c-4d89-be52-05b2de65acba/disk"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:36:00 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/0f53a1ce-fb3c-4d89-be52-05b2de65acba/disk.config"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:36:00 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:e3:22:87"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:       <target dev="tap64dd0de7-fb"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:36:00 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/0f53a1ce-fb3c-4d89-be52-05b2de65acba/console.log" append="off"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <video>
Oct 02 12:36:00 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     </video>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:36:00 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:36:00 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:36:00 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:36:00 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:36:00 compute-0 nova_compute[192079]: </domain>
Oct 02 12:36:00 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.160 2 DEBUG nova.compute.manager [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Preparing to wait for external event network-vif-plugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.160 2 DEBUG oslo_concurrency.lockutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Acquiring lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.161 2 DEBUG oslo_concurrency.lockutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.161 2 DEBUG oslo_concurrency.lockutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.162 2 DEBUG nova.virt.libvirt.vif [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:35:49Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestServerAdvancedOps-server-16731796',display_name='tempest-TestServerAdvancedOps-server-16731796',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testserveradvancedops-server-16731796',id=159,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='6908d705b9b541669e2fe9a84c2cacd7',ramdisk_id='',reservation_id='r-089z0o6n',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestServerAdvancedOps-1292292677',owner_user_name='tempest-TestServerAdvancedOps-1292292677-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:35:50Z,user_data=None,user_id='bf14abcb3f75420e870a3997dfbedee4',uuid=0f53a1ce-fb3c-4d89-be52-05b2de65acba,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "address": "fa:16:3e:e3:22:87", "network": {"id": "616214b7-6fa7-4c4a-92d4-ca6b283a5d5d", "bridge": "br-int", "label": "tempest-TestServerAdvancedOps-1235429694-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6908d705b9b541669e2fe9a84c2cacd7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap64dd0de7-fb", "ovs_interfaceid": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.162 2 DEBUG nova.network.os_vif_util [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Converting VIF {"id": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "address": "fa:16:3e:e3:22:87", "network": {"id": "616214b7-6fa7-4c4a-92d4-ca6b283a5d5d", "bridge": "br-int", "label": "tempest-TestServerAdvancedOps-1235429694-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6908d705b9b541669e2fe9a84c2cacd7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap64dd0de7-fb", "ovs_interfaceid": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.163 2 DEBUG nova.network.os_vif_util [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:e3:22:87,bridge_name='br-int',has_traffic_filtering=True,id=64dd0de7-fbd7-4c16-b867-61d61163f4ba,network=Network(616214b7-6fa7-4c4a-92d4-ca6b283a5d5d),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap64dd0de7-fb') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.163 2 DEBUG os_vif [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:e3:22:87,bridge_name='br-int',has_traffic_filtering=True,id=64dd0de7-fbd7-4c16-b867-61d61163f4ba,network=Network(616214b7-6fa7-4c4a-92d4-ca6b283a5d5d),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap64dd0de7-fb') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.164 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.164 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.165 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.167 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.167 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap64dd0de7-fb, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.168 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap64dd0de7-fb, col_values=(('external_ids', {'iface-id': '64dd0de7-fbd7-4c16-b867-61d61163f4ba', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:e3:22:87', 'vm-uuid': '0f53a1ce-fb3c-4d89-be52-05b2de65acba'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.169 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:00 compute-0 NetworkManager[51160]: <info>  [1759408560.1709] manager: (tap64dd0de7-fb): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/300)
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.172 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.175 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.175 2 INFO os_vif [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:e3:22:87,bridge_name='br-int',has_traffic_filtering=True,id=64dd0de7-fbd7-4c16-b867-61d61163f4ba,network=Network(616214b7-6fa7-4c4a-92d4-ca6b283a5d5d),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap64dd0de7-fb')
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.198 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759408545.197052, 68b1a0ef-5b1f-4d43-b759-e385618171ff => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.198 2 INFO nova.compute.manager [-] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] VM Stopped (Lifecycle Event)
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.330 2 DEBUG nova.compute.manager [None req-bed2b2b0-a479-433b-b994-d909c736bbda - - - - - -] [instance: 68b1a0ef-5b1f-4d43-b759-e385618171ff] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.397 2 DEBUG nova.virt.libvirt.driver [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.397 2 DEBUG nova.virt.libvirt.driver [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.397 2 DEBUG nova.virt.libvirt.driver [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] No VIF found with MAC fa:16:3e:e3:22:87, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.398 2 INFO nova.virt.libvirt.driver [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Using config drive
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.905 2 INFO nova.virt.libvirt.driver [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Creating config drive at /var/lib/nova/instances/0f53a1ce-fb3c-4d89-be52-05b2de65acba/disk.config
Oct 02 12:36:00 compute-0 nova_compute[192079]: 2025-10-02 12:36:00.911 2 DEBUG oslo_concurrency.processutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/0f53a1ce-fb3c-4d89-be52-05b2de65acba/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpd7j5ajfp execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:36:01 compute-0 nova_compute[192079]: 2025-10-02 12:36:01.036 2 DEBUG oslo_concurrency.processutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/0f53a1ce-fb3c-4d89-be52-05b2de65acba/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpd7j5ajfp" returned: 0 in 0.126s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:36:01 compute-0 kernel: tap64dd0de7-fb: entered promiscuous mode
Oct 02 12:36:01 compute-0 NetworkManager[51160]: <info>  [1759408561.1195] manager: (tap64dd0de7-fb): new Tun device (/org/freedesktop/NetworkManager/Devices/301)
Oct 02 12:36:01 compute-0 ovn_controller[94336]: 2025-10-02T12:36:01Z|00607|binding|INFO|Claiming lport 64dd0de7-fbd7-4c16-b867-61d61163f4ba for this chassis.
Oct 02 12:36:01 compute-0 nova_compute[192079]: 2025-10-02 12:36:01.180 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:01 compute-0 ovn_controller[94336]: 2025-10-02T12:36:01Z|00608|binding|INFO|64dd0de7-fbd7-4c16-b867-61d61163f4ba: Claiming fa:16:3e:e3:22:87 10.100.0.11
Oct 02 12:36:01 compute-0 nova_compute[192079]: 2025-10-02 12:36:01.182 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:01 compute-0 systemd-udevd[247289]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:36:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:01.193 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:e3:22:87 10.100.0.11'], port_security=['fa:16:3e:e3:22:87 10.100.0.11'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.11/28', 'neutron:device_id': '0f53a1ce-fb3c-4d89-be52-05b2de65acba', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-616214b7-6fa7-4c4a-92d4-ca6b283a5d5d', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '6908d705b9b541669e2fe9a84c2cacd7', 'neutron:revision_number': '2', 'neutron:security_group_ids': '8a40f8ee-7113-4592-bfa9-35ec9f6a67c6', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=ee25e8ff-f81b-4b66-a313-71c3093cd990, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=2, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=64dd0de7-fbd7-4c16-b867-61d61163f4ba) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:36:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:01.195 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 64dd0de7-fbd7-4c16-b867-61d61163f4ba in datapath 616214b7-6fa7-4c4a-92d4-ca6b283a5d5d bound to our chassis
Oct 02 12:36:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:01.196 103294 DEBUG neutron.agent.ovn.metadata.agent [-] There is no metadata port for network 616214b7-6fa7-4c4a-92d4-ca6b283a5d5d or it has no MAC or IP addresses configured, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:599
Oct 02 12:36:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:01.198 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[de57df4a-c6bc-43ed-8437-690c88005cde]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:36:01 compute-0 NetworkManager[51160]: <info>  [1759408561.2004] device (tap64dd0de7-fb): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:36:01 compute-0 NetworkManager[51160]: <info>  [1759408561.2016] device (tap64dd0de7-fb): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:36:01 compute-0 systemd-machined[152150]: New machine qemu-76-instance-0000009f.
Oct 02 12:36:01 compute-0 nova_compute[192079]: 2025-10-02 12:36:01.218 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:01 compute-0 ovn_controller[94336]: 2025-10-02T12:36:01Z|00609|binding|INFO|Setting lport 64dd0de7-fbd7-4c16-b867-61d61163f4ba ovn-installed in OVS
Oct 02 12:36:01 compute-0 ovn_controller[94336]: 2025-10-02T12:36:01Z|00610|binding|INFO|Setting lport 64dd0de7-fbd7-4c16-b867-61d61163f4ba up in Southbound
Oct 02 12:36:01 compute-0 nova_compute[192079]: 2025-10-02 12:36:01.221 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:01 compute-0 systemd[1]: Started Virtual Machine qemu-76-instance-0000009f.
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.036 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408562.0356834, 0f53a1ce-fb3c-4d89-be52-05b2de65acba => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.037 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] VM Started (Lifecycle Event)
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.072 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.076 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408562.036645, 0f53a1ce-fb3c-4d89-be52-05b2de65acba => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.076 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] VM Paused (Lifecycle Event)
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.104 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.107 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.136 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:36:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:02.239 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:36:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:02.239 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:36:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:02.239 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.554 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.582 2 DEBUG nova.compute.manager [req-81ab0131-4595-469d-a00b-b1d6dbe79145 req-93f098ba-0357-4784-a46a-752e677fa131 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Received event network-vif-plugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.583 2 DEBUG oslo_concurrency.lockutils [req-81ab0131-4595-469d-a00b-b1d6dbe79145 req-93f098ba-0357-4784-a46a-752e677fa131 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.583 2 DEBUG oslo_concurrency.lockutils [req-81ab0131-4595-469d-a00b-b1d6dbe79145 req-93f098ba-0357-4784-a46a-752e677fa131 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.583 2 DEBUG oslo_concurrency.lockutils [req-81ab0131-4595-469d-a00b-b1d6dbe79145 req-93f098ba-0357-4784-a46a-752e677fa131 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.583 2 DEBUG nova.compute.manager [req-81ab0131-4595-469d-a00b-b1d6dbe79145 req-93f098ba-0357-4784-a46a-752e677fa131 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Processing event network-vif-plugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.584 2 DEBUG nova.compute.manager [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.587 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408562.5869505, 0f53a1ce-fb3c-4d89-be52-05b2de65acba => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.587 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] VM Resumed (Lifecycle Event)
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.588 2 DEBUG nova.virt.libvirt.driver [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.591 2 INFO nova.virt.libvirt.driver [-] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Instance spawned successfully.
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.591 2 DEBUG nova.virt.libvirt.driver [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.618 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.620 2 DEBUG nova.virt.libvirt.driver [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.621 2 DEBUG nova.virt.libvirt.driver [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.621 2 DEBUG nova.virt.libvirt.driver [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.622 2 DEBUG nova.virt.libvirt.driver [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.622 2 DEBUG nova.virt.libvirt.driver [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.623 2 DEBUG nova.virt.libvirt.driver [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.627 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.653 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.705 2 INFO nova.compute.manager [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Took 12.08 seconds to spawn the instance on the hypervisor.
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.705 2 DEBUG nova.compute.manager [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.793 2 DEBUG nova.network.neutron [req-383cd623-aa2d-419b-b37e-edb84989e6e9 req-cffb2162-dbe4-487a-b839-0bd16792177d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Updated VIF entry in instance network info cache for port 64dd0de7-fbd7-4c16-b867-61d61163f4ba. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.794 2 DEBUG nova.network.neutron [req-383cd623-aa2d-419b-b37e-edb84989e6e9 req-cffb2162-dbe4-487a-b839-0bd16792177d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Updating instance_info_cache with network_info: [{"id": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "address": "fa:16:3e:e3:22:87", "network": {"id": "616214b7-6fa7-4c4a-92d4-ca6b283a5d5d", "bridge": "br-int", "label": "tempest-TestServerAdvancedOps-1235429694-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6908d705b9b541669e2fe9a84c2cacd7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap64dd0de7-fb", "ovs_interfaceid": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.816 2 DEBUG oslo_concurrency.lockutils [req-383cd623-aa2d-419b-b37e-edb84989e6e9 req-cffb2162-dbe4-487a-b839-0bd16792177d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-0f53a1ce-fb3c-4d89-be52-05b2de65acba" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.827 2 INFO nova.compute.manager [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Took 12.76 seconds to build instance.
Oct 02 12:36:02 compute-0 nova_compute[192079]: 2025-10-02 12:36:02.865 2 DEBUG oslo_concurrency.lockutils [None req-3600b005-06ea-4d76-b5b6-10bc175fc971 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 12.898s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:36:04 compute-0 nova_compute[192079]: 2025-10-02 12:36:04.170 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:04 compute-0 sshd-session[247308]: Invalid user sol from 45.148.10.240 port 48164
Oct 02 12:36:04 compute-0 nova_compute[192079]: 2025-10-02 12:36:04.388 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:04 compute-0 podman[247310]: 2025-10-02 12:36:04.44403272 +0000 UTC m=+0.061921806 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:36:04 compute-0 podman[247312]: 2025-10-02 12:36:04.4443914 +0000 UTC m=+0.062037640 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=iscsid, org.label-schema.license=GPLv2, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, org.label-schema.schema-version=1.0, config_id=iscsid)
Oct 02 12:36:04 compute-0 sshd-session[247308]: pam_unix(sshd:auth): check pass; user unknown
Oct 02 12:36:04 compute-0 sshd-session[247308]: pam_unix(sshd:auth): authentication failure; logname= uid=0 euid=0 tty=ssh ruser= rhost=45.148.10.240
Oct 02 12:36:04 compute-0 nova_compute[192079]: 2025-10-02 12:36:04.748 2 DEBUG nova.compute.manager [req-53c658f5-505f-4bf0-9d70-6de19624d5bf req-59b99a3a-7f6e-4267-b31f-4785a6a77ee1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Received event network-vif-plugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:36:04 compute-0 nova_compute[192079]: 2025-10-02 12:36:04.749 2 DEBUG oslo_concurrency.lockutils [req-53c658f5-505f-4bf0-9d70-6de19624d5bf req-59b99a3a-7f6e-4267-b31f-4785a6a77ee1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:36:04 compute-0 nova_compute[192079]: 2025-10-02 12:36:04.749 2 DEBUG oslo_concurrency.lockutils [req-53c658f5-505f-4bf0-9d70-6de19624d5bf req-59b99a3a-7f6e-4267-b31f-4785a6a77ee1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:36:04 compute-0 nova_compute[192079]: 2025-10-02 12:36:04.749 2 DEBUG oslo_concurrency.lockutils [req-53c658f5-505f-4bf0-9d70-6de19624d5bf req-59b99a3a-7f6e-4267-b31f-4785a6a77ee1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:36:04 compute-0 nova_compute[192079]: 2025-10-02 12:36:04.749 2 DEBUG nova.compute.manager [req-53c658f5-505f-4bf0-9d70-6de19624d5bf req-59b99a3a-7f6e-4267-b31f-4785a6a77ee1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] No waiting events found dispatching network-vif-plugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:36:04 compute-0 nova_compute[192079]: 2025-10-02 12:36:04.750 2 WARNING nova.compute.manager [req-53c658f5-505f-4bf0-9d70-6de19624d5bf req-59b99a3a-7f6e-4267-b31f-4785a6a77ee1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Received unexpected event network-vif-plugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba for instance with vm_state active and task_state None.
Oct 02 12:36:05 compute-0 nova_compute[192079]: 2025-10-02 12:36:05.098 2 DEBUG nova.objects.instance [None req-c24e0203-9623-4287-acdd-95cae4849d7f bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Lazy-loading 'pci_devices' on Instance uuid 0f53a1ce-fb3c-4d89-be52-05b2de65acba obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:36:05 compute-0 nova_compute[192079]: 2025-10-02 12:36:05.122 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408565.1222486, 0f53a1ce-fb3c-4d89-be52-05b2de65acba => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:36:05 compute-0 nova_compute[192079]: 2025-10-02 12:36:05.123 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] VM Paused (Lifecycle Event)
Oct 02 12:36:05 compute-0 nova_compute[192079]: 2025-10-02 12:36:05.147 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:36:05 compute-0 nova_compute[192079]: 2025-10-02 12:36:05.150 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: active, current task_state: suspending, current DB power_state: 1, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:36:05 compute-0 nova_compute[192079]: 2025-10-02 12:36:05.171 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:05 compute-0 nova_compute[192079]: 2025-10-02 12:36:05.175 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] During sync_power_state the instance has a pending task (suspending). Skip.
Oct 02 12:36:05 compute-0 kernel: tap64dd0de7-fb (unregistering): left promiscuous mode
Oct 02 12:36:05 compute-0 NetworkManager[51160]: <info>  [1759408565.5394] device (tap64dd0de7-fb): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:36:05 compute-0 ovn_controller[94336]: 2025-10-02T12:36:05Z|00611|binding|INFO|Releasing lport 64dd0de7-fbd7-4c16-b867-61d61163f4ba from this chassis (sb_readonly=0)
Oct 02 12:36:05 compute-0 nova_compute[192079]: 2025-10-02 12:36:05.590 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:05 compute-0 ovn_controller[94336]: 2025-10-02T12:36:05Z|00612|binding|INFO|Setting lport 64dd0de7-fbd7-4c16-b867-61d61163f4ba down in Southbound
Oct 02 12:36:05 compute-0 ovn_controller[94336]: 2025-10-02T12:36:05Z|00613|binding|INFO|Removing iface tap64dd0de7-fb ovn-installed in OVS
Oct 02 12:36:05 compute-0 nova_compute[192079]: 2025-10-02 12:36:05.594 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:05 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:05.615 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:e3:22:87 10.100.0.11'], port_security=['fa:16:3e:e3:22:87 10.100.0.11'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.11/28', 'neutron:device_id': '0f53a1ce-fb3c-4d89-be52-05b2de65acba', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-616214b7-6fa7-4c4a-92d4-ca6b283a5d5d', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '6908d705b9b541669e2fe9a84c2cacd7', 'neutron:revision_number': '4', 'neutron:security_group_ids': '8a40f8ee-7113-4592-bfa9-35ec9f6a67c6', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=ee25e8ff-f81b-4b66-a313-71c3093cd990, chassis=[], tunnel_key=2, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=64dd0de7-fbd7-4c16-b867-61d61163f4ba) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:36:05 compute-0 nova_compute[192079]: 2025-10-02 12:36:05.616 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:05 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:05.618 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 64dd0de7-fbd7-4c16-b867-61d61163f4ba in datapath 616214b7-6fa7-4c4a-92d4-ca6b283a5d5d unbound from our chassis
Oct 02 12:36:05 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:05.620 103294 DEBUG neutron.agent.ovn.metadata.agent [-] There is no metadata port for network 616214b7-6fa7-4c4a-92d4-ca6b283a5d5d or it has no MAC or IP addresses configured, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:599
Oct 02 12:36:05 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:05.621 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f9975e37-de1f-4615-9958-1b5eb4ea7337]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:36:05 compute-0 systemd[1]: machine-qemu\x2d76\x2dinstance\x2d0000009f.scope: Deactivated successfully.
Oct 02 12:36:05 compute-0 systemd[1]: machine-qemu\x2d76\x2dinstance\x2d0000009f.scope: Consumed 3.489s CPU time.
Oct 02 12:36:05 compute-0 systemd-machined[152150]: Machine qemu-76-instance-0000009f terminated.
Oct 02 12:36:05 compute-0 nova_compute[192079]: 2025-10-02 12:36:05.793 2 DEBUG nova.compute.manager [None req-c24e0203-9623-4287-acdd-95cae4849d7f bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:36:06 compute-0 sshd-session[247308]: Failed password for invalid user sol from 45.148.10.240 port 48164 ssh2
Oct 02 12:36:06 compute-0 nova_compute[192079]: 2025-10-02 12:36:06.872 2 DEBUG nova.compute.manager [req-76b02e11-9e0e-4805-a983-22da7f231920 req-fc0a24b6-fbe3-42ff-bd17-c1c7bc9032e2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Received event network-vif-unplugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:36:06 compute-0 nova_compute[192079]: 2025-10-02 12:36:06.872 2 DEBUG oslo_concurrency.lockutils [req-76b02e11-9e0e-4805-a983-22da7f231920 req-fc0a24b6-fbe3-42ff-bd17-c1c7bc9032e2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:36:06 compute-0 nova_compute[192079]: 2025-10-02 12:36:06.872 2 DEBUG oslo_concurrency.lockutils [req-76b02e11-9e0e-4805-a983-22da7f231920 req-fc0a24b6-fbe3-42ff-bd17-c1c7bc9032e2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:36:06 compute-0 nova_compute[192079]: 2025-10-02 12:36:06.873 2 DEBUG oslo_concurrency.lockutils [req-76b02e11-9e0e-4805-a983-22da7f231920 req-fc0a24b6-fbe3-42ff-bd17-c1c7bc9032e2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:36:06 compute-0 nova_compute[192079]: 2025-10-02 12:36:06.873 2 DEBUG nova.compute.manager [req-76b02e11-9e0e-4805-a983-22da7f231920 req-fc0a24b6-fbe3-42ff-bd17-c1c7bc9032e2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] No waiting events found dispatching network-vif-unplugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:36:06 compute-0 nova_compute[192079]: 2025-10-02 12:36:06.873 2 WARNING nova.compute.manager [req-76b02e11-9e0e-4805-a983-22da7f231920 req-fc0a24b6-fbe3-42ff-bd17-c1c7bc9032e2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Received unexpected event network-vif-unplugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba for instance with vm_state suspended and task_state None.
Oct 02 12:36:06 compute-0 nova_compute[192079]: 2025-10-02 12:36:06.874 2 DEBUG nova.compute.manager [req-76b02e11-9e0e-4805-a983-22da7f231920 req-fc0a24b6-fbe3-42ff-bd17-c1c7bc9032e2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Received event network-vif-plugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:36:06 compute-0 nova_compute[192079]: 2025-10-02 12:36:06.874 2 DEBUG oslo_concurrency.lockutils [req-76b02e11-9e0e-4805-a983-22da7f231920 req-fc0a24b6-fbe3-42ff-bd17-c1c7bc9032e2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:36:06 compute-0 nova_compute[192079]: 2025-10-02 12:36:06.874 2 DEBUG oslo_concurrency.lockutils [req-76b02e11-9e0e-4805-a983-22da7f231920 req-fc0a24b6-fbe3-42ff-bd17-c1c7bc9032e2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:36:06 compute-0 nova_compute[192079]: 2025-10-02 12:36:06.875 2 DEBUG oslo_concurrency.lockutils [req-76b02e11-9e0e-4805-a983-22da7f231920 req-fc0a24b6-fbe3-42ff-bd17-c1c7bc9032e2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:36:06 compute-0 nova_compute[192079]: 2025-10-02 12:36:06.875 2 DEBUG nova.compute.manager [req-76b02e11-9e0e-4805-a983-22da7f231920 req-fc0a24b6-fbe3-42ff-bd17-c1c7bc9032e2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] No waiting events found dispatching network-vif-plugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:36:06 compute-0 nova_compute[192079]: 2025-10-02 12:36:06.875 2 WARNING nova.compute.manager [req-76b02e11-9e0e-4805-a983-22da7f231920 req-fc0a24b6-fbe3-42ff-bd17-c1c7bc9032e2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Received unexpected event network-vif-plugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba for instance with vm_state suspended and task_state None.
Oct 02 12:36:07 compute-0 sshd-session[247308]: Connection closed by invalid user sol 45.148.10.240 port 48164 [preauth]
Oct 02 12:36:07 compute-0 nova_compute[192079]: 2025-10-02 12:36:07.516 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759408552.5148766, 29e46585-0d8d-450d-b3de-d6d103b90a58 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:36:07 compute-0 nova_compute[192079]: 2025-10-02 12:36:07.517 2 INFO nova.compute.manager [-] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] VM Stopped (Lifecycle Event)
Oct 02 12:36:07 compute-0 nova_compute[192079]: 2025-10-02 12:36:07.532 2 INFO nova.compute.manager [None req-c63fc8d9-0797-4a19-900a-8b9368d472ea bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Resuming
Oct 02 12:36:07 compute-0 nova_compute[192079]: 2025-10-02 12:36:07.533 2 DEBUG nova.objects.instance [None req-c63fc8d9-0797-4a19-900a-8b9368d472ea bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Lazy-loading 'flavor' on Instance uuid 0f53a1ce-fb3c-4d89-be52-05b2de65acba obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:36:07 compute-0 nova_compute[192079]: 2025-10-02 12:36:07.534 2 DEBUG nova.compute.manager [None req-e69cb8f4-7b8a-4197-9cc1-b7cd5448d096 - - - - - -] [instance: 29e46585-0d8d-450d-b3de-d6d103b90a58] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:36:07 compute-0 nova_compute[192079]: 2025-10-02 12:36:07.556 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:07 compute-0 nova_compute[192079]: 2025-10-02 12:36:07.597 2 DEBUG oslo_concurrency.lockutils [None req-c63fc8d9-0797-4a19-900a-8b9368d472ea bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Acquiring lock "refresh_cache-0f53a1ce-fb3c-4d89-be52-05b2de65acba" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:36:07 compute-0 nova_compute[192079]: 2025-10-02 12:36:07.598 2 DEBUG oslo_concurrency.lockutils [None req-c63fc8d9-0797-4a19-900a-8b9368d472ea bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Acquired lock "refresh_cache-0f53a1ce-fb3c-4d89-be52-05b2de65acba" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:36:07 compute-0 nova_compute[192079]: 2025-10-02 12:36:07.598 2 DEBUG nova.network.neutron [None req-c63fc8d9-0797-4a19-900a-8b9368d472ea bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:36:09 compute-0 nova_compute[192079]: 2025-10-02 12:36:09.975 2 DEBUG nova.network.neutron [None req-c63fc8d9-0797-4a19-900a-8b9368d472ea bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Updating instance_info_cache with network_info: [{"id": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "address": "fa:16:3e:e3:22:87", "network": {"id": "616214b7-6fa7-4c4a-92d4-ca6b283a5d5d", "bridge": "br-int", "label": "tempest-TestServerAdvancedOps-1235429694-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6908d705b9b541669e2fe9a84c2cacd7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap64dd0de7-fb", "ovs_interfaceid": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:36:10 compute-0 nova_compute[192079]: 2025-10-02 12:36:10.008 2 DEBUG oslo_concurrency.lockutils [None req-c63fc8d9-0797-4a19-900a-8b9368d472ea bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Releasing lock "refresh_cache-0f53a1ce-fb3c-4d89-be52-05b2de65acba" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:36:10 compute-0 nova_compute[192079]: 2025-10-02 12:36:10.014 2 DEBUG nova.virt.libvirt.vif [None req-c63fc8d9-0797-4a19-900a-8b9368d472ea bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:35:49Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestServerAdvancedOps-server-16731796',display_name='tempest-TestServerAdvancedOps-server-16731796',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testserveradvancedops-server-16731796',id=159,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:36:02Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=<?>,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=4,progress=0,project_id='6908d705b9b541669e2fe9a84c2cacd7',ramdisk_id='',reservation_id='r-089z0o6n',resources=<?>,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',old_vm_state='active',owner_project_name='tempest-TestServerAdvancedOps-1292292677',owner_user_name='tempest-TestServerAdvancedOps-1292292677-project-member'},tags=<?>,task_state='resuming',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:36:05Z,user_data=None,user_id='bf14abcb3f75420e870a3997dfbedee4',uuid=0f53a1ce-fb3c-4d89-be52-05b2de65acba,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='suspended') vif={"id": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "address": "fa:16:3e:e3:22:87", "network": {"id": "616214b7-6fa7-4c4a-92d4-ca6b283a5d5d", "bridge": "br-int", "label": "tempest-TestServerAdvancedOps-1235429694-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6908d705b9b541669e2fe9a84c2cacd7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap64dd0de7-fb", "ovs_interfaceid": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:36:10 compute-0 nova_compute[192079]: 2025-10-02 12:36:10.014 2 DEBUG nova.network.os_vif_util [None req-c63fc8d9-0797-4a19-900a-8b9368d472ea bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Converting VIF {"id": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "address": "fa:16:3e:e3:22:87", "network": {"id": "616214b7-6fa7-4c4a-92d4-ca6b283a5d5d", "bridge": "br-int", "label": "tempest-TestServerAdvancedOps-1235429694-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6908d705b9b541669e2fe9a84c2cacd7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap64dd0de7-fb", "ovs_interfaceid": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:36:10 compute-0 nova_compute[192079]: 2025-10-02 12:36:10.015 2 DEBUG nova.network.os_vif_util [None req-c63fc8d9-0797-4a19-900a-8b9368d472ea bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:e3:22:87,bridge_name='br-int',has_traffic_filtering=True,id=64dd0de7-fbd7-4c16-b867-61d61163f4ba,network=Network(616214b7-6fa7-4c4a-92d4-ca6b283a5d5d),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap64dd0de7-fb') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:36:10 compute-0 nova_compute[192079]: 2025-10-02 12:36:10.015 2 DEBUG os_vif [None req-c63fc8d9-0797-4a19-900a-8b9368d472ea bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:e3:22:87,bridge_name='br-int',has_traffic_filtering=True,id=64dd0de7-fbd7-4c16-b867-61d61163f4ba,network=Network(616214b7-6fa7-4c4a-92d4-ca6b283a5d5d),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap64dd0de7-fb') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:36:10 compute-0 nova_compute[192079]: 2025-10-02 12:36:10.016 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:10 compute-0 nova_compute[192079]: 2025-10-02 12:36:10.017 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:36:10 compute-0 nova_compute[192079]: 2025-10-02 12:36:10.017 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:36:10 compute-0 nova_compute[192079]: 2025-10-02 12:36:10.019 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:10 compute-0 nova_compute[192079]: 2025-10-02 12:36:10.020 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap64dd0de7-fb, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:36:10 compute-0 nova_compute[192079]: 2025-10-02 12:36:10.020 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap64dd0de7-fb, col_values=(('external_ids', {'iface-id': '64dd0de7-fbd7-4c16-b867-61d61163f4ba', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:e3:22:87', 'vm-uuid': '0f53a1ce-fb3c-4d89-be52-05b2de65acba'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:36:10 compute-0 nova_compute[192079]: 2025-10-02 12:36:10.020 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:36:10 compute-0 nova_compute[192079]: 2025-10-02 12:36:10.021 2 INFO os_vif [None req-c63fc8d9-0797-4a19-900a-8b9368d472ea bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:e3:22:87,bridge_name='br-int',has_traffic_filtering=True,id=64dd0de7-fbd7-4c16-b867-61d61163f4ba,network=Network(616214b7-6fa7-4c4a-92d4-ca6b283a5d5d),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap64dd0de7-fb')
Oct 02 12:36:10 compute-0 nova_compute[192079]: 2025-10-02 12:36:10.038 2 DEBUG nova.objects.instance [None req-c63fc8d9-0797-4a19-900a-8b9368d472ea bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Lazy-loading 'numa_topology' on Instance uuid 0f53a1ce-fb3c-4d89-be52-05b2de65acba obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:36:10 compute-0 kernel: tap64dd0de7-fb: entered promiscuous mode
Oct 02 12:36:10 compute-0 NetworkManager[51160]: <info>  [1759408570.1136] manager: (tap64dd0de7-fb): new Tun device (/org/freedesktop/NetworkManager/Devices/302)
Oct 02 12:36:10 compute-0 ovn_controller[94336]: 2025-10-02T12:36:10Z|00614|binding|INFO|Claiming lport 64dd0de7-fbd7-4c16-b867-61d61163f4ba for this chassis.
Oct 02 12:36:10 compute-0 ovn_controller[94336]: 2025-10-02T12:36:10Z|00615|binding|INFO|64dd0de7-fbd7-4c16-b867-61d61163f4ba: Claiming fa:16:3e:e3:22:87 10.100.0.11
Oct 02 12:36:10 compute-0 nova_compute[192079]: 2025-10-02 12:36:10.115 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:10 compute-0 ovn_controller[94336]: 2025-10-02T12:36:10Z|00616|binding|INFO|Setting lport 64dd0de7-fbd7-4c16-b867-61d61163f4ba ovn-installed in OVS
Oct 02 12:36:10 compute-0 nova_compute[192079]: 2025-10-02 12:36:10.128 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:10 compute-0 nova_compute[192079]: 2025-10-02 12:36:10.131 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:10 compute-0 systemd-udevd[247391]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:36:10 compute-0 NetworkManager[51160]: <info>  [1759408570.1501] device (tap64dd0de7-fb): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:36:10 compute-0 NetworkManager[51160]: <info>  [1759408570.1509] device (tap64dd0de7-fb): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:36:10 compute-0 ovn_controller[94336]: 2025-10-02T12:36:10Z|00617|binding|INFO|Setting lport 64dd0de7-fbd7-4c16-b867-61d61163f4ba up in Southbound
Oct 02 12:36:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:10.165 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:e3:22:87 10.100.0.11'], port_security=['fa:16:3e:e3:22:87 10.100.0.11'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.11/28', 'neutron:device_id': '0f53a1ce-fb3c-4d89-be52-05b2de65acba', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-616214b7-6fa7-4c4a-92d4-ca6b283a5d5d', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '6908d705b9b541669e2fe9a84c2cacd7', 'neutron:revision_number': '5', 'neutron:security_group_ids': '8a40f8ee-7113-4592-bfa9-35ec9f6a67c6', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=ee25e8ff-f81b-4b66-a313-71c3093cd990, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=2, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=64dd0de7-fbd7-4c16-b867-61d61163f4ba) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:36:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:10.166 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 64dd0de7-fbd7-4c16-b867-61d61163f4ba in datapath 616214b7-6fa7-4c4a-92d4-ca6b283a5d5d bound to our chassis
Oct 02 12:36:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:10.167 103294 DEBUG neutron.agent.ovn.metadata.agent [-] There is no metadata port for network 616214b7-6fa7-4c4a-92d4-ca6b283a5d5d or it has no MAC or IP addresses configured, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:599
Oct 02 12:36:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:10.168 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[fecc2c99-1295-4c91-9555-f3ce240dc76d]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:36:10 compute-0 systemd-machined[152150]: New machine qemu-77-instance-0000009f.
Oct 02 12:36:10 compute-0 nova_compute[192079]: 2025-10-02 12:36:10.173 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:10 compute-0 systemd[1]: Started Virtual Machine qemu-77-instance-0000009f.
Oct 02 12:36:11 compute-0 nova_compute[192079]: 2025-10-02 12:36:11.191 2 DEBUG nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Removed pending event for 0f53a1ce-fb3c-4d89-be52-05b2de65acba due to event _event_emit_delayed /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:438
Oct 02 12:36:11 compute-0 nova_compute[192079]: 2025-10-02 12:36:11.192 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408571.19119, 0f53a1ce-fb3c-4d89-be52-05b2de65acba => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:36:11 compute-0 nova_compute[192079]: 2025-10-02 12:36:11.192 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] VM Started (Lifecycle Event)
Oct 02 12:36:11 compute-0 nova_compute[192079]: 2025-10-02 12:36:11.215 2 DEBUG nova.compute.manager [None req-c63fc8d9-0797-4a19-900a-8b9368d472ea bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:36:11 compute-0 nova_compute[192079]: 2025-10-02 12:36:11.216 2 DEBUG nova.objects.instance [None req-c63fc8d9-0797-4a19-900a-8b9368d472ea bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Lazy-loading 'pci_devices' on Instance uuid 0f53a1ce-fb3c-4d89-be52-05b2de65acba obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:36:11 compute-0 nova_compute[192079]: 2025-10-02 12:36:11.218 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:36:11 compute-0 nova_compute[192079]: 2025-10-02 12:36:11.221 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Synchronizing instance power state after lifecycle event "Started"; current vm_state: suspended, current task_state: resuming, current DB power_state: 4, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:36:11 compute-0 nova_compute[192079]: 2025-10-02 12:36:11.245 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] During sync_power_state the instance has a pending task (resuming). Skip.
Oct 02 12:36:11 compute-0 nova_compute[192079]: 2025-10-02 12:36:11.245 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408571.1986978, 0f53a1ce-fb3c-4d89-be52-05b2de65acba => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:36:11 compute-0 nova_compute[192079]: 2025-10-02 12:36:11.246 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] VM Resumed (Lifecycle Event)
Oct 02 12:36:11 compute-0 nova_compute[192079]: 2025-10-02 12:36:11.249 2 INFO nova.virt.libvirt.driver [-] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Instance running successfully.
Oct 02 12:36:11 compute-0 virtqemud[191807]: argument unsupported: QEMU guest agent is not configured
Oct 02 12:36:11 compute-0 nova_compute[192079]: 2025-10-02 12:36:11.251 2 DEBUG nova.virt.libvirt.guest [None req-c63fc8d9-0797-4a19-900a-8b9368d472ea bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Failed to set time: agent not configured sync_guest_time /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:200
Oct 02 12:36:11 compute-0 nova_compute[192079]: 2025-10-02 12:36:11.252 2 DEBUG nova.compute.manager [None req-c63fc8d9-0797-4a19-900a-8b9368d472ea bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:36:11 compute-0 nova_compute[192079]: 2025-10-02 12:36:11.279 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:36:11 compute-0 nova_compute[192079]: 2025-10-02 12:36:11.282 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: suspended, current task_state: resuming, current DB power_state: 4, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:36:11 compute-0 nova_compute[192079]: 2025-10-02 12:36:11.310 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] During sync_power_state the instance has a pending task (resuming). Skip.
Oct 02 12:36:12 compute-0 nova_compute[192079]: 2025-10-02 12:36:12.476 2 DEBUG nova.compute.manager [req-85884724-49e9-4870-b503-3316b1dfa563 req-06e71713-beb2-4933-ba22-4f7132478d04 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Received event network-vif-plugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:36:12 compute-0 nova_compute[192079]: 2025-10-02 12:36:12.477 2 DEBUG oslo_concurrency.lockutils [req-85884724-49e9-4870-b503-3316b1dfa563 req-06e71713-beb2-4933-ba22-4f7132478d04 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:36:12 compute-0 nova_compute[192079]: 2025-10-02 12:36:12.477 2 DEBUG oslo_concurrency.lockutils [req-85884724-49e9-4870-b503-3316b1dfa563 req-06e71713-beb2-4933-ba22-4f7132478d04 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:36:12 compute-0 nova_compute[192079]: 2025-10-02 12:36:12.477 2 DEBUG oslo_concurrency.lockutils [req-85884724-49e9-4870-b503-3316b1dfa563 req-06e71713-beb2-4933-ba22-4f7132478d04 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:36:12 compute-0 nova_compute[192079]: 2025-10-02 12:36:12.478 2 DEBUG nova.compute.manager [req-85884724-49e9-4870-b503-3316b1dfa563 req-06e71713-beb2-4933-ba22-4f7132478d04 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] No waiting events found dispatching network-vif-plugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:36:12 compute-0 nova_compute[192079]: 2025-10-02 12:36:12.478 2 WARNING nova.compute.manager [req-85884724-49e9-4870-b503-3316b1dfa563 req-06e71713-beb2-4933-ba22-4f7132478d04 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Received unexpected event network-vif-plugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba for instance with vm_state active and task_state None.
Oct 02 12:36:12 compute-0 nova_compute[192079]: 2025-10-02 12:36:12.557 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:13 compute-0 podman[247410]: 2025-10-02 12:36:13.161743099 +0000 UTC m=+0.060682342 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=ovn_metadata_agent, container_name=ovn_metadata_agent, org.label-schema.license=GPLv2, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001)
Oct 02 12:36:13 compute-0 podman[247412]: 2025-10-02 12:36:13.169312436 +0000 UTC m=+0.060383846 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 12:36:13 compute-0 podman[247411]: 2025-10-02 12:36:13.228375753 +0000 UTC m=+0.123500583 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, container_name=ovn_controller, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:36:14 compute-0 nova_compute[192079]: 2025-10-02 12:36:14.230 2 DEBUG nova.objects.instance [None req-0f2be3d7-e951-4bc6-8eaf-756c0c0bd392 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Lazy-loading 'pci_devices' on Instance uuid 0f53a1ce-fb3c-4d89-be52-05b2de65acba obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:36:14 compute-0 nova_compute[192079]: 2025-10-02 12:36:14.251 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408574.2509534, 0f53a1ce-fb3c-4d89-be52-05b2de65acba => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:36:14 compute-0 nova_compute[192079]: 2025-10-02 12:36:14.251 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] VM Paused (Lifecycle Event)
Oct 02 12:36:14 compute-0 nova_compute[192079]: 2025-10-02 12:36:14.272 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:36:14 compute-0 nova_compute[192079]: 2025-10-02 12:36:14.274 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: active, current task_state: suspending, current DB power_state: 1, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:36:14 compute-0 nova_compute[192079]: 2025-10-02 12:36:14.290 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] During sync_power_state the instance has a pending task (suspending). Skip.
Oct 02 12:36:14 compute-0 nova_compute[192079]: 2025-10-02 12:36:14.588 2 DEBUG nova.compute.manager [req-e8f1a77d-2cf6-43b6-a938-8345a687dd91 req-b812365a-1553-4b4c-bedf-878b4dd3381e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Received event network-vif-plugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:36:14 compute-0 nova_compute[192079]: 2025-10-02 12:36:14.589 2 DEBUG oslo_concurrency.lockutils [req-e8f1a77d-2cf6-43b6-a938-8345a687dd91 req-b812365a-1553-4b4c-bedf-878b4dd3381e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:36:14 compute-0 nova_compute[192079]: 2025-10-02 12:36:14.590 2 DEBUG oslo_concurrency.lockutils [req-e8f1a77d-2cf6-43b6-a938-8345a687dd91 req-b812365a-1553-4b4c-bedf-878b4dd3381e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:36:14 compute-0 nova_compute[192079]: 2025-10-02 12:36:14.590 2 DEBUG oslo_concurrency.lockutils [req-e8f1a77d-2cf6-43b6-a938-8345a687dd91 req-b812365a-1553-4b4c-bedf-878b4dd3381e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:36:14 compute-0 nova_compute[192079]: 2025-10-02 12:36:14.590 2 DEBUG nova.compute.manager [req-e8f1a77d-2cf6-43b6-a938-8345a687dd91 req-b812365a-1553-4b4c-bedf-878b4dd3381e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] No waiting events found dispatching network-vif-plugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:36:14 compute-0 nova_compute[192079]: 2025-10-02 12:36:14.591 2 WARNING nova.compute.manager [req-e8f1a77d-2cf6-43b6-a938-8345a687dd91 req-b812365a-1553-4b4c-bedf-878b4dd3381e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Received unexpected event network-vif-plugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba for instance with vm_state active and task_state suspending.
Oct 02 12:36:14 compute-0 kernel: tap64dd0de7-fb (unregistering): left promiscuous mode
Oct 02 12:36:14 compute-0 NetworkManager[51160]: <info>  [1759408574.8955] device (tap64dd0de7-fb): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:36:14 compute-0 ovn_controller[94336]: 2025-10-02T12:36:14Z|00618|binding|INFO|Releasing lport 64dd0de7-fbd7-4c16-b867-61d61163f4ba from this chassis (sb_readonly=0)
Oct 02 12:36:14 compute-0 nova_compute[192079]: 2025-10-02 12:36:14.903 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:14 compute-0 ovn_controller[94336]: 2025-10-02T12:36:14Z|00619|binding|INFO|Setting lport 64dd0de7-fbd7-4c16-b867-61d61163f4ba down in Southbound
Oct 02 12:36:14 compute-0 ovn_controller[94336]: 2025-10-02T12:36:14Z|00620|binding|INFO|Removing iface tap64dd0de7-fb ovn-installed in OVS
Oct 02 12:36:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:14.910 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:e3:22:87 10.100.0.11'], port_security=['fa:16:3e:e3:22:87 10.100.0.11'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.11/28', 'neutron:device_id': '0f53a1ce-fb3c-4d89-be52-05b2de65acba', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-616214b7-6fa7-4c4a-92d4-ca6b283a5d5d', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '6908d705b9b541669e2fe9a84c2cacd7', 'neutron:revision_number': '6', 'neutron:security_group_ids': '8a40f8ee-7113-4592-bfa9-35ec9f6a67c6', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=ee25e8ff-f81b-4b66-a313-71c3093cd990, chassis=[], tunnel_key=2, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=64dd0de7-fbd7-4c16-b867-61d61163f4ba) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:36:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:14.911 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 64dd0de7-fbd7-4c16-b867-61d61163f4ba in datapath 616214b7-6fa7-4c4a-92d4-ca6b283a5d5d unbound from our chassis
Oct 02 12:36:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:14.912 103294 DEBUG neutron.agent.ovn.metadata.agent [-] There is no metadata port for network 616214b7-6fa7-4c4a-92d4-ca6b283a5d5d or it has no MAC or IP addresses configured, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:599
Oct 02 12:36:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:14.913 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a5a151b3-7e0e-4797-bd92-ea953b292112]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:36:14 compute-0 nova_compute[192079]: 2025-10-02 12:36:14.918 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:14 compute-0 systemd[1]: machine-qemu\x2d77\x2dinstance\x2d0000009f.scope: Deactivated successfully.
Oct 02 12:36:14 compute-0 systemd[1]: machine-qemu\x2d77\x2dinstance\x2d0000009f.scope: Consumed 4.101s CPU time.
Oct 02 12:36:14 compute-0 systemd-machined[152150]: Machine qemu-77-instance-0000009f terminated.
Oct 02 12:36:15 compute-0 nova_compute[192079]: 2025-10-02 12:36:15.084 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:15 compute-0 nova_compute[192079]: 2025-10-02 12:36:15.089 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:15 compute-0 nova_compute[192079]: 2025-10-02 12:36:15.127 2 DEBUG nova.compute.manager [None req-0f2be3d7-e951-4bc6-8eaf-756c0c0bd392 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:36:15 compute-0 nova_compute[192079]: 2025-10-02 12:36:15.173 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:16 compute-0 nova_compute[192079]: 2025-10-02 12:36:16.334 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:16.334 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=40, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=39) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:36:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:16.337 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 2 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:36:16 compute-0 nova_compute[192079]: 2025-10-02 12:36:16.438 2 INFO nova.compute.manager [None req-2a87bf30-561c-4972-ac5c-bea04bb273b2 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Resuming
Oct 02 12:36:16 compute-0 nova_compute[192079]: 2025-10-02 12:36:16.439 2 DEBUG nova.objects.instance [None req-2a87bf30-561c-4972-ac5c-bea04bb273b2 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Lazy-loading 'flavor' on Instance uuid 0f53a1ce-fb3c-4d89-be52-05b2de65acba obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:36:16 compute-0 nova_compute[192079]: 2025-10-02 12:36:16.495 2 DEBUG oslo_concurrency.lockutils [None req-2a87bf30-561c-4972-ac5c-bea04bb273b2 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Acquiring lock "refresh_cache-0f53a1ce-fb3c-4d89-be52-05b2de65acba" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:36:16 compute-0 nova_compute[192079]: 2025-10-02 12:36:16.495 2 DEBUG oslo_concurrency.lockutils [None req-2a87bf30-561c-4972-ac5c-bea04bb273b2 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Acquired lock "refresh_cache-0f53a1ce-fb3c-4d89-be52-05b2de65acba" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:36:16 compute-0 nova_compute[192079]: 2025-10-02 12:36:16.495 2 DEBUG nova.network.neutron [None req-2a87bf30-561c-4972-ac5c-bea04bb273b2 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:36:16 compute-0 nova_compute[192079]: 2025-10-02 12:36:16.682 2 DEBUG nova.compute.manager [req-0e93aa7b-2daa-4086-b03a-7ae11a6a5761 req-6bbcfb7f-6e01-404e-9554-ae2c17f79200 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Received event network-vif-unplugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:36:16 compute-0 nova_compute[192079]: 2025-10-02 12:36:16.683 2 DEBUG oslo_concurrency.lockutils [req-0e93aa7b-2daa-4086-b03a-7ae11a6a5761 req-6bbcfb7f-6e01-404e-9554-ae2c17f79200 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:36:16 compute-0 nova_compute[192079]: 2025-10-02 12:36:16.683 2 DEBUG oslo_concurrency.lockutils [req-0e93aa7b-2daa-4086-b03a-7ae11a6a5761 req-6bbcfb7f-6e01-404e-9554-ae2c17f79200 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:36:16 compute-0 nova_compute[192079]: 2025-10-02 12:36:16.683 2 DEBUG oslo_concurrency.lockutils [req-0e93aa7b-2daa-4086-b03a-7ae11a6a5761 req-6bbcfb7f-6e01-404e-9554-ae2c17f79200 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:36:16 compute-0 nova_compute[192079]: 2025-10-02 12:36:16.684 2 DEBUG nova.compute.manager [req-0e93aa7b-2daa-4086-b03a-7ae11a6a5761 req-6bbcfb7f-6e01-404e-9554-ae2c17f79200 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] No waiting events found dispatching network-vif-unplugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:36:16 compute-0 nova_compute[192079]: 2025-10-02 12:36:16.684 2 WARNING nova.compute.manager [req-0e93aa7b-2daa-4086-b03a-7ae11a6a5761 req-6bbcfb7f-6e01-404e-9554-ae2c17f79200 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Received unexpected event network-vif-unplugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba for instance with vm_state suspended and task_state resuming.
Oct 02 12:36:16 compute-0 nova_compute[192079]: 2025-10-02 12:36:16.684 2 DEBUG nova.compute.manager [req-0e93aa7b-2daa-4086-b03a-7ae11a6a5761 req-6bbcfb7f-6e01-404e-9554-ae2c17f79200 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Received event network-vif-plugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:36:16 compute-0 nova_compute[192079]: 2025-10-02 12:36:16.684 2 DEBUG oslo_concurrency.lockutils [req-0e93aa7b-2daa-4086-b03a-7ae11a6a5761 req-6bbcfb7f-6e01-404e-9554-ae2c17f79200 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:36:16 compute-0 nova_compute[192079]: 2025-10-02 12:36:16.685 2 DEBUG oslo_concurrency.lockutils [req-0e93aa7b-2daa-4086-b03a-7ae11a6a5761 req-6bbcfb7f-6e01-404e-9554-ae2c17f79200 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:36:16 compute-0 nova_compute[192079]: 2025-10-02 12:36:16.685 2 DEBUG oslo_concurrency.lockutils [req-0e93aa7b-2daa-4086-b03a-7ae11a6a5761 req-6bbcfb7f-6e01-404e-9554-ae2c17f79200 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:36:16 compute-0 nova_compute[192079]: 2025-10-02 12:36:16.685 2 DEBUG nova.compute.manager [req-0e93aa7b-2daa-4086-b03a-7ae11a6a5761 req-6bbcfb7f-6e01-404e-9554-ae2c17f79200 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] No waiting events found dispatching network-vif-plugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:36:16 compute-0 nova_compute[192079]: 2025-10-02 12:36:16.685 2 WARNING nova.compute.manager [req-0e93aa7b-2daa-4086-b03a-7ae11a6a5761 req-6bbcfb7f-6e01-404e-9554-ae2c17f79200 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Received unexpected event network-vif-plugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba for instance with vm_state suspended and task_state resuming.
Oct 02 12:36:17 compute-0 nova_compute[192079]: 2025-10-02 12:36:17.559 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:17 compute-0 nova_compute[192079]: 2025-10-02 12:36:17.838 2 DEBUG nova.network.neutron [None req-2a87bf30-561c-4972-ac5c-bea04bb273b2 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Updating instance_info_cache with network_info: [{"id": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "address": "fa:16:3e:e3:22:87", "network": {"id": "616214b7-6fa7-4c4a-92d4-ca6b283a5d5d", "bridge": "br-int", "label": "tempest-TestServerAdvancedOps-1235429694-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6908d705b9b541669e2fe9a84c2cacd7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap64dd0de7-fb", "ovs_interfaceid": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:36:17 compute-0 nova_compute[192079]: 2025-10-02 12:36:17.865 2 DEBUG oslo_concurrency.lockutils [None req-2a87bf30-561c-4972-ac5c-bea04bb273b2 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Releasing lock "refresh_cache-0f53a1ce-fb3c-4d89-be52-05b2de65acba" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:36:17 compute-0 nova_compute[192079]: 2025-10-02 12:36:17.870 2 DEBUG nova.virt.libvirt.vif [None req-2a87bf30-561c-4972-ac5c-bea04bb273b2 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:35:49Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestServerAdvancedOps-server-16731796',display_name='tempest-TestServerAdvancedOps-server-16731796',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testserveradvancedops-server-16731796',id=159,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:36:02Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=<?>,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=4,progress=0,project_id='6908d705b9b541669e2fe9a84c2cacd7',ramdisk_id='',reservation_id='r-089z0o6n',resources=<?>,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',old_vm_state='active',owner_project_name='tempest-TestServerAdvancedOps-1292292677',owner_user_name='tempest-TestServerAdvancedOps-1292292677-project-member'},tags=<?>,task_state='resuming',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:36:15Z,user_data=None,user_id='bf14abcb3f75420e870a3997dfbedee4',uuid=0f53a1ce-fb3c-4d89-be52-05b2de65acba,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='suspended') vif={"id": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "address": "fa:16:3e:e3:22:87", "network": {"id": "616214b7-6fa7-4c4a-92d4-ca6b283a5d5d", "bridge": "br-int", "label": "tempest-TestServerAdvancedOps-1235429694-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6908d705b9b541669e2fe9a84c2cacd7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap64dd0de7-fb", "ovs_interfaceid": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:36:17 compute-0 nova_compute[192079]: 2025-10-02 12:36:17.870 2 DEBUG nova.network.os_vif_util [None req-2a87bf30-561c-4972-ac5c-bea04bb273b2 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Converting VIF {"id": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "address": "fa:16:3e:e3:22:87", "network": {"id": "616214b7-6fa7-4c4a-92d4-ca6b283a5d5d", "bridge": "br-int", "label": "tempest-TestServerAdvancedOps-1235429694-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6908d705b9b541669e2fe9a84c2cacd7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap64dd0de7-fb", "ovs_interfaceid": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:36:17 compute-0 nova_compute[192079]: 2025-10-02 12:36:17.871 2 DEBUG nova.network.os_vif_util [None req-2a87bf30-561c-4972-ac5c-bea04bb273b2 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:e3:22:87,bridge_name='br-int',has_traffic_filtering=True,id=64dd0de7-fbd7-4c16-b867-61d61163f4ba,network=Network(616214b7-6fa7-4c4a-92d4-ca6b283a5d5d),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap64dd0de7-fb') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:36:17 compute-0 nova_compute[192079]: 2025-10-02 12:36:17.872 2 DEBUG os_vif [None req-2a87bf30-561c-4972-ac5c-bea04bb273b2 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:e3:22:87,bridge_name='br-int',has_traffic_filtering=True,id=64dd0de7-fbd7-4c16-b867-61d61163f4ba,network=Network(616214b7-6fa7-4c4a-92d4-ca6b283a5d5d),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap64dd0de7-fb') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:36:17 compute-0 nova_compute[192079]: 2025-10-02 12:36:17.872 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:17 compute-0 nova_compute[192079]: 2025-10-02 12:36:17.872 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:36:17 compute-0 nova_compute[192079]: 2025-10-02 12:36:17.873 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:36:17 compute-0 nova_compute[192079]: 2025-10-02 12:36:17.876 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:17 compute-0 nova_compute[192079]: 2025-10-02 12:36:17.876 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap64dd0de7-fb, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:36:17 compute-0 nova_compute[192079]: 2025-10-02 12:36:17.876 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap64dd0de7-fb, col_values=(('external_ids', {'iface-id': '64dd0de7-fbd7-4c16-b867-61d61163f4ba', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:e3:22:87', 'vm-uuid': '0f53a1ce-fb3c-4d89-be52-05b2de65acba'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:36:17 compute-0 nova_compute[192079]: 2025-10-02 12:36:17.877 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:36:17 compute-0 nova_compute[192079]: 2025-10-02 12:36:17.877 2 INFO os_vif [None req-2a87bf30-561c-4972-ac5c-bea04bb273b2 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:e3:22:87,bridge_name='br-int',has_traffic_filtering=True,id=64dd0de7-fbd7-4c16-b867-61d61163f4ba,network=Network(616214b7-6fa7-4c4a-92d4-ca6b283a5d5d),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap64dd0de7-fb')
Oct 02 12:36:17 compute-0 nova_compute[192079]: 2025-10-02 12:36:17.902 2 DEBUG nova.objects.instance [None req-2a87bf30-561c-4972-ac5c-bea04bb273b2 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Lazy-loading 'numa_topology' on Instance uuid 0f53a1ce-fb3c-4d89-be52-05b2de65acba obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:36:17 compute-0 kernel: tap64dd0de7-fb: entered promiscuous mode
Oct 02 12:36:17 compute-0 NetworkManager[51160]: <info>  [1759408577.9750] manager: (tap64dd0de7-fb): new Tun device (/org/freedesktop/NetworkManager/Devices/303)
Oct 02 12:36:17 compute-0 nova_compute[192079]: 2025-10-02 12:36:17.976 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:17 compute-0 ovn_controller[94336]: 2025-10-02T12:36:17Z|00621|binding|INFO|Claiming lport 64dd0de7-fbd7-4c16-b867-61d61163f4ba for this chassis.
Oct 02 12:36:17 compute-0 ovn_controller[94336]: 2025-10-02T12:36:17Z|00622|binding|INFO|64dd0de7-fbd7-4c16-b867-61d61163f4ba: Claiming fa:16:3e:e3:22:87 10.100.0.11
Oct 02 12:36:17 compute-0 ovn_controller[94336]: 2025-10-02T12:36:17Z|00623|binding|INFO|Setting lport 64dd0de7-fbd7-4c16-b867-61d61163f4ba ovn-installed in OVS
Oct 02 12:36:17 compute-0 nova_compute[192079]: 2025-10-02 12:36:17.995 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:17 compute-0 nova_compute[192079]: 2025-10-02 12:36:17.998 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:18 compute-0 systemd-udevd[247516]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:36:18 compute-0 ovn_controller[94336]: 2025-10-02T12:36:18Z|00624|binding|INFO|Setting lport 64dd0de7-fbd7-4c16-b867-61d61163f4ba up in Southbound
Oct 02 12:36:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:18.015 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:e3:22:87 10.100.0.11'], port_security=['fa:16:3e:e3:22:87 10.100.0.11'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.11/28', 'neutron:device_id': '0f53a1ce-fb3c-4d89-be52-05b2de65acba', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-616214b7-6fa7-4c4a-92d4-ca6b283a5d5d', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '6908d705b9b541669e2fe9a84c2cacd7', 'neutron:revision_number': '7', 'neutron:security_group_ids': '8a40f8ee-7113-4592-bfa9-35ec9f6a67c6', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=ee25e8ff-f81b-4b66-a313-71c3093cd990, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=2, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=64dd0de7-fbd7-4c16-b867-61d61163f4ba) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:36:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:18.016 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 64dd0de7-fbd7-4c16-b867-61d61163f4ba in datapath 616214b7-6fa7-4c4a-92d4-ca6b283a5d5d bound to our chassis
Oct 02 12:36:18 compute-0 NetworkManager[51160]: <info>  [1759408578.0172] device (tap64dd0de7-fb): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:36:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:18.017 103294 DEBUG neutron.agent.ovn.metadata.agent [-] There is no metadata port for network 616214b7-6fa7-4c4a-92d4-ca6b283a5d5d or it has no MAC or IP addresses configured, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:599
Oct 02 12:36:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:18.018 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[769d2742-2c92-4a96-bdc4-2fa619e31bb0]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:36:18 compute-0 NetworkManager[51160]: <info>  [1759408578.0190] device (tap64dd0de7-fb): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:36:18 compute-0 systemd-machined[152150]: New machine qemu-78-instance-0000009f.
Oct 02 12:36:18 compute-0 systemd[1]: Started Virtual Machine qemu-78-instance-0000009f.
Oct 02 12:36:18 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:18.339 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '40'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:36:19 compute-0 nova_compute[192079]: 2025-10-02 12:36:19.644 2 DEBUG nova.virt.libvirt.host [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Removed pending event for 0f53a1ce-fb3c-4d89-be52-05b2de65acba due to event _event_emit_delayed /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:438
Oct 02 12:36:19 compute-0 nova_compute[192079]: 2025-10-02 12:36:19.645 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408579.644268, 0f53a1ce-fb3c-4d89-be52-05b2de65acba => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:36:19 compute-0 nova_compute[192079]: 2025-10-02 12:36:19.645 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] VM Started (Lifecycle Event)
Oct 02 12:36:19 compute-0 nova_compute[192079]: 2025-10-02 12:36:19.672 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:36:19 compute-0 nova_compute[192079]: 2025-10-02 12:36:19.673 2 DEBUG nova.compute.manager [None req-2a87bf30-561c-4972-ac5c-bea04bb273b2 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Instance event wait completed in 0 seconds for  wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:36:19 compute-0 nova_compute[192079]: 2025-10-02 12:36:19.674 2 DEBUG nova.objects.instance [None req-2a87bf30-561c-4972-ac5c-bea04bb273b2 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Lazy-loading 'pci_devices' on Instance uuid 0f53a1ce-fb3c-4d89-be52-05b2de65acba obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:36:19 compute-0 nova_compute[192079]: 2025-10-02 12:36:19.678 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Synchronizing instance power state after lifecycle event "Started"; current vm_state: suspended, current task_state: resuming, current DB power_state: 4, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:36:19 compute-0 nova_compute[192079]: 2025-10-02 12:36:19.708 2 INFO nova.virt.libvirt.driver [-] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Instance running successfully.
Oct 02 12:36:19 compute-0 virtqemud[191807]: argument unsupported: QEMU guest agent is not configured
Oct 02 12:36:19 compute-0 nova_compute[192079]: 2025-10-02 12:36:19.709 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] During sync_power_state the instance has a pending task (resuming). Skip.
Oct 02 12:36:19 compute-0 nova_compute[192079]: 2025-10-02 12:36:19.710 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408579.656651, 0f53a1ce-fb3c-4d89-be52-05b2de65acba => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:36:19 compute-0 nova_compute[192079]: 2025-10-02 12:36:19.710 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] VM Resumed (Lifecycle Event)
Oct 02 12:36:19 compute-0 nova_compute[192079]: 2025-10-02 12:36:19.712 2 DEBUG nova.virt.libvirt.guest [None req-2a87bf30-561c-4972-ac5c-bea04bb273b2 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Failed to set time: agent not configured sync_guest_time /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:200
Oct 02 12:36:19 compute-0 nova_compute[192079]: 2025-10-02 12:36:19.713 2 DEBUG nova.compute.manager [None req-2a87bf30-561c-4972-ac5c-bea04bb273b2 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:36:19 compute-0 nova_compute[192079]: 2025-10-02 12:36:19.764 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:36:19 compute-0 nova_compute[192079]: 2025-10-02 12:36:19.767 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: suspended, current task_state: resuming, current DB power_state: 4, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:36:19 compute-0 nova_compute[192079]: 2025-10-02 12:36:19.802 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] During sync_power_state the instance has a pending task (resuming). Skip.
Oct 02 12:36:20 compute-0 nova_compute[192079]: 2025-10-02 12:36:20.099 2 DEBUG nova.compute.manager [req-db5bda60-6527-4c49-b6db-eabb19c2c42f req-6728fd9a-76b1-490b-8739-18578de85fbe 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Received event network-vif-plugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:36:20 compute-0 nova_compute[192079]: 2025-10-02 12:36:20.100 2 DEBUG oslo_concurrency.lockutils [req-db5bda60-6527-4c49-b6db-eabb19c2c42f req-6728fd9a-76b1-490b-8739-18578de85fbe 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:36:20 compute-0 nova_compute[192079]: 2025-10-02 12:36:20.100 2 DEBUG oslo_concurrency.lockutils [req-db5bda60-6527-4c49-b6db-eabb19c2c42f req-6728fd9a-76b1-490b-8739-18578de85fbe 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:36:20 compute-0 nova_compute[192079]: 2025-10-02 12:36:20.100 2 DEBUG oslo_concurrency.lockutils [req-db5bda60-6527-4c49-b6db-eabb19c2c42f req-6728fd9a-76b1-490b-8739-18578de85fbe 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:36:20 compute-0 nova_compute[192079]: 2025-10-02 12:36:20.100 2 DEBUG nova.compute.manager [req-db5bda60-6527-4c49-b6db-eabb19c2c42f req-6728fd9a-76b1-490b-8739-18578de85fbe 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] No waiting events found dispatching network-vif-plugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:36:20 compute-0 nova_compute[192079]: 2025-10-02 12:36:20.101 2 WARNING nova.compute.manager [req-db5bda60-6527-4c49-b6db-eabb19c2c42f req-6728fd9a-76b1-490b-8739-18578de85fbe 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Received unexpected event network-vif-plugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba for instance with vm_state active and task_state None.
Oct 02 12:36:20 compute-0 nova_compute[192079]: 2025-10-02 12:36:20.176 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:22 compute-0 nova_compute[192079]: 2025-10-02 12:36:22.342 2 DEBUG nova.compute.manager [req-34ebfe39-e69e-4a89-8f01-178a5fafa92e req-7516be0a-5cca-4831-8ec3-bc8af2de2bb8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Received event network-vif-plugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:36:22 compute-0 nova_compute[192079]: 2025-10-02 12:36:22.343 2 DEBUG oslo_concurrency.lockutils [req-34ebfe39-e69e-4a89-8f01-178a5fafa92e req-7516be0a-5cca-4831-8ec3-bc8af2de2bb8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:36:22 compute-0 nova_compute[192079]: 2025-10-02 12:36:22.343 2 DEBUG oslo_concurrency.lockutils [req-34ebfe39-e69e-4a89-8f01-178a5fafa92e req-7516be0a-5cca-4831-8ec3-bc8af2de2bb8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:36:22 compute-0 nova_compute[192079]: 2025-10-02 12:36:22.344 2 DEBUG oslo_concurrency.lockutils [req-34ebfe39-e69e-4a89-8f01-178a5fafa92e req-7516be0a-5cca-4831-8ec3-bc8af2de2bb8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:36:22 compute-0 nova_compute[192079]: 2025-10-02 12:36:22.344 2 DEBUG nova.compute.manager [req-34ebfe39-e69e-4a89-8f01-178a5fafa92e req-7516be0a-5cca-4831-8ec3-bc8af2de2bb8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] No waiting events found dispatching network-vif-plugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:36:22 compute-0 nova_compute[192079]: 2025-10-02 12:36:22.344 2 WARNING nova.compute.manager [req-34ebfe39-e69e-4a89-8f01-178a5fafa92e req-7516be0a-5cca-4831-8ec3-bc8af2de2bb8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Received unexpected event network-vif-plugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba for instance with vm_state active and task_state None.
Oct 02 12:36:22 compute-0 nova_compute[192079]: 2025-10-02 12:36:22.562 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:23 compute-0 podman[247535]: 2025-10-02 12:36:23.186594795 +0000 UTC m=+0.092801647 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, container_name=ceilometer_agent_compute, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, org.label-schema.license=GPLv2, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:36:23 compute-0 nova_compute[192079]: 2025-10-02 12:36:23.321 2 DEBUG oslo_concurrency.lockutils [None req-90dcfab1-2d0d-449a-9262-fa239f68ba14 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Acquiring lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:36:23 compute-0 nova_compute[192079]: 2025-10-02 12:36:23.321 2 DEBUG oslo_concurrency.lockutils [None req-90dcfab1-2d0d-449a-9262-fa239f68ba14 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:36:23 compute-0 nova_compute[192079]: 2025-10-02 12:36:23.321 2 DEBUG oslo_concurrency.lockutils [None req-90dcfab1-2d0d-449a-9262-fa239f68ba14 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Acquiring lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:36:23 compute-0 nova_compute[192079]: 2025-10-02 12:36:23.322 2 DEBUG oslo_concurrency.lockutils [None req-90dcfab1-2d0d-449a-9262-fa239f68ba14 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:36:23 compute-0 nova_compute[192079]: 2025-10-02 12:36:23.322 2 DEBUG oslo_concurrency.lockutils [None req-90dcfab1-2d0d-449a-9262-fa239f68ba14 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:36:23 compute-0 nova_compute[192079]: 2025-10-02 12:36:23.336 2 INFO nova.compute.manager [None req-90dcfab1-2d0d-449a-9262-fa239f68ba14 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Terminating instance
Oct 02 12:36:23 compute-0 nova_compute[192079]: 2025-10-02 12:36:23.351 2 DEBUG nova.compute.manager [None req-90dcfab1-2d0d-449a-9262-fa239f68ba14 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:36:23 compute-0 kernel: tap64dd0de7-fb (unregistering): left promiscuous mode
Oct 02 12:36:23 compute-0 NetworkManager[51160]: <info>  [1759408583.3765] device (tap64dd0de7-fb): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:36:23 compute-0 ovn_controller[94336]: 2025-10-02T12:36:23Z|00625|binding|INFO|Releasing lport 64dd0de7-fbd7-4c16-b867-61d61163f4ba from this chassis (sb_readonly=0)
Oct 02 12:36:23 compute-0 ovn_controller[94336]: 2025-10-02T12:36:23Z|00626|binding|INFO|Setting lport 64dd0de7-fbd7-4c16-b867-61d61163f4ba down in Southbound
Oct 02 12:36:23 compute-0 ovn_controller[94336]: 2025-10-02T12:36:23Z|00627|binding|INFO|Removing iface tap64dd0de7-fb ovn-installed in OVS
Oct 02 12:36:23 compute-0 nova_compute[192079]: 2025-10-02 12:36:23.392 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:23 compute-0 nova_compute[192079]: 2025-10-02 12:36:23.394 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:23.400 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:e3:22:87 10.100.0.11'], port_security=['fa:16:3e:e3:22:87 10.100.0.11'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.11/28', 'neutron:device_id': '0f53a1ce-fb3c-4d89-be52-05b2de65acba', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-616214b7-6fa7-4c4a-92d4-ca6b283a5d5d', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '6908d705b9b541669e2fe9a84c2cacd7', 'neutron:revision_number': '8', 'neutron:security_group_ids': '8a40f8ee-7113-4592-bfa9-35ec9f6a67c6', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=ee25e8ff-f81b-4b66-a313-71c3093cd990, chassis=[], tunnel_key=2, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=64dd0de7-fbd7-4c16-b867-61d61163f4ba) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:36:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:23.402 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 64dd0de7-fbd7-4c16-b867-61d61163f4ba in datapath 616214b7-6fa7-4c4a-92d4-ca6b283a5d5d unbound from our chassis
Oct 02 12:36:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:23.404 103294 DEBUG neutron.agent.ovn.metadata.agent [-] There is no metadata port for network 616214b7-6fa7-4c4a-92d4-ca6b283a5d5d or it has no MAC or IP addresses configured, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:599
Oct 02 12:36:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:36:23.406 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a9455033-4dfa-472a-b293-ba58eb0053fc]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:36:23 compute-0 nova_compute[192079]: 2025-10-02 12:36:23.407 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:23 compute-0 systemd[1]: machine-qemu\x2d78\x2dinstance\x2d0000009f.scope: Deactivated successfully.
Oct 02 12:36:23 compute-0 systemd[1]: machine-qemu\x2d78\x2dinstance\x2d0000009f.scope: Consumed 5.244s CPU time.
Oct 02 12:36:23 compute-0 systemd-machined[152150]: Machine qemu-78-instance-0000009f terminated.
Oct 02 12:36:23 compute-0 nova_compute[192079]: 2025-10-02 12:36:23.580 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:23 compute-0 nova_compute[192079]: 2025-10-02 12:36:23.587 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:23 compute-0 nova_compute[192079]: 2025-10-02 12:36:23.625 2 INFO nova.virt.libvirt.driver [-] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Instance destroyed successfully.
Oct 02 12:36:23 compute-0 nova_compute[192079]: 2025-10-02 12:36:23.626 2 DEBUG nova.objects.instance [None req-90dcfab1-2d0d-449a-9262-fa239f68ba14 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Lazy-loading 'resources' on Instance uuid 0f53a1ce-fb3c-4d89-be52-05b2de65acba obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:36:23 compute-0 nova_compute[192079]: 2025-10-02 12:36:23.653 2 DEBUG nova.virt.libvirt.vif [None req-90dcfab1-2d0d-449a-9262-fa239f68ba14 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:35:49Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestServerAdvancedOps-server-16731796',display_name='tempest-TestServerAdvancedOps-server-16731796',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testserveradvancedops-server-16731796',id=159,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:36:02Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='6908d705b9b541669e2fe9a84c2cacd7',ramdisk_id='',reservation_id='r-089z0o6n',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-TestServerAdvancedOps-1292292677',owner_user_name='tempest-TestServerAdvancedOps-1292292677-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:36:19Z,user_data=None,user_id='bf14abcb3f75420e870a3997dfbedee4',uuid=0f53a1ce-fb3c-4d89-be52-05b2de65acba,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "address": "fa:16:3e:e3:22:87", "network": {"id": "616214b7-6fa7-4c4a-92d4-ca6b283a5d5d", "bridge": "br-int", "label": "tempest-TestServerAdvancedOps-1235429694-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6908d705b9b541669e2fe9a84c2cacd7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap64dd0de7-fb", "ovs_interfaceid": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:36:23 compute-0 nova_compute[192079]: 2025-10-02 12:36:23.655 2 DEBUG nova.network.os_vif_util [None req-90dcfab1-2d0d-449a-9262-fa239f68ba14 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Converting VIF {"id": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "address": "fa:16:3e:e3:22:87", "network": {"id": "616214b7-6fa7-4c4a-92d4-ca6b283a5d5d", "bridge": "br-int", "label": "tempest-TestServerAdvancedOps-1235429694-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6908d705b9b541669e2fe9a84c2cacd7", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap64dd0de7-fb", "ovs_interfaceid": "64dd0de7-fbd7-4c16-b867-61d61163f4ba", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:36:23 compute-0 nova_compute[192079]: 2025-10-02 12:36:23.656 2 DEBUG nova.network.os_vif_util [None req-90dcfab1-2d0d-449a-9262-fa239f68ba14 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:e3:22:87,bridge_name='br-int',has_traffic_filtering=True,id=64dd0de7-fbd7-4c16-b867-61d61163f4ba,network=Network(616214b7-6fa7-4c4a-92d4-ca6b283a5d5d),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap64dd0de7-fb') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:36:23 compute-0 nova_compute[192079]: 2025-10-02 12:36:23.656 2 DEBUG os_vif [None req-90dcfab1-2d0d-449a-9262-fa239f68ba14 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:e3:22:87,bridge_name='br-int',has_traffic_filtering=True,id=64dd0de7-fbd7-4c16-b867-61d61163f4ba,network=Network(616214b7-6fa7-4c4a-92d4-ca6b283a5d5d),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap64dd0de7-fb') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:36:23 compute-0 nova_compute[192079]: 2025-10-02 12:36:23.660 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:23 compute-0 nova_compute[192079]: 2025-10-02 12:36:23.661 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap64dd0de7-fb, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:36:23 compute-0 nova_compute[192079]: 2025-10-02 12:36:23.663 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:23 compute-0 nova_compute[192079]: 2025-10-02 12:36:23.665 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:23 compute-0 nova_compute[192079]: 2025-10-02 12:36:23.670 2 INFO os_vif [None req-90dcfab1-2d0d-449a-9262-fa239f68ba14 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:e3:22:87,bridge_name='br-int',has_traffic_filtering=True,id=64dd0de7-fbd7-4c16-b867-61d61163f4ba,network=Network(616214b7-6fa7-4c4a-92d4-ca6b283a5d5d),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap64dd0de7-fb')
Oct 02 12:36:23 compute-0 nova_compute[192079]: 2025-10-02 12:36:23.670 2 INFO nova.virt.libvirt.driver [None req-90dcfab1-2d0d-449a-9262-fa239f68ba14 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Deleting instance files /var/lib/nova/instances/0f53a1ce-fb3c-4d89-be52-05b2de65acba_del
Oct 02 12:36:23 compute-0 nova_compute[192079]: 2025-10-02 12:36:23.671 2 INFO nova.virt.libvirt.driver [None req-90dcfab1-2d0d-449a-9262-fa239f68ba14 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Deletion of /var/lib/nova/instances/0f53a1ce-fb3c-4d89-be52-05b2de65acba_del complete
Oct 02 12:36:23 compute-0 nova_compute[192079]: 2025-10-02 12:36:23.755 2 INFO nova.compute.manager [None req-90dcfab1-2d0d-449a-9262-fa239f68ba14 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Took 0.40 seconds to destroy the instance on the hypervisor.
Oct 02 12:36:23 compute-0 nova_compute[192079]: 2025-10-02 12:36:23.756 2 DEBUG oslo.service.loopingcall [None req-90dcfab1-2d0d-449a-9262-fa239f68ba14 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:36:23 compute-0 nova_compute[192079]: 2025-10-02 12:36:23.756 2 DEBUG nova.compute.manager [-] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:36:23 compute-0 nova_compute[192079]: 2025-10-02 12:36:23.756 2 DEBUG nova.network.neutron [-] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:36:24 compute-0 nova_compute[192079]: 2025-10-02 12:36:24.411 2 DEBUG nova.network.neutron [-] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:36:24 compute-0 nova_compute[192079]: 2025-10-02 12:36:24.430 2 INFO nova.compute.manager [-] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Took 0.67 seconds to deallocate network for instance.
Oct 02 12:36:24 compute-0 nova_compute[192079]: 2025-10-02 12:36:24.448 2 DEBUG nova.compute.manager [req-45054e76-b68f-4d06-a802-3dce6906d173 req-449d54bd-ba3e-4dbc-8f4d-fbe05d629e4f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Received event network-vif-unplugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:36:24 compute-0 nova_compute[192079]: 2025-10-02 12:36:24.449 2 DEBUG oslo_concurrency.lockutils [req-45054e76-b68f-4d06-a802-3dce6906d173 req-449d54bd-ba3e-4dbc-8f4d-fbe05d629e4f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:36:24 compute-0 nova_compute[192079]: 2025-10-02 12:36:24.449 2 DEBUG oslo_concurrency.lockutils [req-45054e76-b68f-4d06-a802-3dce6906d173 req-449d54bd-ba3e-4dbc-8f4d-fbe05d629e4f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:36:24 compute-0 nova_compute[192079]: 2025-10-02 12:36:24.450 2 DEBUG oslo_concurrency.lockutils [req-45054e76-b68f-4d06-a802-3dce6906d173 req-449d54bd-ba3e-4dbc-8f4d-fbe05d629e4f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:36:24 compute-0 nova_compute[192079]: 2025-10-02 12:36:24.450 2 DEBUG nova.compute.manager [req-45054e76-b68f-4d06-a802-3dce6906d173 req-449d54bd-ba3e-4dbc-8f4d-fbe05d629e4f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] No waiting events found dispatching network-vif-unplugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:36:24 compute-0 nova_compute[192079]: 2025-10-02 12:36:24.450 2 DEBUG nova.compute.manager [req-45054e76-b68f-4d06-a802-3dce6906d173 req-449d54bd-ba3e-4dbc-8f4d-fbe05d629e4f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Received event network-vif-unplugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:36:24 compute-0 nova_compute[192079]: 2025-10-02 12:36:24.450 2 DEBUG nova.compute.manager [req-45054e76-b68f-4d06-a802-3dce6906d173 req-449d54bd-ba3e-4dbc-8f4d-fbe05d629e4f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Received event network-vif-plugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:36:24 compute-0 nova_compute[192079]: 2025-10-02 12:36:24.451 2 DEBUG oslo_concurrency.lockutils [req-45054e76-b68f-4d06-a802-3dce6906d173 req-449d54bd-ba3e-4dbc-8f4d-fbe05d629e4f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:36:24 compute-0 nova_compute[192079]: 2025-10-02 12:36:24.451 2 DEBUG oslo_concurrency.lockutils [req-45054e76-b68f-4d06-a802-3dce6906d173 req-449d54bd-ba3e-4dbc-8f4d-fbe05d629e4f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:36:24 compute-0 nova_compute[192079]: 2025-10-02 12:36:24.451 2 DEBUG oslo_concurrency.lockutils [req-45054e76-b68f-4d06-a802-3dce6906d173 req-449d54bd-ba3e-4dbc-8f4d-fbe05d629e4f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:36:24 compute-0 nova_compute[192079]: 2025-10-02 12:36:24.451 2 DEBUG nova.compute.manager [req-45054e76-b68f-4d06-a802-3dce6906d173 req-449d54bd-ba3e-4dbc-8f4d-fbe05d629e4f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] No waiting events found dispatching network-vif-plugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:36:24 compute-0 nova_compute[192079]: 2025-10-02 12:36:24.452 2 WARNING nova.compute.manager [req-45054e76-b68f-4d06-a802-3dce6906d173 req-449d54bd-ba3e-4dbc-8f4d-fbe05d629e4f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Received unexpected event network-vif-plugged-64dd0de7-fbd7-4c16-b867-61d61163f4ba for instance with vm_state active and task_state deleting.
Oct 02 12:36:24 compute-0 nova_compute[192079]: 2025-10-02 12:36:24.541 2 DEBUG oslo_concurrency.lockutils [None req-90dcfab1-2d0d-449a-9262-fa239f68ba14 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:36:24 compute-0 nova_compute[192079]: 2025-10-02 12:36:24.542 2 DEBUG oslo_concurrency.lockutils [None req-90dcfab1-2d0d-449a-9262-fa239f68ba14 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:36:24 compute-0 nova_compute[192079]: 2025-10-02 12:36:24.614 2 DEBUG nova.compute.provider_tree [None req-90dcfab1-2d0d-449a-9262-fa239f68ba14 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:36:24 compute-0 nova_compute[192079]: 2025-10-02 12:36:24.639 2 DEBUG nova.scheduler.client.report [None req-90dcfab1-2d0d-449a-9262-fa239f68ba14 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:36:24 compute-0 nova_compute[192079]: 2025-10-02 12:36:24.661 2 DEBUG oslo_concurrency.lockutils [None req-90dcfab1-2d0d-449a-9262-fa239f68ba14 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.119s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:36:24 compute-0 nova_compute[192079]: 2025-10-02 12:36:24.697 2 INFO nova.scheduler.client.report [None req-90dcfab1-2d0d-449a-9262-fa239f68ba14 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Deleted allocations for instance 0f53a1ce-fb3c-4d89-be52-05b2de65acba
Oct 02 12:36:24 compute-0 nova_compute[192079]: 2025-10-02 12:36:24.793 2 DEBUG oslo_concurrency.lockutils [None req-90dcfab1-2d0d-449a-9262-fa239f68ba14 bf14abcb3f75420e870a3997dfbedee4 6908d705b9b541669e2fe9a84c2cacd7 - - default default] Lock "0f53a1ce-fb3c-4d89-be52-05b2de65acba" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.472s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:36:25 compute-0 nova_compute[192079]: 2025-10-02 12:36:25.845 2 DEBUG nova.compute.manager [req-e8f05f1b-7384-4be0-98ac-5a3e39466d3b req-e953cd3b-cd5f-4dd8-b4b9-de6b699a523f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Received event network-vif-deleted-64dd0de7-fbd7-4c16-b867-61d61163f4ba external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:36:26 compute-0 nova_compute[192079]: 2025-10-02 12:36:26.104 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:27 compute-0 nova_compute[192079]: 2025-10-02 12:36:27.563 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:28 compute-0 nova_compute[192079]: 2025-10-02 12:36:28.665 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:31 compute-0 podman[247577]: 2025-10-02 12:36:31.171899094 +0000 UTC m=+0.077064478 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, architecture=x86_64, vendor=Red Hat, Inc., version=9.6, build-date=2025-08-20T13:12:41, managed_by=edpm_ansible, maintainer=Red Hat, Inc., url=https://catalog.redhat.com/en/search?searchType=containers, name=ubi9-minimal, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., release=1755695350, distribution-scope=public, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, config_id=edpm, io.buildah.version=1.33.7, io.openshift.expose-services=, com.redhat.component=ubi9-minimal-container, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, io.openshift.tags=minimal rhel9, container_name=openstack_network_exporter, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vcs-type=git)
Oct 02 12:36:31 compute-0 podman[247578]: 2025-10-02 12:36:31.172319595 +0000 UTC m=+0.077499980 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, config_id=multipathd, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2)
Oct 02 12:36:32 compute-0 nova_compute[192079]: 2025-10-02 12:36:32.564 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:33 compute-0 nova_compute[192079]: 2025-10-02 12:36:33.669 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:35 compute-0 podman[247612]: 2025-10-02 12:36:35.130973935 +0000 UTC m=+0.045666564 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter)
Oct 02 12:36:35 compute-0 podman[247613]: 2025-10-02 12:36:35.142758546 +0000 UTC m=+0.052675185 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, container_name=iscsid, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_id=iscsid, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:36:37 compute-0 nova_compute[192079]: 2025-10-02 12:36:37.565 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:37 compute-0 nova_compute[192079]: 2025-10-02 12:36:37.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:36:37 compute-0 nova_compute[192079]: 2025-10-02 12:36:37.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:36:38 compute-0 nova_compute[192079]: 2025-10-02 12:36:38.623 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759408583.6218865, 0f53a1ce-fb3c-4d89-be52-05b2de65acba => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:36:38 compute-0 nova_compute[192079]: 2025-10-02 12:36:38.623 2 INFO nova.compute.manager [-] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] VM Stopped (Lifecycle Event)
Oct 02 12:36:38 compute-0 nova_compute[192079]: 2025-10-02 12:36:38.646 2 DEBUG nova.compute.manager [None req-1b4d1705-fada-4075-80fc-e65f143452f2 - - - - - -] [instance: 0f53a1ce-fb3c-4d89-be52-05b2de65acba] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:36:38 compute-0 nova_compute[192079]: 2025-10-02 12:36:38.669 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:41 compute-0 nova_compute[192079]: 2025-10-02 12:36:41.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:36:41 compute-0 nova_compute[192079]: 2025-10-02 12:36:41.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:36:42 compute-0 nova_compute[192079]: 2025-10-02 12:36:42.497 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:36:42 compute-0 nova_compute[192079]: 2025-10-02 12:36:42.498 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:36:42 compute-0 nova_compute[192079]: 2025-10-02 12:36:42.498 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:36:42 compute-0 nova_compute[192079]: 2025-10-02 12:36:42.498 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:36:42 compute-0 nova_compute[192079]: 2025-10-02 12:36:42.566 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:42 compute-0 nova_compute[192079]: 2025-10-02 12:36:42.660 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:36:42 compute-0 nova_compute[192079]: 2025-10-02 12:36:42.661 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5685MB free_disk=73.33988952636719GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:36:42 compute-0 nova_compute[192079]: 2025-10-02 12:36:42.662 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:36:42 compute-0 nova_compute[192079]: 2025-10-02 12:36:42.662 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:36:42 compute-0 nova_compute[192079]: 2025-10-02 12:36:42.754 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:36:42 compute-0 nova_compute[192079]: 2025-10-02 12:36:42.755 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:36:42 compute-0 nova_compute[192079]: 2025-10-02 12:36:42.773 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing inventories for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708 _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:804
Oct 02 12:36:43 compute-0 nova_compute[192079]: 2025-10-02 12:36:43.089 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Updating ProviderTree inventory for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 from _refresh_and_get_inventory using data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} _refresh_and_get_inventory /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:768
Oct 02 12:36:43 compute-0 nova_compute[192079]: 2025-10-02 12:36:43.090 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Updating inventory in ProviderTree for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 with inventory: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:176
Oct 02 12:36:43 compute-0 nova_compute[192079]: 2025-10-02 12:36:43.102 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing aggregate associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, aggregates: None _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:813
Oct 02 12:36:43 compute-0 nova_compute[192079]: 2025-10-02 12:36:43.129 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing trait associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, traits: COMPUTE_SECURITY_UEFI_SECURE_BOOT,COMPUTE_VIOMMU_MODEL_VIRTIO,COMPUTE_VIOMMU_MODEL_AUTO,COMPUTE_IMAGE_TYPE_AKI,COMPUTE_GRAPHICS_MODEL_VIRTIO,COMPUTE_NET_VIF_MODEL_PCNET,HW_CPU_X86_SSE42,COMPUTE_RESCUE_BFV,COMPUTE_VOLUME_EXTEND,COMPUTE_IMAGE_TYPE_QCOW2,COMPUTE_TRUSTED_CERTS,COMPUTE_SOCKET_PCI_NUMA_AFFINITY,COMPUTE_GRAPHICS_MODEL_CIRRUS,HW_CPU_X86_MMX,COMPUTE_STORAGE_BUS_VIRTIO,COMPUTE_NET_ATTACH_INTERFACE_WITH_TAG,COMPUTE_STORAGE_BUS_FDC,COMPUTE_STORAGE_BUS_USB,COMPUTE_NODE,HW_CPU_X86_SSSE3,HW_CPU_X86_SSE2,COMPUTE_GRAPHICS_MODEL_BOCHS,COMPUTE_NET_VIF_MODEL_E1000E,COMPUTE_IMAGE_TYPE_RAW,COMPUTE_NET_VIF_MODEL_NE2K_PCI,COMPUTE_IMAGE_TYPE_AMI,COMPUTE_VIOMMU_MODEL_INTEL,COMPUTE_SECURITY_TPM_2_0,COMPUTE_STORAGE_BUS_SCSI,COMPUTE_IMAGE_TYPE_ARI,COMPUTE_NET_VIF_MODEL_VMXNET3,COMPUTE_SECURITY_TPM_1_2,COMPUTE_NET_VIF_MODEL_E1000,HW_CPU_X86_SSE,COMPUTE_VOLUME_MULTI_ATTACH,COMPUTE_STORAGE_BUS_IDE,COMPUTE_GRAPHICS_MODEL_NONE,COMPUTE_VOLUME_ATTACH_WITH_TAG,COMPUTE_NET_VIF_MODEL_VIRTIO,HW_CPU_X86_SSE41,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_DEVICE_TAGGING,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_ACCELERATORS,COMPUTE_NET_VIF_MODEL_RTL8139,COMPUTE_GRAPHICS_MODEL_VGA,COMPUTE_STORAGE_BUS_SATA,COMPUTE_NET_VIF_MODEL_SPAPR_VLAN _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:825
Oct 02 12:36:43 compute-0 nova_compute[192079]: 2025-10-02 12:36:43.180 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:36:43 compute-0 nova_compute[192079]: 2025-10-02 12:36:43.213 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:36:43 compute-0 nova_compute[192079]: 2025-10-02 12:36:43.244 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:36:43 compute-0 nova_compute[192079]: 2025-10-02 12:36:43.245 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.583s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:36:43 compute-0 nova_compute[192079]: 2025-10-02 12:36:43.671 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:44 compute-0 podman[247657]: 2025-10-02 12:36:44.137125505 +0000 UTC m=+0.050465195 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_id=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:36:44 compute-0 podman[247659]: 2025-10-02 12:36:44.144213769 +0000 UTC m=+0.050592759 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>)
Oct 02 12:36:44 compute-0 podman[247658]: 2025-10-02 12:36:44.1806369 +0000 UTC m=+0.087699218 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, config_id=ovn_controller, container_name=ovn_controller, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_managed=true)
Oct 02 12:36:45 compute-0 nova_compute[192079]: 2025-10-02 12:36:45.245 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:36:45 compute-0 nova_compute[192079]: 2025-10-02 12:36:45.245 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:36:45 compute-0 nova_compute[192079]: 2025-10-02 12:36:45.246 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:36:45 compute-0 nova_compute[192079]: 2025-10-02 12:36:45.246 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:36:45 compute-0 nova_compute[192079]: 2025-10-02 12:36:45.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:36:47 compute-0 nova_compute[192079]: 2025-10-02 12:36:47.568 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:47 compute-0 nova_compute[192079]: 2025-10-02 12:36:47.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:36:47 compute-0 nova_compute[192079]: 2025-10-02 12:36:47.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:36:47 compute-0 nova_compute[192079]: 2025-10-02 12:36:47.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:36:47 compute-0 nova_compute[192079]: 2025-10-02 12:36:47.701 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:36:48 compute-0 nova_compute[192079]: 2025-10-02 12:36:48.673 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:52 compute-0 nova_compute[192079]: 2025-10-02 12:36:52.608 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:52 compute-0 nova_compute[192079]: 2025-10-02 12:36:52.697 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:36:53 compute-0 nova_compute[192079]: 2025-10-02 12:36:53.723 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:54 compute-0 podman[247723]: 2025-10-02 12:36:54.165154737 +0000 UTC m=+0.074354015 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, config_id=edpm, org.label-schema.build-date=20251001, container_name=ceilometer_agent_compute, managed_by=edpm_ansible)
Oct 02 12:36:57 compute-0 nova_compute[192079]: 2025-10-02 12:36:57.610 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:36:58 compute-0 nova_compute[192079]: 2025-10-02 12:36:58.727 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:02 compute-0 podman[247743]: 2025-10-02 12:37:02.164249783 +0000 UTC m=+0.069629556 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, distribution-scope=public, vendor=Red Hat, Inc., io.buildah.version=1.33.7, config_id=edpm, io.openshift.tags=minimal rhel9, managed_by=edpm_ansible, maintainer=Red Hat, Inc., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, com.redhat.component=ubi9-minimal-container, container_name=openstack_network_exporter, vcs-type=git, architecture=x86_64, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, build-date=2025-08-20T13:12:41, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, release=1755695350, io.openshift.expose-services=, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, url=https://catalog.redhat.com/en/search?searchType=containers, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., name=ubi9-minimal, version=9.6)
Oct 02 12:37:02 compute-0 podman[247744]: 2025-10-02 12:37:02.179247702 +0000 UTC m=+0.079061183 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, config_id=multipathd, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, container_name=multipathd, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:37:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:02.239 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:37:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:02.239 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:37:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:02.240 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:37:02 compute-0 nova_compute[192079]: 2025-10-02 12:37:02.641 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:03 compute-0 nova_compute[192079]: 2025-10-02 12:37:03.776 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:06 compute-0 podman[247781]: 2025-10-02 12:37:06.168027091 +0000 UTC m=+0.071397605 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:37:06 compute-0 podman[247782]: 2025-10-02 12:37:06.177050936 +0000 UTC m=+0.077470400 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, container_name=iscsid, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=iscsid, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0)
Oct 02 12:37:07 compute-0 nova_compute[192079]: 2025-10-02 12:37:07.643 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:08 compute-0 nova_compute[192079]: 2025-10-02 12:37:08.779 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:12 compute-0 nova_compute[192079]: 2025-10-02 12:37:12.644 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:13 compute-0 nova_compute[192079]: 2025-10-02 12:37:13.782 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:14.523 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:fb:62:4a 10.100.0.2 2001:db8::f816:3eff:fefb:624a'], port_security=[], type=localport, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': ''}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.2/28 2001:db8::f816:3eff:fefb:624a/64', 'neutron:device_id': 'ovnmeta-1d7388dd-d8ef-404d-8bb8-6f3d3ab763b6', 'neutron:device_owner': 'network:distributed', 'neutron:mtu': '', 'neutron:network_name': 'neutron-1d7388dd-d8ef-404d-8bb8-6f3d3ab763b6', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'neutron:revision_number': '3', 'neutron:security_group_ids': '', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=546080ca-391c-439c-be48-88bb942119c9, chassis=[], tunnel_key=1, gateway_chassis=[], requested_chassis=[], logical_port=e78bd1c4-7546-4ebe-a71b-a49e8c78f36c) old=Port_Binding(mac=['fa:16:3e:fb:62:4a 10.100.0.2'], external_ids={'neutron:cidrs': '10.100.0.2/28', 'neutron:device_id': 'ovnmeta-1d7388dd-d8ef-404d-8bb8-6f3d3ab763b6', 'neutron:device_owner': 'network:distributed', 'neutron:mtu': '', 'neutron:network_name': 'neutron-1d7388dd-d8ef-404d-8bb8-6f3d3ab763b6', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'neutron:revision_number': '2', 'neutron:security_group_ids': '', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:37:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:14.524 103294 INFO neutron.agent.ovn.metadata.agent [-] Metadata Port e78bd1c4-7546-4ebe-a71b-a49e8c78f36c in datapath 1d7388dd-d8ef-404d-8bb8-6f3d3ab763b6 updated
Oct 02 12:37:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:14.525 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 1d7388dd-d8ef-404d-8bb8-6f3d3ab763b6, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:37:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:14.526 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[86e53763-6dc5-429f-8246-7e7e7942748d]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:15 compute-0 podman[247825]: 2025-10-02 12:37:15.131204951 +0000 UTC m=+0.043263279 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:37:15 compute-0 podman[247823]: 2025-10-02 12:37:15.142330934 +0000 UTC m=+0.055673677 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_id=ovn_metadata_agent, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']})
Oct 02 12:37:15 compute-0 podman[247824]: 2025-10-02 12:37:15.192496819 +0000 UTC m=+0.095029078 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller)
Oct 02 12:37:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:16.565 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=41, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=40) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:37:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:16.566 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 3 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:37:16 compute-0 nova_compute[192079]: 2025-10-02 12:37:16.618 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:16 compute-0 nova_compute[192079]: 2025-10-02 12:37:16.889 2 DEBUG oslo_concurrency.lockutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "dbb54a17-c5e3-491f-bca4-54ddde9b72fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:37:16 compute-0 nova_compute[192079]: 2025-10-02 12:37:16.890 2 DEBUG oslo_concurrency.lockutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "dbb54a17-c5e3-491f-bca4-54ddde9b72fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:37:16 compute-0 nova_compute[192079]: 2025-10-02 12:37:16.913 2 DEBUG nova.compute.manager [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.043 2 DEBUG oslo_concurrency.lockutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.043 2 DEBUG oslo_concurrency.lockutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.050 2 DEBUG nova.virt.hardware [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.050 2 INFO nova.compute.claims [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:37:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:37:17.111 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:37:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:37:17.112 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:37:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:37:17.112 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:37:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:37:17.112 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:37:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:37:17.113 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.iops, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:37:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:37:17.113 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:37:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:37:17.113 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.allocation, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:37:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:37:17.113 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:37:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:37:17.113 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:37:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:37:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.capacity, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:37:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:37:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:37:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:37:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:37:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:37:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:37:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:37:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:37:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:37:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:37:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:37:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster cpu, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:37:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:37:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:37:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:37:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:37:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:37:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:37:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:37:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:37:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:37:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:37:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:37:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster memory.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:37:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:37:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:37:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:37:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:37:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:37:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.224 2 DEBUG nova.compute.provider_tree [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.243 2 DEBUG nova.scheduler.client.report [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.267 2 DEBUG oslo_concurrency.lockutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.223s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.267 2 DEBUG nova.compute.manager [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.360 2 DEBUG nova.compute.manager [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.360 2 DEBUG nova.network.neutron [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.386 2 INFO nova.virt.libvirt.driver [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.408 2 DEBUG nova.compute.manager [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.557 2 DEBUG nova.compute.manager [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.558 2 DEBUG nova.virt.libvirt.driver [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.559 2 INFO nova.virt.libvirt.driver [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Creating image(s)
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.559 2 DEBUG oslo_concurrency.lockutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "/var/lib/nova/instances/dbb54a17-c5e3-491f-bca4-54ddde9b72fa/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.560 2 DEBUG oslo_concurrency.lockutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "/var/lib/nova/instances/dbb54a17-c5e3-491f-bca4-54ddde9b72fa/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.561 2 DEBUG oslo_concurrency.lockutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "/var/lib/nova/instances/dbb54a17-c5e3-491f-bca4-54ddde9b72fa/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.576 2 DEBUG oslo_concurrency.processutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.657 2 DEBUG nova.policy [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.667 2 DEBUG oslo_concurrency.processutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.091s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.667 2 DEBUG oslo_concurrency.lockutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.668 2 DEBUG oslo_concurrency.lockutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.685 2 DEBUG oslo_concurrency.processutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.707 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.755 2 DEBUG oslo_concurrency.processutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.070s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.756 2 DEBUG oslo_concurrency.processutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/dbb54a17-c5e3-491f-bca4-54ddde9b72fa/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.797 2 DEBUG oslo_concurrency.processutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/dbb54a17-c5e3-491f-bca4-54ddde9b72fa/disk 1073741824" returned: 0 in 0.041s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.798 2 DEBUG oslo_concurrency.lockutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.131s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.799 2 DEBUG oslo_concurrency.processutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.860 2 DEBUG oslo_concurrency.processutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.061s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.862 2 DEBUG nova.virt.disk.api [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Checking if we can resize image /var/lib/nova/instances/dbb54a17-c5e3-491f-bca4-54ddde9b72fa/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.863 2 DEBUG oslo_concurrency.processutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/dbb54a17-c5e3-491f-bca4-54ddde9b72fa/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.955 2 DEBUG oslo_concurrency.processutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/dbb54a17-c5e3-491f-bca4-54ddde9b72fa/disk --force-share --output=json" returned: 0 in 0.092s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.956 2 DEBUG nova.virt.disk.api [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Cannot resize image /var/lib/nova/instances/dbb54a17-c5e3-491f-bca4-54ddde9b72fa/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.957 2 DEBUG nova.objects.instance [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lazy-loading 'migration_context' on Instance uuid dbb54a17-c5e3-491f-bca4-54ddde9b72fa obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.970 2 DEBUG nova.virt.libvirt.driver [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.970 2 DEBUG nova.virt.libvirt.driver [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Ensure instance console log exists: /var/lib/nova/instances/dbb54a17-c5e3-491f-bca4-54ddde9b72fa/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.971 2 DEBUG oslo_concurrency.lockutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.971 2 DEBUG oslo_concurrency.lockutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:37:17 compute-0 nova_compute[192079]: 2025-10-02 12:37:17.971 2 DEBUG oslo_concurrency.lockutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:37:18 compute-0 nova_compute[192079]: 2025-10-02 12:37:18.817 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:19 compute-0 nova_compute[192079]: 2025-10-02 12:37:19.520 2 DEBUG nova.network.neutron [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Successfully created port: 07a62b49-14ca-420c-8b61-b7f06d28df05 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:37:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:19.568 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '41'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:37:20 compute-0 nova_compute[192079]: 2025-10-02 12:37:20.668 2 DEBUG nova.network.neutron [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Successfully updated port: 07a62b49-14ca-420c-8b61-b7f06d28df05 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:37:20 compute-0 nova_compute[192079]: 2025-10-02 12:37:20.719 2 DEBUG oslo_concurrency.lockutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "refresh_cache-dbb54a17-c5e3-491f-bca4-54ddde9b72fa" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:37:20 compute-0 nova_compute[192079]: 2025-10-02 12:37:20.720 2 DEBUG oslo_concurrency.lockutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquired lock "refresh_cache-dbb54a17-c5e3-491f-bca4-54ddde9b72fa" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:37:20 compute-0 nova_compute[192079]: 2025-10-02 12:37:20.720 2 DEBUG nova.network.neutron [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:37:20 compute-0 nova_compute[192079]: 2025-10-02 12:37:20.855 2 DEBUG nova.compute.manager [req-41206480-a9d1-4b3b-87fe-1eb19a939088 req-7b5bfd35-1de8-44cd-a6af-4c2f29470633 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Received event network-changed-07a62b49-14ca-420c-8b61-b7f06d28df05 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:37:20 compute-0 nova_compute[192079]: 2025-10-02 12:37:20.855 2 DEBUG nova.compute.manager [req-41206480-a9d1-4b3b-87fe-1eb19a939088 req-7b5bfd35-1de8-44cd-a6af-4c2f29470633 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Refreshing instance network info cache due to event network-changed-07a62b49-14ca-420c-8b61-b7f06d28df05. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:37:20 compute-0 nova_compute[192079]: 2025-10-02 12:37:20.855 2 DEBUG oslo_concurrency.lockutils [req-41206480-a9d1-4b3b-87fe-1eb19a939088 req-7b5bfd35-1de8-44cd-a6af-4c2f29470633 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-dbb54a17-c5e3-491f-bca4-54ddde9b72fa" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:37:21 compute-0 nova_compute[192079]: 2025-10-02 12:37:21.080 2 DEBUG nova.network.neutron [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:37:21 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:21.249 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:fb:62:4a 10.100.0.2 2001:db8:0:1:f816:3eff:fefb:624a 2001:db8::f816:3eff:fefb:624a'], port_security=[], type=localport, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': ''}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.2/28 2001:db8:0:1:f816:3eff:fefb:624a/64 2001:db8::f816:3eff:fefb:624a/64', 'neutron:device_id': 'ovnmeta-1d7388dd-d8ef-404d-8bb8-6f3d3ab763b6', 'neutron:device_owner': 'network:distributed', 'neutron:mtu': '', 'neutron:network_name': 'neutron-1d7388dd-d8ef-404d-8bb8-6f3d3ab763b6', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'neutron:revision_number': '4', 'neutron:security_group_ids': '', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=546080ca-391c-439c-be48-88bb942119c9, chassis=[], tunnel_key=1, gateway_chassis=[], requested_chassis=[], logical_port=e78bd1c4-7546-4ebe-a71b-a49e8c78f36c) old=Port_Binding(mac=['fa:16:3e:fb:62:4a 10.100.0.2 2001:db8::f816:3eff:fefb:624a'], external_ids={'neutron:cidrs': '10.100.0.2/28 2001:db8::f816:3eff:fefb:624a/64', 'neutron:device_id': 'ovnmeta-1d7388dd-d8ef-404d-8bb8-6f3d3ab763b6', 'neutron:device_owner': 'network:distributed', 'neutron:mtu': '', 'neutron:network_name': 'neutron-1d7388dd-d8ef-404d-8bb8-6f3d3ab763b6', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'neutron:revision_number': '3', 'neutron:security_group_ids': '', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:37:21 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:21.250 103294 INFO neutron.agent.ovn.metadata.agent [-] Metadata Port e78bd1c4-7546-4ebe-a71b-a49e8c78f36c in datapath 1d7388dd-d8ef-404d-8bb8-6f3d3ab763b6 updated
Oct 02 12:37:21 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:21.252 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 1d7388dd-d8ef-404d-8bb8-6f3d3ab763b6, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:37:21 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:21.252 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[911c2976-1006-4d9c-bb62-5f75d17b9576]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.059 2 DEBUG nova.network.neutron [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Updating instance_info_cache with network_info: [{"id": "07a62b49-14ca-420c-8b61-b7f06d28df05", "address": "fa:16:3e:aa:3d:10", "network": {"id": "a970b3c6-2fc3-4025-868b-2e9af396991a", "bridge": "br-int", "label": "tempest-network-smoke--441167180", "subnets": [{"cidr": "10.100.0.16/28", "dns": [], "gateway": {"address": null, "type": "gateway", "version": null, "meta": {}}, "ips": [{"address": "10.100.0.19", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap07a62b49-14", "ovs_interfaceid": "07a62b49-14ca-420c-8b61-b7f06d28df05", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.082 2 DEBUG oslo_concurrency.lockutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Releasing lock "refresh_cache-dbb54a17-c5e3-491f-bca4-54ddde9b72fa" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.083 2 DEBUG nova.compute.manager [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Instance network_info: |[{"id": "07a62b49-14ca-420c-8b61-b7f06d28df05", "address": "fa:16:3e:aa:3d:10", "network": {"id": "a970b3c6-2fc3-4025-868b-2e9af396991a", "bridge": "br-int", "label": "tempest-network-smoke--441167180", "subnets": [{"cidr": "10.100.0.16/28", "dns": [], "gateway": {"address": null, "type": "gateway", "version": null, "meta": {}}, "ips": [{"address": "10.100.0.19", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap07a62b49-14", "ovs_interfaceid": "07a62b49-14ca-420c-8b61-b7f06d28df05", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.083 2 DEBUG oslo_concurrency.lockutils [req-41206480-a9d1-4b3b-87fe-1eb19a939088 req-7b5bfd35-1de8-44cd-a6af-4c2f29470633 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-dbb54a17-c5e3-491f-bca4-54ddde9b72fa" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.083 2 DEBUG nova.network.neutron [req-41206480-a9d1-4b3b-87fe-1eb19a939088 req-7b5bfd35-1de8-44cd-a6af-4c2f29470633 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Refreshing network info cache for port 07a62b49-14ca-420c-8b61-b7f06d28df05 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.086 2 DEBUG nova.virt.libvirt.driver [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Start _get_guest_xml network_info=[{"id": "07a62b49-14ca-420c-8b61-b7f06d28df05", "address": "fa:16:3e:aa:3d:10", "network": {"id": "a970b3c6-2fc3-4025-868b-2e9af396991a", "bridge": "br-int", "label": "tempest-network-smoke--441167180", "subnets": [{"cidr": "10.100.0.16/28", "dns": [], "gateway": {"address": null, "type": "gateway", "version": null, "meta": {}}, "ips": [{"address": "10.100.0.19", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap07a62b49-14", "ovs_interfaceid": "07a62b49-14ca-420c-8b61-b7f06d28df05", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.090 2 WARNING nova.virt.libvirt.driver [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.101 2 DEBUG nova.virt.libvirt.host [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.101 2 DEBUG nova.virt.libvirt.host [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.196 2 DEBUG nova.virt.libvirt.host [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.197 2 DEBUG nova.virt.libvirt.host [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.198 2 DEBUG nova.virt.libvirt.driver [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.199 2 DEBUG nova.virt.hardware [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.199 2 DEBUG nova.virt.hardware [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.199 2 DEBUG nova.virt.hardware [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.199 2 DEBUG nova.virt.hardware [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.200 2 DEBUG nova.virt.hardware [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.200 2 DEBUG nova.virt.hardware [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.200 2 DEBUG nova.virt.hardware [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.200 2 DEBUG nova.virt.hardware [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.200 2 DEBUG nova.virt.hardware [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.201 2 DEBUG nova.virt.hardware [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.201 2 DEBUG nova.virt.hardware [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.204 2 DEBUG nova.virt.libvirt.vif [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:37:16Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestNetworkBasicOps-server-309185011',display_name='tempest-TestNetworkBasicOps-server-309185011',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkbasicops-server-309185011',id=162,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBJO+wO/+GTOFf51kWLpwn3zBwiC/mnr0GgssprPZoW4336Xkqf5/GxM2/nY8bXLRzxB9iu8WMqvFe2Azj1RGGYQmk7SnHozPvlLaKUXaimsfpZCFxDle3QzaWsuBECiebg==',key_name='tempest-TestNetworkBasicOps-1746924842',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='6e2a4899168a47618e377cb3ac85ddd2',ramdisk_id='',reservation_id='r-g9slemmv',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestNetworkBasicOps-1323893370',owner_user_name='tempest-TestNetworkBasicOps-1323893370-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:37:17Z,user_data=None,user_id='a1898fdf056c4a249c33590f26d4d845',uuid=dbb54a17-c5e3-491f-bca4-54ddde9b72fa,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "07a62b49-14ca-420c-8b61-b7f06d28df05", "address": "fa:16:3e:aa:3d:10", "network": {"id": "a970b3c6-2fc3-4025-868b-2e9af396991a", "bridge": "br-int", "label": "tempest-network-smoke--441167180", "subnets": [{"cidr": "10.100.0.16/28", "dns": [], "gateway": {"address": null, "type": "gateway", "version": null, "meta": {}}, "ips": [{"address": "10.100.0.19", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap07a62b49-14", "ovs_interfaceid": "07a62b49-14ca-420c-8b61-b7f06d28df05", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.204 2 DEBUG nova.network.os_vif_util [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converting VIF {"id": "07a62b49-14ca-420c-8b61-b7f06d28df05", "address": "fa:16:3e:aa:3d:10", "network": {"id": "a970b3c6-2fc3-4025-868b-2e9af396991a", "bridge": "br-int", "label": "tempest-network-smoke--441167180", "subnets": [{"cidr": "10.100.0.16/28", "dns": [], "gateway": {"address": null, "type": "gateway", "version": null, "meta": {}}, "ips": [{"address": "10.100.0.19", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap07a62b49-14", "ovs_interfaceid": "07a62b49-14ca-420c-8b61-b7f06d28df05", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.205 2 DEBUG nova.network.os_vif_util [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:aa:3d:10,bridge_name='br-int',has_traffic_filtering=True,id=07a62b49-14ca-420c-8b61-b7f06d28df05,network=Network(a970b3c6-2fc3-4025-868b-2e9af396991a),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap07a62b49-14') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.206 2 DEBUG nova.objects.instance [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lazy-loading 'pci_devices' on Instance uuid dbb54a17-c5e3-491f-bca4-54ddde9b72fa obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.228 2 DEBUG nova.virt.libvirt.driver [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:37:22 compute-0 nova_compute[192079]:   <uuid>dbb54a17-c5e3-491f-bca4-54ddde9b72fa</uuid>
Oct 02 12:37:22 compute-0 nova_compute[192079]:   <name>instance-000000a2</name>
Oct 02 12:37:22 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:37:22 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:37:22 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:37:22 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:       <nova:name>tempest-TestNetworkBasicOps-server-309185011</nova:name>
Oct 02 12:37:22 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:37:22</nova:creationTime>
Oct 02 12:37:22 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:37:22 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:37:22 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:37:22 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:37:22 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:37:22 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:37:22 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:37:22 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:37:22 compute-0 nova_compute[192079]:         <nova:user uuid="a1898fdf056c4a249c33590f26d4d845">tempest-TestNetworkBasicOps-1323893370-project-member</nova:user>
Oct 02 12:37:22 compute-0 nova_compute[192079]:         <nova:project uuid="6e2a4899168a47618e377cb3ac85ddd2">tempest-TestNetworkBasicOps-1323893370</nova:project>
Oct 02 12:37:22 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:37:22 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:37:22 compute-0 nova_compute[192079]:         <nova:port uuid="07a62b49-14ca-420c-8b61-b7f06d28df05">
Oct 02 12:37:22 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.19" ipVersion="4"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:37:22 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:37:22 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:37:22 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <system>
Oct 02 12:37:22 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:37:22 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:37:22 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:37:22 compute-0 nova_compute[192079]:       <entry name="serial">dbb54a17-c5e3-491f-bca4-54ddde9b72fa</entry>
Oct 02 12:37:22 compute-0 nova_compute[192079]:       <entry name="uuid">dbb54a17-c5e3-491f-bca4-54ddde9b72fa</entry>
Oct 02 12:37:22 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     </system>
Oct 02 12:37:22 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:37:22 compute-0 nova_compute[192079]:   <os>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:   </os>
Oct 02 12:37:22 compute-0 nova_compute[192079]:   <features>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:   </features>
Oct 02 12:37:22 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:37:22 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:37:22 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:37:22 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/dbb54a17-c5e3-491f-bca4-54ddde9b72fa/disk"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:37:22 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/dbb54a17-c5e3-491f-bca4-54ddde9b72fa/disk.config"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:37:22 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:aa:3d:10"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:       <target dev="tap07a62b49-14"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:37:22 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/dbb54a17-c5e3-491f-bca4-54ddde9b72fa/console.log" append="off"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <video>
Oct 02 12:37:22 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     </video>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:37:22 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:37:22 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:37:22 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:37:22 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:37:22 compute-0 nova_compute[192079]: </domain>
Oct 02 12:37:22 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.229 2 DEBUG nova.compute.manager [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Preparing to wait for external event network-vif-plugged-07a62b49-14ca-420c-8b61-b7f06d28df05 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.229 2 DEBUG oslo_concurrency.lockutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "dbb54a17-c5e3-491f-bca4-54ddde9b72fa-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.230 2 DEBUG oslo_concurrency.lockutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "dbb54a17-c5e3-491f-bca4-54ddde9b72fa-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.230 2 DEBUG oslo_concurrency.lockutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "dbb54a17-c5e3-491f-bca4-54ddde9b72fa-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.230 2 DEBUG nova.virt.libvirt.vif [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:37:16Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestNetworkBasicOps-server-309185011',display_name='tempest-TestNetworkBasicOps-server-309185011',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkbasicops-server-309185011',id=162,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBJO+wO/+GTOFf51kWLpwn3zBwiC/mnr0GgssprPZoW4336Xkqf5/GxM2/nY8bXLRzxB9iu8WMqvFe2Azj1RGGYQmk7SnHozPvlLaKUXaimsfpZCFxDle3QzaWsuBECiebg==',key_name='tempest-TestNetworkBasicOps-1746924842',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='6e2a4899168a47618e377cb3ac85ddd2',ramdisk_id='',reservation_id='r-g9slemmv',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestNetworkBasicOps-1323893370',owner_user_name='tempest-TestNetworkBasicOps-1323893370-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:37:17Z,user_data=None,user_id='a1898fdf056c4a249c33590f26d4d845',uuid=dbb54a17-c5e3-491f-bca4-54ddde9b72fa,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "07a62b49-14ca-420c-8b61-b7f06d28df05", "address": "fa:16:3e:aa:3d:10", "network": {"id": "a970b3c6-2fc3-4025-868b-2e9af396991a", "bridge": "br-int", "label": "tempest-network-smoke--441167180", "subnets": [{"cidr": "10.100.0.16/28", "dns": [], "gateway": {"address": null, "type": "gateway", "version": null, "meta": {}}, "ips": [{"address": "10.100.0.19", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap07a62b49-14", "ovs_interfaceid": "07a62b49-14ca-420c-8b61-b7f06d28df05", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.231 2 DEBUG nova.network.os_vif_util [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converting VIF {"id": "07a62b49-14ca-420c-8b61-b7f06d28df05", "address": "fa:16:3e:aa:3d:10", "network": {"id": "a970b3c6-2fc3-4025-868b-2e9af396991a", "bridge": "br-int", "label": "tempest-network-smoke--441167180", "subnets": [{"cidr": "10.100.0.16/28", "dns": [], "gateway": {"address": null, "type": "gateway", "version": null, "meta": {}}, "ips": [{"address": "10.100.0.19", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap07a62b49-14", "ovs_interfaceid": "07a62b49-14ca-420c-8b61-b7f06d28df05", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.231 2 DEBUG nova.network.os_vif_util [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:aa:3d:10,bridge_name='br-int',has_traffic_filtering=True,id=07a62b49-14ca-420c-8b61-b7f06d28df05,network=Network(a970b3c6-2fc3-4025-868b-2e9af396991a),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap07a62b49-14') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.232 2 DEBUG os_vif [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:aa:3d:10,bridge_name='br-int',has_traffic_filtering=True,id=07a62b49-14ca-420c-8b61-b7f06d28df05,network=Network(a970b3c6-2fc3-4025-868b-2e9af396991a),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap07a62b49-14') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.232 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.232 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.233 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.235 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.235 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap07a62b49-14, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.236 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap07a62b49-14, col_values=(('external_ids', {'iface-id': '07a62b49-14ca-420c-8b61-b7f06d28df05', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:aa:3d:10', 'vm-uuid': 'dbb54a17-c5e3-491f-bca4-54ddde9b72fa'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.267 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:22 compute-0 NetworkManager[51160]: <info>  [1759408642.2680] manager: (tap07a62b49-14): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/304)
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.270 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.274 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.275 2 INFO os_vif [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:aa:3d:10,bridge_name='br-int',has_traffic_filtering=True,id=07a62b49-14ca-420c-8b61-b7f06d28df05,network=Network(a970b3c6-2fc3-4025-868b-2e9af396991a),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap07a62b49-14')
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.357 2 DEBUG nova.virt.libvirt.driver [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.358 2 DEBUG nova.virt.libvirt.driver [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.358 2 DEBUG nova.virt.libvirt.driver [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] No VIF found with MAC fa:16:3e:aa:3d:10, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.358 2 INFO nova.virt.libvirt.driver [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Using config drive
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.681 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.772 2 INFO nova.virt.libvirt.driver [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Creating config drive at /var/lib/nova/instances/dbb54a17-c5e3-491f-bca4-54ddde9b72fa/disk.config
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.781 2 DEBUG oslo_concurrency.processutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/dbb54a17-c5e3-491f-bca4-54ddde9b72fa/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpyow5qlkw execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:37:22 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.916 2 DEBUG oslo_concurrency.processutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/dbb54a17-c5e3-491f-bca4-54ddde9b72fa/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpyow5qlkw" returned: 0 in 0.135s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:37:22 compute-0 NetworkManager[51160]: <info>  [1759408642.9924] manager: (tap07a62b49-14): new Tun device (/org/freedesktop/NetworkManager/Devices/305)
Oct 02 12:37:22 compute-0 kernel: tap07a62b49-14: entered promiscuous mode
Oct 02 12:37:23 compute-0 nova_compute[192079]: 2025-10-02 12:37:22.998 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:23 compute-0 ovn_controller[94336]: 2025-10-02T12:37:22Z|00628|binding|INFO|Claiming lport 07a62b49-14ca-420c-8b61-b7f06d28df05 for this chassis.
Oct 02 12:37:23 compute-0 ovn_controller[94336]: 2025-10-02T12:37:23Z|00629|binding|INFO|07a62b49-14ca-420c-8b61-b7f06d28df05: Claiming fa:16:3e:aa:3d:10 10.100.0.19
Oct 02 12:37:23 compute-0 nova_compute[192079]: 2025-10-02 12:37:23.007 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:23.015 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:aa:3d:10 10.100.0.19'], port_security=['fa:16:3e:aa:3d:10 10.100.0.19'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.19/28', 'neutron:device_id': 'dbb54a17-c5e3-491f-bca4-54ddde9b72fa', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-a970b3c6-2fc3-4025-868b-2e9af396991a', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'neutron:revision_number': '2', 'neutron:security_group_ids': '6f1ca3b6-c25a-4924-86f4-2b179dfa50a2', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=cf407807-38c2-4b6a-825d-3f40edf483e2, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=07a62b49-14ca-420c-8b61-b7f06d28df05) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:23.016 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 07a62b49-14ca-420c-8b61-b7f06d28df05 in datapath a970b3c6-2fc3-4025-868b-2e9af396991a bound to our chassis
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:23.018 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network a970b3c6-2fc3-4025-868b-2e9af396991a
Oct 02 12:37:23 compute-0 systemd-udevd[247926]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:23.032 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d904454f-82c9-4c20-8009-bddad1c33c70]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:23.033 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tapa970b3c6-21 in ovnmeta-a970b3c6-2fc3-4025-868b-2e9af396991a namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:23.035 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tapa970b3c6-20 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:23.035 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8f53111e-4034-4d65-b1dd-7926d5c13352]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:23 compute-0 systemd-machined[152150]: New machine qemu-79-instance-000000a2.
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:23.036 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[72cdbd10-9ca5-489b-891b-8ad8ac83faf5]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:23 compute-0 ovn_controller[94336]: 2025-10-02T12:37:23Z|00630|binding|INFO|Setting lport 07a62b49-14ca-420c-8b61-b7f06d28df05 ovn-installed in OVS
Oct 02 12:37:23 compute-0 ovn_controller[94336]: 2025-10-02T12:37:23Z|00631|binding|INFO|Setting lport 07a62b49-14ca-420c-8b61-b7f06d28df05 up in Southbound
Oct 02 12:37:23 compute-0 nova_compute[192079]: 2025-10-02 12:37:23.040 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:23 compute-0 NetworkManager[51160]: <info>  [1759408643.0487] device (tap07a62b49-14): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:37:23 compute-0 NetworkManager[51160]: <info>  [1759408643.0499] device (tap07a62b49-14): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:23.050 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[f95b7bbc-78f1-4e51-a3dd-31b2148ceda6]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:23 compute-0 systemd[1]: Started Virtual Machine qemu-79-instance-000000a2.
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:23.074 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[18aab51f-55d9-4f67-9e4a-86106fd06845]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:23.108 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[f72f9557-632b-4c6f-97ae-8a8b1e95584c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:23.113 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4f77dd04-d1a3-488d-9ac7-a5cc1d529b37]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:23 compute-0 NetworkManager[51160]: <info>  [1759408643.1155] manager: (tapa970b3c6-20): new Veth device (/org/freedesktop/NetworkManager/Devices/306)
Oct 02 12:37:23 compute-0 systemd-udevd[247929]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:23.157 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[c174480f-cd9f-4dd0-826d-26c8f2e964b5]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:23.162 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[e4a448a2-25d5-43db-9585-7e7b89270dd6]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:23 compute-0 NetworkManager[51160]: <info>  [1759408643.1963] device (tapa970b3c6-20): carrier: link connected
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:23.205 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[efb61173-865e-4ecb-b4e7-d97970fde3e7]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:23.224 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5da7904c-9749-4103-b1d3-1d585ca17417]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapa970b3c6-21'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:d4:4e:6e'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 200], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 664082, 'reachable_time': 16221, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 247958, 'error': None, 'target': 'ovnmeta-a970b3c6-2fc3-4025-868b-2e9af396991a', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:23.239 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[08c366c6-90b3-48fa-b73e-9050bb52a1a1]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fed4:4e6e'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 664082, 'tstamp': 664082}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 247959, 'error': None, 'target': 'ovnmeta-a970b3c6-2fc3-4025-868b-2e9af396991a', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:23.260 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[90062427-4447-44df-a10e-04b3330bec1f]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapa970b3c6-21'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:d4:4e:6e'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 200], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 664082, 'reachable_time': 16221, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 247960, 'error': None, 'target': 'ovnmeta-a970b3c6-2fc3-4025-868b-2e9af396991a', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:23.298 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[04464fc8-26f7-4908-8c06-7d234336a9d3]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:23.382 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[cc7aebd9-d0c0-4d30-a413-56b2c2476d3b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:23.384 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapa970b3c6-20, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:23.384 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:23.385 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapa970b3c6-20, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:37:23 compute-0 NetworkManager[51160]: <info>  [1759408643.4028] manager: (tapa970b3c6-20): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/307)
Oct 02 12:37:23 compute-0 kernel: tapa970b3c6-20: entered promiscuous mode
Oct 02 12:37:23 compute-0 nova_compute[192079]: 2025-10-02 12:37:23.402 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:23 compute-0 nova_compute[192079]: 2025-10-02 12:37:23.404 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:23.405 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tapa970b3c6-20, col_values=(('external_ids', {'iface-id': '6aa346c6-3e0c-4887-be68-d585d409cf95'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:37:23 compute-0 ovn_controller[94336]: 2025-10-02T12:37:23Z|00632|binding|INFO|Releasing lport 6aa346c6-3e0c-4887-be68-d585d409cf95 from this chassis (sb_readonly=0)
Oct 02 12:37:23 compute-0 nova_compute[192079]: 2025-10-02 12:37:23.406 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:23 compute-0 nova_compute[192079]: 2025-10-02 12:37:23.422 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:23.423 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/a970b3c6-2fc3-4025-868b-2e9af396991a.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/a970b3c6-2fc3-4025-868b-2e9af396991a.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:23.424 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9acd9b62-538a-4972-b73a-e8e59fbb94ce]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:23.424 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-a970b3c6-2fc3-4025-868b-2e9af396991a
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/a970b3c6-2fc3-4025-868b-2e9af396991a.pid.haproxy
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID a970b3c6-2fc3-4025-868b-2e9af396991a
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:37:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:23.425 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-a970b3c6-2fc3-4025-868b-2e9af396991a', 'env', 'PROCESS_TAG=haproxy-a970b3c6-2fc3-4025-868b-2e9af396991a', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/a970b3c6-2fc3-4025-868b-2e9af396991a.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:37:23 compute-0 nova_compute[192079]: 2025-10-02 12:37:23.799 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408643.799296, dbb54a17-c5e3-491f-bca4-54ddde9b72fa => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:37:23 compute-0 nova_compute[192079]: 2025-10-02 12:37:23.801 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] VM Started (Lifecycle Event)
Oct 02 12:37:23 compute-0 podman[247999]: 2025-10-02 12:37:23.80444215 +0000 UTC m=+0.057540617 container create 4d526a6f61c0d0e4bcfeee1417772191019cb2effffe78423d1f9aa5f70d0a3b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a970b3c6-2fc3-4025-868b-2e9af396991a, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2)
Oct 02 12:37:23 compute-0 nova_compute[192079]: 2025-10-02 12:37:23.823 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:37:23 compute-0 nova_compute[192079]: 2025-10-02 12:37:23.827 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408643.8003013, dbb54a17-c5e3-491f-bca4-54ddde9b72fa => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:37:23 compute-0 nova_compute[192079]: 2025-10-02 12:37:23.827 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] VM Paused (Lifecycle Event)
Oct 02 12:37:23 compute-0 systemd[1]: Started libpod-conmon-4d526a6f61c0d0e4bcfeee1417772191019cb2effffe78423d1f9aa5f70d0a3b.scope.
Oct 02 12:37:23 compute-0 nova_compute[192079]: 2025-10-02 12:37:23.861 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:37:23 compute-0 nova_compute[192079]: 2025-10-02 12:37:23.864 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:37:23 compute-0 podman[247999]: 2025-10-02 12:37:23.769578951 +0000 UTC m=+0.022677439 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:37:23 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:37:23 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/1b45655dbb226ba9c61b714c96de1c92cdba1b3b72b471e274b24d596bde251b/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:37:23 compute-0 podman[247999]: 2025-10-02 12:37:23.882397193 +0000 UTC m=+0.135495680 container init 4d526a6f61c0d0e4bcfeee1417772191019cb2effffe78423d1f9aa5f70d0a3b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a970b3c6-2fc3-4025-868b-2e9af396991a, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:37:23 compute-0 podman[247999]: 2025-10-02 12:37:23.887349638 +0000 UTC m=+0.140448105 container start 4d526a6f61c0d0e4bcfeee1417772191019cb2effffe78423d1f9aa5f70d0a3b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a970b3c6-2fc3-4025-868b-2e9af396991a, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:37:23 compute-0 nova_compute[192079]: 2025-10-02 12:37:23.895 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:37:23 compute-0 neutron-haproxy-ovnmeta-a970b3c6-2fc3-4025-868b-2e9af396991a[248014]: [NOTICE]   (248018) : New worker (248020) forked
Oct 02 12:37:23 compute-0 neutron-haproxy-ovnmeta-a970b3c6-2fc3-4025-868b-2e9af396991a[248014]: [NOTICE]   (248018) : Loading success.
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.094 2 DEBUG nova.network.neutron [req-41206480-a9d1-4b3b-87fe-1eb19a939088 req-7b5bfd35-1de8-44cd-a6af-4c2f29470633 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Updated VIF entry in instance network info cache for port 07a62b49-14ca-420c-8b61-b7f06d28df05. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.095 2 DEBUG nova.network.neutron [req-41206480-a9d1-4b3b-87fe-1eb19a939088 req-7b5bfd35-1de8-44cd-a6af-4c2f29470633 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Updating instance_info_cache with network_info: [{"id": "07a62b49-14ca-420c-8b61-b7f06d28df05", "address": "fa:16:3e:aa:3d:10", "network": {"id": "a970b3c6-2fc3-4025-868b-2e9af396991a", "bridge": "br-int", "label": "tempest-network-smoke--441167180", "subnets": [{"cidr": "10.100.0.16/28", "dns": [], "gateway": {"address": null, "type": "gateway", "version": null, "meta": {}}, "ips": [{"address": "10.100.0.19", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap07a62b49-14", "ovs_interfaceid": "07a62b49-14ca-420c-8b61-b7f06d28df05", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.112 2 DEBUG oslo_concurrency.lockutils [req-41206480-a9d1-4b3b-87fe-1eb19a939088 req-7b5bfd35-1de8-44cd-a6af-4c2f29470633 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-dbb54a17-c5e3-491f-bca4-54ddde9b72fa" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.718 2 DEBUG nova.compute.manager [req-ccdb7b12-6be9-4fb4-a414-fe4623d09449 req-1372c7ff-0d87-41e5-b510-e2cd8f624b31 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Received event network-vif-plugged-07a62b49-14ca-420c-8b61-b7f06d28df05 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.719 2 DEBUG oslo_concurrency.lockutils [req-ccdb7b12-6be9-4fb4-a414-fe4623d09449 req-1372c7ff-0d87-41e5-b510-e2cd8f624b31 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "dbb54a17-c5e3-491f-bca4-54ddde9b72fa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.719 2 DEBUG oslo_concurrency.lockutils [req-ccdb7b12-6be9-4fb4-a414-fe4623d09449 req-1372c7ff-0d87-41e5-b510-e2cd8f624b31 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "dbb54a17-c5e3-491f-bca4-54ddde9b72fa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.719 2 DEBUG oslo_concurrency.lockutils [req-ccdb7b12-6be9-4fb4-a414-fe4623d09449 req-1372c7ff-0d87-41e5-b510-e2cd8f624b31 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "dbb54a17-c5e3-491f-bca4-54ddde9b72fa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.719 2 DEBUG nova.compute.manager [req-ccdb7b12-6be9-4fb4-a414-fe4623d09449 req-1372c7ff-0d87-41e5-b510-e2cd8f624b31 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Processing event network-vif-plugged-07a62b49-14ca-420c-8b61-b7f06d28df05 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.720 2 DEBUG nova.compute.manager [req-ccdb7b12-6be9-4fb4-a414-fe4623d09449 req-1372c7ff-0d87-41e5-b510-e2cd8f624b31 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Received event network-vif-plugged-07a62b49-14ca-420c-8b61-b7f06d28df05 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.720 2 DEBUG oslo_concurrency.lockutils [req-ccdb7b12-6be9-4fb4-a414-fe4623d09449 req-1372c7ff-0d87-41e5-b510-e2cd8f624b31 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "dbb54a17-c5e3-491f-bca4-54ddde9b72fa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.720 2 DEBUG oslo_concurrency.lockutils [req-ccdb7b12-6be9-4fb4-a414-fe4623d09449 req-1372c7ff-0d87-41e5-b510-e2cd8f624b31 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "dbb54a17-c5e3-491f-bca4-54ddde9b72fa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.720 2 DEBUG oslo_concurrency.lockutils [req-ccdb7b12-6be9-4fb4-a414-fe4623d09449 req-1372c7ff-0d87-41e5-b510-e2cd8f624b31 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "dbb54a17-c5e3-491f-bca4-54ddde9b72fa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.720 2 DEBUG nova.compute.manager [req-ccdb7b12-6be9-4fb4-a414-fe4623d09449 req-1372c7ff-0d87-41e5-b510-e2cd8f624b31 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] No waiting events found dispatching network-vif-plugged-07a62b49-14ca-420c-8b61-b7f06d28df05 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.721 2 WARNING nova.compute.manager [req-ccdb7b12-6be9-4fb4-a414-fe4623d09449 req-1372c7ff-0d87-41e5-b510-e2cd8f624b31 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Received unexpected event network-vif-plugged-07a62b49-14ca-420c-8b61-b7f06d28df05 for instance with vm_state building and task_state spawning.
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.721 2 DEBUG nova.compute.manager [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.724 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408644.724693, dbb54a17-c5e3-491f-bca4-54ddde9b72fa => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.725 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] VM Resumed (Lifecycle Event)
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.726 2 DEBUG nova.virt.libvirt.driver [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.729 2 INFO nova.virt.libvirt.driver [-] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Instance spawned successfully.
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.729 2 DEBUG nova.virt.libvirt.driver [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.757 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.765 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.768 2 DEBUG nova.virt.libvirt.driver [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.769 2 DEBUG nova.virt.libvirt.driver [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.769 2 DEBUG nova.virt.libvirt.driver [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.770 2 DEBUG nova.virt.libvirt.driver [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.770 2 DEBUG nova.virt.libvirt.driver [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.771 2 DEBUG nova.virt.libvirt.driver [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.806 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.867 2 INFO nova.compute.manager [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Took 7.31 seconds to spawn the instance on the hypervisor.
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.868 2 DEBUG nova.compute.manager [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.959 2 INFO nova.compute.manager [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Took 7.97 seconds to build instance.
Oct 02 12:37:24 compute-0 nova_compute[192079]: 2025-10-02 12:37:24.978 2 DEBUG oslo_concurrency.lockutils [None req-a4dd6cdd-1f6a-40cd-a9a4-963dc03c4e47 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "dbb54a17-c5e3-491f-bca4-54ddde9b72fa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 8.089s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:37:25 compute-0 podman[248029]: 2025-10-02 12:37:25.157830564 +0000 UTC m=+0.070223102 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=ceilometer_agent_compute, org.label-schema.license=GPLv2, config_id=edpm, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:37:27 compute-0 nova_compute[192079]: 2025-10-02 12:37:27.267 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:27 compute-0 nova_compute[192079]: 2025-10-02 12:37:27.683 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:32 compute-0 nova_compute[192079]: 2025-10-02 12:37:32.271 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:32 compute-0 nova_compute[192079]: 2025-10-02 12:37:32.724 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:33 compute-0 podman[248048]: 2025-10-02 12:37:33.179751752 +0000 UTC m=+0.079322830 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=multipathd, io.buildah.version=1.41.3, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2)
Oct 02 12:37:33 compute-0 podman[248047]: 2025-10-02 12:37:33.190793753 +0000 UTC m=+0.089636222 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.33.7, io.openshift.tags=minimal rhel9, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, vendor=Red Hat, Inc., summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., build-date=2025-08-20T13:12:41, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, config_id=edpm, io.openshift.expose-services=, url=https://catalog.redhat.com/en/search?searchType=containers, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, maintainer=Red Hat, Inc., io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., managed_by=edpm_ansible, release=1755695350, vcs-type=git, version=9.6, distribution-scope=public, container_name=openstack_network_exporter, name=ubi9-minimal, architecture=x86_64, com.redhat.component=ubi9-minimal-container)
Oct 02 12:37:36 compute-0 ovn_controller[94336]: 2025-10-02T12:37:36Z|00067|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:aa:3d:10 10.100.0.19
Oct 02 12:37:36 compute-0 ovn_controller[94336]: 2025-10-02T12:37:36Z|00068|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:aa:3d:10 10.100.0.19
Oct 02 12:37:36 compute-0 nova_compute[192079]: 2025-10-02 12:37:36.681 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:37:37 compute-0 podman[248101]: 2025-10-02 12:37:37.144883188 +0000 UTC m=+0.055710298 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, config_id=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, container_name=iscsid, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_managed=true)
Oct 02 12:37:37 compute-0 podman[248100]: 2025-10-02 12:37:37.14753038 +0000 UTC m=+0.060801396 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:37:37 compute-0 nova_compute[192079]: 2025-10-02 12:37:37.321 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:37 compute-0 nova_compute[192079]: 2025-10-02 12:37:37.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:37:37 compute-0 nova_compute[192079]: 2025-10-02 12:37:37.730 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:41 compute-0 nova_compute[192079]: 2025-10-02 12:37:41.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:37:41 compute-0 nova_compute[192079]: 2025-10-02 12:37:41.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:37:41 compute-0 nova_compute[192079]: 2025-10-02 12:37:41.736 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:37:41 compute-0 nova_compute[192079]: 2025-10-02 12:37:41.737 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:37:41 compute-0 nova_compute[192079]: 2025-10-02 12:37:41.737 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:37:41 compute-0 nova_compute[192079]: 2025-10-02 12:37:41.737 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:37:41 compute-0 nova_compute[192079]: 2025-10-02 12:37:41.896 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/dbb54a17-c5e3-491f-bca4-54ddde9b72fa/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:37:41 compute-0 nova_compute[192079]: 2025-10-02 12:37:41.972 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/dbb54a17-c5e3-491f-bca4-54ddde9b72fa/disk --force-share --output=json" returned: 0 in 0.076s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:37:41 compute-0 nova_compute[192079]: 2025-10-02 12:37:41.973 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/dbb54a17-c5e3-491f-bca4-54ddde9b72fa/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:37:42 compute-0 nova_compute[192079]: 2025-10-02 12:37:42.052 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/dbb54a17-c5e3-491f-bca4-54ddde9b72fa/disk --force-share --output=json" returned: 0 in 0.079s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:37:42 compute-0 nova_compute[192079]: 2025-10-02 12:37:42.249 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:37:42 compute-0 nova_compute[192079]: 2025-10-02 12:37:42.251 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5512MB free_disk=73.31109237670898GB free_vcpus=7 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:37:42 compute-0 nova_compute[192079]: 2025-10-02 12:37:42.251 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:37:42 compute-0 nova_compute[192079]: 2025-10-02 12:37:42.252 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:37:42 compute-0 nova_compute[192079]: 2025-10-02 12:37:42.324 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:42 compute-0 nova_compute[192079]: 2025-10-02 12:37:42.342 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance dbb54a17-c5e3-491f-bca4-54ddde9b72fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:37:42 compute-0 nova_compute[192079]: 2025-10-02 12:37:42.343 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 1 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:37:42 compute-0 nova_compute[192079]: 2025-10-02 12:37:42.343 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=640MB phys_disk=79GB used_disk=1GB total_vcpus=8 used_vcpus=1 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:37:42 compute-0 nova_compute[192079]: 2025-10-02 12:37:42.400 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:37:42 compute-0 nova_compute[192079]: 2025-10-02 12:37:42.418 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:37:42 compute-0 nova_compute[192079]: 2025-10-02 12:37:42.454 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:37:42 compute-0 nova_compute[192079]: 2025-10-02 12:37:42.455 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.203s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:37:42 compute-0 nova_compute[192079]: 2025-10-02 12:37:42.730 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:45 compute-0 nova_compute[192079]: 2025-10-02 12:37:45.454 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:37:45 compute-0 nova_compute[192079]: 2025-10-02 12:37:45.455 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:37:45 compute-0 nova_compute[192079]: 2025-10-02 12:37:45.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:37:46 compute-0 podman[248152]: 2025-10-02 12:37:46.217221093 +0000 UTC m=+0.118862027 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_metadata_agent, tcib_managed=true, config_id=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS)
Oct 02 12:37:46 compute-0 podman[248154]: 2025-10-02 12:37:46.217258114 +0000 UTC m=+0.110199901 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 12:37:46 compute-0 podman[248153]: 2025-10-02 12:37:46.231154082 +0000 UTC m=+0.127568804 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, config_id=ovn_controller, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, container_name=ovn_controller, org.label-schema.build-date=20251001)
Oct 02 12:37:46 compute-0 nova_compute[192079]: 2025-10-02 12:37:46.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:37:46 compute-0 nova_compute[192079]: 2025-10-02 12:37:46.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:37:47 compute-0 nova_compute[192079]: 2025-10-02 12:37:47.364 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:47 compute-0 nova_compute[192079]: 2025-10-02 12:37:47.732 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:49 compute-0 nova_compute[192079]: 2025-10-02 12:37:49.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:37:49 compute-0 nova_compute[192079]: 2025-10-02 12:37:49.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:37:49 compute-0 nova_compute[192079]: 2025-10-02 12:37:49.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:37:49 compute-0 nova_compute[192079]: 2025-10-02 12:37:49.926 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "refresh_cache-dbb54a17-c5e3-491f-bca4-54ddde9b72fa" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:37:49 compute-0 nova_compute[192079]: 2025-10-02 12:37:49.927 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquired lock "refresh_cache-dbb54a17-c5e3-491f-bca4-54ddde9b72fa" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:37:49 compute-0 nova_compute[192079]: 2025-10-02 12:37:49.927 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Forcefully refreshing network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2004
Oct 02 12:37:49 compute-0 nova_compute[192079]: 2025-10-02 12:37:49.927 2 DEBUG nova.objects.instance [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lazy-loading 'info_cache' on Instance uuid dbb54a17-c5e3-491f-bca4-54ddde9b72fa obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:37:51 compute-0 nova_compute[192079]: 2025-10-02 12:37:51.417 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Updating instance_info_cache with network_info: [{"id": "07a62b49-14ca-420c-8b61-b7f06d28df05", "address": "fa:16:3e:aa:3d:10", "network": {"id": "a970b3c6-2fc3-4025-868b-2e9af396991a", "bridge": "br-int", "label": "tempest-network-smoke--441167180", "subnets": [{"cidr": "10.100.0.16/28", "dns": [], "gateway": {"address": null, "type": "gateway", "version": null, "meta": {}}, "ips": [{"address": "10.100.0.19", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap07a62b49-14", "ovs_interfaceid": "07a62b49-14ca-420c-8b61-b7f06d28df05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:37:51 compute-0 nova_compute[192079]: 2025-10-02 12:37:51.433 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Releasing lock "refresh_cache-dbb54a17-c5e3-491f-bca4-54ddde9b72fa" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:37:51 compute-0 nova_compute[192079]: 2025-10-02 12:37:51.434 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Updated the network info_cache for instance _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9929
Oct 02 12:37:51 compute-0 nova_compute[192079]: 2025-10-02 12:37:51.534 2 DEBUG oslo_concurrency.lockutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Acquiring lock "2f4dba21-eb3b-48e5-b17a-724f9ab6459e" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:37:51 compute-0 nova_compute[192079]: 2025-10-02 12:37:51.535 2 DEBUG oslo_concurrency.lockutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Lock "2f4dba21-eb3b-48e5-b17a-724f9ab6459e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:37:51 compute-0 nova_compute[192079]: 2025-10-02 12:37:51.555 2 DEBUG nova.compute.manager [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:37:51 compute-0 nova_compute[192079]: 2025-10-02 12:37:51.666 2 DEBUG oslo_concurrency.lockutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:37:51 compute-0 nova_compute[192079]: 2025-10-02 12:37:51.667 2 DEBUG oslo_concurrency.lockutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:37:51 compute-0 nova_compute[192079]: 2025-10-02 12:37:51.672 2 DEBUG nova.virt.hardware [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:37:51 compute-0 nova_compute[192079]: 2025-10-02 12:37:51.673 2 INFO nova.compute.claims [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:37:51 compute-0 nova_compute[192079]: 2025-10-02 12:37:51.787 2 DEBUG nova.compute.provider_tree [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:37:51 compute-0 nova_compute[192079]: 2025-10-02 12:37:51.799 2 DEBUG nova.scheduler.client.report [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:37:51 compute-0 nova_compute[192079]: 2025-10-02 12:37:51.821 2 DEBUG oslo_concurrency.lockutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.155s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:37:51 compute-0 nova_compute[192079]: 2025-10-02 12:37:51.822 2 DEBUG nova.compute.manager [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:37:51 compute-0 nova_compute[192079]: 2025-10-02 12:37:51.875 2 DEBUG nova.compute.manager [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:37:51 compute-0 nova_compute[192079]: 2025-10-02 12:37:51.875 2 DEBUG nova.network.neutron [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:37:51 compute-0 nova_compute[192079]: 2025-10-02 12:37:51.892 2 INFO nova.virt.libvirt.driver [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:37:51 compute-0 nova_compute[192079]: 2025-10-02 12:37:51.908 2 DEBUG nova.compute.manager [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:37:52 compute-0 nova_compute[192079]: 2025-10-02 12:37:52.019 2 DEBUG nova.compute.manager [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:37:52 compute-0 nova_compute[192079]: 2025-10-02 12:37:52.020 2 DEBUG nova.virt.libvirt.driver [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:37:52 compute-0 nova_compute[192079]: 2025-10-02 12:37:52.020 2 INFO nova.virt.libvirt.driver [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Creating image(s)
Oct 02 12:37:52 compute-0 nova_compute[192079]: 2025-10-02 12:37:52.021 2 DEBUG oslo_concurrency.lockutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Acquiring lock "/var/lib/nova/instances/2f4dba21-eb3b-48e5-b17a-724f9ab6459e/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:37:52 compute-0 nova_compute[192079]: 2025-10-02 12:37:52.021 2 DEBUG oslo_concurrency.lockutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Lock "/var/lib/nova/instances/2f4dba21-eb3b-48e5-b17a-724f9ab6459e/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:37:52 compute-0 nova_compute[192079]: 2025-10-02 12:37:52.022 2 DEBUG oslo_concurrency.lockutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Lock "/var/lib/nova/instances/2f4dba21-eb3b-48e5-b17a-724f9ab6459e/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:37:52 compute-0 nova_compute[192079]: 2025-10-02 12:37:52.022 2 DEBUG oslo_concurrency.lockutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Acquiring lock "6226a5cf11b26d104556719508e058e93aa7073d" by "nova.virt.libvirt.imagebackend.Image.cache.<locals>.fetch_func_sync" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:37:52 compute-0 nova_compute[192079]: 2025-10-02 12:37:52.023 2 DEBUG oslo_concurrency.lockutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Lock "6226a5cf11b26d104556719508e058e93aa7073d" acquired by "nova.virt.libvirt.imagebackend.Image.cache.<locals>.fetch_func_sync" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:37:52 compute-0 nova_compute[192079]: 2025-10-02 12:37:52.068 2 DEBUG nova.policy [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6d07868c23de4edc9018d8964b43d954', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8f7d693b90ba447196796435b74590f6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:37:52 compute-0 nova_compute[192079]: 2025-10-02 12:37:52.369 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:52 compute-0 nova_compute[192079]: 2025-10-02 12:37:52.734 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:53 compute-0 nova_compute[192079]: 2025-10-02 12:37:53.341 2 DEBUG nova.network.neutron [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Successfully created port: 4c827308-ad8f-4ea0-ac23-feff2dac22eb _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:37:53 compute-0 nova_compute[192079]: 2025-10-02 12:37:53.625 2 DEBUG oslo_concurrency.processutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/6226a5cf11b26d104556719508e058e93aa7073d.part --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:37:53 compute-0 nova_compute[192079]: 2025-10-02 12:37:53.678 2 DEBUG oslo_concurrency.processutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/6226a5cf11b26d104556719508e058e93aa7073d.part --force-share --output=json" returned: 0 in 0.053s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:37:53 compute-0 nova_compute[192079]: 2025-10-02 12:37:53.679 2 DEBUG nova.virt.images [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] 800f3823-995d-405f-a6ba-5aa2a313bb4a was qcow2, converting to raw fetch_to_raw /usr/lib/python3.9/site-packages/nova/virt/images.py:242
Oct 02 12:37:53 compute-0 nova_compute[192079]: 2025-10-02 12:37:53.680 2 DEBUG nova.privsep.utils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Path '/var/lib/nova/instances' supports direct I/O supports_direct_io /usr/lib/python3.9/site-packages/nova/privsep/utils.py:63
Oct 02 12:37:53 compute-0 nova_compute[192079]: 2025-10-02 12:37:53.681 2 DEBUG oslo_concurrency.processutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Running cmd (subprocess): qemu-img convert -t none -O raw -f qcow2 /var/lib/nova/instances/_base/6226a5cf11b26d104556719508e058e93aa7073d.part /var/lib/nova/instances/_base/6226a5cf11b26d104556719508e058e93aa7073d.converted execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:37:53 compute-0 nova_compute[192079]: 2025-10-02 12:37:53.897 2 DEBUG oslo_concurrency.processutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] CMD "qemu-img convert -t none -O raw -f qcow2 /var/lib/nova/instances/_base/6226a5cf11b26d104556719508e058e93aa7073d.part /var/lib/nova/instances/_base/6226a5cf11b26d104556719508e058e93aa7073d.converted" returned: 0 in 0.216s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:37:53 compute-0 nova_compute[192079]: 2025-10-02 12:37:53.906 2 DEBUG oslo_concurrency.processutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/6226a5cf11b26d104556719508e058e93aa7073d.converted --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:37:53 compute-0 nova_compute[192079]: 2025-10-02 12:37:53.960 2 DEBUG oslo_concurrency.processutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/6226a5cf11b26d104556719508e058e93aa7073d.converted --force-share --output=json" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:37:53 compute-0 nova_compute[192079]: 2025-10-02 12:37:53.961 2 DEBUG oslo_concurrency.lockutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Lock "6226a5cf11b26d104556719508e058e93aa7073d" "released" by "nova.virt.libvirt.imagebackend.Image.cache.<locals>.fetch_func_sync" :: held 1.939s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:37:53 compute-0 nova_compute[192079]: 2025-10-02 12:37:53.974 2 DEBUG oslo_concurrency.processutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/6226a5cf11b26d104556719508e058e93aa7073d --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:37:54 compute-0 nova_compute[192079]: 2025-10-02 12:37:54.037 2 DEBUG oslo_concurrency.processutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/6226a5cf11b26d104556719508e058e93aa7073d --force-share --output=json" returned: 0 in 0.063s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:37:54 compute-0 nova_compute[192079]: 2025-10-02 12:37:54.038 2 DEBUG oslo_concurrency.lockutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Acquiring lock "6226a5cf11b26d104556719508e058e93aa7073d" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:37:54 compute-0 nova_compute[192079]: 2025-10-02 12:37:54.038 2 DEBUG oslo_concurrency.lockutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Lock "6226a5cf11b26d104556719508e058e93aa7073d" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:37:54 compute-0 nova_compute[192079]: 2025-10-02 12:37:54.049 2 DEBUG oslo_concurrency.processutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/6226a5cf11b26d104556719508e058e93aa7073d --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:37:54 compute-0 nova_compute[192079]: 2025-10-02 12:37:54.092 2 DEBUG nova.network.neutron [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Successfully updated port: 4c827308-ad8f-4ea0-ac23-feff2dac22eb _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:37:54 compute-0 nova_compute[192079]: 2025-10-02 12:37:54.101 2 DEBUG oslo_concurrency.processutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/6226a5cf11b26d104556719508e058e93aa7073d --force-share --output=json" returned: 0 in 0.053s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:37:54 compute-0 nova_compute[192079]: 2025-10-02 12:37:54.102 2 DEBUG oslo_concurrency.processutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/6226a5cf11b26d104556719508e058e93aa7073d,backing_fmt=raw /var/lib/nova/instances/2f4dba21-eb3b-48e5-b17a-724f9ab6459e/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:37:54 compute-0 nova_compute[192079]: 2025-10-02 12:37:54.119 2 DEBUG oslo_concurrency.lockutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Acquiring lock "refresh_cache-2f4dba21-eb3b-48e5-b17a-724f9ab6459e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:37:54 compute-0 nova_compute[192079]: 2025-10-02 12:37:54.120 2 DEBUG oslo_concurrency.lockutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Acquired lock "refresh_cache-2f4dba21-eb3b-48e5-b17a-724f9ab6459e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:37:54 compute-0 nova_compute[192079]: 2025-10-02 12:37:54.120 2 DEBUG nova.network.neutron [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:37:54 compute-0 nova_compute[192079]: 2025-10-02 12:37:54.137 2 DEBUG oslo_concurrency.processutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/6226a5cf11b26d104556719508e058e93aa7073d,backing_fmt=raw /var/lib/nova/instances/2f4dba21-eb3b-48e5-b17a-724f9ab6459e/disk 1073741824" returned: 0 in 0.035s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:37:54 compute-0 nova_compute[192079]: 2025-10-02 12:37:54.138 2 DEBUG oslo_concurrency.lockutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Lock "6226a5cf11b26d104556719508e058e93aa7073d" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.099s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:37:54 compute-0 nova_compute[192079]: 2025-10-02 12:37:54.138 2 DEBUG oslo_concurrency.processutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/6226a5cf11b26d104556719508e058e93aa7073d --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:37:54 compute-0 nova_compute[192079]: 2025-10-02 12:37:54.190 2 DEBUG oslo_concurrency.processutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/6226a5cf11b26d104556719508e058e93aa7073d --force-share --output=json" returned: 0 in 0.052s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:37:54 compute-0 nova_compute[192079]: 2025-10-02 12:37:54.191 2 DEBUG nova.objects.instance [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Lazy-loading 'migration_context' on Instance uuid 2f4dba21-eb3b-48e5-b17a-724f9ab6459e obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:37:54 compute-0 nova_compute[192079]: 2025-10-02 12:37:54.204 2 DEBUG nova.virt.libvirt.driver [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:37:54 compute-0 nova_compute[192079]: 2025-10-02 12:37:54.204 2 DEBUG nova.virt.libvirt.driver [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Ensure instance console log exists: /var/lib/nova/instances/2f4dba21-eb3b-48e5-b17a-724f9ab6459e/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:37:54 compute-0 nova_compute[192079]: 2025-10-02 12:37:54.205 2 DEBUG oslo_concurrency.lockutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:37:54 compute-0 nova_compute[192079]: 2025-10-02 12:37:54.205 2 DEBUG oslo_concurrency.lockutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:37:54 compute-0 nova_compute[192079]: 2025-10-02 12:37:54.205 2 DEBUG oslo_concurrency.lockutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:37:54 compute-0 nova_compute[192079]: 2025-10-02 12:37:54.213 2 DEBUG nova.compute.manager [req-fdaa614f-e360-4a23-9f96-486c3c19222c req-4ff076de-d62c-474a-93ef-5e936dbea559 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Received event network-changed-4c827308-ad8f-4ea0-ac23-feff2dac22eb external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:37:54 compute-0 nova_compute[192079]: 2025-10-02 12:37:54.213 2 DEBUG nova.compute.manager [req-fdaa614f-e360-4a23-9f96-486c3c19222c req-4ff076de-d62c-474a-93ef-5e936dbea559 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Refreshing instance network info cache due to event network-changed-4c827308-ad8f-4ea0-ac23-feff2dac22eb. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:37:54 compute-0 nova_compute[192079]: 2025-10-02 12:37:54.213 2 DEBUG oslo_concurrency.lockutils [req-fdaa614f-e360-4a23-9f96-486c3c19222c req-4ff076de-d62c-474a-93ef-5e936dbea559 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-2f4dba21-eb3b-48e5-b17a-724f9ab6459e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:37:54 compute-0 nova_compute[192079]: 2025-10-02 12:37:54.319 2 DEBUG nova.network.neutron [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.392 2 DEBUG nova.network.neutron [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Updating instance_info_cache with network_info: [{"id": "4c827308-ad8f-4ea0-ac23-feff2dac22eb", "address": "fa:16:3e:bd:4c:e1", "network": {"id": "95da58c1-265e-4dd9-ba00-692853005e46", "bridge": "br-int", "label": "tempest-TestSnapshotPattern-603762842-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "8f7d693b90ba447196796435b74590f6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap4c827308-ad", "ovs_interfaceid": "4c827308-ad8f-4ea0-ac23-feff2dac22eb", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.426 2 DEBUG oslo_concurrency.lockutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Releasing lock "refresh_cache-2f4dba21-eb3b-48e5-b17a-724f9ab6459e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.427 2 DEBUG nova.compute.manager [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Instance network_info: |[{"id": "4c827308-ad8f-4ea0-ac23-feff2dac22eb", "address": "fa:16:3e:bd:4c:e1", "network": {"id": "95da58c1-265e-4dd9-ba00-692853005e46", "bridge": "br-int", "label": "tempest-TestSnapshotPattern-603762842-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "8f7d693b90ba447196796435b74590f6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap4c827308-ad", "ovs_interfaceid": "4c827308-ad8f-4ea0-ac23-feff2dac22eb", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.429 2 DEBUG oslo_concurrency.lockutils [req-fdaa614f-e360-4a23-9f96-486c3c19222c req-4ff076de-d62c-474a-93ef-5e936dbea559 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-2f4dba21-eb3b-48e5-b17a-724f9ab6459e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.430 2 DEBUG nova.network.neutron [req-fdaa614f-e360-4a23-9f96-486c3c19222c req-4ff076de-d62c-474a-93ef-5e936dbea559 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Refreshing network info cache for port 4c827308-ad8f-4ea0-ac23-feff2dac22eb _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.436 2 DEBUG nova.virt.libvirt.driver [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Start _get_guest_xml network_info=[{"id": "4c827308-ad8f-4ea0-ac23-feff2dac22eb", "address": "fa:16:3e:bd:4c:e1", "network": {"id": "95da58c1-265e-4dd9-ba00-692853005e46", "bridge": "br-int", "label": "tempest-TestSnapshotPattern-603762842-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "8f7d693b90ba447196796435b74590f6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap4c827308-ad", "ovs_interfaceid": "4c827308-ad8f-4ea0-ac23-feff2dac22eb", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='2c81b56cf5e043d147041aef370b5041',container_format='bare',created_at=2025-10-02T12:37:41Z,direct_url=<?>,disk_format='qcow2',id=800f3823-995d-405f-a6ba-5aa2a313bb4a,min_disk=1,min_ram=0,name='tempest-TestSnapshotPatternsnapshot-319802408',owner='8f7d693b90ba447196796435b74590f6',properties=ImageMetaProps,protected=<?>,size=52297728,status='active',tags=<?>,updated_at=2025-10-02T12:37:47Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': '800f3823-995d-405f-a6ba-5aa2a313bb4a'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.446 2 WARNING nova.virt.libvirt.driver [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.454 2 DEBUG nova.virt.libvirt.host [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.455 2 DEBUG nova.virt.libvirt.host [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.465 2 DEBUG nova.virt.libvirt.host [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.466 2 DEBUG nova.virt.libvirt.host [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.469 2 DEBUG nova.virt.libvirt.driver [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.469 2 DEBUG nova.virt.hardware [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='2c81b56cf5e043d147041aef370b5041',container_format='bare',created_at=2025-10-02T12:37:41Z,direct_url=<?>,disk_format='qcow2',id=800f3823-995d-405f-a6ba-5aa2a313bb4a,min_disk=1,min_ram=0,name='tempest-TestSnapshotPatternsnapshot-319802408',owner='8f7d693b90ba447196796435b74590f6',properties=ImageMetaProps,protected=<?>,size=52297728,status='active',tags=<?>,updated_at=2025-10-02T12:37:47Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.470 2 DEBUG nova.virt.hardware [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.470 2 DEBUG nova.virt.hardware [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.471 2 DEBUG nova.virt.hardware [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.471 2 DEBUG nova.virt.hardware [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.472 2 DEBUG nova.virt.hardware [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.472 2 DEBUG nova.virt.hardware [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.473 2 DEBUG nova.virt.hardware [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.473 2 DEBUG nova.virt.hardware [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.474 2 DEBUG nova.virt.hardware [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.474 2 DEBUG nova.virt.hardware [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.477 2 DEBUG nova.virt.libvirt.vif [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:37:50Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestSnapshotPattern-server-5496910',display_name='tempest-TestSnapshotPattern-server-5496910',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testsnapshotpattern-server-5496910',id=164,image_ref='800f3823-995d-405f-a6ba-5aa2a313bb4a',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBFzJGGGUE+Xks9+aY5SjFk2n2DGAnXfOBhkbeNeuAVWQ/dQZsUYNFa4aU04DL6V5Ahv7YBoVwhzJt5xloq0NtgboR41kXTeWdHADR0n2ucoHL3yxU4d4gs2dS5flZPM85w==',key_name='tempest-TestSnapshotPattern-331136498',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='8f7d693b90ba447196796435b74590f6',ramdisk_id='',reservation_id='r-0ubse6sn',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_boot_roles='reader,member',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_image_location='snapshot',image_image_state='available',image_image_type='snapshot',image_instance_uuid='a489cbb2-1400-41b4-9345-18186b74b900',image_min_disk='1',image_min_ram='0',image_owner_id='8f7d693b90ba447196796435b74590f6',image_owner_project_name='tempest-TestSnapshotPattern-1950942920',image_owner_user_name='tempest-TestSnapshotPattern-1950942920-project-member',image_user_id='6d07868c23de4edc9018d8964b43d954',image_version='8.0',network_allocated='True',owner_project_name='tempest-TestSnapshotPattern-1950942920',owner_user_name='tempest-TestSnapshotPattern-1950942920-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:37:51Z,user_data=None,user_id='6d07868c23de4edc9018d8964b43d954',uuid=2f4dba21-eb3b-48e5-b17a-724f9ab6459e,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "4c827308-ad8f-4ea0-ac23-feff2dac22eb", "address": "fa:16:3e:bd:4c:e1", "network": {"id": "95da58c1-265e-4dd9-ba00-692853005e46", "bridge": "br-int", "label": "tempest-TestSnapshotPattern-603762842-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "8f7d693b90ba447196796435b74590f6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap4c827308-ad", "ovs_interfaceid": "4c827308-ad8f-4ea0-ac23-feff2dac22eb", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.478 2 DEBUG nova.network.os_vif_util [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Converting VIF {"id": "4c827308-ad8f-4ea0-ac23-feff2dac22eb", "address": "fa:16:3e:bd:4c:e1", "network": {"id": "95da58c1-265e-4dd9-ba00-692853005e46", "bridge": "br-int", "label": "tempest-TestSnapshotPattern-603762842-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "8f7d693b90ba447196796435b74590f6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap4c827308-ad", "ovs_interfaceid": "4c827308-ad8f-4ea0-ac23-feff2dac22eb", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.478 2 DEBUG nova.network.os_vif_util [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:bd:4c:e1,bridge_name='br-int',has_traffic_filtering=True,id=4c827308-ad8f-4ea0-ac23-feff2dac22eb,network=Network(95da58c1-265e-4dd9-ba00-692853005e46),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap4c827308-ad') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.479 2 DEBUG nova.objects.instance [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Lazy-loading 'pci_devices' on Instance uuid 2f4dba21-eb3b-48e5-b17a-724f9ab6459e obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.493 2 DEBUG nova.virt.libvirt.driver [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:37:55 compute-0 nova_compute[192079]:   <uuid>2f4dba21-eb3b-48e5-b17a-724f9ab6459e</uuid>
Oct 02 12:37:55 compute-0 nova_compute[192079]:   <name>instance-000000a4</name>
Oct 02 12:37:55 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:37:55 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:37:55 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:37:55 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:       <nova:name>tempest-TestSnapshotPattern-server-5496910</nova:name>
Oct 02 12:37:55 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:37:55</nova:creationTime>
Oct 02 12:37:55 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:37:55 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:37:55 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:37:55 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:37:55 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:37:55 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:37:55 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:37:55 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:37:55 compute-0 nova_compute[192079]:         <nova:user uuid="6d07868c23de4edc9018d8964b43d954">tempest-TestSnapshotPattern-1950942920-project-member</nova:user>
Oct 02 12:37:55 compute-0 nova_compute[192079]:         <nova:project uuid="8f7d693b90ba447196796435b74590f6">tempest-TestSnapshotPattern-1950942920</nova:project>
Oct 02 12:37:55 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:37:55 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="800f3823-995d-405f-a6ba-5aa2a313bb4a"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:37:55 compute-0 nova_compute[192079]:         <nova:port uuid="4c827308-ad8f-4ea0-ac23-feff2dac22eb">
Oct 02 12:37:55 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.14" ipVersion="4"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:37:55 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:37:55 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:37:55 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <system>
Oct 02 12:37:55 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:37:55 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:37:55 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:37:55 compute-0 nova_compute[192079]:       <entry name="serial">2f4dba21-eb3b-48e5-b17a-724f9ab6459e</entry>
Oct 02 12:37:55 compute-0 nova_compute[192079]:       <entry name="uuid">2f4dba21-eb3b-48e5-b17a-724f9ab6459e</entry>
Oct 02 12:37:55 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     </system>
Oct 02 12:37:55 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:37:55 compute-0 nova_compute[192079]:   <os>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:   </os>
Oct 02 12:37:55 compute-0 nova_compute[192079]:   <features>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:   </features>
Oct 02 12:37:55 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:37:55 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:37:55 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:37:55 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/2f4dba21-eb3b-48e5-b17a-724f9ab6459e/disk"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:37:55 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/2f4dba21-eb3b-48e5-b17a-724f9ab6459e/disk.config"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:37:55 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:bd:4c:e1"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:       <target dev="tap4c827308-ad"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:37:55 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/2f4dba21-eb3b-48e5-b17a-724f9ab6459e/console.log" append="off"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <video>
Oct 02 12:37:55 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     </video>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <input type="keyboard" bus="usb"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:37:55 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:37:55 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:37:55 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:37:55 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:37:55 compute-0 nova_compute[192079]: </domain>
Oct 02 12:37:55 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.493 2 DEBUG nova.compute.manager [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Preparing to wait for external event network-vif-plugged-4c827308-ad8f-4ea0-ac23-feff2dac22eb prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.493 2 DEBUG oslo_concurrency.lockutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Acquiring lock "2f4dba21-eb3b-48e5-b17a-724f9ab6459e-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.494 2 DEBUG oslo_concurrency.lockutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Lock "2f4dba21-eb3b-48e5-b17a-724f9ab6459e-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.494 2 DEBUG oslo_concurrency.lockutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Lock "2f4dba21-eb3b-48e5-b17a-724f9ab6459e-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.494 2 DEBUG nova.virt.libvirt.vif [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:37:50Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestSnapshotPattern-server-5496910',display_name='tempest-TestSnapshotPattern-server-5496910',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testsnapshotpattern-server-5496910',id=164,image_ref='800f3823-995d-405f-a6ba-5aa2a313bb4a',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBFzJGGGUE+Xks9+aY5SjFk2n2DGAnXfOBhkbeNeuAVWQ/dQZsUYNFa4aU04DL6V5Ahv7YBoVwhzJt5xloq0NtgboR41kXTeWdHADR0n2ucoHL3yxU4d4gs2dS5flZPM85w==',key_name='tempest-TestSnapshotPattern-331136498',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='8f7d693b90ba447196796435b74590f6',ramdisk_id='',reservation_id='r-0ubse6sn',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_boot_roles='reader,member',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_image_location='snapshot',image_image_state='available',image_image_type='snapshot',image_instance_uuid='a489cbb2-1400-41b4-9345-18186b74b900',image_min_disk='1',image_min_ram='0',image_owner_id='8f7d693b90ba447196796435b74590f6',image_owner_project_name='tempest-TestSnapshotPattern-1950942920',image_owner_user_name='tempest-TestSnapshotPattern-1950942920-project-member',image_user_id='6d07868c23de4edc9018d8964b43d954',image_version='8.0',network_allocated='True',owner_project_name='tempest-TestSnapshotPattern-1950942920',owner_user_name='tempest-TestSnapshotPattern-1950942920-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:37:51Z,user_data=None,user_id='6d07868c23de4edc9018d8964b43d954',uuid=2f4dba21-eb3b-48e5-b17a-724f9ab6459e,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "4c827308-ad8f-4ea0-ac23-feff2dac22eb", "address": "fa:16:3e:bd:4c:e1", "network": {"id": "95da58c1-265e-4dd9-ba00-692853005e46", "bridge": "br-int", "label": "tempest-TestSnapshotPattern-603762842-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "8f7d693b90ba447196796435b74590f6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap4c827308-ad", "ovs_interfaceid": "4c827308-ad8f-4ea0-ac23-feff2dac22eb", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.495 2 DEBUG nova.network.os_vif_util [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Converting VIF {"id": "4c827308-ad8f-4ea0-ac23-feff2dac22eb", "address": "fa:16:3e:bd:4c:e1", "network": {"id": "95da58c1-265e-4dd9-ba00-692853005e46", "bridge": "br-int", "label": "tempest-TestSnapshotPattern-603762842-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "8f7d693b90ba447196796435b74590f6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap4c827308-ad", "ovs_interfaceid": "4c827308-ad8f-4ea0-ac23-feff2dac22eb", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.495 2 DEBUG nova.network.os_vif_util [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:bd:4c:e1,bridge_name='br-int',has_traffic_filtering=True,id=4c827308-ad8f-4ea0-ac23-feff2dac22eb,network=Network(95da58c1-265e-4dd9-ba00-692853005e46),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap4c827308-ad') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.495 2 DEBUG os_vif [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:bd:4c:e1,bridge_name='br-int',has_traffic_filtering=True,id=4c827308-ad8f-4ea0-ac23-feff2dac22eb,network=Network(95da58c1-265e-4dd9-ba00-692853005e46),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap4c827308-ad') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.496 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.496 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.497 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.500 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.501 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap4c827308-ad, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.501 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap4c827308-ad, col_values=(('external_ids', {'iface-id': '4c827308-ad8f-4ea0-ac23-feff2dac22eb', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:bd:4c:e1', 'vm-uuid': '2f4dba21-eb3b-48e5-b17a-724f9ab6459e'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.503 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:55 compute-0 NetworkManager[51160]: <info>  [1759408675.5049] manager: (tap4c827308-ad): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/308)
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.505 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.513 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.514 2 INFO os_vif [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:bd:4c:e1,bridge_name='br-int',has_traffic_filtering=True,id=4c827308-ad8f-4ea0-ac23-feff2dac22eb,network=Network(95da58c1-265e-4dd9-ba00-692853005e46),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap4c827308-ad')
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.559 2 DEBUG nova.virt.libvirt.driver [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.559 2 DEBUG nova.virt.libvirt.driver [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.559 2 DEBUG nova.virt.libvirt.driver [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] No VIF found with MAC fa:16:3e:bd:4c:e1, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:37:55 compute-0 nova_compute[192079]: 2025-10-02 12:37:55.560 2 INFO nova.virt.libvirt.driver [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Using config drive
Oct 02 12:37:56 compute-0 podman[248250]: 2025-10-02 12:37:56.157751403 +0000 UTC m=+0.066012758 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, container_name=ceilometer_agent_compute, org.label-schema.vendor=CentOS, tcib_managed=true, config_id=edpm, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:37:56 compute-0 nova_compute[192079]: 2025-10-02 12:37:56.226 2 INFO nova.virt.libvirt.driver [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Creating config drive at /var/lib/nova/instances/2f4dba21-eb3b-48e5-b17a-724f9ab6459e/disk.config
Oct 02 12:37:56 compute-0 nova_compute[192079]: 2025-10-02 12:37:56.230 2 DEBUG oslo_concurrency.processutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/2f4dba21-eb3b-48e5-b17a-724f9ab6459e/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpu5we5xi8 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:37:56 compute-0 nova_compute[192079]: 2025-10-02 12:37:56.369 2 DEBUG oslo_concurrency.processutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/2f4dba21-eb3b-48e5-b17a-724f9ab6459e/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpu5we5xi8" returned: 0 in 0.139s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:37:56 compute-0 kernel: tap4c827308-ad: entered promiscuous mode
Oct 02 12:37:56 compute-0 NetworkManager[51160]: <info>  [1759408676.4523] manager: (tap4c827308-ad): new Tun device (/org/freedesktop/NetworkManager/Devices/309)
Oct 02 12:37:56 compute-0 ovn_controller[94336]: 2025-10-02T12:37:56Z|00633|binding|INFO|Claiming lport 4c827308-ad8f-4ea0-ac23-feff2dac22eb for this chassis.
Oct 02 12:37:56 compute-0 ovn_controller[94336]: 2025-10-02T12:37:56Z|00634|binding|INFO|4c827308-ad8f-4ea0-ac23-feff2dac22eb: Claiming fa:16:3e:bd:4c:e1 10.100.0.14
Oct 02 12:37:56 compute-0 nova_compute[192079]: 2025-10-02 12:37:56.456 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:56 compute-0 nova_compute[192079]: 2025-10-02 12:37:56.477 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:56 compute-0 nova_compute[192079]: 2025-10-02 12:37:56.492 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:56 compute-0 NetworkManager[51160]: <info>  [1759408676.4946] manager: (patch-br-int-to-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/310)
Oct 02 12:37:56 compute-0 NetworkManager[51160]: <info>  [1759408676.4953] manager: (patch-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d-to-br-int): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/311)
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:56.495 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:bd:4c:e1 10.100.0.14'], port_security=['fa:16:3e:bd:4c:e1 10.100.0.14'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.14/28', 'neutron:device_id': '2f4dba21-eb3b-48e5-b17a-724f9ab6459e', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-95da58c1-265e-4dd9-ba00-692853005e46', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '8f7d693b90ba447196796435b74590f6', 'neutron:revision_number': '2', 'neutron:security_group_ids': '35c2ff63-16f8-4b9e-8320-2301129fdf30', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=2037b9ce-d2e9-4c7b-b130-56e2abc95360, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=4c827308-ad8f-4ea0-ac23-feff2dac22eb) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:56.496 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 4c827308-ad8f-4ea0-ac23-feff2dac22eb in datapath 95da58c1-265e-4dd9-ba00-692853005e46 bound to our chassis
Oct 02 12:37:56 compute-0 systemd-machined[152150]: New machine qemu-80-instance-000000a4.
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:56.498 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 95da58c1-265e-4dd9-ba00-692853005e46
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:56.512 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b81f78a8-bea1-472a-884c-641654589dab]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:56.513 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap95da58c1-21 in ovnmeta-95da58c1-265e-4dd9-ba00-692853005e46 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:56.515 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap95da58c1-20 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:56.515 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6088bc44-6d43-4e2b-bc12-38f5446c20bc]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:56.516 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ac760939-efb9-465f-a3b6-9f42f4fd1edf]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:56.530 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[f7430ff1-6b23-45f9-8b41-7bd98a33b508]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:56.566 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[53bcf6c3-cbef-4af0-8443-3796f9cc22cc]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:56 compute-0 systemd[1]: Started Virtual Machine qemu-80-instance-000000a4.
Oct 02 12:37:56 compute-0 systemd-udevd[248292]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:37:56 compute-0 NetworkManager[51160]: <info>  [1759408676.6137] device (tap4c827308-ad): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:56.612 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[cd1e5f9c-f979-4481-81a4-142e98b04332]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:56 compute-0 NetworkManager[51160]: <info>  [1759408676.6158] device (tap4c827308-ad): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:37:56 compute-0 NetworkManager[51160]: <info>  [1759408676.6421] manager: (tap95da58c1-20): new Veth device (/org/freedesktop/NetworkManager/Devices/312)
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:56.640 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[dda3e0da-10d6-4046-82a5-6f6be2cf1e65]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:56 compute-0 nova_compute[192079]: 2025-10-02 12:37:56.665 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:56 compute-0 ovn_controller[94336]: 2025-10-02T12:37:56Z|00635|binding|INFO|Releasing lport 6aa346c6-3e0c-4887-be68-d585d409cf95 from this chassis (sb_readonly=0)
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:56.685 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[e9a22be5-d18e-461a-9778-efcab7bd1d68]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:56.688 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[2e5964ee-2051-4136-ad1d-49cd019bb34d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:56 compute-0 nova_compute[192079]: 2025-10-02 12:37:56.693 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:56 compute-0 ovn_controller[94336]: 2025-10-02T12:37:56Z|00636|binding|INFO|Setting lport 4c827308-ad8f-4ea0-ac23-feff2dac22eb ovn-installed in OVS
Oct 02 12:37:56 compute-0 ovn_controller[94336]: 2025-10-02T12:37:56Z|00637|binding|INFO|Setting lport 4c827308-ad8f-4ea0-ac23-feff2dac22eb up in Southbound
Oct 02 12:37:56 compute-0 nova_compute[192079]: 2025-10-02 12:37:56.703 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:56 compute-0 NetworkManager[51160]: <info>  [1759408676.7141] device (tap95da58c1-20): carrier: link connected
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:56.720 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[d9b29422-bc0a-495d-88dc-d87eaa8288e8]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:56.739 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[45bc0890-04f3-457c-be0d-4858690db19d]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap95da58c1-21'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:75:f5:95'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 202], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 667434, 'reachable_time': 36924, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 248320, 'error': None, 'target': 'ovnmeta-95da58c1-265e-4dd9-ba00-692853005e46', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:56.758 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[57a59e45-81e8-46fa-b53e-6cad0426f397]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe75:f595'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 667434, 'tstamp': 667434}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 248321, 'error': None, 'target': 'ovnmeta-95da58c1-265e-4dd9-ba00-692853005e46', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:56.777 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e4f94330-efac-4213-b19b-7a1bca1a0005]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap95da58c1-21'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:75:f5:95'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 202], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 667434, 'reachable_time': 36924, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 248322, 'error': None, 'target': 'ovnmeta-95da58c1-265e-4dd9-ba00-692853005e46', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:56.822 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[85fa30aa-d276-4e8f-a7cf-62bbcded4d13]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:56.894 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b9f71d82-0ee2-40c4-836a-aa83608a7e5d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:56.896 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap95da58c1-20, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:56.896 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:56.897 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap95da58c1-20, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:37:56 compute-0 nova_compute[192079]: 2025-10-02 12:37:56.898 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:56 compute-0 kernel: tap95da58c1-20: entered promiscuous mode
Oct 02 12:37:56 compute-0 NetworkManager[51160]: <info>  [1759408676.9006] manager: (tap95da58c1-20): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/313)
Oct 02 12:37:56 compute-0 nova_compute[192079]: 2025-10-02 12:37:56.901 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:56.902 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap95da58c1-20, col_values=(('external_ids', {'iface-id': 'ad4e7082-9510-41a9-bc81-de2c66402e98'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:37:56 compute-0 nova_compute[192079]: 2025-10-02 12:37:56.904 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:56 compute-0 ovn_controller[94336]: 2025-10-02T12:37:56Z|00638|binding|INFO|Releasing lport ad4e7082-9510-41a9-bc81-de2c66402e98 from this chassis (sb_readonly=0)
Oct 02 12:37:56 compute-0 nova_compute[192079]: 2025-10-02 12:37:56.919 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:56 compute-0 nova_compute[192079]: 2025-10-02 12:37:56.920 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:56.921 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/95da58c1-265e-4dd9-ba00-692853005e46.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/95da58c1-265e-4dd9-ba00-692853005e46.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:56.922 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[57fb5e1d-1886-4de5-b660-eaa72a6c12fe]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:56.923 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-95da58c1-265e-4dd9-ba00-692853005e46
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/95da58c1-265e-4dd9-ba00-692853005e46.pid.haproxy
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 95da58c1-265e-4dd9-ba00-692853005e46
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:37:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:56.924 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-95da58c1-265e-4dd9-ba00-692853005e46', 'env', 'PROCESS_TAG=haproxy-95da58c1-265e-4dd9-ba00-692853005e46', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/95da58c1-265e-4dd9-ba00-692853005e46.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:37:57 compute-0 nova_compute[192079]: 2025-10-02 12:37:57.139 2 DEBUG nova.network.neutron [req-fdaa614f-e360-4a23-9f96-486c3c19222c req-4ff076de-d62c-474a-93ef-5e936dbea559 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Updated VIF entry in instance network info cache for port 4c827308-ad8f-4ea0-ac23-feff2dac22eb. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:37:57 compute-0 nova_compute[192079]: 2025-10-02 12:37:57.140 2 DEBUG nova.network.neutron [req-fdaa614f-e360-4a23-9f96-486c3c19222c req-4ff076de-d62c-474a-93ef-5e936dbea559 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Updating instance_info_cache with network_info: [{"id": "4c827308-ad8f-4ea0-ac23-feff2dac22eb", "address": "fa:16:3e:bd:4c:e1", "network": {"id": "95da58c1-265e-4dd9-ba00-692853005e46", "bridge": "br-int", "label": "tempest-TestSnapshotPattern-603762842-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "8f7d693b90ba447196796435b74590f6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap4c827308-ad", "ovs_interfaceid": "4c827308-ad8f-4ea0-ac23-feff2dac22eb", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:37:57 compute-0 nova_compute[192079]: 2025-10-02 12:37:57.160 2 DEBUG oslo_concurrency.lockutils [req-fdaa614f-e360-4a23-9f96-486c3c19222c req-4ff076de-d62c-474a-93ef-5e936dbea559 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-2f4dba21-eb3b-48e5-b17a-724f9ab6459e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:37:57 compute-0 podman[248361]: 2025-10-02 12:37:57.302836057 +0000 UTC m=+0.045451358 container create 7516803da79ea6db13e45f66b868c2d67abf966e0073020f239f7c05f5cc0d0b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-95da58c1-265e-4dd9-ba00-692853005e46, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:37:57 compute-0 nova_compute[192079]: 2025-10-02 12:37:57.306 2 DEBUG nova.compute.manager [req-19fb90d9-02f7-417e-8878-e352ee77e0db req-d6869c60-ff9a-4857-9b3a-572f805a2fdf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Received event network-vif-plugged-4c827308-ad8f-4ea0-ac23-feff2dac22eb external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:37:57 compute-0 nova_compute[192079]: 2025-10-02 12:37:57.307 2 DEBUG oslo_concurrency.lockutils [req-19fb90d9-02f7-417e-8878-e352ee77e0db req-d6869c60-ff9a-4857-9b3a-572f805a2fdf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "2f4dba21-eb3b-48e5-b17a-724f9ab6459e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:37:57 compute-0 nova_compute[192079]: 2025-10-02 12:37:57.307 2 DEBUG oslo_concurrency.lockutils [req-19fb90d9-02f7-417e-8878-e352ee77e0db req-d6869c60-ff9a-4857-9b3a-572f805a2fdf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2f4dba21-eb3b-48e5-b17a-724f9ab6459e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:37:57 compute-0 nova_compute[192079]: 2025-10-02 12:37:57.307 2 DEBUG oslo_concurrency.lockutils [req-19fb90d9-02f7-417e-8878-e352ee77e0db req-d6869c60-ff9a-4857-9b3a-572f805a2fdf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2f4dba21-eb3b-48e5-b17a-724f9ab6459e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:37:57 compute-0 nova_compute[192079]: 2025-10-02 12:37:57.308 2 DEBUG nova.compute.manager [req-19fb90d9-02f7-417e-8878-e352ee77e0db req-d6869c60-ff9a-4857-9b3a-572f805a2fdf 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Processing event network-vif-plugged-4c827308-ad8f-4ea0-ac23-feff2dac22eb _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:37:57 compute-0 systemd[1]: Started libpod-conmon-7516803da79ea6db13e45f66b868c2d67abf966e0073020f239f7c05f5cc0d0b.scope.
Oct 02 12:37:57 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:37:57 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/4af8bab3aac0810ed853100f059b5b010714d93fdf99f080f233a54569bc5245/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:37:57 compute-0 podman[248361]: 2025-10-02 12:37:57.277803865 +0000 UTC m=+0.020419186 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:37:57 compute-0 podman[248361]: 2025-10-02 12:37:57.374977231 +0000 UTC m=+0.117592552 container init 7516803da79ea6db13e45f66b868c2d67abf966e0073020f239f7c05f5cc0d0b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-95da58c1-265e-4dd9-ba00-692853005e46, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001)
Oct 02 12:37:57 compute-0 podman[248361]: 2025-10-02 12:37:57.382704352 +0000 UTC m=+0.125319653 container start 7516803da79ea6db13e45f66b868c2d67abf966e0073020f239f7c05f5cc0d0b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-95da58c1-265e-4dd9-ba00-692853005e46, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2)
Oct 02 12:37:57 compute-0 neutron-haproxy-ovnmeta-95da58c1-265e-4dd9-ba00-692853005e46[248376]: [NOTICE]   (248380) : New worker (248382) forked
Oct 02 12:37:57 compute-0 neutron-haproxy-ovnmeta-95da58c1-265e-4dd9-ba00-692853005e46[248376]: [NOTICE]   (248380) : Loading success.
Oct 02 12:37:57 compute-0 nova_compute[192079]: 2025-10-02 12:37:57.467 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408677.466795, 2f4dba21-eb3b-48e5-b17a-724f9ab6459e => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:37:57 compute-0 nova_compute[192079]: 2025-10-02 12:37:57.467 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] VM Started (Lifecycle Event)
Oct 02 12:37:57 compute-0 nova_compute[192079]: 2025-10-02 12:37:57.469 2 DEBUG nova.compute.manager [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:37:57 compute-0 nova_compute[192079]: 2025-10-02 12:37:57.471 2 DEBUG nova.virt.libvirt.driver [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:37:57 compute-0 nova_compute[192079]: 2025-10-02 12:37:57.474 2 INFO nova.virt.libvirt.driver [-] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Instance spawned successfully.
Oct 02 12:37:57 compute-0 nova_compute[192079]: 2025-10-02 12:37:57.474 2 INFO nova.compute.manager [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Took 5.46 seconds to spawn the instance on the hypervisor.
Oct 02 12:37:57 compute-0 nova_compute[192079]: 2025-10-02 12:37:57.475 2 DEBUG nova.compute.manager [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:37:57 compute-0 nova_compute[192079]: 2025-10-02 12:37:57.511 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:37:57 compute-0 nova_compute[192079]: 2025-10-02 12:37:57.515 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:37:57 compute-0 nova_compute[192079]: 2025-10-02 12:37:57.548 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408677.466959, 2f4dba21-eb3b-48e5-b17a-724f9ab6459e => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:37:57 compute-0 nova_compute[192079]: 2025-10-02 12:37:57.548 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] VM Paused (Lifecycle Event)
Oct 02 12:37:57 compute-0 nova_compute[192079]: 2025-10-02 12:37:57.567 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:37:57 compute-0 nova_compute[192079]: 2025-10-02 12:37:57.577 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408677.471144, 2f4dba21-eb3b-48e5-b17a-724f9ab6459e => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:37:57 compute-0 nova_compute[192079]: 2025-10-02 12:37:57.578 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] VM Resumed (Lifecycle Event)
Oct 02 12:37:57 compute-0 nova_compute[192079]: 2025-10-02 12:37:57.580 2 INFO nova.compute.manager [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Took 5.95 seconds to build instance.
Oct 02 12:37:57 compute-0 nova_compute[192079]: 2025-10-02 12:37:57.612 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:37:57 compute-0 nova_compute[192079]: 2025-10-02 12:37:57.617 2 DEBUG oslo_concurrency.lockutils [None req-29b94743-b23d-4b14-be89-810e329f4824 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Lock "2f4dba21-eb3b-48e5-b17a-724f9ab6459e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 6.082s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:37:57 compute-0 nova_compute[192079]: 2025-10-02 12:37:57.618 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: active, current task_state: None, current DB power_state: 1, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:37:57 compute-0 nova_compute[192079]: 2025-10-02 12:37:57.736 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:59 compute-0 nova_compute[192079]: 2025-10-02 12:37:59.588 2 DEBUG nova.compute.manager [req-c9711a66-20e8-4d38-98d0-e2f83c55f27f req-e2b3b4d9-dd2e-4c02-b3cd-24c6d2c4d909 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Received event network-vif-plugged-4c827308-ad8f-4ea0-ac23-feff2dac22eb external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:37:59 compute-0 nova_compute[192079]: 2025-10-02 12:37:59.589 2 DEBUG oslo_concurrency.lockutils [req-c9711a66-20e8-4d38-98d0-e2f83c55f27f req-e2b3b4d9-dd2e-4c02-b3cd-24c6d2c4d909 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "2f4dba21-eb3b-48e5-b17a-724f9ab6459e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:37:59 compute-0 nova_compute[192079]: 2025-10-02 12:37:59.589 2 DEBUG oslo_concurrency.lockutils [req-c9711a66-20e8-4d38-98d0-e2f83c55f27f req-e2b3b4d9-dd2e-4c02-b3cd-24c6d2c4d909 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2f4dba21-eb3b-48e5-b17a-724f9ab6459e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:37:59 compute-0 nova_compute[192079]: 2025-10-02 12:37:59.589 2 DEBUG oslo_concurrency.lockutils [req-c9711a66-20e8-4d38-98d0-e2f83c55f27f req-e2b3b4d9-dd2e-4c02-b3cd-24c6d2c4d909 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2f4dba21-eb3b-48e5-b17a-724f9ab6459e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:37:59 compute-0 nova_compute[192079]: 2025-10-02 12:37:59.589 2 DEBUG nova.compute.manager [req-c9711a66-20e8-4d38-98d0-e2f83c55f27f req-e2b3b4d9-dd2e-4c02-b3cd-24c6d2c4d909 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] No waiting events found dispatching network-vif-plugged-4c827308-ad8f-4ea0-ac23-feff2dac22eb pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:37:59 compute-0 nova_compute[192079]: 2025-10-02 12:37:59.590 2 WARNING nova.compute.manager [req-c9711a66-20e8-4d38-98d0-e2f83c55f27f req-e2b3b4d9-dd2e-4c02-b3cd-24c6d2c4d909 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Received unexpected event network-vif-plugged-4c827308-ad8f-4ea0-ac23-feff2dac22eb for instance with vm_state active and task_state None.
Oct 02 12:37:59 compute-0 nova_compute[192079]: 2025-10-02 12:37:59.930 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:37:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:59.931 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=42, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=41) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:37:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:37:59.933 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 5 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:38:00 compute-0 nova_compute[192079]: 2025-10-02 12:38:00.503 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:00 compute-0 nova_compute[192079]: 2025-10-02 12:38:00.777 2 DEBUG nova.compute.manager [req-f1f7c560-2974-4dc7-8158-cd8dbad3928e req-6270fed8-a5bb-4276-aed8-17c910a517ff 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Received event network-changed-4c827308-ad8f-4ea0-ac23-feff2dac22eb external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:38:00 compute-0 nova_compute[192079]: 2025-10-02 12:38:00.777 2 DEBUG nova.compute.manager [req-f1f7c560-2974-4dc7-8158-cd8dbad3928e req-6270fed8-a5bb-4276-aed8-17c910a517ff 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Refreshing instance network info cache due to event network-changed-4c827308-ad8f-4ea0-ac23-feff2dac22eb. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:38:00 compute-0 nova_compute[192079]: 2025-10-02 12:38:00.778 2 DEBUG oslo_concurrency.lockutils [req-f1f7c560-2974-4dc7-8158-cd8dbad3928e req-6270fed8-a5bb-4276-aed8-17c910a517ff 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-2f4dba21-eb3b-48e5-b17a-724f9ab6459e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:38:00 compute-0 nova_compute[192079]: 2025-10-02 12:38:00.778 2 DEBUG oslo_concurrency.lockutils [req-f1f7c560-2974-4dc7-8158-cd8dbad3928e req-6270fed8-a5bb-4276-aed8-17c910a517ff 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-2f4dba21-eb3b-48e5-b17a-724f9ab6459e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:38:00 compute-0 nova_compute[192079]: 2025-10-02 12:38:00.778 2 DEBUG nova.network.neutron [req-f1f7c560-2974-4dc7-8158-cd8dbad3928e req-6270fed8-a5bb-4276-aed8-17c910a517ff 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Refreshing network info cache for port 4c827308-ad8f-4ea0-ac23-feff2dac22eb _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:38:01 compute-0 nova_compute[192079]: 2025-10-02 12:38:01.917 2 DEBUG nova.network.neutron [req-f1f7c560-2974-4dc7-8158-cd8dbad3928e req-6270fed8-a5bb-4276-aed8-17c910a517ff 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Updated VIF entry in instance network info cache for port 4c827308-ad8f-4ea0-ac23-feff2dac22eb. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:38:01 compute-0 nova_compute[192079]: 2025-10-02 12:38:01.918 2 DEBUG nova.network.neutron [req-f1f7c560-2974-4dc7-8158-cd8dbad3928e req-6270fed8-a5bb-4276-aed8-17c910a517ff 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Updating instance_info_cache with network_info: [{"id": "4c827308-ad8f-4ea0-ac23-feff2dac22eb", "address": "fa:16:3e:bd:4c:e1", "network": {"id": "95da58c1-265e-4dd9-ba00-692853005e46", "bridge": "br-int", "label": "tempest-TestSnapshotPattern-603762842-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.186", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "8f7d693b90ba447196796435b74590f6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap4c827308-ad", "ovs_interfaceid": "4c827308-ad8f-4ea0-ac23-feff2dac22eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:38:01 compute-0 nova_compute[192079]: 2025-10-02 12:38:01.936 2 DEBUG oslo_concurrency.lockutils [req-f1f7c560-2974-4dc7-8158-cd8dbad3928e req-6270fed8-a5bb-4276-aed8-17c910a517ff 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-2f4dba21-eb3b-48e5-b17a-724f9ab6459e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:38:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:02.240 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:38:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:02.242 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:38:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:02.242 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:38:02 compute-0 nova_compute[192079]: 2025-10-02 12:38:02.739 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:04 compute-0 podman[248392]: 2025-10-02 12:38:04.160105958 +0000 UTC m=+0.063788178 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, container_name=multipathd, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, managed_by=edpm_ansible)
Oct 02 12:38:04 compute-0 podman[248391]: 2025-10-02 12:38:04.17080239 +0000 UTC m=+0.076665449 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, maintainer=Red Hat, Inc., com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, version=9.6, container_name=openstack_network_exporter, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vendor=Red Hat, Inc., config_id=edpm, io.buildah.version=1.33.7, managed_by=edpm_ansible, distribution-scope=public, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., name=ubi9-minimal, architecture=x86_64, com.redhat.component=ubi9-minimal-container, io.openshift.expose-services=, io.openshift.tags=minimal rhel9, vcs-type=git, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, build-date=2025-08-20T13:12:41, url=https://catalog.redhat.com/en/search?searchType=containers, release=1755695350, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly.)
Oct 02 12:38:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:04.936 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '42'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:38:05 compute-0 nova_compute[192079]: 2025-10-02 12:38:05.504 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:07 compute-0 nova_compute[192079]: 2025-10-02 12:38:07.741 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:08 compute-0 podman[248432]: 2025-10-02 12:38:08.150785579 +0000 UTC m=+0.051074081 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=iscsid, container_name=iscsid, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']})
Oct 02 12:38:08 compute-0 podman[248431]: 2025-10-02 12:38:08.178434452 +0000 UTC m=+0.089203849 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter)
Oct 02 12:38:10 compute-0 nova_compute[192079]: 2025-10-02 12:38:10.510 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:11 compute-0 ovn_controller[94336]: 2025-10-02T12:38:11Z|00069|pinctrl(ovn_pinctrl0)|WARN|DHCPREQUEST requested IP 10.100.0.8 does not match offer 10.100.0.14
Oct 02 12:38:11 compute-0 ovn_controller[94336]: 2025-10-02T12:38:11Z|00070|pinctrl(ovn_pinctrl0)|INFO|DHCPNAK fa:16:3e:bd:4c:e1 10.100.0.14
Oct 02 12:38:12 compute-0 nova_compute[192079]: 2025-10-02 12:38:12.742 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:14 compute-0 ovn_controller[94336]: 2025-10-02T12:38:14Z|00071|pinctrl(ovn_pinctrl0)|WARN|DHCPREQUEST requested IP 10.100.0.8 does not match offer 10.100.0.14
Oct 02 12:38:14 compute-0 ovn_controller[94336]: 2025-10-02T12:38:14Z|00072|pinctrl(ovn_pinctrl0)|INFO|DHCPNAK fa:16:3e:bd:4c:e1 10.100.0.14
Oct 02 12:38:15 compute-0 nova_compute[192079]: 2025-10-02 12:38:15.512 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:16 compute-0 ovn_controller[94336]: 2025-10-02T12:38:16Z|00073|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:bd:4c:e1 10.100.0.14
Oct 02 12:38:16 compute-0 ovn_controller[94336]: 2025-10-02T12:38:16Z|00074|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:bd:4c:e1 10.100.0.14
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.438 2 DEBUG oslo_concurrency.lockutils [None req-77f10a63-f51c-4f08-8adc-a70182d72107 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "dbb54a17-c5e3-491f-bca4-54ddde9b72fa" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.439 2 DEBUG oslo_concurrency.lockutils [None req-77f10a63-f51c-4f08-8adc-a70182d72107 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "dbb54a17-c5e3-491f-bca4-54ddde9b72fa" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.439 2 DEBUG oslo_concurrency.lockutils [None req-77f10a63-f51c-4f08-8adc-a70182d72107 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "dbb54a17-c5e3-491f-bca4-54ddde9b72fa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.440 2 DEBUG oslo_concurrency.lockutils [None req-77f10a63-f51c-4f08-8adc-a70182d72107 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "dbb54a17-c5e3-491f-bca4-54ddde9b72fa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.440 2 DEBUG oslo_concurrency.lockutils [None req-77f10a63-f51c-4f08-8adc-a70182d72107 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "dbb54a17-c5e3-491f-bca4-54ddde9b72fa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.454 2 INFO nova.compute.manager [None req-77f10a63-f51c-4f08-8adc-a70182d72107 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Terminating instance
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.469 2 DEBUG nova.compute.manager [None req-77f10a63-f51c-4f08-8adc-a70182d72107 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:38:16 compute-0 kernel: tap07a62b49-14 (unregistering): left promiscuous mode
Oct 02 12:38:16 compute-0 NetworkManager[51160]: <info>  [1759408696.5028] device (tap07a62b49-14): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:38:16 compute-0 ovn_controller[94336]: 2025-10-02T12:38:16Z|00639|binding|INFO|Releasing lport 07a62b49-14ca-420c-8b61-b7f06d28df05 from this chassis (sb_readonly=0)
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.513 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:16 compute-0 ovn_controller[94336]: 2025-10-02T12:38:16Z|00640|binding|INFO|Setting lport 07a62b49-14ca-420c-8b61-b7f06d28df05 down in Southbound
Oct 02 12:38:16 compute-0 ovn_controller[94336]: 2025-10-02T12:38:16Z|00641|binding|INFO|Removing iface tap07a62b49-14 ovn-installed in OVS
Oct 02 12:38:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:16.522 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:aa:3d:10 10.100.0.19'], port_security=['fa:16:3e:aa:3d:10 10.100.0.19'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.19/28', 'neutron:device_id': 'dbb54a17-c5e3-491f-bca4-54ddde9b72fa', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-a970b3c6-2fc3-4025-868b-2e9af396991a', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'neutron:revision_number': '4', 'neutron:security_group_ids': '6f1ca3b6-c25a-4924-86f4-2b179dfa50a2', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=cf407807-38c2-4b6a-825d-3f40edf483e2, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=07a62b49-14ca-420c-8b61-b7f06d28df05) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.523 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:16.523 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 07a62b49-14ca-420c-8b61-b7f06d28df05 in datapath a970b3c6-2fc3-4025-868b-2e9af396991a unbound from our chassis
Oct 02 12:38:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:16.524 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network a970b3c6-2fc3-4025-868b-2e9af396991a, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:38:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:16.526 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[581d59bc-76d4-49c7-8483-23fd9edfc35a]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:16.526 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-a970b3c6-2fc3-4025-868b-2e9af396991a namespace which is not needed anymore
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.530 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:16 compute-0 systemd[1]: machine-qemu\x2d79\x2dinstance\x2d000000a2.scope: Deactivated successfully.
Oct 02 12:38:16 compute-0 systemd[1]: machine-qemu\x2d79\x2dinstance\x2d000000a2.scope: Consumed 14.220s CPU time.
Oct 02 12:38:16 compute-0 systemd-machined[152150]: Machine qemu-79-instance-000000a2 terminated.
Oct 02 12:38:16 compute-0 podman[248486]: 2025-10-02 12:38:16.621780422 +0000 UTC m=+0.082007093 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']})
Oct 02 12:38:16 compute-0 podman[248482]: 2025-10-02 12:38:16.623173961 +0000 UTC m=+0.094731791 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent)
Oct 02 12:38:16 compute-0 podman[248485]: 2025-10-02 12:38:16.641303404 +0000 UTC m=+0.095973324 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, container_name=ovn_controller, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_managed=true, config_id=ovn_controller, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:38:16 compute-0 neutron-haproxy-ovnmeta-a970b3c6-2fc3-4025-868b-2e9af396991a[248014]: [NOTICE]   (248018) : haproxy version is 2.8.14-c23fe91
Oct 02 12:38:16 compute-0 neutron-haproxy-ovnmeta-a970b3c6-2fc3-4025-868b-2e9af396991a[248014]: [NOTICE]   (248018) : path to executable is /usr/sbin/haproxy
Oct 02 12:38:16 compute-0 neutron-haproxy-ovnmeta-a970b3c6-2fc3-4025-868b-2e9af396991a[248014]: [WARNING]  (248018) : Exiting Master process...
Oct 02 12:38:16 compute-0 neutron-haproxy-ovnmeta-a970b3c6-2fc3-4025-868b-2e9af396991a[248014]: [WARNING]  (248018) : Exiting Master process...
Oct 02 12:38:16 compute-0 neutron-haproxy-ovnmeta-a970b3c6-2fc3-4025-868b-2e9af396991a[248014]: [ALERT]    (248018) : Current worker (248020) exited with code 143 (Terminated)
Oct 02 12:38:16 compute-0 neutron-haproxy-ovnmeta-a970b3c6-2fc3-4025-868b-2e9af396991a[248014]: [WARNING]  (248018) : All workers exited. Exiting... (0)
Oct 02 12:38:16 compute-0 systemd[1]: libpod-4d526a6f61c0d0e4bcfeee1417772191019cb2effffe78423d1f9aa5f70d0a3b.scope: Deactivated successfully.
Oct 02 12:38:16 compute-0 podman[248567]: 2025-10-02 12:38:16.664132105 +0000 UTC m=+0.044707778 container died 4d526a6f61c0d0e4bcfeee1417772191019cb2effffe78423d1f9aa5f70d0a3b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a970b3c6-2fc3-4025-868b-2e9af396991a, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_managed=true, io.buildah.version=1.41.3)
Oct 02 12:38:16 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-4d526a6f61c0d0e4bcfeee1417772191019cb2effffe78423d1f9aa5f70d0a3b-userdata-shm.mount: Deactivated successfully.
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.696 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:16 compute-0 systemd[1]: var-lib-containers-storage-overlay-1b45655dbb226ba9c61b714c96de1c92cdba1b3b72b471e274b24d596bde251b-merged.mount: Deactivated successfully.
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.699 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:16 compute-0 podman[248567]: 2025-10-02 12:38:16.715153035 +0000 UTC m=+0.095728698 container cleanup 4d526a6f61c0d0e4bcfeee1417772191019cb2effffe78423d1f9aa5f70d0a3b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a970b3c6-2fc3-4025-868b-2e9af396991a, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:38:16 compute-0 systemd[1]: libpod-conmon-4d526a6f61c0d0e4bcfeee1417772191019cb2effffe78423d1f9aa5f70d0a3b.scope: Deactivated successfully.
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.738 2 INFO nova.virt.libvirt.driver [-] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Instance destroyed successfully.
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.739 2 DEBUG nova.objects.instance [None req-77f10a63-f51c-4f08-8adc-a70182d72107 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lazy-loading 'resources' on Instance uuid dbb54a17-c5e3-491f-bca4-54ddde9b72fa obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.752 2 DEBUG nova.virt.libvirt.vif [None req-77f10a63-f51c-4f08-8adc-a70182d72107 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:37:16Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestNetworkBasicOps-server-309185011',display_name='tempest-TestNetworkBasicOps-server-309185011',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkbasicops-server-309185011',id=162,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBJO+wO/+GTOFf51kWLpwn3zBwiC/mnr0GgssprPZoW4336Xkqf5/GxM2/nY8bXLRzxB9iu8WMqvFe2Azj1RGGYQmk7SnHozPvlLaKUXaimsfpZCFxDle3QzaWsuBECiebg==',key_name='tempest-TestNetworkBasicOps-1746924842',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:37:24Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='6e2a4899168a47618e377cb3ac85ddd2',ramdisk_id='',reservation_id='r-g9slemmv',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-TestNetworkBasicOps-1323893370',owner_user_name='tempest-TestNetworkBasicOps-1323893370-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:37:24Z,user_data=None,user_id='a1898fdf056c4a249c33590f26d4d845',uuid=dbb54a17-c5e3-491f-bca4-54ddde9b72fa,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "07a62b49-14ca-420c-8b61-b7f06d28df05", "address": "fa:16:3e:aa:3d:10", "network": {"id": "a970b3c6-2fc3-4025-868b-2e9af396991a", "bridge": "br-int", "label": "tempest-network-smoke--441167180", "subnets": [{"cidr": "10.100.0.16/28", "dns": [], "gateway": {"address": null, "type": "gateway", "version": null, "meta": {}}, "ips": [{"address": "10.100.0.19", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap07a62b49-14", "ovs_interfaceid": "07a62b49-14ca-420c-8b61-b7f06d28df05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.752 2 DEBUG nova.network.os_vif_util [None req-77f10a63-f51c-4f08-8adc-a70182d72107 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converting VIF {"id": "07a62b49-14ca-420c-8b61-b7f06d28df05", "address": "fa:16:3e:aa:3d:10", "network": {"id": "a970b3c6-2fc3-4025-868b-2e9af396991a", "bridge": "br-int", "label": "tempest-network-smoke--441167180", "subnets": [{"cidr": "10.100.0.16/28", "dns": [], "gateway": {"address": null, "type": "gateway", "version": null, "meta": {}}, "ips": [{"address": "10.100.0.19", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap07a62b49-14", "ovs_interfaceid": "07a62b49-14ca-420c-8b61-b7f06d28df05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.753 2 DEBUG nova.network.os_vif_util [None req-77f10a63-f51c-4f08-8adc-a70182d72107 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:aa:3d:10,bridge_name='br-int',has_traffic_filtering=True,id=07a62b49-14ca-420c-8b61-b7f06d28df05,network=Network(a970b3c6-2fc3-4025-868b-2e9af396991a),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap07a62b49-14') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.753 2 DEBUG os_vif [None req-77f10a63-f51c-4f08-8adc-a70182d72107 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:aa:3d:10,bridge_name='br-int',has_traffic_filtering=True,id=07a62b49-14ca-420c-8b61-b7f06d28df05,network=Network(a970b3c6-2fc3-4025-868b-2e9af396991a),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap07a62b49-14') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.756 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.756 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap07a62b49-14, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.759 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.761 2 INFO os_vif [None req-77f10a63-f51c-4f08-8adc-a70182d72107 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:aa:3d:10,bridge_name='br-int',has_traffic_filtering=True,id=07a62b49-14ca-420c-8b61-b7f06d28df05,network=Network(a970b3c6-2fc3-4025-868b-2e9af396991a),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap07a62b49-14')
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.762 2 INFO nova.virt.libvirt.driver [None req-77f10a63-f51c-4f08-8adc-a70182d72107 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Deleting instance files /var/lib/nova/instances/dbb54a17-c5e3-491f-bca4-54ddde9b72fa_del
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.763 2 INFO nova.virt.libvirt.driver [None req-77f10a63-f51c-4f08-8adc-a70182d72107 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Deletion of /var/lib/nova/instances/dbb54a17-c5e3-491f-bca4-54ddde9b72fa_del complete
Oct 02 12:38:16 compute-0 podman[248613]: 2025-10-02 12:38:16.781934212 +0000 UTC m=+0.045472299 container remove 4d526a6f61c0d0e4bcfeee1417772191019cb2effffe78423d1f9aa5f70d0a3b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-a970b3c6-2fc3-4025-868b-2e9af396991a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS)
Oct 02 12:38:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:16.786 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[15c7a0ec-7275-4565-9959-e9cdaf940b62]: (4, ('Thu Oct  2 12:38:16 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-a970b3c6-2fc3-4025-868b-2e9af396991a (4d526a6f61c0d0e4bcfeee1417772191019cb2effffe78423d1f9aa5f70d0a3b)\n4d526a6f61c0d0e4bcfeee1417772191019cb2effffe78423d1f9aa5f70d0a3b\nThu Oct  2 12:38:16 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-a970b3c6-2fc3-4025-868b-2e9af396991a (4d526a6f61c0d0e4bcfeee1417772191019cb2effffe78423d1f9aa5f70d0a3b)\n4d526a6f61c0d0e4bcfeee1417772191019cb2effffe78423d1f9aa5f70d0a3b\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:16.787 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5495966f-bc41-473a-8014-85bf9b2f2e12]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:16.787 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapa970b3c6-20, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.789 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:16 compute-0 kernel: tapa970b3c6-20: left promiscuous mode
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.800 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:16.803 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0ded244b-9698-410d-bfa7-6c25a5659805]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.822 2 INFO nova.compute.manager [None req-77f10a63-f51c-4f08-8adc-a70182d72107 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Took 0.35 seconds to destroy the instance on the hypervisor.
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.822 2 DEBUG oslo.service.loopingcall [None req-77f10a63-f51c-4f08-8adc-a70182d72107 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.822 2 DEBUG nova.compute.manager [-] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:38:16 compute-0 nova_compute[192079]: 2025-10-02 12:38:16.822 2 DEBUG nova.network.neutron [-] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:38:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:16.827 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0be1db21-4375-416b-b1f8-017b581ac6d5]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:16.828 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[daf559b9-3bc6-4d29-91ea-4b8456e2e798]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:16.841 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c33c8a97-b631-4547-a854-86c4d98128b7]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 664073, 'reachable_time': 43784, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 248631, 'error': None, 'target': 'ovnmeta-a970b3c6-2fc3-4025-868b-2e9af396991a', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:16.843 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-a970b3c6-2fc3-4025-868b-2e9af396991a deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:38:16 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:16.844 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[276d63e9-ce2b-4ff4-b2a4-9891abc41dee]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:16 compute-0 systemd[1]: run-netns-ovnmeta\x2da970b3c6\x2d2fc3\x2d4025\x2d868b\x2d2e9af396991a.mount: Deactivated successfully.
Oct 02 12:38:17 compute-0 nova_compute[192079]: 2025-10-02 12:38:17.463 2 DEBUG nova.compute.manager [req-b7a15892-431d-406f-ace4-2d0a10230f33 req-3d9599b2-ca16-4196-bb7e-f72bba0226ec 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Received event network-vif-unplugged-07a62b49-14ca-420c-8b61-b7f06d28df05 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:38:17 compute-0 nova_compute[192079]: 2025-10-02 12:38:17.463 2 DEBUG oslo_concurrency.lockutils [req-b7a15892-431d-406f-ace4-2d0a10230f33 req-3d9599b2-ca16-4196-bb7e-f72bba0226ec 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "dbb54a17-c5e3-491f-bca4-54ddde9b72fa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:38:17 compute-0 nova_compute[192079]: 2025-10-02 12:38:17.463 2 DEBUG oslo_concurrency.lockutils [req-b7a15892-431d-406f-ace4-2d0a10230f33 req-3d9599b2-ca16-4196-bb7e-f72bba0226ec 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "dbb54a17-c5e3-491f-bca4-54ddde9b72fa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:38:17 compute-0 nova_compute[192079]: 2025-10-02 12:38:17.463 2 DEBUG oslo_concurrency.lockutils [req-b7a15892-431d-406f-ace4-2d0a10230f33 req-3d9599b2-ca16-4196-bb7e-f72bba0226ec 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "dbb54a17-c5e3-491f-bca4-54ddde9b72fa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:38:17 compute-0 nova_compute[192079]: 2025-10-02 12:38:17.463 2 DEBUG nova.compute.manager [req-b7a15892-431d-406f-ace4-2d0a10230f33 req-3d9599b2-ca16-4196-bb7e-f72bba0226ec 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] No waiting events found dispatching network-vif-unplugged-07a62b49-14ca-420c-8b61-b7f06d28df05 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:38:17 compute-0 nova_compute[192079]: 2025-10-02 12:38:17.464 2 DEBUG nova.compute.manager [req-b7a15892-431d-406f-ace4-2d0a10230f33 req-3d9599b2-ca16-4196-bb7e-f72bba0226ec 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Received event network-vif-unplugged-07a62b49-14ca-420c-8b61-b7f06d28df05 for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:38:17 compute-0 nova_compute[192079]: 2025-10-02 12:38:17.749 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:17 compute-0 nova_compute[192079]: 2025-10-02 12:38:17.760 2 DEBUG nova.network.neutron [-] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:38:17 compute-0 nova_compute[192079]: 2025-10-02 12:38:17.786 2 INFO nova.compute.manager [-] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Took 0.96 seconds to deallocate network for instance.
Oct 02 12:38:17 compute-0 nova_compute[192079]: 2025-10-02 12:38:17.857 2 DEBUG oslo_concurrency.lockutils [None req-77f10a63-f51c-4f08-8adc-a70182d72107 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:38:17 compute-0 nova_compute[192079]: 2025-10-02 12:38:17.857 2 DEBUG oslo_concurrency.lockutils [None req-77f10a63-f51c-4f08-8adc-a70182d72107 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:38:17 compute-0 nova_compute[192079]: 2025-10-02 12:38:17.928 2 DEBUG nova.compute.provider_tree [None req-77f10a63-f51c-4f08-8adc-a70182d72107 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:38:17 compute-0 nova_compute[192079]: 2025-10-02 12:38:17.942 2 DEBUG nova.scheduler.client.report [None req-77f10a63-f51c-4f08-8adc-a70182d72107 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:38:17 compute-0 nova_compute[192079]: 2025-10-02 12:38:17.960 2 DEBUG oslo_concurrency.lockutils [None req-77f10a63-f51c-4f08-8adc-a70182d72107 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.103s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:38:18 compute-0 nova_compute[192079]: 2025-10-02 12:38:18.000 2 INFO nova.scheduler.client.report [None req-77f10a63-f51c-4f08-8adc-a70182d72107 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Deleted allocations for instance dbb54a17-c5e3-491f-bca4-54ddde9b72fa
Oct 02 12:38:18 compute-0 nova_compute[192079]: 2025-10-02 12:38:18.067 2 DEBUG oslo_concurrency.lockutils [None req-77f10a63-f51c-4f08-8adc-a70182d72107 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "dbb54a17-c5e3-491f-bca4-54ddde9b72fa" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.629s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:38:19 compute-0 nova_compute[192079]: 2025-10-02 12:38:19.554 2 DEBUG nova.compute.manager [req-3e878388-217c-41a8-83f7-6e77d7112f42 req-05ab123b-1879-430e-867c-1877c531ec61 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Received event network-vif-plugged-07a62b49-14ca-420c-8b61-b7f06d28df05 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:38:19 compute-0 nova_compute[192079]: 2025-10-02 12:38:19.555 2 DEBUG oslo_concurrency.lockutils [req-3e878388-217c-41a8-83f7-6e77d7112f42 req-05ab123b-1879-430e-867c-1877c531ec61 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "dbb54a17-c5e3-491f-bca4-54ddde9b72fa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:38:19 compute-0 nova_compute[192079]: 2025-10-02 12:38:19.556 2 DEBUG oslo_concurrency.lockutils [req-3e878388-217c-41a8-83f7-6e77d7112f42 req-05ab123b-1879-430e-867c-1877c531ec61 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "dbb54a17-c5e3-491f-bca4-54ddde9b72fa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:38:19 compute-0 nova_compute[192079]: 2025-10-02 12:38:19.556 2 DEBUG oslo_concurrency.lockutils [req-3e878388-217c-41a8-83f7-6e77d7112f42 req-05ab123b-1879-430e-867c-1877c531ec61 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "dbb54a17-c5e3-491f-bca4-54ddde9b72fa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:38:19 compute-0 nova_compute[192079]: 2025-10-02 12:38:19.557 2 DEBUG nova.compute.manager [req-3e878388-217c-41a8-83f7-6e77d7112f42 req-05ab123b-1879-430e-867c-1877c531ec61 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] No waiting events found dispatching network-vif-plugged-07a62b49-14ca-420c-8b61-b7f06d28df05 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:38:19 compute-0 nova_compute[192079]: 2025-10-02 12:38:19.557 2 WARNING nova.compute.manager [req-3e878388-217c-41a8-83f7-6e77d7112f42 req-05ab123b-1879-430e-867c-1877c531ec61 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Received unexpected event network-vif-plugged-07a62b49-14ca-420c-8b61-b7f06d28df05 for instance with vm_state deleted and task_state None.
Oct 02 12:38:19 compute-0 nova_compute[192079]: 2025-10-02 12:38:19.557 2 DEBUG nova.compute.manager [req-3e878388-217c-41a8-83f7-6e77d7112f42 req-05ab123b-1879-430e-867c-1877c531ec61 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Received event network-vif-deleted-07a62b49-14ca-420c-8b61-b7f06d28df05 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:38:21 compute-0 nova_compute[192079]: 2025-10-02 12:38:21.760 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:21 compute-0 ovn_controller[94336]: 2025-10-02T12:38:21Z|00642|binding|INFO|Releasing lport ad4e7082-9510-41a9-bc81-de2c66402e98 from this chassis (sb_readonly=0)
Oct 02 12:38:21 compute-0 nova_compute[192079]: 2025-10-02 12:38:21.910 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:22 compute-0 nova_compute[192079]: 2025-10-02 12:38:22.752 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:26 compute-0 nova_compute[192079]: 2025-10-02 12:38:26.763 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:27 compute-0 podman[248632]: 2025-10-02 12:38:27.17885136 +0000 UTC m=+0.084768909 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, config_id=edpm, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:38:27 compute-0 nova_compute[192079]: 2025-10-02 12:38:27.755 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:30 compute-0 ovn_controller[94336]: 2025-10-02T12:38:30Z|00643|binding|INFO|Releasing lport ad4e7082-9510-41a9-bc81-de2c66402e98 from this chassis (sb_readonly=0)
Oct 02 12:38:30 compute-0 nova_compute[192079]: 2025-10-02 12:38:30.509 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:31 compute-0 nova_compute[192079]: 2025-10-02 12:38:31.738 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759408696.7362752, dbb54a17-c5e3-491f-bca4-54ddde9b72fa => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:38:31 compute-0 nova_compute[192079]: 2025-10-02 12:38:31.738 2 INFO nova.compute.manager [-] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] VM Stopped (Lifecycle Event)
Oct 02 12:38:31 compute-0 nova_compute[192079]: 2025-10-02 12:38:31.756 2 DEBUG nova.compute.manager [None req-63c6bf13-baef-4d67-bc61-2a16426ad2ad - - - - - -] [instance: dbb54a17-c5e3-491f-bca4-54ddde9b72fa] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:38:31 compute-0 nova_compute[192079]: 2025-10-02 12:38:31.766 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:32 compute-0 nova_compute[192079]: 2025-10-02 12:38:32.757 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:33 compute-0 nova_compute[192079]: 2025-10-02 12:38:33.447 2 DEBUG nova.compute.manager [None req-96b534b9-278c-4374-8abd-9107412d8049 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:38:33 compute-0 nova_compute[192079]: 2025-10-02 12:38:33.556 2 INFO nova.compute.manager [None req-96b534b9-278c-4374-8abd-9107412d8049 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] instance snapshotting
Oct 02 12:38:33 compute-0 nova_compute[192079]: 2025-10-02 12:38:33.773 2 INFO nova.virt.libvirt.driver [None req-96b534b9-278c-4374-8abd-9107412d8049 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Beginning live snapshot process
Oct 02 12:38:33 compute-0 virtqemud[191807]: invalid argument: disk vda does not have an active block job
Oct 02 12:38:33 compute-0 nova_compute[192079]: 2025-10-02 12:38:33.934 2 DEBUG oslo_concurrency.processutils [None req-96b534b9-278c-4374-8abd-9107412d8049 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2f4dba21-eb3b-48e5-b17a-724f9ab6459e/disk --force-share --output=json -f qcow2 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:38:34 compute-0 nova_compute[192079]: 2025-10-02 12:38:34.009 2 DEBUG oslo_concurrency.processutils [None req-96b534b9-278c-4374-8abd-9107412d8049 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2f4dba21-eb3b-48e5-b17a-724f9ab6459e/disk --force-share --output=json -f qcow2" returned: 0 in 0.075s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:38:34 compute-0 nova_compute[192079]: 2025-10-02 12:38:34.011 2 DEBUG oslo_concurrency.processutils [None req-96b534b9-278c-4374-8abd-9107412d8049 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2f4dba21-eb3b-48e5-b17a-724f9ab6459e/disk --force-share --output=json -f qcow2 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:38:34 compute-0 nova_compute[192079]: 2025-10-02 12:38:34.103 2 DEBUG oslo_concurrency.processutils [None req-96b534b9-278c-4374-8abd-9107412d8049 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/2f4dba21-eb3b-48e5-b17a-724f9ab6459e/disk --force-share --output=json -f qcow2" returned: 0 in 0.092s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:38:34 compute-0 nova_compute[192079]: 2025-10-02 12:38:34.133 2 DEBUG oslo_concurrency.processutils [None req-96b534b9-278c-4374-8abd-9107412d8049 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/6226a5cf11b26d104556719508e058e93aa7073d --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:38:34 compute-0 nova_compute[192079]: 2025-10-02 12:38:34.238 2 DEBUG oslo_concurrency.processutils [None req-96b534b9-278c-4374-8abd-9107412d8049 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/6226a5cf11b26d104556719508e058e93aa7073d --force-share --output=json" returned: 0 in 0.105s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:38:34 compute-0 nova_compute[192079]: 2025-10-02 12:38:34.240 2 DEBUG oslo_concurrency.processutils [None req-96b534b9-278c-4374-8abd-9107412d8049 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/6226a5cf11b26d104556719508e058e93aa7073d,backing_fmt=raw /var/lib/nova/instances/snapshots/tmp_fmjwxxr/8eb7cab1df614d3886e3ec7a1a6b575a.delta 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:38:34 compute-0 nova_compute[192079]: 2025-10-02 12:38:34.277 2 DEBUG oslo_concurrency.processutils [None req-96b534b9-278c-4374-8abd-9107412d8049 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/6226a5cf11b26d104556719508e058e93aa7073d,backing_fmt=raw /var/lib/nova/instances/snapshots/tmp_fmjwxxr/8eb7cab1df614d3886e3ec7a1a6b575a.delta 1073741824" returned: 0 in 0.037s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:38:34 compute-0 nova_compute[192079]: 2025-10-02 12:38:34.279 2 INFO nova.virt.libvirt.driver [None req-96b534b9-278c-4374-8abd-9107412d8049 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Quiescing instance not available: QEMU guest agent is not enabled.
Oct 02 12:38:34 compute-0 nova_compute[192079]: 2025-10-02 12:38:34.372 2 DEBUG nova.virt.libvirt.guest [None req-96b534b9-278c-4374-8abd-9107412d8049 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] COPY block job progress, current cursor: 0 final cursor: 1048576 is_job_complete /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:846
Oct 02 12:38:34 compute-0 podman[248667]: 2025-10-02 12:38:34.384205148 +0000 UTC m=+0.058446882 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, release=1755695350, build-date=2025-08-20T13:12:41, managed_by=edpm_ansible, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, com.redhat.component=ubi9-minimal-container, architecture=x86_64, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.expose-services=, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.tags=minimal rhel9, url=https://catalog.redhat.com/en/search?searchType=containers, container_name=openstack_network_exporter, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, maintainer=Red Hat, Inc., version=9.6, config_id=edpm, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vendor=Red Hat, Inc., distribution-scope=public, vcs-type=git, io.buildah.version=1.33.7, name=ubi9-minimal)
Oct 02 12:38:34 compute-0 podman[248668]: 2025-10-02 12:38:34.396894343 +0000 UTC m=+0.070603553 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, container_name=multipathd, org.label-schema.license=GPLv2)
Oct 02 12:38:34 compute-0 nova_compute[192079]: 2025-10-02 12:38:34.876 2 DEBUG nova.virt.libvirt.guest [None req-96b534b9-278c-4374-8abd-9107412d8049 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] COPY block job progress, current cursor: 1048576 final cursor: 1048576 is_job_complete /usr/lib/python3.9/site-packages/nova/virt/libvirt/guest.py:846
Oct 02 12:38:34 compute-0 nova_compute[192079]: 2025-10-02 12:38:34.880 2 INFO nova.virt.libvirt.driver [None req-96b534b9-278c-4374-8abd-9107412d8049 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Skipping quiescing instance: QEMU guest agent is not enabled.
Oct 02 12:38:34 compute-0 nova_compute[192079]: 2025-10-02 12:38:34.941 2 DEBUG nova.privsep.utils [None req-96b534b9-278c-4374-8abd-9107412d8049 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Path '/var/lib/nova/instances' supports direct I/O supports_direct_io /usr/lib/python3.9/site-packages/nova/privsep/utils.py:63
Oct 02 12:38:34 compute-0 nova_compute[192079]: 2025-10-02 12:38:34.942 2 DEBUG oslo_concurrency.processutils [None req-96b534b9-278c-4374-8abd-9107412d8049 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Running cmd (subprocess): qemu-img convert -t none -O qcow2 -f qcow2 /var/lib/nova/instances/snapshots/tmp_fmjwxxr/8eb7cab1df614d3886e3ec7a1a6b575a.delta /var/lib/nova/instances/snapshots/tmp_fmjwxxr/8eb7cab1df614d3886e3ec7a1a6b575a execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:38:35 compute-0 nova_compute[192079]: 2025-10-02 12:38:35.310 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:35 compute-0 nova_compute[192079]: 2025-10-02 12:38:35.331 2 DEBUG oslo_concurrency.processutils [None req-96b534b9-278c-4374-8abd-9107412d8049 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] CMD "qemu-img convert -t none -O qcow2 -f qcow2 /var/lib/nova/instances/snapshots/tmp_fmjwxxr/8eb7cab1df614d3886e3ec7a1a6b575a.delta /var/lib/nova/instances/snapshots/tmp_fmjwxxr/8eb7cab1df614d3886e3ec7a1a6b575a" returned: 0 in 0.390s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:38:35 compute-0 nova_compute[192079]: 2025-10-02 12:38:35.333 2 INFO nova.virt.libvirt.driver [None req-96b534b9-278c-4374-8abd-9107412d8049 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Snapshot extracted, beginning image upload
Oct 02 12:38:36 compute-0 nova_compute[192079]: 2025-10-02 12:38:36.768 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:37 compute-0 nova_compute[192079]: 2025-10-02 12:38:37.645 2 INFO nova.virt.libvirt.driver [None req-96b534b9-278c-4374-8abd-9107412d8049 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Snapshot image upload complete
Oct 02 12:38:37 compute-0 nova_compute[192079]: 2025-10-02 12:38:37.646 2 INFO nova.compute.manager [None req-96b534b9-278c-4374-8abd-9107412d8049 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Took 4.07 seconds to snapshot the instance on the hypervisor.
Oct 02 12:38:37 compute-0 nova_compute[192079]: 2025-10-02 12:38:37.760 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.111 2 DEBUG nova.compute.manager [req-f4866728-6a55-436d-9031-32c45d50c3b0 req-c89026a2-8ebf-41b6-bfcb-c6b32e4b1a45 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Received event network-changed-4c827308-ad8f-4ea0-ac23-feff2dac22eb external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.112 2 DEBUG nova.compute.manager [req-f4866728-6a55-436d-9031-32c45d50c3b0 req-c89026a2-8ebf-41b6-bfcb-c6b32e4b1a45 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Refreshing instance network info cache due to event network-changed-4c827308-ad8f-4ea0-ac23-feff2dac22eb. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.112 2 DEBUG oslo_concurrency.lockutils [req-f4866728-6a55-436d-9031-32c45d50c3b0 req-c89026a2-8ebf-41b6-bfcb-c6b32e4b1a45 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-2f4dba21-eb3b-48e5-b17a-724f9ab6459e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.112 2 DEBUG oslo_concurrency.lockutils [req-f4866728-6a55-436d-9031-32c45d50c3b0 req-c89026a2-8ebf-41b6-bfcb-c6b32e4b1a45 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-2f4dba21-eb3b-48e5-b17a-724f9ab6459e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.112 2 DEBUG nova.network.neutron [req-f4866728-6a55-436d-9031-32c45d50c3b0 req-c89026a2-8ebf-41b6-bfcb-c6b32e4b1a45 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Refreshing network info cache for port 4c827308-ad8f-4ea0-ac23-feff2dac22eb _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:38:39 compute-0 podman[248721]: 2025-10-02 12:38:39.129714221 +0000 UTC m=+0.043525217 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']})
Oct 02 12:38:39 compute-0 podman[248722]: 2025-10-02 12:38:39.142117808 +0000 UTC m=+0.053054126 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_id=iscsid, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid)
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.189 2 DEBUG oslo_concurrency.lockutils [None req-e0d21856-856c-4637-9639-257736b8819a 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Acquiring lock "2f4dba21-eb3b-48e5-b17a-724f9ab6459e" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.190 2 DEBUG oslo_concurrency.lockutils [None req-e0d21856-856c-4637-9639-257736b8819a 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Lock "2f4dba21-eb3b-48e5-b17a-724f9ab6459e" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.190 2 DEBUG oslo_concurrency.lockutils [None req-e0d21856-856c-4637-9639-257736b8819a 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Acquiring lock "2f4dba21-eb3b-48e5-b17a-724f9ab6459e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.190 2 DEBUG oslo_concurrency.lockutils [None req-e0d21856-856c-4637-9639-257736b8819a 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Lock "2f4dba21-eb3b-48e5-b17a-724f9ab6459e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.190 2 DEBUG oslo_concurrency.lockutils [None req-e0d21856-856c-4637-9639-257736b8819a 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Lock "2f4dba21-eb3b-48e5-b17a-724f9ab6459e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.199 2 INFO nova.compute.manager [None req-e0d21856-856c-4637-9639-257736b8819a 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Terminating instance
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.207 2 DEBUG nova.compute.manager [None req-e0d21856-856c-4637-9639-257736b8819a 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:38:39 compute-0 kernel: tap4c827308-ad (unregistering): left promiscuous mode
Oct 02 12:38:39 compute-0 NetworkManager[51160]: <info>  [1759408719.2350] device (tap4c827308-ad): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:38:39 compute-0 ovn_controller[94336]: 2025-10-02T12:38:39Z|00644|binding|INFO|Releasing lport 4c827308-ad8f-4ea0-ac23-feff2dac22eb from this chassis (sb_readonly=0)
Oct 02 12:38:39 compute-0 ovn_controller[94336]: 2025-10-02T12:38:39Z|00645|binding|INFO|Setting lport 4c827308-ad8f-4ea0-ac23-feff2dac22eb down in Southbound
Oct 02 12:38:39 compute-0 ovn_controller[94336]: 2025-10-02T12:38:39Z|00646|binding|INFO|Removing iface tap4c827308-ad ovn-installed in OVS
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.291 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.299 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:39.302 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:bd:4c:e1 10.100.0.14'], port_security=['fa:16:3e:bd:4c:e1 10.100.0.14'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.14/28', 'neutron:device_id': '2f4dba21-eb3b-48e5-b17a-724f9ab6459e', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-95da58c1-265e-4dd9-ba00-692853005e46', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '8f7d693b90ba447196796435b74590f6', 'neutron:revision_number': '4', 'neutron:security_group_ids': '35c2ff63-16f8-4b9e-8320-2301129fdf30', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=2037b9ce-d2e9-4c7b-b130-56e2abc95360, chassis=[], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=4c827308-ad8f-4ea0-ac23-feff2dac22eb) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:38:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:39.303 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 4c827308-ad8f-4ea0-ac23-feff2dac22eb in datapath 95da58c1-265e-4dd9-ba00-692853005e46 unbound from our chassis
Oct 02 12:38:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:39.305 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 95da58c1-265e-4dd9-ba00-692853005e46, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:38:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:39.306 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ef2186ab-0704-4de8-b196-de9b87c8c650]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:39.306 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-95da58c1-265e-4dd9-ba00-692853005e46 namespace which is not needed anymore
Oct 02 12:38:39 compute-0 systemd[1]: machine-qemu\x2d80\x2dinstance\x2d000000a4.scope: Deactivated successfully.
Oct 02 12:38:39 compute-0 systemd[1]: machine-qemu\x2d80\x2dinstance\x2d000000a4.scope: Consumed 14.590s CPU time.
Oct 02 12:38:39 compute-0 systemd-machined[152150]: Machine qemu-80-instance-000000a4 terminated.
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.431 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:38:39 compute-0 neutron-haproxy-ovnmeta-95da58c1-265e-4dd9-ba00-692853005e46[248376]: [NOTICE]   (248380) : haproxy version is 2.8.14-c23fe91
Oct 02 12:38:39 compute-0 neutron-haproxy-ovnmeta-95da58c1-265e-4dd9-ba00-692853005e46[248376]: [NOTICE]   (248380) : path to executable is /usr/sbin/haproxy
Oct 02 12:38:39 compute-0 neutron-haproxy-ovnmeta-95da58c1-265e-4dd9-ba00-692853005e46[248376]: [WARNING]  (248380) : Exiting Master process...
Oct 02 12:38:39 compute-0 neutron-haproxy-ovnmeta-95da58c1-265e-4dd9-ba00-692853005e46[248376]: [WARNING]  (248380) : Exiting Master process...
Oct 02 12:38:39 compute-0 neutron-haproxy-ovnmeta-95da58c1-265e-4dd9-ba00-692853005e46[248376]: [ALERT]    (248380) : Current worker (248382) exited with code 143 (Terminated)
Oct 02 12:38:39 compute-0 neutron-haproxy-ovnmeta-95da58c1-265e-4dd9-ba00-692853005e46[248376]: [WARNING]  (248380) : All workers exited. Exiting... (0)
Oct 02 12:38:39 compute-0 systemd[1]: libpod-7516803da79ea6db13e45f66b868c2d67abf966e0073020f239f7c05f5cc0d0b.scope: Deactivated successfully.
Oct 02 12:38:39 compute-0 podman[248792]: 2025-10-02 12:38:39.453157425 +0000 UTC m=+0.054302339 container died 7516803da79ea6db13e45f66b868c2d67abf966e0073020f239f7c05f5cc0d0b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-95da58c1-265e-4dd9-ba00-692853005e46, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS)
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.473 2 INFO nova.virt.libvirt.driver [-] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Instance destroyed successfully.
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.473 2 DEBUG nova.objects.instance [None req-e0d21856-856c-4637-9639-257736b8819a 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Lazy-loading 'resources' on Instance uuid 2f4dba21-eb3b-48e5-b17a-724f9ab6459e obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:38:39 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-7516803da79ea6db13e45f66b868c2d67abf966e0073020f239f7c05f5cc0d0b-userdata-shm.mount: Deactivated successfully.
Oct 02 12:38:39 compute-0 systemd[1]: var-lib-containers-storage-overlay-4af8bab3aac0810ed853100f059b5b010714d93fdf99f080f233a54569bc5245-merged.mount: Deactivated successfully.
Oct 02 12:38:39 compute-0 podman[248792]: 2025-10-02 12:38:39.488530599 +0000 UTC m=+0.089675532 container cleanup 7516803da79ea6db13e45f66b868c2d67abf966e0073020f239f7c05f5cc0d0b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-95da58c1-265e-4dd9-ba00-692853005e46, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.490 2 DEBUG nova.virt.libvirt.vif [None req-e0d21856-856c-4637-9639-257736b8819a 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:37:50Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestSnapshotPattern-server-5496910',display_name='tempest-TestSnapshotPattern-server-5496910',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testsnapshotpattern-server-5496910',id=164,image_ref='800f3823-995d-405f-a6ba-5aa2a313bb4a',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBFzJGGGUE+Xks9+aY5SjFk2n2DGAnXfOBhkbeNeuAVWQ/dQZsUYNFa4aU04DL6V5Ahv7YBoVwhzJt5xloq0NtgboR41kXTeWdHADR0n2ucoHL3yxU4d4gs2dS5flZPM85w==',key_name='tempest-TestSnapshotPattern-331136498',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:37:57Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='8f7d693b90ba447196796435b74590f6',ramdisk_id='',reservation_id='r-0ubse6sn',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_boot_roles='reader,member',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_image_location='snapshot',image_image_state='available',image_image_type='snapshot',image_instance_uuid='a489cbb2-1400-41b4-9345-18186b74b900',image_min_disk='1',image_min_ram='0',image_owner_id='8f7d693b90ba447196796435b74590f6',image_owner_project_name='tempest-TestSnapshotPattern-1950942920',image_owner_user_name='tempest-TestSnapshotPattern-1950942920-project-member',image_user_id='6d07868c23de4edc9018d8964b43d954',image_version='8.0',owner_project_name='tempest-TestSnapshotPattern-1950942920',owner_user_name='tempest-TestSnapshotPattern-1950942920-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:38:37Z,user_data=None,user_id='6d07868c23de4edc9018d8964b43d954',uuid=2f4dba21-eb3b-48e5-b17a-724f9ab6459e,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "4c827308-ad8f-4ea0-ac23-feff2dac22eb", "address": "fa:16:3e:bd:4c:e1", "network": {"id": "95da58c1-265e-4dd9-ba00-692853005e46", "bridge": "br-int", "label": "tempest-TestSnapshotPattern-603762842-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.186", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "8f7d693b90ba447196796435b74590f6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap4c827308-ad", "ovs_interfaceid": "4c827308-ad8f-4ea0-ac23-feff2dac22eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.490 2 DEBUG nova.network.os_vif_util [None req-e0d21856-856c-4637-9639-257736b8819a 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Converting VIF {"id": "4c827308-ad8f-4ea0-ac23-feff2dac22eb", "address": "fa:16:3e:bd:4c:e1", "network": {"id": "95da58c1-265e-4dd9-ba00-692853005e46", "bridge": "br-int", "label": "tempest-TestSnapshotPattern-603762842-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.186", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "8f7d693b90ba447196796435b74590f6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap4c827308-ad", "ovs_interfaceid": "4c827308-ad8f-4ea0-ac23-feff2dac22eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.491 2 DEBUG nova.network.os_vif_util [None req-e0d21856-856c-4637-9639-257736b8819a 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:bd:4c:e1,bridge_name='br-int',has_traffic_filtering=True,id=4c827308-ad8f-4ea0-ac23-feff2dac22eb,network=Network(95da58c1-265e-4dd9-ba00-692853005e46),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap4c827308-ad') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.491 2 DEBUG os_vif [None req-e0d21856-856c-4637-9639-257736b8819a 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:bd:4c:e1,bridge_name='br-int',has_traffic_filtering=True,id=4c827308-ad8f-4ea0-ac23-feff2dac22eb,network=Network(95da58c1-265e-4dd9-ba00-692853005e46),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap4c827308-ad') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.493 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.493 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap4c827308-ad, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.495 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.496 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.498 2 INFO os_vif [None req-e0d21856-856c-4637-9639-257736b8819a 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:bd:4c:e1,bridge_name='br-int',has_traffic_filtering=True,id=4c827308-ad8f-4ea0-ac23-feff2dac22eb,network=Network(95da58c1-265e-4dd9-ba00-692853005e46),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap4c827308-ad')
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.499 2 INFO nova.virt.libvirt.driver [None req-e0d21856-856c-4637-9639-257736b8819a 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Deleting instance files /var/lib/nova/instances/2f4dba21-eb3b-48e5-b17a-724f9ab6459e_del
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.500 2 INFO nova.virt.libvirt.driver [None req-e0d21856-856c-4637-9639-257736b8819a 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Deletion of /var/lib/nova/instances/2f4dba21-eb3b-48e5-b17a-724f9ab6459e_del complete
Oct 02 12:38:39 compute-0 systemd[1]: libpod-conmon-7516803da79ea6db13e45f66b868c2d67abf966e0073020f239f7c05f5cc0d0b.scope: Deactivated successfully.
Oct 02 12:38:39 compute-0 podman[248835]: 2025-10-02 12:38:39.555686497 +0000 UTC m=+0.042864107 container remove 7516803da79ea6db13e45f66b868c2d67abf966e0073020f239f7c05f5cc0d0b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-95da58c1-265e-4dd9-ba00-692853005e46, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:38:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:39.562 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[240f0830-a6a6-4ad3-bbc6-5ed5efb030bb]: (4, ('Thu Oct  2 12:38:39 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-95da58c1-265e-4dd9-ba00-692853005e46 (7516803da79ea6db13e45f66b868c2d67abf966e0073020f239f7c05f5cc0d0b)\n7516803da79ea6db13e45f66b868c2d67abf966e0073020f239f7c05f5cc0d0b\nThu Oct  2 12:38:39 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-95da58c1-265e-4dd9-ba00-692853005e46 (7516803da79ea6db13e45f66b868c2d67abf966e0073020f239f7c05f5cc0d0b)\n7516803da79ea6db13e45f66b868c2d67abf966e0073020f239f7c05f5cc0d0b\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:39.563 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6cc7b4fb-5de6-4a2b-9bc4-30d63dce16e3]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:39.565 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap95da58c1-20, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.566 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:39 compute-0 kernel: tap95da58c1-20: left promiscuous mode
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.569 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:39.573 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a147d9e0-7482-436e-b8c8-baeec68d30b2]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.580 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.589 2 INFO nova.compute.manager [None req-e0d21856-856c-4637-9639-257736b8819a 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Took 0.38 seconds to destroy the instance on the hypervisor.
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.590 2 DEBUG oslo.service.loopingcall [None req-e0d21856-856c-4637-9639-257736b8819a 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.590 2 DEBUG nova.compute.manager [-] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.591 2 DEBUG nova.network.neutron [-] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:38:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:39.601 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3cca4d55-d1ed-4df2-8218-d4e6d841665a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:39.603 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9ca3d389-833d-46b9-80d5-05e6af06b938]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:39.617 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ff7b5fc4-18ee-4165-af5b-a30001bf68d9]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 667423, 'reachable_time': 18012, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 248850, 'error': None, 'target': 'ovnmeta-95da58c1-265e-4dd9-ba00-692853005e46', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:39.620 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-95da58c1-265e-4dd9-ba00-692853005e46 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:38:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:39.620 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[3df77d24-1cb3-4625-811e-1ec40238d233]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:39 compute-0 systemd[1]: run-netns-ovnmeta\x2d95da58c1\x2d265e\x2d4dd9\x2dba00\x2d692853005e46.mount: Deactivated successfully.
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.667 2 DEBUG nova.compute.manager [req-6bc0ee58-4ea7-40ae-9bd9-96ee2ebb03b1 req-30b56567-55d4-4cb1-ac9f-085914fbe606 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Received event network-vif-unplugged-4c827308-ad8f-4ea0-ac23-feff2dac22eb external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.667 2 DEBUG oslo_concurrency.lockutils [req-6bc0ee58-4ea7-40ae-9bd9-96ee2ebb03b1 req-30b56567-55d4-4cb1-ac9f-085914fbe606 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "2f4dba21-eb3b-48e5-b17a-724f9ab6459e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.668 2 DEBUG oslo_concurrency.lockutils [req-6bc0ee58-4ea7-40ae-9bd9-96ee2ebb03b1 req-30b56567-55d4-4cb1-ac9f-085914fbe606 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2f4dba21-eb3b-48e5-b17a-724f9ab6459e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.668 2 DEBUG oslo_concurrency.lockutils [req-6bc0ee58-4ea7-40ae-9bd9-96ee2ebb03b1 req-30b56567-55d4-4cb1-ac9f-085914fbe606 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2f4dba21-eb3b-48e5-b17a-724f9ab6459e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.668 2 DEBUG nova.compute.manager [req-6bc0ee58-4ea7-40ae-9bd9-96ee2ebb03b1 req-30b56567-55d4-4cb1-ac9f-085914fbe606 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] No waiting events found dispatching network-vif-unplugged-4c827308-ad8f-4ea0-ac23-feff2dac22eb pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:38:39 compute-0 nova_compute[192079]: 2025-10-02 12:38:39.668 2 DEBUG nova.compute.manager [req-6bc0ee58-4ea7-40ae-9bd9-96ee2ebb03b1 req-30b56567-55d4-4cb1-ac9f-085914fbe606 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Received event network-vif-unplugged-4c827308-ad8f-4ea0-ac23-feff2dac22eb for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:38:40 compute-0 nova_compute[192079]: 2025-10-02 12:38:40.200 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:41 compute-0 nova_compute[192079]: 2025-10-02 12:38:41.125 2 DEBUG nova.network.neutron [-] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:38:41 compute-0 nova_compute[192079]: 2025-10-02 12:38:41.182 2 DEBUG nova.network.neutron [req-f4866728-6a55-436d-9031-32c45d50c3b0 req-c89026a2-8ebf-41b6-bfcb-c6b32e4b1a45 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Updated VIF entry in instance network info cache for port 4c827308-ad8f-4ea0-ac23-feff2dac22eb. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:38:41 compute-0 nova_compute[192079]: 2025-10-02 12:38:41.183 2 DEBUG nova.network.neutron [req-f4866728-6a55-436d-9031-32c45d50c3b0 req-c89026a2-8ebf-41b6-bfcb-c6b32e4b1a45 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Updating instance_info_cache with network_info: [{"id": "4c827308-ad8f-4ea0-ac23-feff2dac22eb", "address": "fa:16:3e:bd:4c:e1", "network": {"id": "95da58c1-265e-4dd9-ba00-692853005e46", "bridge": "br-int", "label": "tempest-TestSnapshotPattern-603762842-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "8f7d693b90ba447196796435b74590f6", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap4c827308-ad", "ovs_interfaceid": "4c827308-ad8f-4ea0-ac23-feff2dac22eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:38:41 compute-0 nova_compute[192079]: 2025-10-02 12:38:41.192 2 INFO nova.compute.manager [-] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Took 1.60 seconds to deallocate network for instance.
Oct 02 12:38:41 compute-0 nova_compute[192079]: 2025-10-02 12:38:41.202 2 DEBUG nova.compute.manager [req-6cc29324-371e-4c97-8df8-29bebdf62aae req-49355ed3-a6ac-48b0-b004-fd59714ba9a3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Received event network-vif-deleted-4c827308-ad8f-4ea0-ac23-feff2dac22eb external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:38:41 compute-0 nova_compute[192079]: 2025-10-02 12:38:41.203 2 INFO nova.compute.manager [req-6cc29324-371e-4c97-8df8-29bebdf62aae req-49355ed3-a6ac-48b0-b004-fd59714ba9a3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Neutron deleted interface 4c827308-ad8f-4ea0-ac23-feff2dac22eb; detaching it from the instance and deleting it from the info cache
Oct 02 12:38:41 compute-0 nova_compute[192079]: 2025-10-02 12:38:41.203 2 DEBUG nova.network.neutron [req-6cc29324-371e-4c97-8df8-29bebdf62aae req-49355ed3-a6ac-48b0-b004-fd59714ba9a3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:38:41 compute-0 nova_compute[192079]: 2025-10-02 12:38:41.210 2 DEBUG oslo_concurrency.lockutils [req-f4866728-6a55-436d-9031-32c45d50c3b0 req-c89026a2-8ebf-41b6-bfcb-c6b32e4b1a45 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-2f4dba21-eb3b-48e5-b17a-724f9ab6459e" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:38:41 compute-0 nova_compute[192079]: 2025-10-02 12:38:41.235 2 DEBUG nova.compute.manager [req-6cc29324-371e-4c97-8df8-29bebdf62aae req-49355ed3-a6ac-48b0-b004-fd59714ba9a3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Detach interface failed, port_id=4c827308-ad8f-4ea0-ac23-feff2dac22eb, reason: Instance 2f4dba21-eb3b-48e5-b17a-724f9ab6459e could not be found. _process_instance_vif_deleted_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10882
Oct 02 12:38:41 compute-0 nova_compute[192079]: 2025-10-02 12:38:41.293 2 DEBUG oslo_concurrency.lockutils [None req-e0d21856-856c-4637-9639-257736b8819a 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:38:41 compute-0 nova_compute[192079]: 2025-10-02 12:38:41.294 2 DEBUG oslo_concurrency.lockutils [None req-e0d21856-856c-4637-9639-257736b8819a 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:38:41 compute-0 nova_compute[192079]: 2025-10-02 12:38:41.381 2 DEBUG nova.compute.provider_tree [None req-e0d21856-856c-4637-9639-257736b8819a 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:38:41 compute-0 nova_compute[192079]: 2025-10-02 12:38:41.394 2 DEBUG nova.scheduler.client.report [None req-e0d21856-856c-4637-9639-257736b8819a 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:38:41 compute-0 nova_compute[192079]: 2025-10-02 12:38:41.414 2 DEBUG oslo_concurrency.lockutils [None req-e0d21856-856c-4637-9639-257736b8819a 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.120s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:38:41 compute-0 nova_compute[192079]: 2025-10-02 12:38:41.434 2 INFO nova.scheduler.client.report [None req-e0d21856-856c-4637-9639-257736b8819a 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Deleted allocations for instance 2f4dba21-eb3b-48e5-b17a-724f9ab6459e
Oct 02 12:38:41 compute-0 nova_compute[192079]: 2025-10-02 12:38:41.516 2 DEBUG oslo_concurrency.lockutils [None req-e0d21856-856c-4637-9639-257736b8819a 6d07868c23de4edc9018d8964b43d954 8f7d693b90ba447196796435b74590f6 - - default default] Lock "2f4dba21-eb3b-48e5-b17a-724f9ab6459e" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 2.326s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:38:41 compute-0 nova_compute[192079]: 2025-10-02 12:38:41.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:38:41 compute-0 nova_compute[192079]: 2025-10-02 12:38:41.765 2 DEBUG nova.compute.manager [req-16a8e5ba-492d-4152-a447-0199f8b7a5c8 req-5c0efac3-988e-477c-aee9-1ce5e8c7d3a3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Received event network-vif-plugged-4c827308-ad8f-4ea0-ac23-feff2dac22eb external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:38:41 compute-0 nova_compute[192079]: 2025-10-02 12:38:41.766 2 DEBUG oslo_concurrency.lockutils [req-16a8e5ba-492d-4152-a447-0199f8b7a5c8 req-5c0efac3-988e-477c-aee9-1ce5e8c7d3a3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "2f4dba21-eb3b-48e5-b17a-724f9ab6459e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:38:41 compute-0 nova_compute[192079]: 2025-10-02 12:38:41.766 2 DEBUG oslo_concurrency.lockutils [req-16a8e5ba-492d-4152-a447-0199f8b7a5c8 req-5c0efac3-988e-477c-aee9-1ce5e8c7d3a3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2f4dba21-eb3b-48e5-b17a-724f9ab6459e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:38:41 compute-0 nova_compute[192079]: 2025-10-02 12:38:41.767 2 DEBUG oslo_concurrency.lockutils [req-16a8e5ba-492d-4152-a447-0199f8b7a5c8 req-5c0efac3-988e-477c-aee9-1ce5e8c7d3a3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "2f4dba21-eb3b-48e5-b17a-724f9ab6459e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:38:41 compute-0 nova_compute[192079]: 2025-10-02 12:38:41.767 2 DEBUG nova.compute.manager [req-16a8e5ba-492d-4152-a447-0199f8b7a5c8 req-5c0efac3-988e-477c-aee9-1ce5e8c7d3a3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] No waiting events found dispatching network-vif-plugged-4c827308-ad8f-4ea0-ac23-feff2dac22eb pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:38:41 compute-0 nova_compute[192079]: 2025-10-02 12:38:41.767 2 WARNING nova.compute.manager [req-16a8e5ba-492d-4152-a447-0199f8b7a5c8 req-5c0efac3-988e-477c-aee9-1ce5e8c7d3a3 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Received unexpected event network-vif-plugged-4c827308-ad8f-4ea0-ac23-feff2dac22eb for instance with vm_state deleted and task_state None.
Oct 02 12:38:42 compute-0 nova_compute[192079]: 2025-10-02 12:38:42.661 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:42 compute-0 nova_compute[192079]: 2025-10-02 12:38:42.830 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:42 compute-0 nova_compute[192079]: 2025-10-02 12:38:42.841 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:43 compute-0 nova_compute[192079]: 2025-10-02 12:38:43.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:38:43 compute-0 nova_compute[192079]: 2025-10-02 12:38:43.684 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:38:43 compute-0 nova_compute[192079]: 2025-10-02 12:38:43.685 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:38:43 compute-0 nova_compute[192079]: 2025-10-02 12:38:43.685 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:38:43 compute-0 nova_compute[192079]: 2025-10-02 12:38:43.685 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:38:43 compute-0 nova_compute[192079]: 2025-10-02 12:38:43.886 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:38:43 compute-0 nova_compute[192079]: 2025-10-02 12:38:43.888 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5657MB free_disk=73.27243041992188GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:38:43 compute-0 nova_compute[192079]: 2025-10-02 12:38:43.889 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:38:43 compute-0 nova_compute[192079]: 2025-10-02 12:38:43.889 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:38:43 compute-0 nova_compute[192079]: 2025-10-02 12:38:43.951 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:38:43 compute-0 nova_compute[192079]: 2025-10-02 12:38:43.952 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:38:43 compute-0 nova_compute[192079]: 2025-10-02 12:38:43.969 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:38:43 compute-0 nova_compute[192079]: 2025-10-02 12:38:43.989 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:38:44 compute-0 nova_compute[192079]: 2025-10-02 12:38:44.010 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:38:44 compute-0 nova_compute[192079]: 2025-10-02 12:38:44.011 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.122s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:38:44 compute-0 nova_compute[192079]: 2025-10-02 12:38:44.499 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:44.551 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=43, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=42) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:38:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:44.552 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 6 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:38:44 compute-0 nova_compute[192079]: 2025-10-02 12:38:44.552 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:45 compute-0 nova_compute[192079]: 2025-10-02 12:38:45.013 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:38:45 compute-0 nova_compute[192079]: 2025-10-02 12:38:45.013 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:38:47 compute-0 podman[248853]: 2025-10-02 12:38:47.146256603 +0000 UTC m=+0.052143701 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_id=ovn_metadata_agent, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, container_name=ovn_metadata_agent, managed_by=edpm_ansible)
Oct 02 12:38:47 compute-0 podman[248855]: 2025-10-02 12:38:47.152989506 +0000 UTC m=+0.053615871 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 12:38:47 compute-0 podman[248854]: 2025-10-02 12:38:47.181874062 +0000 UTC m=+0.081144941 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, container_name=ovn_controller, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:38:47 compute-0 nova_compute[192079]: 2025-10-02 12:38:47.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:38:47 compute-0 nova_compute[192079]: 2025-10-02 12:38:47.832 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:48 compute-0 nova_compute[192079]: 2025-10-02 12:38:48.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:38:48 compute-0 nova_compute[192079]: 2025-10-02 12:38:48.667 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:38:48 compute-0 nova_compute[192079]: 2025-10-02 12:38:48.717 2 DEBUG oslo_concurrency.lockutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "6171764a-638c-4d3c-9f01-830314a0687a" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:38:48 compute-0 nova_compute[192079]: 2025-10-02 12:38:48.718 2 DEBUG oslo_concurrency.lockutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "6171764a-638c-4d3c-9f01-830314a0687a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:38:48 compute-0 nova_compute[192079]: 2025-10-02 12:38:48.743 2 DEBUG nova.compute.manager [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:38:48 compute-0 nova_compute[192079]: 2025-10-02 12:38:48.836 2 DEBUG oslo_concurrency.lockutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:38:48 compute-0 nova_compute[192079]: 2025-10-02 12:38:48.837 2 DEBUG oslo_concurrency.lockutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:38:48 compute-0 nova_compute[192079]: 2025-10-02 12:38:48.846 2 DEBUG nova.virt.hardware [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:38:48 compute-0 nova_compute[192079]: 2025-10-02 12:38:48.847 2 INFO nova.compute.claims [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:38:48 compute-0 nova_compute[192079]: 2025-10-02 12:38:48.977 2 DEBUG nova.compute.provider_tree [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:38:48 compute-0 nova_compute[192079]: 2025-10-02 12:38:48.991 2 DEBUG nova.scheduler.client.report [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.012 2 DEBUG oslo_concurrency.lockutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.176s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.013 2 DEBUG nova.compute.manager [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.069 2 DEBUG nova.compute.manager [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.069 2 DEBUG nova.network.neutron [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.097 2 INFO nova.virt.libvirt.driver [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.120 2 DEBUG nova.compute.manager [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.222 2 DEBUG nova.compute.manager [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.223 2 DEBUG nova.virt.libvirt.driver [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.224 2 INFO nova.virt.libvirt.driver [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Creating image(s)
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.224 2 DEBUG oslo_concurrency.lockutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "/var/lib/nova/instances/6171764a-638c-4d3c-9f01-830314a0687a/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.225 2 DEBUG oslo_concurrency.lockutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "/var/lib/nova/instances/6171764a-638c-4d3c-9f01-830314a0687a/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.227 2 DEBUG oslo_concurrency.lockutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "/var/lib/nova/instances/6171764a-638c-4d3c-9f01-830314a0687a/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.258 2 DEBUG oslo_concurrency.processutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.317 2 DEBUG oslo_concurrency.processutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.059s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.318 2 DEBUG oslo_concurrency.lockutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.319 2 DEBUG oslo_concurrency.lockutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.336 2 DEBUG oslo_concurrency.processutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.392 2 DEBUG oslo_concurrency.processutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.394 2 DEBUG oslo_concurrency.processutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/6171764a-638c-4d3c-9f01-830314a0687a/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.442 2 DEBUG nova.policy [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.447 2 DEBUG oslo_concurrency.processutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/6171764a-638c-4d3c-9f01-830314a0687a/disk 1073741824" returned: 0 in 0.053s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.448 2 DEBUG oslo_concurrency.lockutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.129s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.449 2 DEBUG oslo_concurrency.processutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.503 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.524 2 DEBUG oslo_concurrency.processutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.075s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.525 2 DEBUG nova.virt.disk.api [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Checking if we can resize image /var/lib/nova/instances/6171764a-638c-4d3c-9f01-830314a0687a/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.525 2 DEBUG oslo_concurrency.processutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/6171764a-638c-4d3c-9f01-830314a0687a/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.583 2 DEBUG oslo_concurrency.processutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/6171764a-638c-4d3c-9f01-830314a0687a/disk --force-share --output=json" returned: 0 in 0.058s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.585 2 DEBUG nova.virt.disk.api [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Cannot resize image /var/lib/nova/instances/6171764a-638c-4d3c-9f01-830314a0687a/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.585 2 DEBUG nova.objects.instance [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lazy-loading 'migration_context' on Instance uuid 6171764a-638c-4d3c-9f01-830314a0687a obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.600 2 DEBUG nova.virt.libvirt.driver [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.600 2 DEBUG nova.virt.libvirt.driver [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Ensure instance console log exists: /var/lib/nova/instances/6171764a-638c-4d3c-9f01-830314a0687a/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.601 2 DEBUG oslo_concurrency.lockutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.602 2 DEBUG oslo_concurrency.lockutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.602 2 DEBUG oslo_concurrency.lockutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.667 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.667 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.686 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Skipping network cache update for instance because it is Building. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9871
Oct 02 12:38:49 compute-0 nova_compute[192079]: 2025-10-02 12:38:49.687 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:38:50 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:50.553 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '43'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:38:51 compute-0 nova_compute[192079]: 2025-10-02 12:38:51.111 2 DEBUG nova.network.neutron [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Successfully updated port: b2d256d9-6788-41ed-a218-ab6139d999cb _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:38:51 compute-0 nova_compute[192079]: 2025-10-02 12:38:51.128 2 DEBUG oslo_concurrency.lockutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "refresh_cache-6171764a-638c-4d3c-9f01-830314a0687a" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:38:51 compute-0 nova_compute[192079]: 2025-10-02 12:38:51.129 2 DEBUG oslo_concurrency.lockutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquired lock "refresh_cache-6171764a-638c-4d3c-9f01-830314a0687a" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:38:51 compute-0 nova_compute[192079]: 2025-10-02 12:38:51.129 2 DEBUG nova.network.neutron [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:38:51 compute-0 nova_compute[192079]: 2025-10-02 12:38:51.236 2 DEBUG nova.compute.manager [req-d4ed7b8a-8c87-4153-bc5d-51525eedff2c req-45090cee-9ee4-4a77-97a6-e4ae155f7c14 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Received event network-changed-b2d256d9-6788-41ed-a218-ab6139d999cb external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:38:51 compute-0 nova_compute[192079]: 2025-10-02 12:38:51.237 2 DEBUG nova.compute.manager [req-d4ed7b8a-8c87-4153-bc5d-51525eedff2c req-45090cee-9ee4-4a77-97a6-e4ae155f7c14 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Refreshing instance network info cache due to event network-changed-b2d256d9-6788-41ed-a218-ab6139d999cb. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:38:51 compute-0 nova_compute[192079]: 2025-10-02 12:38:51.237 2 DEBUG oslo_concurrency.lockutils [req-d4ed7b8a-8c87-4153-bc5d-51525eedff2c req-45090cee-9ee4-4a77-97a6-e4ae155f7c14 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-6171764a-638c-4d3c-9f01-830314a0687a" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:38:51 compute-0 nova_compute[192079]: 2025-10-02 12:38:51.320 2 DEBUG nova.network.neutron [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.834 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.842 2 DEBUG nova.network.neutron [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Updating instance_info_cache with network_info: [{"id": "b2d256d9-6788-41ed-a218-ab6139d999cb", "address": "fa:16:3e:02:80:1f", "network": {"id": "670889c7-549b-45d0-be10-992f080979ef", "bridge": "br-int", "label": "tempest-network-smoke--1745189972", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb2d256d9-67", "ovs_interfaceid": "b2d256d9-6788-41ed-a218-ab6139d999cb", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.866 2 DEBUG oslo_concurrency.lockutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Releasing lock "refresh_cache-6171764a-638c-4d3c-9f01-830314a0687a" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.866 2 DEBUG nova.compute.manager [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Instance network_info: |[{"id": "b2d256d9-6788-41ed-a218-ab6139d999cb", "address": "fa:16:3e:02:80:1f", "network": {"id": "670889c7-549b-45d0-be10-992f080979ef", "bridge": "br-int", "label": "tempest-network-smoke--1745189972", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb2d256d9-67", "ovs_interfaceid": "b2d256d9-6788-41ed-a218-ab6139d999cb", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.867 2 DEBUG oslo_concurrency.lockutils [req-d4ed7b8a-8c87-4153-bc5d-51525eedff2c req-45090cee-9ee4-4a77-97a6-e4ae155f7c14 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-6171764a-638c-4d3c-9f01-830314a0687a" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.867 2 DEBUG nova.network.neutron [req-d4ed7b8a-8c87-4153-bc5d-51525eedff2c req-45090cee-9ee4-4a77-97a6-e4ae155f7c14 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Refreshing network info cache for port b2d256d9-6788-41ed-a218-ab6139d999cb _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.869 2 DEBUG nova.virt.libvirt.driver [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Start _get_guest_xml network_info=[{"id": "b2d256d9-6788-41ed-a218-ab6139d999cb", "address": "fa:16:3e:02:80:1f", "network": {"id": "670889c7-549b-45d0-be10-992f080979ef", "bridge": "br-int", "label": "tempest-network-smoke--1745189972", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb2d256d9-67", "ovs_interfaceid": "b2d256d9-6788-41ed-a218-ab6139d999cb", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.873 2 WARNING nova.virt.libvirt.driver [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.878 2 DEBUG nova.virt.libvirt.host [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.879 2 DEBUG nova.virt.libvirt.host [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.883 2 DEBUG nova.virt.libvirt.host [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.884 2 DEBUG nova.virt.libvirt.host [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.886 2 DEBUG nova.virt.libvirt.driver [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.886 2 DEBUG nova.virt.hardware [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.887 2 DEBUG nova.virt.hardware [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.888 2 DEBUG nova.virt.hardware [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.888 2 DEBUG nova.virt.hardware [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.889 2 DEBUG nova.virt.hardware [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.889 2 DEBUG nova.virt.hardware [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.889 2 DEBUG nova.virt.hardware [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.890 2 DEBUG nova.virt.hardware [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.890 2 DEBUG nova.virt.hardware [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.891 2 DEBUG nova.virt.hardware [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.891 2 DEBUG nova.virt.hardware [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.897 2 DEBUG nova.virt.libvirt.vif [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:38:47Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestNetworkBasicOps-server-1254201559',display_name='tempest-TestNetworkBasicOps-server-1254201559',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkbasicops-server-1254201559',id=166,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBIMqMDbom+PK4c/Jv5XtvQfFH1fkW+FSRUAxnWfxRcw1UDYrrEG6aps2VuNnmMnWWvdKNfhliXR0Zwf+47iOmuQsbF0874Je9gYU9lBaFNGs4KfbyyC0JVuX6gQe774tNQ==',key_name='tempest-TestNetworkBasicOps-203379559',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='6e2a4899168a47618e377cb3ac85ddd2',ramdisk_id='',reservation_id='r-dpi192j5',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestNetworkBasicOps-1323893370',owner_user_name='tempest-TestNetworkBasicOps-1323893370-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:38:49Z,user_data=None,user_id='a1898fdf056c4a249c33590f26d4d845',uuid=6171764a-638c-4d3c-9f01-830314a0687a,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "b2d256d9-6788-41ed-a218-ab6139d999cb", "address": "fa:16:3e:02:80:1f", "network": {"id": "670889c7-549b-45d0-be10-992f080979ef", "bridge": "br-int", "label": "tempest-network-smoke--1745189972", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb2d256d9-67", "ovs_interfaceid": "b2d256d9-6788-41ed-a218-ab6139d999cb", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.897 2 DEBUG nova.network.os_vif_util [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converting VIF {"id": "b2d256d9-6788-41ed-a218-ab6139d999cb", "address": "fa:16:3e:02:80:1f", "network": {"id": "670889c7-549b-45d0-be10-992f080979ef", "bridge": "br-int", "label": "tempest-network-smoke--1745189972", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb2d256d9-67", "ovs_interfaceid": "b2d256d9-6788-41ed-a218-ab6139d999cb", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.898 2 DEBUG nova.network.os_vif_util [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:02:80:1f,bridge_name='br-int',has_traffic_filtering=True,id=b2d256d9-6788-41ed-a218-ab6139d999cb,network=Network(670889c7-549b-45d0-be10-992f080979ef),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=True,vif_name='tapb2d256d9-67') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.900 2 DEBUG nova.objects.instance [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lazy-loading 'pci_devices' on Instance uuid 6171764a-638c-4d3c-9f01-830314a0687a obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.927 2 DEBUG nova.virt.libvirt.driver [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:38:52 compute-0 nova_compute[192079]:   <uuid>6171764a-638c-4d3c-9f01-830314a0687a</uuid>
Oct 02 12:38:52 compute-0 nova_compute[192079]:   <name>instance-000000a6</name>
Oct 02 12:38:52 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:38:52 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:38:52 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:38:52 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:       <nova:name>tempest-TestNetworkBasicOps-server-1254201559</nova:name>
Oct 02 12:38:52 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:38:52</nova:creationTime>
Oct 02 12:38:52 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:38:52 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:38:52 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:38:52 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:38:52 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:38:52 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:38:52 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:38:52 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:38:52 compute-0 nova_compute[192079]:         <nova:user uuid="a1898fdf056c4a249c33590f26d4d845">tempest-TestNetworkBasicOps-1323893370-project-member</nova:user>
Oct 02 12:38:52 compute-0 nova_compute[192079]:         <nova:project uuid="6e2a4899168a47618e377cb3ac85ddd2">tempest-TestNetworkBasicOps-1323893370</nova:project>
Oct 02 12:38:52 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:38:52 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:38:52 compute-0 nova_compute[192079]:         <nova:port uuid="b2d256d9-6788-41ed-a218-ab6139d999cb">
Oct 02 12:38:52 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.13" ipVersion="4"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:38:52 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:38:52 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:38:52 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <system>
Oct 02 12:38:52 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:38:52 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:38:52 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:38:52 compute-0 nova_compute[192079]:       <entry name="serial">6171764a-638c-4d3c-9f01-830314a0687a</entry>
Oct 02 12:38:52 compute-0 nova_compute[192079]:       <entry name="uuid">6171764a-638c-4d3c-9f01-830314a0687a</entry>
Oct 02 12:38:52 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     </system>
Oct 02 12:38:52 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:38:52 compute-0 nova_compute[192079]:   <os>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:   </os>
Oct 02 12:38:52 compute-0 nova_compute[192079]:   <features>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:   </features>
Oct 02 12:38:52 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:38:52 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:38:52 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:38:52 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/6171764a-638c-4d3c-9f01-830314a0687a/disk"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:38:52 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/6171764a-638c-4d3c-9f01-830314a0687a/disk.config"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:38:52 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:02:80:1f"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:       <target dev="tapb2d256d9-67"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:38:52 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/6171764a-638c-4d3c-9f01-830314a0687a/console.log" append="off"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <video>
Oct 02 12:38:52 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     </video>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:38:52 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:38:52 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:38:52 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:38:52 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:38:52 compute-0 nova_compute[192079]: </domain>
Oct 02 12:38:52 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.929 2 DEBUG nova.compute.manager [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Preparing to wait for external event network-vif-plugged-b2d256d9-6788-41ed-a218-ab6139d999cb prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.929 2 DEBUG oslo_concurrency.lockutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "6171764a-638c-4d3c-9f01-830314a0687a-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.929 2 DEBUG oslo_concurrency.lockutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "6171764a-638c-4d3c-9f01-830314a0687a-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.929 2 DEBUG oslo_concurrency.lockutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "6171764a-638c-4d3c-9f01-830314a0687a-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.930 2 DEBUG nova.virt.libvirt.vif [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:38:47Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestNetworkBasicOps-server-1254201559',display_name='tempest-TestNetworkBasicOps-server-1254201559',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkbasicops-server-1254201559',id=166,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBIMqMDbom+PK4c/Jv5XtvQfFH1fkW+FSRUAxnWfxRcw1UDYrrEG6aps2VuNnmMnWWvdKNfhliXR0Zwf+47iOmuQsbF0874Je9gYU9lBaFNGs4KfbyyC0JVuX6gQe774tNQ==',key_name='tempest-TestNetworkBasicOps-203379559',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='6e2a4899168a47618e377cb3ac85ddd2',ramdisk_id='',reservation_id='r-dpi192j5',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestNetworkBasicOps-1323893370',owner_user_name='tempest-TestNetworkBasicOps-1323893370-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:38:49Z,user_data=None,user_id='a1898fdf056c4a249c33590f26d4d845',uuid=6171764a-638c-4d3c-9f01-830314a0687a,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "b2d256d9-6788-41ed-a218-ab6139d999cb", "address": "fa:16:3e:02:80:1f", "network": {"id": "670889c7-549b-45d0-be10-992f080979ef", "bridge": "br-int", "label": "tempest-network-smoke--1745189972", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb2d256d9-67", "ovs_interfaceid": "b2d256d9-6788-41ed-a218-ab6139d999cb", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.931 2 DEBUG nova.network.os_vif_util [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converting VIF {"id": "b2d256d9-6788-41ed-a218-ab6139d999cb", "address": "fa:16:3e:02:80:1f", "network": {"id": "670889c7-549b-45d0-be10-992f080979ef", "bridge": "br-int", "label": "tempest-network-smoke--1745189972", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb2d256d9-67", "ovs_interfaceid": "b2d256d9-6788-41ed-a218-ab6139d999cb", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.931 2 DEBUG nova.network.os_vif_util [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:02:80:1f,bridge_name='br-int',has_traffic_filtering=True,id=b2d256d9-6788-41ed-a218-ab6139d999cb,network=Network(670889c7-549b-45d0-be10-992f080979ef),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=True,vif_name='tapb2d256d9-67') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.932 2 DEBUG os_vif [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:02:80:1f,bridge_name='br-int',has_traffic_filtering=True,id=b2d256d9-6788-41ed-a218-ab6139d999cb,network=Network(670889c7-549b-45d0-be10-992f080979ef),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=True,vif_name='tapb2d256d9-67') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.932 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.933 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.933 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.936 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.936 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapb2d256d9-67, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.936 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapb2d256d9-67, col_values=(('external_ids', {'iface-id': 'b2d256d9-6788-41ed-a218-ab6139d999cb', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:02:80:1f', 'vm-uuid': '6171764a-638c-4d3c-9f01-830314a0687a'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.938 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:52 compute-0 NetworkManager[51160]: <info>  [1759408732.9391] manager: (tapb2d256d9-67): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/314)
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.940 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.944 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.945 2 INFO os_vif [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:02:80:1f,bridge_name='br-int',has_traffic_filtering=True,id=b2d256d9-6788-41ed-a218-ab6139d999cb,network=Network(670889c7-549b-45d0-be10-992f080979ef),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=True,vif_name='tapb2d256d9-67')
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.995 2 DEBUG nova.virt.libvirt.driver [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.995 2 DEBUG nova.virt.libvirt.driver [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.996 2 DEBUG nova.virt.libvirt.driver [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] No VIF found with MAC fa:16:3e:02:80:1f, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:38:52 compute-0 nova_compute[192079]: 2025-10-02 12:38:52.996 2 INFO nova.virt.libvirt.driver [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Using config drive
Oct 02 12:38:53 compute-0 nova_compute[192079]: 2025-10-02 12:38:53.496 2 INFO nova.virt.libvirt.driver [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Creating config drive at /var/lib/nova/instances/6171764a-638c-4d3c-9f01-830314a0687a/disk.config
Oct 02 12:38:53 compute-0 nova_compute[192079]: 2025-10-02 12:38:53.507 2 DEBUG oslo_concurrency.processutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/6171764a-638c-4d3c-9f01-830314a0687a/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpaivkobol execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:38:53 compute-0 nova_compute[192079]: 2025-10-02 12:38:53.654 2 DEBUG oslo_concurrency.processutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/6171764a-638c-4d3c-9f01-830314a0687a/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpaivkobol" returned: 0 in 0.147s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:38:53 compute-0 NetworkManager[51160]: <info>  [1759408733.7334] manager: (tapb2d256d9-67): new Tun device (/org/freedesktop/NetworkManager/Devices/315)
Oct 02 12:38:53 compute-0 kernel: tapb2d256d9-67: entered promiscuous mode
Oct 02 12:38:53 compute-0 nova_compute[192079]: 2025-10-02 12:38:53.762 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:53 compute-0 ovn_controller[94336]: 2025-10-02T12:38:53Z|00647|binding|INFO|Claiming lport b2d256d9-6788-41ed-a218-ab6139d999cb for this chassis.
Oct 02 12:38:53 compute-0 ovn_controller[94336]: 2025-10-02T12:38:53Z|00648|binding|INFO|b2d256d9-6788-41ed-a218-ab6139d999cb: Claiming fa:16:3e:02:80:1f 10.100.0.13
Oct 02 12:38:53 compute-0 systemd-udevd[248948]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:38:53 compute-0 nova_compute[192079]: 2025-10-02 12:38:53.767 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:53 compute-0 NetworkManager[51160]: <info>  [1759408733.7795] device (tapb2d256d9-67): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:38:53 compute-0 NetworkManager[51160]: <info>  [1759408733.7802] device (tapb2d256d9-67): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:38:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:53.796 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:02:80:1f 10.100.0.13'], port_security=['fa:16:3e:02:80:1f 10.100.0.13'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'name': 'tempest-TestNetworkBasicOps-1976934795', 'neutron:cidrs': '10.100.0.13/28', 'neutron:device_id': '6171764a-638c-4d3c-9f01-830314a0687a', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-670889c7-549b-45d0-be10-992f080979ef', 'neutron:port_capabilities': '', 'neutron:port_name': 'tempest-TestNetworkBasicOps-1976934795', 'neutron:project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'neutron:revision_number': '2', 'neutron:security_group_ids': '1aab0b39-6daf-41d1-a7da-b7bb077ff5e9', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=54854aa2-539b-45ea-833b-3fc4d3ced3bf, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=b2d256d9-6788-41ed-a218-ab6139d999cb) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:38:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:53.797 103294 INFO neutron.agent.ovn.metadata.agent [-] Port b2d256d9-6788-41ed-a218-ab6139d999cb in datapath 670889c7-549b-45d0-be10-992f080979ef bound to our chassis
Oct 02 12:38:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:53.798 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 670889c7-549b-45d0-be10-992f080979ef
Oct 02 12:38:53 compute-0 systemd-machined[152150]: New machine qemu-81-instance-000000a6.
Oct 02 12:38:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:53.808 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[49574efe-6184-4861-bee5-c5a4ddf71835]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:53.809 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap670889c7-51 in ovnmeta-670889c7-549b-45d0-be10-992f080979ef namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:38:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:53.811 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap670889c7-50 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:38:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:53.811 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[63f4ec60-77a5-42c2-b817-2669d6ce96ef]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:53.812 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f2fce10a-6ac5-4b80-940c-c0185f72460f]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:53.825 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[ce992438-de4d-4e4e-8fc8-98ae59fc3be6]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:53 compute-0 systemd[1]: Started Virtual Machine qemu-81-instance-000000a6.
Oct 02 12:38:53 compute-0 ovn_controller[94336]: 2025-10-02T12:38:53Z|00649|binding|INFO|Setting lport b2d256d9-6788-41ed-a218-ab6139d999cb ovn-installed in OVS
Oct 02 12:38:53 compute-0 ovn_controller[94336]: 2025-10-02T12:38:53Z|00650|binding|INFO|Setting lport b2d256d9-6788-41ed-a218-ab6139d999cb up in Southbound
Oct 02 12:38:53 compute-0 nova_compute[192079]: 2025-10-02 12:38:53.855 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:53.860 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d52d0754-d069-45fe-9f91-8f120ab1d656]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:53.888 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[849e48b3-493d-4cc1-b0a1-8d4b3c5dd1ce]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:53.894 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[012255f8-c15b-403b-a3f6-51347b5e7ce0]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:53 compute-0 NetworkManager[51160]: <info>  [1759408733.8953] manager: (tap670889c7-50): new Veth device (/org/freedesktop/NetworkManager/Devices/316)
Oct 02 12:38:53 compute-0 systemd-udevd[248952]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:38:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:53.925 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[2b64c647-d545-4e46-b79a-7f0e329c810c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:53.927 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[d2f15dd7-9eef-42fc-96f5-5b7355013bdb]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:53 compute-0 NetworkManager[51160]: <info>  [1759408733.9473] device (tap670889c7-50): carrier: link connected
Oct 02 12:38:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:53.952 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[096774de-6a7b-4181-a5f6-ef6ddf31c8a6]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:53.975 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a5182f0e-9aa6-486e-bb83-d31df5ef0cd1]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap670889c7-51'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:4d:42:59'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 206], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 673157, 'reachable_time': 40049, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 248984, 'error': None, 'target': 'ovnmeta-670889c7-549b-45d0-be10-992f080979ef', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:53.992 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0b2edc24-ba3a-4274-9b10-59b865cbe39a]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe4d:4259'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 673157, 'tstamp': 673157}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 248985, 'error': None, 'target': 'ovnmeta-670889c7-549b-45d0-be10-992f080979ef', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:54.012 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[dcbcc08a-21e6-4fc6-bd72-b8cc9cb8717e]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap670889c7-51'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:4d:42:59'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 2, 'tx_packets': 1, 'rx_bytes': 220, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 2, 'tx_packets': 1, 'rx_bytes': 220, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 206], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 673157, 'reachable_time': 40049, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 2, 'inoctets': 192, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 2, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 192, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 2, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 248986, 'error': None, 'target': 'ovnmeta-670889c7-549b-45d0-be10-992f080979ef', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:54.044 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6a01caa5-f08b-4557-a11b-373b90306967]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:54.107 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3aa437f7-00a8-42fa-b5cc-3a334a416aa4]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:54.109 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap670889c7-50, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:54.109 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:54.109 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap670889c7-50, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:38:54 compute-0 nova_compute[192079]: 2025-10-02 12:38:54.111 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:54 compute-0 kernel: tap670889c7-50: entered promiscuous mode
Oct 02 12:38:54 compute-0 NetworkManager[51160]: <info>  [1759408734.1126] manager: (tap670889c7-50): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/317)
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:54.115 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap670889c7-50, col_values=(('external_ids', {'iface-id': 'cdb35ee2-22af-436d-82f3-c08eadf2b2c7'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:38:54 compute-0 nova_compute[192079]: 2025-10-02 12:38:54.115 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:54 compute-0 nova_compute[192079]: 2025-10-02 12:38:54.116 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:54 compute-0 ovn_controller[94336]: 2025-10-02T12:38:54Z|00651|binding|INFO|Releasing lport cdb35ee2-22af-436d-82f3-c08eadf2b2c7 from this chassis (sb_readonly=0)
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:54.117 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/670889c7-549b-45d0-be10-992f080979ef.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/670889c7-549b-45d0-be10-992f080979ef.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:54.118 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8df75fdb-e2e4-46f5-a29e-63717c3ada13]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:54.118 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-670889c7-549b-45d0-be10-992f080979ef
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/670889c7-549b-45d0-be10-992f080979ef.pid.haproxy
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 670889c7-549b-45d0-be10-992f080979ef
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:38:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:54.119 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-670889c7-549b-45d0-be10-992f080979ef', 'env', 'PROCESS_TAG=haproxy-670889c7-549b-45d0-be10-992f080979ef', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/670889c7-549b-45d0-be10-992f080979ef.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:38:54 compute-0 nova_compute[192079]: 2025-10-02 12:38:54.131 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:54 compute-0 nova_compute[192079]: 2025-10-02 12:38:54.471 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759408719.4706695, 2f4dba21-eb3b-48e5-b17a-724f9ab6459e => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:38:54 compute-0 nova_compute[192079]: 2025-10-02 12:38:54.472 2 INFO nova.compute.manager [-] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] VM Stopped (Lifecycle Event)
Oct 02 12:38:54 compute-0 nova_compute[192079]: 2025-10-02 12:38:54.497 2 DEBUG nova.compute.manager [None req-1b686943-afe8-430c-923b-7ca4f5adb3c9 - - - - - -] [instance: 2f4dba21-eb3b-48e5-b17a-724f9ab6459e] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:38:54 compute-0 podman[249025]: 2025-10-02 12:38:54.521206226 +0000 UTC m=+0.057427074 container create 44a16113847e25688f51f398bc2aa671406b31d3ac10710c3056a90c13f7717b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-670889c7-549b-45d0-be10-992f080979ef, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:38:54 compute-0 systemd[1]: Started libpod-conmon-44a16113847e25688f51f398bc2aa671406b31d3ac10710c3056a90c13f7717b.scope.
Oct 02 12:38:54 compute-0 podman[249025]: 2025-10-02 12:38:54.487697934 +0000 UTC m=+0.023918832 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:38:54 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:38:54 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/10e2eaa3886c2cbd440ab9e9175a75b5ac45fc79f3a5749f37301f1860311331/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:38:54 compute-0 podman[249025]: 2025-10-02 12:38:54.609416218 +0000 UTC m=+0.145637146 container init 44a16113847e25688f51f398bc2aa671406b31d3ac10710c3056a90c13f7717b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-670889c7-549b-45d0-be10-992f080979ef, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001)
Oct 02 12:38:54 compute-0 podman[249025]: 2025-10-02 12:38:54.619594995 +0000 UTC m=+0.155815883 container start 44a16113847e25688f51f398bc2aa671406b31d3ac10710c3056a90c13f7717b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-670889c7-549b-45d0-be10-992f080979ef, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:38:54 compute-0 neutron-haproxy-ovnmeta-670889c7-549b-45d0-be10-992f080979ef[249040]: [NOTICE]   (249044) : New worker (249046) forked
Oct 02 12:38:54 compute-0 neutron-haproxy-ovnmeta-670889c7-549b-45d0-be10-992f080979ef[249040]: [NOTICE]   (249044) : Loading success.
Oct 02 12:38:54 compute-0 nova_compute[192079]: 2025-10-02 12:38:54.750 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408734.7496219, 6171764a-638c-4d3c-9f01-830314a0687a => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:38:54 compute-0 nova_compute[192079]: 2025-10-02 12:38:54.750 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] VM Started (Lifecycle Event)
Oct 02 12:38:54 compute-0 nova_compute[192079]: 2025-10-02 12:38:54.771 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:38:54 compute-0 nova_compute[192079]: 2025-10-02 12:38:54.775 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408734.7530406, 6171764a-638c-4d3c-9f01-830314a0687a => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:38:54 compute-0 nova_compute[192079]: 2025-10-02 12:38:54.775 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] VM Paused (Lifecycle Event)
Oct 02 12:38:54 compute-0 nova_compute[192079]: 2025-10-02 12:38:54.798 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:38:54 compute-0 nova_compute[192079]: 2025-10-02 12:38:54.799 2 DEBUG nova.network.neutron [req-d4ed7b8a-8c87-4153-bc5d-51525eedff2c req-45090cee-9ee4-4a77-97a6-e4ae155f7c14 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Updated VIF entry in instance network info cache for port b2d256d9-6788-41ed-a218-ab6139d999cb. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:38:54 compute-0 nova_compute[192079]: 2025-10-02 12:38:54.799 2 DEBUG nova.network.neutron [req-d4ed7b8a-8c87-4153-bc5d-51525eedff2c req-45090cee-9ee4-4a77-97a6-e4ae155f7c14 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Updating instance_info_cache with network_info: [{"id": "b2d256d9-6788-41ed-a218-ab6139d999cb", "address": "fa:16:3e:02:80:1f", "network": {"id": "670889c7-549b-45d0-be10-992f080979ef", "bridge": "br-int", "label": "tempest-network-smoke--1745189972", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb2d256d9-67", "ovs_interfaceid": "b2d256d9-6788-41ed-a218-ab6139d999cb", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:38:54 compute-0 nova_compute[192079]: 2025-10-02 12:38:54.803 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:38:54 compute-0 nova_compute[192079]: 2025-10-02 12:38:54.820 2 DEBUG oslo_concurrency.lockutils [req-d4ed7b8a-8c87-4153-bc5d-51525eedff2c req-45090cee-9ee4-4a77-97a6-e4ae155f7c14 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-6171764a-638c-4d3c-9f01-830314a0687a" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:38:54 compute-0 nova_compute[192079]: 2025-10-02 12:38:54.822 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:38:55 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:55.259 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:2b:a1:21 10.100.0.2 2001:db8::f816:3eff:fe2b:a121'], port_security=[], type=localport, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': ''}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.2/28 2001:db8::f816:3eff:fe2b:a121/64', 'neutron:device_id': 'ovnmeta-b9d6d69e-0327-4bcf-b8a6-b2cf69a4d177', 'neutron:device_owner': 'network:distributed', 'neutron:mtu': '', 'neutron:network_name': 'neutron-b9d6d69e-0327-4bcf-b8a6-b2cf69a4d177', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'neutron:revision_number': '3', 'neutron:security_group_ids': '', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=5fce3bea-36c3-4b1e-bdee-b694cf8990ad, chassis=[], tunnel_key=1, gateway_chassis=[], requested_chassis=[], logical_port=ad07d234-3bc8-429a-8834-7a9ae3274be2) old=Port_Binding(mac=['fa:16:3e:2b:a1:21 10.100.0.2'], external_ids={'neutron:cidrs': '10.100.0.2/28', 'neutron:device_id': 'ovnmeta-b9d6d69e-0327-4bcf-b8a6-b2cf69a4d177', 'neutron:device_owner': 'network:distributed', 'neutron:mtu': '', 'neutron:network_name': 'neutron-b9d6d69e-0327-4bcf-b8a6-b2cf69a4d177', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'neutron:revision_number': '2', 'neutron:security_group_ids': '', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:38:55 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:55.262 103294 INFO neutron.agent.ovn.metadata.agent [-] Metadata Port ad07d234-3bc8-429a-8834-7a9ae3274be2 in datapath b9d6d69e-0327-4bcf-b8a6-b2cf69a4d177 updated
Oct 02 12:38:55 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:55.263 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network b9d6d69e-0327-4bcf-b8a6-b2cf69a4d177, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:38:55 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:55.264 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9526c623-c518-43fd-8351-df7a910f028a]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:56 compute-0 nova_compute[192079]: 2025-10-02 12:38:56.680 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:38:57 compute-0 nova_compute[192079]: 2025-10-02 12:38:57.838 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:57 compute-0 nova_compute[192079]: 2025-10-02 12:38:57.939 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:38:58 compute-0 podman[249055]: 2025-10-02 12:38:58.1663398 +0000 UTC m=+0.073927703 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, config_id=edpm, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute)
Oct 02 12:38:59 compute-0 nova_compute[192079]: 2025-10-02 12:38:59.559 2 DEBUG nova.compute.manager [req-3d6d4353-26db-4978-b8a5-8546f815afd2 req-8083d631-cf6e-47f4-894d-988857dab37b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Received event network-vif-plugged-b2d256d9-6788-41ed-a218-ab6139d999cb external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:38:59 compute-0 nova_compute[192079]: 2025-10-02 12:38:59.560 2 DEBUG oslo_concurrency.lockutils [req-3d6d4353-26db-4978-b8a5-8546f815afd2 req-8083d631-cf6e-47f4-894d-988857dab37b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "6171764a-638c-4d3c-9f01-830314a0687a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:38:59 compute-0 nova_compute[192079]: 2025-10-02 12:38:59.561 2 DEBUG oslo_concurrency.lockutils [req-3d6d4353-26db-4978-b8a5-8546f815afd2 req-8083d631-cf6e-47f4-894d-988857dab37b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6171764a-638c-4d3c-9f01-830314a0687a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:38:59 compute-0 nova_compute[192079]: 2025-10-02 12:38:59.562 2 DEBUG oslo_concurrency.lockutils [req-3d6d4353-26db-4978-b8a5-8546f815afd2 req-8083d631-cf6e-47f4-894d-988857dab37b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6171764a-638c-4d3c-9f01-830314a0687a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:38:59 compute-0 nova_compute[192079]: 2025-10-02 12:38:59.562 2 DEBUG nova.compute.manager [req-3d6d4353-26db-4978-b8a5-8546f815afd2 req-8083d631-cf6e-47f4-894d-988857dab37b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Processing event network-vif-plugged-b2d256d9-6788-41ed-a218-ab6139d999cb _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:38:59 compute-0 nova_compute[192079]: 2025-10-02 12:38:59.564 2 DEBUG nova.compute.manager [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Instance event wait completed in 4 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:38:59 compute-0 nova_compute[192079]: 2025-10-02 12:38:59.570 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408739.5697825, 6171764a-638c-4d3c-9f01-830314a0687a => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:38:59 compute-0 nova_compute[192079]: 2025-10-02 12:38:59.570 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] VM Resumed (Lifecycle Event)
Oct 02 12:38:59 compute-0 nova_compute[192079]: 2025-10-02 12:38:59.574 2 DEBUG nova.virt.libvirt.driver [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:38:59 compute-0 nova_compute[192079]: 2025-10-02 12:38:59.579 2 INFO nova.virt.libvirt.driver [-] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Instance spawned successfully.
Oct 02 12:38:59 compute-0 nova_compute[192079]: 2025-10-02 12:38:59.580 2 DEBUG nova.virt.libvirt.driver [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:38:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:59.583 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:2b:a1:21 10.100.0.2 2001:db8:0:1:f816:3eff:fe2b:a121 2001:db8::f816:3eff:fe2b:a121'], port_security=[], type=localport, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': ''}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.2/28 2001:db8:0:1:f816:3eff:fe2b:a121/64 2001:db8::f816:3eff:fe2b:a121/64', 'neutron:device_id': 'ovnmeta-b9d6d69e-0327-4bcf-b8a6-b2cf69a4d177', 'neutron:device_owner': 'network:distributed', 'neutron:mtu': '', 'neutron:network_name': 'neutron-b9d6d69e-0327-4bcf-b8a6-b2cf69a4d177', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'neutron:revision_number': '4', 'neutron:security_group_ids': '', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=5fce3bea-36c3-4b1e-bdee-b694cf8990ad, chassis=[], tunnel_key=1, gateway_chassis=[], requested_chassis=[], logical_port=ad07d234-3bc8-429a-8834-7a9ae3274be2) old=Port_Binding(mac=['fa:16:3e:2b:a1:21 10.100.0.2 2001:db8::f816:3eff:fe2b:a121'], external_ids={'neutron:cidrs': '10.100.0.2/28 2001:db8::f816:3eff:fe2b:a121/64', 'neutron:device_id': 'ovnmeta-b9d6d69e-0327-4bcf-b8a6-b2cf69a4d177', 'neutron:device_owner': 'network:distributed', 'neutron:mtu': '', 'neutron:network_name': 'neutron-b9d6d69e-0327-4bcf-b8a6-b2cf69a4d177', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'neutron:revision_number': '3', 'neutron:security_group_ids': '', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:38:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:59.586 103294 INFO neutron.agent.ovn.metadata.agent [-] Metadata Port ad07d234-3bc8-429a-8834-7a9ae3274be2 in datapath b9d6d69e-0327-4bcf-b8a6-b2cf69a4d177 updated
Oct 02 12:38:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:59.588 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network b9d6d69e-0327-4bcf-b8a6-b2cf69a4d177, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:38:59 compute-0 nova_compute[192079]: 2025-10-02 12:38:59.589 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:38:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:38:59.590 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5dd0a2e3-2039-4551-82c8-fd4a282918ff]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:38:59 compute-0 nova_compute[192079]: 2025-10-02 12:38:59.594 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:38:59 compute-0 nova_compute[192079]: 2025-10-02 12:38:59.608 2 DEBUG nova.virt.libvirt.driver [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:38:59 compute-0 nova_compute[192079]: 2025-10-02 12:38:59.608 2 DEBUG nova.virt.libvirt.driver [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:38:59 compute-0 nova_compute[192079]: 2025-10-02 12:38:59.609 2 DEBUG nova.virt.libvirt.driver [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:38:59 compute-0 nova_compute[192079]: 2025-10-02 12:38:59.610 2 DEBUG nova.virt.libvirt.driver [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:38:59 compute-0 nova_compute[192079]: 2025-10-02 12:38:59.611 2 DEBUG nova.virt.libvirt.driver [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:38:59 compute-0 nova_compute[192079]: 2025-10-02 12:38:59.611 2 DEBUG nova.virt.libvirt.driver [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:38:59 compute-0 nova_compute[192079]: 2025-10-02 12:38:59.618 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:38:59 compute-0 nova_compute[192079]: 2025-10-02 12:38:59.687 2 INFO nova.compute.manager [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Took 10.46 seconds to spawn the instance on the hypervisor.
Oct 02 12:38:59 compute-0 nova_compute[192079]: 2025-10-02 12:38:59.687 2 DEBUG nova.compute.manager [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:38:59 compute-0 nova_compute[192079]: 2025-10-02 12:38:59.766 2 INFO nova.compute.manager [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Took 10.96 seconds to build instance.
Oct 02 12:38:59 compute-0 nova_compute[192079]: 2025-10-02 12:38:59.781 2 DEBUG oslo_concurrency.lockutils [None req-bf14974f-8bc3-4ba9-984a-8180a3103dd2 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "6171764a-638c-4d3c-9f01-830314a0687a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 11.064s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:39:01 compute-0 nova_compute[192079]: 2025-10-02 12:39:01.645 2 DEBUG nova.compute.manager [req-3e4716db-5d42-4114-bc54-ce215918a92b req-876019d5-97a8-48d9-adc4-42879705e76c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Received event network-vif-plugged-b2d256d9-6788-41ed-a218-ab6139d999cb external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:39:01 compute-0 nova_compute[192079]: 2025-10-02 12:39:01.645 2 DEBUG oslo_concurrency.lockutils [req-3e4716db-5d42-4114-bc54-ce215918a92b req-876019d5-97a8-48d9-adc4-42879705e76c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "6171764a-638c-4d3c-9f01-830314a0687a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:39:01 compute-0 nova_compute[192079]: 2025-10-02 12:39:01.645 2 DEBUG oslo_concurrency.lockutils [req-3e4716db-5d42-4114-bc54-ce215918a92b req-876019d5-97a8-48d9-adc4-42879705e76c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6171764a-638c-4d3c-9f01-830314a0687a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:39:01 compute-0 nova_compute[192079]: 2025-10-02 12:39:01.646 2 DEBUG oslo_concurrency.lockutils [req-3e4716db-5d42-4114-bc54-ce215918a92b req-876019d5-97a8-48d9-adc4-42879705e76c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "6171764a-638c-4d3c-9f01-830314a0687a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:39:01 compute-0 nova_compute[192079]: 2025-10-02 12:39:01.646 2 DEBUG nova.compute.manager [req-3e4716db-5d42-4114-bc54-ce215918a92b req-876019d5-97a8-48d9-adc4-42879705e76c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] No waiting events found dispatching network-vif-plugged-b2d256d9-6788-41ed-a218-ab6139d999cb pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:39:01 compute-0 nova_compute[192079]: 2025-10-02 12:39:01.646 2 WARNING nova.compute.manager [req-3e4716db-5d42-4114-bc54-ce215918a92b req-876019d5-97a8-48d9-adc4-42879705e76c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Received unexpected event network-vif-plugged-b2d256d9-6788-41ed-a218-ab6139d999cb for instance with vm_state active and task_state None.
Oct 02 12:39:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:02.241 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:39:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:02.242 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:39:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:02.243 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:39:02 compute-0 nova_compute[192079]: 2025-10-02 12:39:02.839 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:02 compute-0 nova_compute[192079]: 2025-10-02 12:39:02.942 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:03 compute-0 nova_compute[192079]: 2025-10-02 12:39:03.362 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:03 compute-0 NetworkManager[51160]: <info>  [1759408743.3631] manager: (patch-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d-to-br-int): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/318)
Oct 02 12:39:03 compute-0 NetworkManager[51160]: <info>  [1759408743.3642] manager: (patch-br-int-to-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/319)
Oct 02 12:39:03 compute-0 nova_compute[192079]: 2025-10-02 12:39:03.464 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:03 compute-0 ovn_controller[94336]: 2025-10-02T12:39:03Z|00652|binding|INFO|Releasing lport cdb35ee2-22af-436d-82f3-c08eadf2b2c7 from this chassis (sb_readonly=0)
Oct 02 12:39:03 compute-0 nova_compute[192079]: 2025-10-02 12:39:03.479 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:03 compute-0 nova_compute[192079]: 2025-10-02 12:39:03.748 2 DEBUG nova.compute.manager [req-0600d810-be48-49f7-baa4-6617950f6fab req-4c192806-1434-4134-b983-28bacb8c12e1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Received event network-changed-b2d256d9-6788-41ed-a218-ab6139d999cb external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:39:03 compute-0 nova_compute[192079]: 2025-10-02 12:39:03.749 2 DEBUG nova.compute.manager [req-0600d810-be48-49f7-baa4-6617950f6fab req-4c192806-1434-4134-b983-28bacb8c12e1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Refreshing instance network info cache due to event network-changed-b2d256d9-6788-41ed-a218-ab6139d999cb. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:39:03 compute-0 nova_compute[192079]: 2025-10-02 12:39:03.749 2 DEBUG oslo_concurrency.lockutils [req-0600d810-be48-49f7-baa4-6617950f6fab req-4c192806-1434-4134-b983-28bacb8c12e1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-6171764a-638c-4d3c-9f01-830314a0687a" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:39:03 compute-0 nova_compute[192079]: 2025-10-02 12:39:03.749 2 DEBUG oslo_concurrency.lockutils [req-0600d810-be48-49f7-baa4-6617950f6fab req-4c192806-1434-4134-b983-28bacb8c12e1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-6171764a-638c-4d3c-9f01-830314a0687a" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:39:03 compute-0 nova_compute[192079]: 2025-10-02 12:39:03.749 2 DEBUG nova.network.neutron [req-0600d810-be48-49f7-baa4-6617950f6fab req-4c192806-1434-4134-b983-28bacb8c12e1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Refreshing network info cache for port b2d256d9-6788-41ed-a218-ab6139d999cb _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:39:03 compute-0 nova_compute[192079]: 2025-10-02 12:39:03.997 2 DEBUG oslo_concurrency.lockutils [None req-50d791a3-b958-40d9-9ef6-e6ccbb749877 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "6171764a-638c-4d3c-9f01-830314a0687a" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:39:03 compute-0 nova_compute[192079]: 2025-10-02 12:39:03.998 2 DEBUG oslo_concurrency.lockutils [None req-50d791a3-b958-40d9-9ef6-e6ccbb749877 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "6171764a-638c-4d3c-9f01-830314a0687a" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:39:03 compute-0 nova_compute[192079]: 2025-10-02 12:39:03.998 2 DEBUG oslo_concurrency.lockutils [None req-50d791a3-b958-40d9-9ef6-e6ccbb749877 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "6171764a-638c-4d3c-9f01-830314a0687a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:39:03 compute-0 nova_compute[192079]: 2025-10-02 12:39:03.999 2 DEBUG oslo_concurrency.lockutils [None req-50d791a3-b958-40d9-9ef6-e6ccbb749877 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "6171764a-638c-4d3c-9f01-830314a0687a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:39:04 compute-0 nova_compute[192079]: 2025-10-02 12:39:03.999 2 DEBUG oslo_concurrency.lockutils [None req-50d791a3-b958-40d9-9ef6-e6ccbb749877 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "6171764a-638c-4d3c-9f01-830314a0687a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:39:04 compute-0 nova_compute[192079]: 2025-10-02 12:39:04.018 2 INFO nova.compute.manager [None req-50d791a3-b958-40d9-9ef6-e6ccbb749877 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Terminating instance
Oct 02 12:39:04 compute-0 nova_compute[192079]: 2025-10-02 12:39:04.036 2 DEBUG nova.compute.manager [None req-50d791a3-b958-40d9-9ef6-e6ccbb749877 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:39:04 compute-0 kernel: tapb2d256d9-67 (unregistering): left promiscuous mode
Oct 02 12:39:04 compute-0 NetworkManager[51160]: <info>  [1759408744.0571] device (tapb2d256d9-67): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:39:04 compute-0 ovn_controller[94336]: 2025-10-02T12:39:04Z|00653|binding|INFO|Releasing lport b2d256d9-6788-41ed-a218-ab6139d999cb from this chassis (sb_readonly=0)
Oct 02 12:39:04 compute-0 ovn_controller[94336]: 2025-10-02T12:39:04Z|00654|binding|INFO|Setting lport b2d256d9-6788-41ed-a218-ab6139d999cb down in Southbound
Oct 02 12:39:04 compute-0 nova_compute[192079]: 2025-10-02 12:39:04.110 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:04 compute-0 ovn_controller[94336]: 2025-10-02T12:39:04Z|00655|binding|INFO|Removing iface tapb2d256d9-67 ovn-installed in OVS
Oct 02 12:39:04 compute-0 nova_compute[192079]: 2025-10-02 12:39:04.113 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:04.120 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:02:80:1f 10.100.0.13'], port_security=['fa:16:3e:02:80:1f 10.100.0.13'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'name': 'tempest-TestNetworkBasicOps-1976934795', 'neutron:cidrs': '10.100.0.13/28', 'neutron:device_id': '6171764a-638c-4d3c-9f01-830314a0687a', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-670889c7-549b-45d0-be10-992f080979ef', 'neutron:port_capabilities': '', 'neutron:port_name': 'tempest-TestNetworkBasicOps-1976934795', 'neutron:project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'neutron:revision_number': '4', 'neutron:security_group_ids': '1aab0b39-6daf-41d1-a7da-b7bb077ff5e9', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com', 'neutron:port_fip': '192.168.122.172'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=54854aa2-539b-45ea-833b-3fc4d3ced3bf, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=b2d256d9-6788-41ed-a218-ab6139d999cb) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:39:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:04.122 103294 INFO neutron.agent.ovn.metadata.agent [-] Port b2d256d9-6788-41ed-a218-ab6139d999cb in datapath 670889c7-549b-45d0-be10-992f080979ef unbound from our chassis
Oct 02 12:39:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:04.123 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 670889c7-549b-45d0-be10-992f080979ef, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:39:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:04.123 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[00390de6-0b75-4f45-bad5-5fe3ad97961c]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:39:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:04.124 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-670889c7-549b-45d0-be10-992f080979ef namespace which is not needed anymore
Oct 02 12:39:04 compute-0 nova_compute[192079]: 2025-10-02 12:39:04.125 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:04 compute-0 systemd[1]: machine-qemu\x2d81\x2dinstance\x2d000000a6.scope: Deactivated successfully.
Oct 02 12:39:04 compute-0 systemd[1]: machine-qemu\x2d81\x2dinstance\x2d000000a6.scope: Consumed 5.252s CPU time.
Oct 02 12:39:04 compute-0 systemd-machined[152150]: Machine qemu-81-instance-000000a6 terminated.
Oct 02 12:39:04 compute-0 neutron-haproxy-ovnmeta-670889c7-549b-45d0-be10-992f080979ef[249040]: [NOTICE]   (249044) : haproxy version is 2.8.14-c23fe91
Oct 02 12:39:04 compute-0 neutron-haproxy-ovnmeta-670889c7-549b-45d0-be10-992f080979ef[249040]: [NOTICE]   (249044) : path to executable is /usr/sbin/haproxy
Oct 02 12:39:04 compute-0 neutron-haproxy-ovnmeta-670889c7-549b-45d0-be10-992f080979ef[249040]: [WARNING]  (249044) : Exiting Master process...
Oct 02 12:39:04 compute-0 neutron-haproxy-ovnmeta-670889c7-549b-45d0-be10-992f080979ef[249040]: [ALERT]    (249044) : Current worker (249046) exited with code 143 (Terminated)
Oct 02 12:39:04 compute-0 neutron-haproxy-ovnmeta-670889c7-549b-45d0-be10-992f080979ef[249040]: [WARNING]  (249044) : All workers exited. Exiting... (0)
Oct 02 12:39:04 compute-0 systemd[1]: libpod-44a16113847e25688f51f398bc2aa671406b31d3ac10710c3056a90c13f7717b.scope: Deactivated successfully.
Oct 02 12:39:04 compute-0 podman[249101]: 2025-10-02 12:39:04.25063162 +0000 UTC m=+0.043768262 container died 44a16113847e25688f51f398bc2aa671406b31d3ac10710c3056a90c13f7717b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-670889c7-549b-45d0-be10-992f080979ef, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:39:04 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-44a16113847e25688f51f398bc2aa671406b31d3ac10710c3056a90c13f7717b-userdata-shm.mount: Deactivated successfully.
Oct 02 12:39:04 compute-0 systemd[1]: var-lib-containers-storage-overlay-10e2eaa3886c2cbd440ab9e9175a75b5ac45fc79f3a5749f37301f1860311331-merged.mount: Deactivated successfully.
Oct 02 12:39:04 compute-0 podman[249101]: 2025-10-02 12:39:04.296876039 +0000 UTC m=+0.090012681 container cleanup 44a16113847e25688f51f398bc2aa671406b31d3ac10710c3056a90c13f7717b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-670889c7-549b-45d0-be10-992f080979ef, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:39:04 compute-0 nova_compute[192079]: 2025-10-02 12:39:04.303 2 INFO nova.virt.libvirt.driver [-] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Instance destroyed successfully.
Oct 02 12:39:04 compute-0 nova_compute[192079]: 2025-10-02 12:39:04.304 2 DEBUG nova.objects.instance [None req-50d791a3-b958-40d9-9ef6-e6ccbb749877 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lazy-loading 'resources' on Instance uuid 6171764a-638c-4d3c-9f01-830314a0687a obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:39:04 compute-0 systemd[1]: libpod-conmon-44a16113847e25688f51f398bc2aa671406b31d3ac10710c3056a90c13f7717b.scope: Deactivated successfully.
Oct 02 12:39:04 compute-0 nova_compute[192079]: 2025-10-02 12:39:04.340 2 DEBUG nova.virt.libvirt.vif [None req-50d791a3-b958-40d9-9ef6-e6ccbb749877 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:38:47Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestNetworkBasicOps-server-1254201559',display_name='tempest-TestNetworkBasicOps-server-1254201559',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkbasicops-server-1254201559',id=166,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBIMqMDbom+PK4c/Jv5XtvQfFH1fkW+FSRUAxnWfxRcw1UDYrrEG6aps2VuNnmMnWWvdKNfhliXR0Zwf+47iOmuQsbF0874Je9gYU9lBaFNGs4KfbyyC0JVuX6gQe774tNQ==',key_name='tempest-TestNetworkBasicOps-203379559',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:38:59Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='6e2a4899168a47618e377cb3ac85ddd2',ramdisk_id='',reservation_id='r-dpi192j5',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='member,reader',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-TestNetworkBasicOps-1323893370',owner_user_name='tempest-TestNetworkBasicOps-1323893370-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:38:59Z,user_data=None,user_id='a1898fdf056c4a249c33590f26d4d845',uuid=6171764a-638c-4d3c-9f01-830314a0687a,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "b2d256d9-6788-41ed-a218-ab6139d999cb", "address": "fa:16:3e:02:80:1f", "network": {"id": "670889c7-549b-45d0-be10-992f080979ef", "bridge": "br-int", "label": "tempest-network-smoke--1745189972", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb2d256d9-67", "ovs_interfaceid": "b2d256d9-6788-41ed-a218-ab6139d999cb", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:39:04 compute-0 nova_compute[192079]: 2025-10-02 12:39:04.340 2 DEBUG nova.network.os_vif_util [None req-50d791a3-b958-40d9-9ef6-e6ccbb749877 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converting VIF {"id": "b2d256d9-6788-41ed-a218-ab6139d999cb", "address": "fa:16:3e:02:80:1f", "network": {"id": "670889c7-549b-45d0-be10-992f080979ef", "bridge": "br-int", "label": "tempest-network-smoke--1745189972", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb2d256d9-67", "ovs_interfaceid": "b2d256d9-6788-41ed-a218-ab6139d999cb", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:39:04 compute-0 nova_compute[192079]: 2025-10-02 12:39:04.341 2 DEBUG nova.network.os_vif_util [None req-50d791a3-b958-40d9-9ef6-e6ccbb749877 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:02:80:1f,bridge_name='br-int',has_traffic_filtering=True,id=b2d256d9-6788-41ed-a218-ab6139d999cb,network=Network(670889c7-549b-45d0-be10-992f080979ef),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=True,vif_name='tapb2d256d9-67') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:39:04 compute-0 nova_compute[192079]: 2025-10-02 12:39:04.341 2 DEBUG os_vif [None req-50d791a3-b958-40d9-9ef6-e6ccbb749877 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:02:80:1f,bridge_name='br-int',has_traffic_filtering=True,id=b2d256d9-6788-41ed-a218-ab6139d999cb,network=Network(670889c7-549b-45d0-be10-992f080979ef),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=True,vif_name='tapb2d256d9-67') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:39:04 compute-0 nova_compute[192079]: 2025-10-02 12:39:04.343 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:04 compute-0 nova_compute[192079]: 2025-10-02 12:39:04.344 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapb2d256d9-67, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:39:04 compute-0 nova_compute[192079]: 2025-10-02 12:39:04.345 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:04 compute-0 nova_compute[192079]: 2025-10-02 12:39:04.346 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:04 compute-0 nova_compute[192079]: 2025-10-02 12:39:04.348 2 INFO os_vif [None req-50d791a3-b958-40d9-9ef6-e6ccbb749877 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:02:80:1f,bridge_name='br-int',has_traffic_filtering=True,id=b2d256d9-6788-41ed-a218-ab6139d999cb,network=Network(670889c7-549b-45d0-be10-992f080979ef),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=True,vif_name='tapb2d256d9-67')
Oct 02 12:39:04 compute-0 nova_compute[192079]: 2025-10-02 12:39:04.349 2 INFO nova.virt.libvirt.driver [None req-50d791a3-b958-40d9-9ef6-e6ccbb749877 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Deleting instance files /var/lib/nova/instances/6171764a-638c-4d3c-9f01-830314a0687a_del
Oct 02 12:39:04 compute-0 nova_compute[192079]: 2025-10-02 12:39:04.349 2 INFO nova.virt.libvirt.driver [None req-50d791a3-b958-40d9-9ef6-e6ccbb749877 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Deletion of /var/lib/nova/instances/6171764a-638c-4d3c-9f01-830314a0687a_del complete
Oct 02 12:39:04 compute-0 podman[249145]: 2025-10-02 12:39:04.356885042 +0000 UTC m=+0.040072651 container remove 44a16113847e25688f51f398bc2aa671406b31d3ac10710c3056a90c13f7717b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-670889c7-549b-45d0-be10-992f080979ef, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:39:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:04.362 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d5c76790-17eb-443d-b88e-a584829fef4f]: (4, ('Thu Oct  2 12:39:04 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-670889c7-549b-45d0-be10-992f080979ef (44a16113847e25688f51f398bc2aa671406b31d3ac10710c3056a90c13f7717b)\n44a16113847e25688f51f398bc2aa671406b31d3ac10710c3056a90c13f7717b\nThu Oct  2 12:39:04 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-670889c7-549b-45d0-be10-992f080979ef (44a16113847e25688f51f398bc2aa671406b31d3ac10710c3056a90c13f7717b)\n44a16113847e25688f51f398bc2aa671406b31d3ac10710c3056a90c13f7717b\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:39:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:04.363 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a45b903f-7f72-4fea-8d5c-f44d942c5a91]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:39:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:04.364 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap670889c7-50, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:39:04 compute-0 nova_compute[192079]: 2025-10-02 12:39:04.365 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:04 compute-0 kernel: tap670889c7-50: left promiscuous mode
Oct 02 12:39:04 compute-0 nova_compute[192079]: 2025-10-02 12:39:04.381 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:04.385 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ebed0c54-2f43-4f48-b68e-dcac7b1f542a]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:39:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:04.413 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b35575e1-4ea6-4d2a-b8cc-f9bc7c95d1d6]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:39:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:04.414 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e1ea5771-8b1b-49b4-b188-96f115cf881e]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:39:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:04.428 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5fd8c15d-cf9d-469c-99f6-4b23f41cf72b]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 673151, 'reachable_time': 39200, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 249162, 'error': None, 'target': 'ovnmeta-670889c7-549b-45d0-be10-992f080979ef', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:39:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:04.430 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-670889c7-549b-45d0-be10-992f080979ef deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:39:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:04.430 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[6f6905ef-ec37-4ae5-8b18-b70f2b0672ec]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:39:04 compute-0 systemd[1]: run-netns-ovnmeta\x2d670889c7\x2d549b\x2d45d0\x2dbe10\x2d992f080979ef.mount: Deactivated successfully.
Oct 02 12:39:04 compute-0 nova_compute[192079]: 2025-10-02 12:39:04.444 2 INFO nova.compute.manager [None req-50d791a3-b958-40d9-9ef6-e6ccbb749877 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Took 0.41 seconds to destroy the instance on the hypervisor.
Oct 02 12:39:04 compute-0 nova_compute[192079]: 2025-10-02 12:39:04.444 2 DEBUG oslo.service.loopingcall [None req-50d791a3-b958-40d9-9ef6-e6ccbb749877 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:39:04 compute-0 nova_compute[192079]: 2025-10-02 12:39:04.445 2 DEBUG nova.compute.manager [-] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:39:04 compute-0 nova_compute[192079]: 2025-10-02 12:39:04.445 2 DEBUG nova.network.neutron [-] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:39:04 compute-0 podman[249161]: 2025-10-02 12:39:04.496071812 +0000 UTC m=+0.063377077 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=multipathd, managed_by=edpm_ansible, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, container_name=multipathd, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS)
Oct 02 12:39:04 compute-0 podman[249159]: 2025-10-02 12:39:04.510645218 +0000 UTC m=+0.087945974 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, com.redhat.component=ubi9-minimal-container, distribution-scope=public, build-date=2025-08-20T13:12:41, release=1755695350, vcs-type=git, managed_by=edpm_ansible, io.openshift.expose-services=, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.tags=minimal rhel9, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., url=https://catalog.redhat.com/en/search?searchType=containers, maintainer=Red Hat, Inc., config_id=edpm, name=ubi9-minimal, vendor=Red Hat, Inc., io.buildah.version=1.33.7, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, container_name=openstack_network_exporter, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, version=9.6, architecture=x86_64)
Oct 02 12:39:05 compute-0 nova_compute[192079]: 2025-10-02 12:39:05.238 2 DEBUG nova.network.neutron [req-0600d810-be48-49f7-baa4-6617950f6fab req-4c192806-1434-4134-b983-28bacb8c12e1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Updated VIF entry in instance network info cache for port b2d256d9-6788-41ed-a218-ab6139d999cb. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:39:05 compute-0 nova_compute[192079]: 2025-10-02 12:39:05.238 2 DEBUG nova.network.neutron [req-0600d810-be48-49f7-baa4-6617950f6fab req-4c192806-1434-4134-b983-28bacb8c12e1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Updating instance_info_cache with network_info: [{"id": "b2d256d9-6788-41ed-a218-ab6139d999cb", "address": "fa:16:3e:02:80:1f", "network": {"id": "670889c7-549b-45d0-be10-992f080979ef", "bridge": "br-int", "label": "tempest-network-smoke--1745189972", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.172", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb2d256d9-67", "ovs_interfaceid": "b2d256d9-6788-41ed-a218-ab6139d999cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:39:05 compute-0 nova_compute[192079]: 2025-10-02 12:39:05.254 2 DEBUG oslo_concurrency.lockutils [req-0600d810-be48-49f7-baa4-6617950f6fab req-4c192806-1434-4134-b983-28bacb8c12e1 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-6171764a-638c-4d3c-9f01-830314a0687a" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:39:05 compute-0 nova_compute[192079]: 2025-10-02 12:39:05.648 2 DEBUG nova.network.neutron [-] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:39:05 compute-0 nova_compute[192079]: 2025-10-02 12:39:05.692 2 INFO nova.compute.manager [-] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Took 1.25 seconds to deallocate network for instance.
Oct 02 12:39:05 compute-0 nova_compute[192079]: 2025-10-02 12:39:05.793 2 DEBUG oslo_concurrency.lockutils [None req-50d791a3-b958-40d9-9ef6-e6ccbb749877 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:39:05 compute-0 nova_compute[192079]: 2025-10-02 12:39:05.793 2 DEBUG oslo_concurrency.lockutils [None req-50d791a3-b958-40d9-9ef6-e6ccbb749877 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:39:05 compute-0 nova_compute[192079]: 2025-10-02 12:39:05.846 2 DEBUG nova.compute.provider_tree [None req-50d791a3-b958-40d9-9ef6-e6ccbb749877 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:39:05 compute-0 nova_compute[192079]: 2025-10-02 12:39:05.862 2 DEBUG nova.scheduler.client.report [None req-50d791a3-b958-40d9-9ef6-e6ccbb749877 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:39:05 compute-0 nova_compute[192079]: 2025-10-02 12:39:05.895 2 DEBUG oslo_concurrency.lockutils [None req-50d791a3-b958-40d9-9ef6-e6ccbb749877 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.102s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:39:05 compute-0 nova_compute[192079]: 2025-10-02 12:39:05.935 2 INFO nova.scheduler.client.report [None req-50d791a3-b958-40d9-9ef6-e6ccbb749877 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Deleted allocations for instance 6171764a-638c-4d3c-9f01-830314a0687a
Oct 02 12:39:06 compute-0 nova_compute[192079]: 2025-10-02 12:39:06.021 2 DEBUG oslo_concurrency.lockutils [None req-50d791a3-b958-40d9-9ef6-e6ccbb749877 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "6171764a-638c-4d3c-9f01-830314a0687a" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 2.023s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:39:07 compute-0 nova_compute[192079]: 2025-10-02 12:39:07.842 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:09 compute-0 nova_compute[192079]: 2025-10-02 12:39:09.347 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:10 compute-0 podman[249200]: 2025-10-02 12:39:10.149721034 +0000 UTC m=+0.062615286 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:39:10 compute-0 podman[249201]: 2025-10-02 12:39:10.179414762 +0000 UTC m=+0.074924131 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, container_name=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.license=GPLv2)
Oct 02 12:39:12 compute-0 nova_compute[192079]: 2025-10-02 12:39:12.844 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:14 compute-0 nova_compute[192079]: 2025-10-02 12:39:14.350 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:39:17.112 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:39:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:39:17.113 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:39:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:39:17.113 12 DEBUG ceilometer.polling.manager [-] Skip pollster cpu, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:39:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:39:17.113 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:39:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:39:17.113 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:39:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:39:17.113 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:39:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:39:17.113 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:39:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:39:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.iops, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:39:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:39:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:39:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:39:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:39:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:39:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:39:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:39:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.allocation, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:39:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:39:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:39:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:39:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:39:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:39:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.capacity, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:39:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:39:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:39:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:39:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster memory.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:39:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:39:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:39:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:39:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:39:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:39:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:39:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:39:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:39:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:39:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:39:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:39:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:39:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:39:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:39:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:39:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:39:17 compute-0 nova_compute[192079]: 2025-10-02 12:39:17.846 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:18 compute-0 podman[249244]: 2025-10-02 12:39:18.137779559 +0000 UTC m=+0.048304396 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 12:39:18 compute-0 podman[249242]: 2025-10-02 12:39:18.138009876 +0000 UTC m=+0.056354116 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, container_name=ovn_metadata_agent, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_metadata_agent, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS)
Oct 02 12:39:18 compute-0 podman[249243]: 2025-10-02 12:39:18.166745268 +0000 UTC m=+0.080807721 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, container_name=ovn_controller, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.vendor=CentOS)
Oct 02 12:39:19 compute-0 nova_compute[192079]: 2025-10-02 12:39:19.302 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759408744.301795, 6171764a-638c-4d3c-9f01-830314a0687a => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:39:19 compute-0 nova_compute[192079]: 2025-10-02 12:39:19.302 2 INFO nova.compute.manager [-] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] VM Stopped (Lifecycle Event)
Oct 02 12:39:19 compute-0 nova_compute[192079]: 2025-10-02 12:39:19.324 2 DEBUG nova.compute.manager [None req-17ece97a-b283-468c-83ff-9f2e365504ae - - - - - -] [instance: 6171764a-638c-4d3c-9f01-830314a0687a] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:39:19 compute-0 nova_compute[192079]: 2025-10-02 12:39:19.353 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:22 compute-0 nova_compute[192079]: 2025-10-02 12:39:22.889 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:23.505 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=44, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=43) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:39:23 compute-0 nova_compute[192079]: 2025-10-02 12:39:23.505 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:23.507 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 3 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:39:24 compute-0 nova_compute[192079]: 2025-10-02 12:39:24.355 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:26 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:26.508 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '44'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:39:27 compute-0 nova_compute[192079]: 2025-10-02 12:39:27.929 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:29 compute-0 podman[249313]: 2025-10-02 12:39:29.17482401 +0000 UTC m=+0.083209596 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=edpm, org.label-schema.schema-version=1.0, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, org.label-schema.license=GPLv2)
Oct 02 12:39:29 compute-0 nova_compute[192079]: 2025-10-02 12:39:29.358 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:30 compute-0 nova_compute[192079]: 2025-10-02 12:39:30.360 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:30 compute-0 nova_compute[192079]: 2025-10-02 12:39:30.468 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:31 compute-0 sshd-session[249312]: error: kex_exchange_identification: read: Connection reset by peer
Oct 02 12:39:31 compute-0 sshd-session[249312]: Connection reset by 8.222.181.172 port 56756
Oct 02 12:39:32 compute-0 nova_compute[192079]: 2025-10-02 12:39:32.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_incomplete_migrations run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:39:32 compute-0 nova_compute[192079]: 2025-10-02 12:39:32.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Cleaning up deleted instances with incomplete migration  _cleanup_incomplete_migrations /usr/lib/python3.9/site-packages/nova/compute/manager.py:11183
Oct 02 12:39:32 compute-0 nova_compute[192079]: 2025-10-02 12:39:32.932 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:33 compute-0 sshd-session[249334]: Invalid user  from 8.222.181.172 port 56758
Oct 02 12:39:33 compute-0 sshd-session[249334]: Connection closed by invalid user  8.222.181.172 port 56758 [preauth]
Oct 02 12:39:34 compute-0 nova_compute[192079]: 2025-10-02 12:39:34.362 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:35 compute-0 podman[249336]: 2025-10-02 12:39:35.138016051 +0000 UTC m=+0.058359860 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, managed_by=edpm_ansible, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, vcs-type=git, name=ubi9-minimal, io.openshift.tags=minimal rhel9, release=1755695350, vendor=Red Hat, Inc., com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.buildah.version=1.33.7, url=https://catalog.redhat.com/en/search?searchType=containers, version=9.6, container_name=openstack_network_exporter, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.expose-services=, com.redhat.component=ubi9-minimal-container, distribution-scope=public, maintainer=Red Hat, Inc., summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, architecture=x86_64, build-date=2025-08-20T13:12:41)
Oct 02 12:39:35 compute-0 podman[249337]: 2025-10-02 12:39:35.17984302 +0000 UTC m=+0.092629563 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, container_name=multipathd, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_id=multipathd)
Oct 02 12:39:37 compute-0 nova_compute[192079]: 2025-10-02 12:39:37.674 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:39:37 compute-0 nova_compute[192079]: 2025-10-02 12:39:37.934 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:39 compute-0 nova_compute[192079]: 2025-10-02 12:39:39.365 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:39 compute-0 nova_compute[192079]: 2025-10-02 12:39:39.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:39:41 compute-0 podman[249377]: 2025-10-02 12:39:41.144736788 +0000 UTC m=+0.052539342 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, config_id=iscsid, container_name=iscsid, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3)
Oct 02 12:39:41 compute-0 podman[249376]: 2025-10-02 12:39:41.145048406 +0000 UTC m=+0.058225356 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>)
Oct 02 12:39:41 compute-0 nova_compute[192079]: 2025-10-02 12:39:41.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:39:42 compute-0 nova_compute[192079]: 2025-10-02 12:39:42.982 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:44 compute-0 nova_compute[192079]: 2025-10-02 12:39:44.368 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:45 compute-0 nova_compute[192079]: 2025-10-02 12:39:45.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:39:45 compute-0 nova_compute[192079]: 2025-10-02 12:39:45.684 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:39:45 compute-0 nova_compute[192079]: 2025-10-02 12:39:45.684 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:39:45 compute-0 nova_compute[192079]: 2025-10-02 12:39:45.685 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:39:45 compute-0 nova_compute[192079]: 2025-10-02 12:39:45.685 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:39:45 compute-0 nova_compute[192079]: 2025-10-02 12:39:45.832 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:39:45 compute-0 nova_compute[192079]: 2025-10-02 12:39:45.833 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5709MB free_disk=73.27241897583008GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:39:45 compute-0 nova_compute[192079]: 2025-10-02 12:39:45.833 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:39:45 compute-0 nova_compute[192079]: 2025-10-02 12:39:45.834 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:39:45 compute-0 nova_compute[192079]: 2025-10-02 12:39:45.895 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:39:45 compute-0 nova_compute[192079]: 2025-10-02 12:39:45.895 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:39:45 compute-0 nova_compute[192079]: 2025-10-02 12:39:45.929 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:39:45 compute-0 nova_compute[192079]: 2025-10-02 12:39:45.948 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:39:45 compute-0 nova_compute[192079]: 2025-10-02 12:39:45.982 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:39:45 compute-0 nova_compute[192079]: 2025-10-02 12:39:45.982 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.149s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:39:46 compute-0 nova_compute[192079]: 2025-10-02 12:39:46.983 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:39:46 compute-0 nova_compute[192079]: 2025-10-02 12:39:46.983 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:39:48 compute-0 nova_compute[192079]: 2025-10-02 12:39:48.044 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:49 compute-0 podman[249415]: 2025-10-02 12:39:49.127854751 +0000 UTC m=+0.043404073 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_metadata_agent, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, managed_by=edpm_ansible, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true)
Oct 02 12:39:49 compute-0 podman[249417]: 2025-10-02 12:39:49.133119705 +0000 UTC m=+0.044497613 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 12:39:49 compute-0 podman[249416]: 2025-10-02 12:39:49.157954361 +0000 UTC m=+0.071285652 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, container_name=ovn_controller, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:39:49 compute-0 nova_compute[192079]: 2025-10-02 12:39:49.369 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:49 compute-0 nova_compute[192079]: 2025-10-02 12:39:49.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:39:49 compute-0 nova_compute[192079]: 2025-10-02 12:39:49.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:39:49 compute-0 nova_compute[192079]: 2025-10-02 12:39:49.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:39:49 compute-0 nova_compute[192079]: 2025-10-02 12:39:49.781 2 DEBUG oslo_concurrency.lockutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "541184e3-5963-4add-ac60-b22fdcf3774b" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:39:49 compute-0 nova_compute[192079]: 2025-10-02 12:39:49.781 2 DEBUG oslo_concurrency.lockutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "541184e3-5963-4add-ac60-b22fdcf3774b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:39:49 compute-0 nova_compute[192079]: 2025-10-02 12:39:49.801 2 DEBUG nova.compute.manager [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:39:49 compute-0 nova_compute[192079]: 2025-10-02 12:39:49.906 2 DEBUG oslo_concurrency.lockutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:39:49 compute-0 nova_compute[192079]: 2025-10-02 12:39:49.906 2 DEBUG oslo_concurrency.lockutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:39:49 compute-0 nova_compute[192079]: 2025-10-02 12:39:49.911 2 DEBUG nova.virt.hardware [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:39:49 compute-0 nova_compute[192079]: 2025-10-02 12:39:49.912 2 INFO nova.compute.claims [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.066 2 DEBUG nova.compute.provider_tree [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.087 2 DEBUG nova.scheduler.client.report [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.115 2 DEBUG oslo_concurrency.lockutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.209s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.115 2 DEBUG nova.compute.manager [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.218 2 DEBUG nova.compute.manager [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.219 2 DEBUG nova.network.neutron [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.246 2 INFO nova.virt.libvirt.driver [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.273 2 DEBUG nova.compute.manager [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.501 2 DEBUG nova.compute.manager [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.503 2 DEBUG nova.virt.libvirt.driver [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.503 2 INFO nova.virt.libvirt.driver [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Creating image(s)
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.504 2 DEBUG oslo_concurrency.lockutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "/var/lib/nova/instances/541184e3-5963-4add-ac60-b22fdcf3774b/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.504 2 DEBUG oslo_concurrency.lockutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "/var/lib/nova/instances/541184e3-5963-4add-ac60-b22fdcf3774b/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.505 2 DEBUG oslo_concurrency.lockutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "/var/lib/nova/instances/541184e3-5963-4add-ac60-b22fdcf3774b/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.523 2 DEBUG oslo_concurrency.processutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.599 2 DEBUG oslo_concurrency.processutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.076s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.600 2 DEBUG oslo_concurrency.lockutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.600 2 DEBUG oslo_concurrency.lockutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.617 2 DEBUG oslo_concurrency.processutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.687 2 DEBUG oslo_concurrency.processutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.070s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.688 2 DEBUG oslo_concurrency.processutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/541184e3-5963-4add-ac60-b22fdcf3774b/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.720 2 DEBUG oslo_concurrency.processutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/541184e3-5963-4add-ac60-b22fdcf3774b/disk 1073741824" returned: 0 in 0.032s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.721 2 DEBUG oslo_concurrency.lockutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.121s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.721 2 DEBUG oslo_concurrency.processutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.799 2 DEBUG oslo_concurrency.processutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.078s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.801 2 DEBUG nova.virt.disk.api [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Checking if we can resize image /var/lib/nova/instances/541184e3-5963-4add-ac60-b22fdcf3774b/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.802 2 DEBUG oslo_concurrency.processutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/541184e3-5963-4add-ac60-b22fdcf3774b/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.895 2 DEBUG oslo_concurrency.processutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/541184e3-5963-4add-ac60-b22fdcf3774b/disk --force-share --output=json" returned: 0 in 0.093s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.896 2 DEBUG nova.virt.disk.api [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Cannot resize image /var/lib/nova/instances/541184e3-5963-4add-ac60-b22fdcf3774b/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.896 2 DEBUG nova.objects.instance [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lazy-loading 'migration_context' on Instance uuid 541184e3-5963-4add-ac60-b22fdcf3774b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.910 2 DEBUG nova.virt.libvirt.driver [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.911 2 DEBUG nova.virt.libvirt.driver [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Ensure instance console log exists: /var/lib/nova/instances/541184e3-5963-4add-ac60-b22fdcf3774b/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.911 2 DEBUG oslo_concurrency.lockutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.912 2 DEBUG oslo_concurrency.lockutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:39:50 compute-0 nova_compute[192079]: 2025-10-02 12:39:50.912 2 DEBUG oslo_concurrency.lockutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:39:51 compute-0 nova_compute[192079]: 2025-10-02 12:39:51.320 2 DEBUG nova.policy [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:39:51 compute-0 nova_compute[192079]: 2025-10-02 12:39:51.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:39:51 compute-0 nova_compute[192079]: 2025-10-02 12:39:51.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:39:51 compute-0 nova_compute[192079]: 2025-10-02 12:39:51.667 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:39:51 compute-0 nova_compute[192079]: 2025-10-02 12:39:51.689 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Skipping network cache update for instance because it is Building. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9871
Oct 02 12:39:51 compute-0 nova_compute[192079]: 2025-10-02 12:39:51.690 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:39:53 compute-0 nova_compute[192079]: 2025-10-02 12:39:53.047 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:53 compute-0 nova_compute[192079]: 2025-10-02 12:39:53.711 2 DEBUG nova.network.neutron [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Successfully created port: f3f11d03-1ddb-4149-b2f0-d1f2020bab39 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:39:54 compute-0 nova_compute[192079]: 2025-10-02 12:39:54.373 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:54 compute-0 nova_compute[192079]: 2025-10-02 12:39:54.860 2 DEBUG nova.network.neutron [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Successfully updated port: f3f11d03-1ddb-4149-b2f0-d1f2020bab39 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:39:54 compute-0 nova_compute[192079]: 2025-10-02 12:39:54.880 2 DEBUG oslo_concurrency.lockutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "refresh_cache-541184e3-5963-4add-ac60-b22fdcf3774b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:39:54 compute-0 nova_compute[192079]: 2025-10-02 12:39:54.880 2 DEBUG oslo_concurrency.lockutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquired lock "refresh_cache-541184e3-5963-4add-ac60-b22fdcf3774b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:39:54 compute-0 nova_compute[192079]: 2025-10-02 12:39:54.880 2 DEBUG nova.network.neutron [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:39:55 compute-0 nova_compute[192079]: 2025-10-02 12:39:55.078 2 DEBUG nova.compute.manager [req-9ef2f9a8-2dab-4678-9d27-502ba0cf7910 req-dafd6c79-bc0d-4970-a2d9-0ad055fde2fd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Received event network-changed-f3f11d03-1ddb-4149-b2f0-d1f2020bab39 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:39:55 compute-0 nova_compute[192079]: 2025-10-02 12:39:55.079 2 DEBUG nova.compute.manager [req-9ef2f9a8-2dab-4678-9d27-502ba0cf7910 req-dafd6c79-bc0d-4970-a2d9-0ad055fde2fd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Refreshing instance network info cache due to event network-changed-f3f11d03-1ddb-4149-b2f0-d1f2020bab39. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:39:55 compute-0 nova_compute[192079]: 2025-10-02 12:39:55.079 2 DEBUG oslo_concurrency.lockutils [req-9ef2f9a8-2dab-4678-9d27-502ba0cf7910 req-dafd6c79-bc0d-4970-a2d9-0ad055fde2fd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-541184e3-5963-4add-ac60-b22fdcf3774b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:39:55 compute-0 nova_compute[192079]: 2025-10-02 12:39:55.234 2 DEBUG nova.network.neutron [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.520 2 DEBUG nova.network.neutron [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Updating instance_info_cache with network_info: [{"id": "f3f11d03-1ddb-4149-b2f0-d1f2020bab39", "address": "fa:16:3e:32:93:e7", "network": {"id": "c95536aa-e734-4350-9442-56cceb6f6448", "bridge": "br-int", "label": "tempest-network-smoke--847152420", "subnets": [{"cidr": "10.100.0.0/28", "dns": [{"address": "1.2.3.4", "type": "dns", "version": 4, "meta": {}}], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf3f11d03-1d", "ovs_interfaceid": "f3f11d03-1ddb-4149-b2f0-d1f2020bab39", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.613 2 DEBUG oslo_concurrency.lockutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Releasing lock "refresh_cache-541184e3-5963-4add-ac60-b22fdcf3774b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.614 2 DEBUG nova.compute.manager [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Instance network_info: |[{"id": "f3f11d03-1ddb-4149-b2f0-d1f2020bab39", "address": "fa:16:3e:32:93:e7", "network": {"id": "c95536aa-e734-4350-9442-56cceb6f6448", "bridge": "br-int", "label": "tempest-network-smoke--847152420", "subnets": [{"cidr": "10.100.0.0/28", "dns": [{"address": "1.2.3.4", "type": "dns", "version": 4, "meta": {}}], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf3f11d03-1d", "ovs_interfaceid": "f3f11d03-1ddb-4149-b2f0-d1f2020bab39", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.615 2 DEBUG oslo_concurrency.lockutils [req-9ef2f9a8-2dab-4678-9d27-502ba0cf7910 req-dafd6c79-bc0d-4970-a2d9-0ad055fde2fd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-541184e3-5963-4add-ac60-b22fdcf3774b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.615 2 DEBUG nova.network.neutron [req-9ef2f9a8-2dab-4678-9d27-502ba0cf7910 req-dafd6c79-bc0d-4970-a2d9-0ad055fde2fd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Refreshing network info cache for port f3f11d03-1ddb-4149-b2f0-d1f2020bab39 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.621 2 DEBUG nova.virt.libvirt.driver [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Start _get_guest_xml network_info=[{"id": "f3f11d03-1ddb-4149-b2f0-d1f2020bab39", "address": "fa:16:3e:32:93:e7", "network": {"id": "c95536aa-e734-4350-9442-56cceb6f6448", "bridge": "br-int", "label": "tempest-network-smoke--847152420", "subnets": [{"cidr": "10.100.0.0/28", "dns": [{"address": "1.2.3.4", "type": "dns", "version": 4, "meta": {}}], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf3f11d03-1d", "ovs_interfaceid": "f3f11d03-1ddb-4149-b2f0-d1f2020bab39", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.628 2 WARNING nova.virt.libvirt.driver [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.635 2 DEBUG nova.virt.libvirt.host [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.636 2 DEBUG nova.virt.libvirt.host [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.640 2 DEBUG nova.virt.libvirt.host [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.642 2 DEBUG nova.virt.libvirt.host [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.643 2 DEBUG nova.virt.libvirt.driver [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.644 2 DEBUG nova.virt.hardware [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.645 2 DEBUG nova.virt.hardware [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.645 2 DEBUG nova.virt.hardware [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.646 2 DEBUG nova.virt.hardware [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.646 2 DEBUG nova.virt.hardware [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.647 2 DEBUG nova.virt.hardware [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.647 2 DEBUG nova.virt.hardware [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.647 2 DEBUG nova.virt.hardware [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.648 2 DEBUG nova.virt.hardware [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.648 2 DEBUG nova.virt.hardware [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.649 2 DEBUG nova.virt.hardware [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.655 2 DEBUG nova.virt.libvirt.vif [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:39:48Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestNetworkBasicOps-server-192751529',display_name='tempest-TestNetworkBasicOps-server-192751529',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkbasicops-server-192751529',id=170,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBJ8WJUvqPBapU5qvPIR5mTLAdCXJsQfWZYc+MxWmuaUwHq+MlVGVk1Uz5Mo9dMVlpWwmSIS8E+AmIJ3Mwq4O1FZCpV/RUKmBYiyo4TwqWt0l+5wzQovSk/4k1pvsCoSsIQ==',key_name='tempest-TestNetworkBasicOps-1987012530',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='6e2a4899168a47618e377cb3ac85ddd2',ramdisk_id='',reservation_id='r-ety1r8ek',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestNetworkBasicOps-1323893370',owner_user_name='tempest-TestNetworkBasicOps-1323893370-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:39:50Z,user_data=None,user_id='a1898fdf056c4a249c33590f26d4d845',uuid=541184e3-5963-4add-ac60-b22fdcf3774b,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "f3f11d03-1ddb-4149-b2f0-d1f2020bab39", "address": "fa:16:3e:32:93:e7", "network": {"id": "c95536aa-e734-4350-9442-56cceb6f6448", "bridge": "br-int", "label": "tempest-network-smoke--847152420", "subnets": [{"cidr": "10.100.0.0/28", "dns": [{"address": "1.2.3.4", "type": "dns", "version": 4, "meta": {}}], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf3f11d03-1d", "ovs_interfaceid": "f3f11d03-1ddb-4149-b2f0-d1f2020bab39", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.656 2 DEBUG nova.network.os_vif_util [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converting VIF {"id": "f3f11d03-1ddb-4149-b2f0-d1f2020bab39", "address": "fa:16:3e:32:93:e7", "network": {"id": "c95536aa-e734-4350-9442-56cceb6f6448", "bridge": "br-int", "label": "tempest-network-smoke--847152420", "subnets": [{"cidr": "10.100.0.0/28", "dns": [{"address": "1.2.3.4", "type": "dns", "version": 4, "meta": {}}], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf3f11d03-1d", "ovs_interfaceid": "f3f11d03-1ddb-4149-b2f0-d1f2020bab39", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.657 2 DEBUG nova.network.os_vif_util [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:32:93:e7,bridge_name='br-int',has_traffic_filtering=True,id=f3f11d03-1ddb-4149-b2f0-d1f2020bab39,network=Network(c95536aa-e734-4350-9442-56cceb6f6448),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf3f11d03-1d') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.659 2 DEBUG nova.objects.instance [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lazy-loading 'pci_devices' on Instance uuid 541184e3-5963-4add-ac60-b22fdcf3774b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.691 2 DEBUG nova.virt.libvirt.driver [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:39:57 compute-0 nova_compute[192079]:   <uuid>541184e3-5963-4add-ac60-b22fdcf3774b</uuid>
Oct 02 12:39:57 compute-0 nova_compute[192079]:   <name>instance-000000aa</name>
Oct 02 12:39:57 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:39:57 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:39:57 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:39:57 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:       <nova:name>tempest-TestNetworkBasicOps-server-192751529</nova:name>
Oct 02 12:39:57 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:39:57</nova:creationTime>
Oct 02 12:39:57 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:39:57 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:39:57 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:39:57 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:39:57 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:39:57 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:39:57 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:39:57 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:39:57 compute-0 nova_compute[192079]:         <nova:user uuid="a1898fdf056c4a249c33590f26d4d845">tempest-TestNetworkBasicOps-1323893370-project-member</nova:user>
Oct 02 12:39:57 compute-0 nova_compute[192079]:         <nova:project uuid="6e2a4899168a47618e377cb3ac85ddd2">tempest-TestNetworkBasicOps-1323893370</nova:project>
Oct 02 12:39:57 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:39:57 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:39:57 compute-0 nova_compute[192079]:         <nova:port uuid="f3f11d03-1ddb-4149-b2f0-d1f2020bab39">
Oct 02 12:39:57 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.11" ipVersion="4"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:39:57 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:39:57 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:39:57 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <system>
Oct 02 12:39:57 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:39:57 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:39:57 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:39:57 compute-0 nova_compute[192079]:       <entry name="serial">541184e3-5963-4add-ac60-b22fdcf3774b</entry>
Oct 02 12:39:57 compute-0 nova_compute[192079]:       <entry name="uuid">541184e3-5963-4add-ac60-b22fdcf3774b</entry>
Oct 02 12:39:57 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     </system>
Oct 02 12:39:57 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:39:57 compute-0 nova_compute[192079]:   <os>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:   </os>
Oct 02 12:39:57 compute-0 nova_compute[192079]:   <features>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:   </features>
Oct 02 12:39:57 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:39:57 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:39:57 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:39:57 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/541184e3-5963-4add-ac60-b22fdcf3774b/disk"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:39:57 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/541184e3-5963-4add-ac60-b22fdcf3774b/disk.config"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:39:57 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:32:93:e7"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:       <target dev="tapf3f11d03-1d"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:39:57 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/541184e3-5963-4add-ac60-b22fdcf3774b/console.log" append="off"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <video>
Oct 02 12:39:57 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     </video>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:39:57 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:39:57 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:39:57 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:39:57 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:39:57 compute-0 nova_compute[192079]: </domain>
Oct 02 12:39:57 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.692 2 DEBUG nova.compute.manager [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Preparing to wait for external event network-vif-plugged-f3f11d03-1ddb-4149-b2f0-d1f2020bab39 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.693 2 DEBUG oslo_concurrency.lockutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "541184e3-5963-4add-ac60-b22fdcf3774b-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.693 2 DEBUG oslo_concurrency.lockutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "541184e3-5963-4add-ac60-b22fdcf3774b-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.693 2 DEBUG oslo_concurrency.lockutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "541184e3-5963-4add-ac60-b22fdcf3774b-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.694 2 DEBUG nova.virt.libvirt.vif [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:39:48Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestNetworkBasicOps-server-192751529',display_name='tempest-TestNetworkBasicOps-server-192751529',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkbasicops-server-192751529',id=170,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBJ8WJUvqPBapU5qvPIR5mTLAdCXJsQfWZYc+MxWmuaUwHq+MlVGVk1Uz5Mo9dMVlpWwmSIS8E+AmIJ3Mwq4O1FZCpV/RUKmBYiyo4TwqWt0l+5wzQovSk/4k1pvsCoSsIQ==',key_name='tempest-TestNetworkBasicOps-1987012530',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='6e2a4899168a47618e377cb3ac85ddd2',ramdisk_id='',reservation_id='r-ety1r8ek',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestNetworkBasicOps-1323893370',owner_user_name='tempest-TestNetworkBasicOps-1323893370-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:39:50Z,user_data=None,user_id='a1898fdf056c4a249c33590f26d4d845',uuid=541184e3-5963-4add-ac60-b22fdcf3774b,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "f3f11d03-1ddb-4149-b2f0-d1f2020bab39", "address": "fa:16:3e:32:93:e7", "network": {"id": "c95536aa-e734-4350-9442-56cceb6f6448", "bridge": "br-int", "label": "tempest-network-smoke--847152420", "subnets": [{"cidr": "10.100.0.0/28", "dns": [{"address": "1.2.3.4", "type": "dns", "version": 4, "meta": {}}], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf3f11d03-1d", "ovs_interfaceid": "f3f11d03-1ddb-4149-b2f0-d1f2020bab39", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.694 2 DEBUG nova.network.os_vif_util [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converting VIF {"id": "f3f11d03-1ddb-4149-b2f0-d1f2020bab39", "address": "fa:16:3e:32:93:e7", "network": {"id": "c95536aa-e734-4350-9442-56cceb6f6448", "bridge": "br-int", "label": "tempest-network-smoke--847152420", "subnets": [{"cidr": "10.100.0.0/28", "dns": [{"address": "1.2.3.4", "type": "dns", "version": 4, "meta": {}}], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf3f11d03-1d", "ovs_interfaceid": "f3f11d03-1ddb-4149-b2f0-d1f2020bab39", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.695 2 DEBUG nova.network.os_vif_util [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:32:93:e7,bridge_name='br-int',has_traffic_filtering=True,id=f3f11d03-1ddb-4149-b2f0-d1f2020bab39,network=Network(c95536aa-e734-4350-9442-56cceb6f6448),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf3f11d03-1d') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.696 2 DEBUG os_vif [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:32:93:e7,bridge_name='br-int',has_traffic_filtering=True,id=f3f11d03-1ddb-4149-b2f0-d1f2020bab39,network=Network(c95536aa-e734-4350-9442-56cceb6f6448),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf3f11d03-1d') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.696 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.697 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.697 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.701 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.701 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapf3f11d03-1d, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.702 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapf3f11d03-1d, col_values=(('external_ids', {'iface-id': 'f3f11d03-1ddb-4149-b2f0-d1f2020bab39', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:32:93:e7', 'vm-uuid': '541184e3-5963-4add-ac60-b22fdcf3774b'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:39:57 compute-0 NetworkManager[51160]: <info>  [1759408797.7048] manager: (tapf3f11d03-1d): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/320)
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.705 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.707 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.714 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.715 2 INFO os_vif [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:32:93:e7,bridge_name='br-int',has_traffic_filtering=True,id=f3f11d03-1ddb-4149-b2f0-d1f2020bab39,network=Network(c95536aa-e734-4350-9442-56cceb6f6448),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf3f11d03-1d')
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.795 2 DEBUG nova.virt.libvirt.driver [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.796 2 DEBUG nova.virt.libvirt.driver [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.796 2 DEBUG nova.virt.libvirt.driver [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] No VIF found with MAC fa:16:3e:32:93:e7, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:39:57 compute-0 nova_compute[192079]: 2025-10-02 12:39:57.797 2 INFO nova.virt.libvirt.driver [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Using config drive
Oct 02 12:39:58 compute-0 nova_compute[192079]: 2025-10-02 12:39:58.048 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:58 compute-0 nova_compute[192079]: 2025-10-02 12:39:58.425 2 INFO nova.virt.libvirt.driver [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Creating config drive at /var/lib/nova/instances/541184e3-5963-4add-ac60-b22fdcf3774b/disk.config
Oct 02 12:39:58 compute-0 nova_compute[192079]: 2025-10-02 12:39:58.433 2 DEBUG oslo_concurrency.processutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/541184e3-5963-4add-ac60-b22fdcf3774b/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp6jl00a3s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:39:58 compute-0 nova_compute[192079]: 2025-10-02 12:39:58.562 2 DEBUG oslo_concurrency.processutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/541184e3-5963-4add-ac60-b22fdcf3774b/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp6jl00a3s" returned: 0 in 0.129s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:39:58 compute-0 kernel: tapf3f11d03-1d: entered promiscuous mode
Oct 02 12:39:58 compute-0 NetworkManager[51160]: <info>  [1759408798.6142] manager: (tapf3f11d03-1d): new Tun device (/org/freedesktop/NetworkManager/Devices/321)
Oct 02 12:39:58 compute-0 nova_compute[192079]: 2025-10-02 12:39:58.615 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:58 compute-0 ovn_controller[94336]: 2025-10-02T12:39:58Z|00656|binding|INFO|Claiming lport f3f11d03-1ddb-4149-b2f0-d1f2020bab39 for this chassis.
Oct 02 12:39:58 compute-0 ovn_controller[94336]: 2025-10-02T12:39:58Z|00657|binding|INFO|f3f11d03-1ddb-4149-b2f0-d1f2020bab39: Claiming fa:16:3e:32:93:e7 10.100.0.11
Oct 02 12:39:58 compute-0 nova_compute[192079]: 2025-10-02 12:39:58.617 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:58 compute-0 nova_compute[192079]: 2025-10-02 12:39:58.621 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:58 compute-0 systemd-udevd[249510]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:39:58 compute-0 NetworkManager[51160]: <info>  [1759408798.6506] device (tapf3f11d03-1d): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:39:58 compute-0 NetworkManager[51160]: <info>  [1759408798.6518] device (tapf3f11d03-1d): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:39:58 compute-0 systemd-machined[152150]: New machine qemu-82-instance-000000aa.
Oct 02 12:39:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:58.653 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:32:93:e7 10.100.0.11'], port_security=['fa:16:3e:32:93:e7 10.100.0.11'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.11/28', 'neutron:device_id': '541184e3-5963-4add-ac60-b22fdcf3774b', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-c95536aa-e734-4350-9442-56cceb6f6448', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'neutron:revision_number': '2', 'neutron:security_group_ids': 'e0364827-2aef-4d66-9236-5a3d51998ebc', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=51088e7a-7274-41d4-82f4-8a7994a97b5a, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=f3f11d03-1ddb-4149-b2f0-d1f2020bab39) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:39:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:58.654 103294 INFO neutron.agent.ovn.metadata.agent [-] Port f3f11d03-1ddb-4149-b2f0-d1f2020bab39 in datapath c95536aa-e734-4350-9442-56cceb6f6448 bound to our chassis
Oct 02 12:39:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:58.655 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network c95536aa-e734-4350-9442-56cceb6f6448
Oct 02 12:39:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:58.666 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[33c940b9-4942-49fa-9b5b-c2940de758b2]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:39:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:58.667 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tapc95536aa-e1 in ovnmeta-c95536aa-e734-4350-9442-56cceb6f6448 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:39:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:58.669 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tapc95536aa-e0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:39:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:58.670 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[83327c5d-44d8-4467-a0f2-f38717bdcbc4]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:39:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:58.670 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[471fd7ef-9ba8-4835-a929-ae57854c856e]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:39:58 compute-0 nova_compute[192079]: 2025-10-02 12:39:58.674 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:58 compute-0 ovn_controller[94336]: 2025-10-02T12:39:58Z|00658|binding|INFO|Setting lport f3f11d03-1ddb-4149-b2f0-d1f2020bab39 ovn-installed in OVS
Oct 02 12:39:58 compute-0 ovn_controller[94336]: 2025-10-02T12:39:58Z|00659|binding|INFO|Setting lport f3f11d03-1ddb-4149-b2f0-d1f2020bab39 up in Southbound
Oct 02 12:39:58 compute-0 nova_compute[192079]: 2025-10-02 12:39:58.679 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:58 compute-0 systemd[1]: Started Virtual Machine qemu-82-instance-000000aa.
Oct 02 12:39:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:58.682 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[4f5bda5d-ab04-44aa-9f98-8400479370a4]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:39:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:58.698 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ae71435a-e9bd-462e-b94b-c8de0fdbd2c1]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:39:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:58.727 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[da27ed0f-e2db-47de-9ccc-244635753b59]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:39:58 compute-0 NetworkManager[51160]: <info>  [1759408798.7333] manager: (tapc95536aa-e0): new Veth device (/org/freedesktop/NetworkManager/Devices/322)
Oct 02 12:39:58 compute-0 systemd-udevd[249513]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:39:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:58.732 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8534e386-14ae-423e-a5dd-f7dcb92e6f40]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:39:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:58.768 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[296e44d9-999a-4a4e-9117-819d7c7d3665]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:39:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:58.771 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[02d43fb8-f277-4692-a7ac-06e493866701]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:39:58 compute-0 NetworkManager[51160]: <info>  [1759408798.7912] device (tapc95536aa-e0): carrier: link connected
Oct 02 12:39:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:58.796 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[4f6d9957-faa3-4670-a448-4146e3fa6a03]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:39:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:58.812 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[47768f71-bf19-4b97-bf4b-1b6655fc4040]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapc95536aa-e1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:9d:a0:d6'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 209], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 679641, 'reachable_time': 40595, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 249544, 'error': None, 'target': 'ovnmeta-c95536aa-e734-4350-9442-56cceb6f6448', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:39:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:58.826 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c649e370-6751-4cca-b2aa-33019bf4f7f6]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe9d:a0d6'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 679641, 'tstamp': 679641}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 249545, 'error': None, 'target': 'ovnmeta-c95536aa-e734-4350-9442-56cceb6f6448', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:39:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:58.841 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b0ac09c4-c4cc-4d42-949d-ba19b6788df9]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapc95536aa-e1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:9d:a0:d6'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 209], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 679641, 'reachable_time': 40595, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 249546, 'error': None, 'target': 'ovnmeta-c95536aa-e734-4350-9442-56cceb6f6448', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:39:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:58.876 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ed163919-116f-4b62-b9c3-71e66c3d82cc]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:39:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:58.960 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4eb5fe68-4ce9-40e7-bc8c-476407065cf8]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:39:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:58.962 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapc95536aa-e0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:39:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:58.962 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:39:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:58.963 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapc95536aa-e0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:39:58 compute-0 NetworkManager[51160]: <info>  [1759408798.9665] manager: (tapc95536aa-e0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/323)
Oct 02 12:39:58 compute-0 kernel: tapc95536aa-e0: entered promiscuous mode
Oct 02 12:39:58 compute-0 nova_compute[192079]: 2025-10-02 12:39:58.965 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:58 compute-0 nova_compute[192079]: 2025-10-02 12:39:58.969 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:58 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:58.970 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tapc95536aa-e0, col_values=(('external_ids', {'iface-id': '4a56e1f3-de9c-40ee-a72d-7e5829d8f985'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:39:58 compute-0 nova_compute[192079]: 2025-10-02 12:39:58.971 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:58 compute-0 ovn_controller[94336]: 2025-10-02T12:39:58Z|00660|binding|INFO|Releasing lport 4a56e1f3-de9c-40ee-a72d-7e5829d8f985 from this chassis (sb_readonly=0)
Oct 02 12:39:58 compute-0 nova_compute[192079]: 2025-10-02 12:39:58.998 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:59 compute-0 nova_compute[192079]: 2025-10-02 12:39:59.003 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:59.003 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/c95536aa-e734-4350-9442-56cceb6f6448.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/c95536aa-e734-4350-9442-56cceb6f6448.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:59.004 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[514b7bdd-91e5-4bbe-8f9f-c3d0b0e49233]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:59.005 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-c95536aa-e734-4350-9442-56cceb6f6448
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/c95536aa-e734-4350-9442-56cceb6f6448.pid.haproxy
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID c95536aa-e734-4350-9442-56cceb6f6448
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:39:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:39:59.007 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-c95536aa-e734-4350-9442-56cceb6f6448', 'env', 'PROCESS_TAG=haproxy-c95536aa-e734-4350-9442-56cceb6f6448', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/c95536aa-e734-4350-9442-56cceb6f6448.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:39:59 compute-0 nova_compute[192079]: 2025-10-02 12:39:59.481 2 DEBUG nova.compute.manager [req-1e695de6-e58a-4703-88de-5a37d2f39c86 req-af846426-11ae-4bd8-98bb-362037a92d79 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Received event network-vif-plugged-f3f11d03-1ddb-4149-b2f0-d1f2020bab39 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:39:59 compute-0 nova_compute[192079]: 2025-10-02 12:39:59.482 2 DEBUG oslo_concurrency.lockutils [req-1e695de6-e58a-4703-88de-5a37d2f39c86 req-af846426-11ae-4bd8-98bb-362037a92d79 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "541184e3-5963-4add-ac60-b22fdcf3774b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:39:59 compute-0 nova_compute[192079]: 2025-10-02 12:39:59.482 2 DEBUG oslo_concurrency.lockutils [req-1e695de6-e58a-4703-88de-5a37d2f39c86 req-af846426-11ae-4bd8-98bb-362037a92d79 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "541184e3-5963-4add-ac60-b22fdcf3774b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:39:59 compute-0 nova_compute[192079]: 2025-10-02 12:39:59.483 2 DEBUG oslo_concurrency.lockutils [req-1e695de6-e58a-4703-88de-5a37d2f39c86 req-af846426-11ae-4bd8-98bb-362037a92d79 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "541184e3-5963-4add-ac60-b22fdcf3774b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:39:59 compute-0 nova_compute[192079]: 2025-10-02 12:39:59.483 2 DEBUG nova.compute.manager [req-1e695de6-e58a-4703-88de-5a37d2f39c86 req-af846426-11ae-4bd8-98bb-362037a92d79 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Processing event network-vif-plugged-f3f11d03-1ddb-4149-b2f0-d1f2020bab39 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:39:59 compute-0 podman[249578]: 2025-10-02 12:39:59.489756491 +0000 UTC m=+0.079512827 container create 165496a95d9922921f007008d81c16a5053f1ec84f301eb06d21321094c7fa2b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-c95536aa-e734-4350-9442-56cceb6f6448, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_managed=true)
Oct 02 12:39:59 compute-0 systemd[1]: Started libpod-conmon-165496a95d9922921f007008d81c16a5053f1ec84f301eb06d21321094c7fa2b.scope.
Oct 02 12:39:59 compute-0 podman[249578]: 2025-10-02 12:39:59.45079014 +0000 UTC m=+0.040546546 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:39:59 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:39:59 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/10ae88263f63148e5802392bd427fdc300a5ce2d7cd6aed40623659d58fd96dc/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:39:59 compute-0 podman[249578]: 2025-10-02 12:39:59.599405045 +0000 UTC m=+0.189161411 container init 165496a95d9922921f007008d81c16a5053f1ec84f301eb06d21321094c7fa2b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-c95536aa-e734-4350-9442-56cceb6f6448, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, org.label-schema.build-date=20251001)
Oct 02 12:39:59 compute-0 podman[249578]: 2025-10-02 12:39:59.604894855 +0000 UTC m=+0.194651191 container start 165496a95d9922921f007008d81c16a5053f1ec84f301eb06d21321094c7fa2b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-c95536aa-e734-4350-9442-56cceb6f6448, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:39:59 compute-0 podman[249592]: 2025-10-02 12:39:59.622968146 +0000 UTC m=+0.092225111 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_id=edpm, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible)
Oct 02 12:39:59 compute-0 neutron-haproxy-ovnmeta-c95536aa-e734-4350-9442-56cceb6f6448[249595]: [NOTICE]   (249617) : New worker (249620) forked
Oct 02 12:39:59 compute-0 neutron-haproxy-ovnmeta-c95536aa-e734-4350-9442-56cceb6f6448[249595]: [NOTICE]   (249617) : Loading success.
Oct 02 12:40:00 compute-0 nova_compute[192079]: 2025-10-02 12:40:00.244 2 DEBUG nova.network.neutron [req-9ef2f9a8-2dab-4678-9d27-502ba0cf7910 req-dafd6c79-bc0d-4970-a2d9-0ad055fde2fd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Updated VIF entry in instance network info cache for port f3f11d03-1ddb-4149-b2f0-d1f2020bab39. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:40:00 compute-0 nova_compute[192079]: 2025-10-02 12:40:00.246 2 DEBUG nova.network.neutron [req-9ef2f9a8-2dab-4678-9d27-502ba0cf7910 req-dafd6c79-bc0d-4970-a2d9-0ad055fde2fd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Updating instance_info_cache with network_info: [{"id": "f3f11d03-1ddb-4149-b2f0-d1f2020bab39", "address": "fa:16:3e:32:93:e7", "network": {"id": "c95536aa-e734-4350-9442-56cceb6f6448", "bridge": "br-int", "label": "tempest-network-smoke--847152420", "subnets": [{"cidr": "10.100.0.0/28", "dns": [{"address": "1.2.3.4", "type": "dns", "version": 4, "meta": {}}], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf3f11d03-1d", "ovs_interfaceid": "f3f11d03-1ddb-4149-b2f0-d1f2020bab39", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:40:00 compute-0 nova_compute[192079]: 2025-10-02 12:40:00.249 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408800.2491708, 541184e3-5963-4add-ac60-b22fdcf3774b => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:40:00 compute-0 nova_compute[192079]: 2025-10-02 12:40:00.250 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] VM Started (Lifecycle Event)
Oct 02 12:40:00 compute-0 nova_compute[192079]: 2025-10-02 12:40:00.254 2 DEBUG nova.compute.manager [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:40:00 compute-0 nova_compute[192079]: 2025-10-02 12:40:00.258 2 DEBUG nova.virt.libvirt.driver [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:40:00 compute-0 nova_compute[192079]: 2025-10-02 12:40:00.262 2 INFO nova.virt.libvirt.driver [-] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Instance spawned successfully.
Oct 02 12:40:00 compute-0 nova_compute[192079]: 2025-10-02 12:40:00.262 2 DEBUG nova.virt.libvirt.driver [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:40:00 compute-0 nova_compute[192079]: 2025-10-02 12:40:00.281 2 DEBUG oslo_concurrency.lockutils [req-9ef2f9a8-2dab-4678-9d27-502ba0cf7910 req-dafd6c79-bc0d-4970-a2d9-0ad055fde2fd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-541184e3-5963-4add-ac60-b22fdcf3774b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:40:00 compute-0 nova_compute[192079]: 2025-10-02 12:40:00.321 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:40:00 compute-0 nova_compute[192079]: 2025-10-02 12:40:00.327 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:40:00 compute-0 nova_compute[192079]: 2025-10-02 12:40:00.331 2 DEBUG nova.virt.libvirt.driver [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:40:00 compute-0 nova_compute[192079]: 2025-10-02 12:40:00.332 2 DEBUG nova.virt.libvirt.driver [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:40:00 compute-0 nova_compute[192079]: 2025-10-02 12:40:00.332 2 DEBUG nova.virt.libvirt.driver [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:40:00 compute-0 nova_compute[192079]: 2025-10-02 12:40:00.333 2 DEBUG nova.virt.libvirt.driver [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:40:00 compute-0 nova_compute[192079]: 2025-10-02 12:40:00.333 2 DEBUG nova.virt.libvirt.driver [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:40:00 compute-0 nova_compute[192079]: 2025-10-02 12:40:00.334 2 DEBUG nova.virt.libvirt.driver [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:40:00 compute-0 nova_compute[192079]: 2025-10-02 12:40:00.382 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:40:00 compute-0 nova_compute[192079]: 2025-10-02 12:40:00.383 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408800.2504804, 541184e3-5963-4add-ac60-b22fdcf3774b => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:40:00 compute-0 nova_compute[192079]: 2025-10-02 12:40:00.383 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] VM Paused (Lifecycle Event)
Oct 02 12:40:00 compute-0 nova_compute[192079]: 2025-10-02 12:40:00.438 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:40:00 compute-0 nova_compute[192079]: 2025-10-02 12:40:00.443 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408800.2565863, 541184e3-5963-4add-ac60-b22fdcf3774b => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:40:00 compute-0 nova_compute[192079]: 2025-10-02 12:40:00.443 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] VM Resumed (Lifecycle Event)
Oct 02 12:40:00 compute-0 nova_compute[192079]: 2025-10-02 12:40:00.474 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:40:00 compute-0 nova_compute[192079]: 2025-10-02 12:40:00.478 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:40:00 compute-0 nova_compute[192079]: 2025-10-02 12:40:00.799 2 INFO nova.compute.manager [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Took 10.30 seconds to spawn the instance on the hypervisor.
Oct 02 12:40:00 compute-0 nova_compute[192079]: 2025-10-02 12:40:00.800 2 DEBUG nova.compute.manager [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:40:01 compute-0 nova_compute[192079]: 2025-10-02 12:40:01.817 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:40:01 compute-0 nova_compute[192079]: 2025-10-02 12:40:01.921 2 DEBUG nova.compute.manager [req-42a7c213-7ce9-49b1-a4c8-28a677891e5c req-e2fe05ef-06c8-4711-83c5-cd8ed512a51d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Received event network-vif-plugged-f3f11d03-1ddb-4149-b2f0-d1f2020bab39 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:40:01 compute-0 nova_compute[192079]: 2025-10-02 12:40:01.922 2 DEBUG oslo_concurrency.lockutils [req-42a7c213-7ce9-49b1-a4c8-28a677891e5c req-e2fe05ef-06c8-4711-83c5-cd8ed512a51d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "541184e3-5963-4add-ac60-b22fdcf3774b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:40:01 compute-0 nova_compute[192079]: 2025-10-02 12:40:01.922 2 DEBUG oslo_concurrency.lockutils [req-42a7c213-7ce9-49b1-a4c8-28a677891e5c req-e2fe05ef-06c8-4711-83c5-cd8ed512a51d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "541184e3-5963-4add-ac60-b22fdcf3774b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:40:01 compute-0 nova_compute[192079]: 2025-10-02 12:40:01.923 2 DEBUG oslo_concurrency.lockutils [req-42a7c213-7ce9-49b1-a4c8-28a677891e5c req-e2fe05ef-06c8-4711-83c5-cd8ed512a51d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "541184e3-5963-4add-ac60-b22fdcf3774b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:40:01 compute-0 nova_compute[192079]: 2025-10-02 12:40:01.923 2 DEBUG nova.compute.manager [req-42a7c213-7ce9-49b1-a4c8-28a677891e5c req-e2fe05ef-06c8-4711-83c5-cd8ed512a51d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] No waiting events found dispatching network-vif-plugged-f3f11d03-1ddb-4149-b2f0-d1f2020bab39 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:40:01 compute-0 nova_compute[192079]: 2025-10-02 12:40:01.924 2 WARNING nova.compute.manager [req-42a7c213-7ce9-49b1-a4c8-28a677891e5c req-e2fe05ef-06c8-4711-83c5-cd8ed512a51d 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Received unexpected event network-vif-plugged-f3f11d03-1ddb-4149-b2f0-d1f2020bab39 for instance with vm_state building and task_state spawning.
Oct 02 12:40:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:02.244 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:40:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:02.246 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.002s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:40:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:02.246 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:40:02 compute-0 nova_compute[192079]: 2025-10-02 12:40:02.340 2 INFO nova.compute.manager [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Took 12.46 seconds to build instance.
Oct 02 12:40:02 compute-0 nova_compute[192079]: 2025-10-02 12:40:02.431 2 DEBUG oslo_concurrency.lockutils [None req-6cde0cda-1877-460c-a1d6-8782eba48958 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "541184e3-5963-4add-ac60-b22fdcf3774b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 12.649s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:40:02 compute-0 nova_compute[192079]: 2025-10-02 12:40:02.706 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:03 compute-0 nova_compute[192079]: 2025-10-02 12:40:03.052 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:05 compute-0 NetworkManager[51160]: <info>  [1759408805.0995] manager: (patch-br-int-to-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/324)
Oct 02 12:40:05 compute-0 NetworkManager[51160]: <info>  [1759408805.1007] manager: (patch-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d-to-br-int): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/325)
Oct 02 12:40:05 compute-0 nova_compute[192079]: 2025-10-02 12:40:05.098 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:05 compute-0 nova_compute[192079]: 2025-10-02 12:40:05.155 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:05 compute-0 ovn_controller[94336]: 2025-10-02T12:40:05Z|00661|binding|INFO|Releasing lport 4a56e1f3-de9c-40ee-a72d-7e5829d8f985 from this chassis (sb_readonly=0)
Oct 02 12:40:05 compute-0 nova_compute[192079]: 2025-10-02 12:40:05.225 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:05 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:05.573 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=45, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=44) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:40:05 compute-0 nova_compute[192079]: 2025-10-02 12:40:05.574 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:05 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:05.575 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 2 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:40:05 compute-0 nova_compute[192079]: 2025-10-02 12:40:05.852 2 DEBUG nova.compute.manager [req-68f695d2-fd82-4f70-8a18-b7156b0b5505 req-927edb0b-2674-4d9c-a76c-865dce44fb73 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Received event network-changed-f3f11d03-1ddb-4149-b2f0-d1f2020bab39 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:40:05 compute-0 nova_compute[192079]: 2025-10-02 12:40:05.852 2 DEBUG nova.compute.manager [req-68f695d2-fd82-4f70-8a18-b7156b0b5505 req-927edb0b-2674-4d9c-a76c-865dce44fb73 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Refreshing instance network info cache due to event network-changed-f3f11d03-1ddb-4149-b2f0-d1f2020bab39. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:40:05 compute-0 nova_compute[192079]: 2025-10-02 12:40:05.853 2 DEBUG oslo_concurrency.lockutils [req-68f695d2-fd82-4f70-8a18-b7156b0b5505 req-927edb0b-2674-4d9c-a76c-865dce44fb73 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-541184e3-5963-4add-ac60-b22fdcf3774b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:40:05 compute-0 nova_compute[192079]: 2025-10-02 12:40:05.853 2 DEBUG oslo_concurrency.lockutils [req-68f695d2-fd82-4f70-8a18-b7156b0b5505 req-927edb0b-2674-4d9c-a76c-865dce44fb73 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-541184e3-5963-4add-ac60-b22fdcf3774b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:40:05 compute-0 nova_compute[192079]: 2025-10-02 12:40:05.853 2 DEBUG nova.network.neutron [req-68f695d2-fd82-4f70-8a18-b7156b0b5505 req-927edb0b-2674-4d9c-a76c-865dce44fb73 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Refreshing network info cache for port f3f11d03-1ddb-4149-b2f0-d1f2020bab39 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:40:06 compute-0 podman[249638]: 2025-10-02 12:40:06.165294414 +0000 UTC m=+0.067107248 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, container_name=multipathd, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=multipathd, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']})
Oct 02 12:40:06 compute-0 podman[249637]: 2025-10-02 12:40:06.177984059 +0000 UTC m=+0.080693228 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Red Hat, Inc., name=ubi9-minimal, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., distribution-scope=public, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., managed_by=edpm_ansible, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, url=https://catalog.redhat.com/en/search?searchType=containers, vcs-type=git, vendor=Red Hat, Inc., config_id=edpm, io.openshift.tags=minimal rhel9, build-date=2025-08-20T13:12:41, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.buildah.version=1.33.7, io.openshift.expose-services=, release=1755695350, version=9.6, container_name=openstack_network_exporter, architecture=x86_64, com.redhat.component=ubi9-minimal-container, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal)
Oct 02 12:40:07 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:07.579 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '45'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:40:07 compute-0 nova_compute[192079]: 2025-10-02 12:40:07.710 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:08 compute-0 nova_compute[192079]: 2025-10-02 12:40:08.096 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:10 compute-0 nova_compute[192079]: 2025-10-02 12:40:10.379 2 DEBUG nova.network.neutron [req-68f695d2-fd82-4f70-8a18-b7156b0b5505 req-927edb0b-2674-4d9c-a76c-865dce44fb73 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Updated VIF entry in instance network info cache for port f3f11d03-1ddb-4149-b2f0-d1f2020bab39. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:40:10 compute-0 nova_compute[192079]: 2025-10-02 12:40:10.380 2 DEBUG nova.network.neutron [req-68f695d2-fd82-4f70-8a18-b7156b0b5505 req-927edb0b-2674-4d9c-a76c-865dce44fb73 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Updating instance_info_cache with network_info: [{"id": "f3f11d03-1ddb-4149-b2f0-d1f2020bab39", "address": "fa:16:3e:32:93:e7", "network": {"id": "c95536aa-e734-4350-9442-56cceb6f6448", "bridge": "br-int", "label": "tempest-network-smoke--847152420", "subnets": [{"cidr": "10.100.0.0/28", "dns": [{"address": "1.2.3.4", "type": "dns", "version": 4, "meta": {}}], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf3f11d03-1d", "ovs_interfaceid": "f3f11d03-1ddb-4149-b2f0-d1f2020bab39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:40:10 compute-0 nova_compute[192079]: 2025-10-02 12:40:10.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:40:10 compute-0 nova_compute[192079]: 2025-10-02 12:40:10.677 2 DEBUG oslo_concurrency.lockutils [req-68f695d2-fd82-4f70-8a18-b7156b0b5505 req-927edb0b-2674-4d9c-a76c-865dce44fb73 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-541184e3-5963-4add-ac60-b22fdcf3774b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:40:12 compute-0 podman[249691]: 2025-10-02 12:40:12.143263697 +0000 UTC m=+0.052019678 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']})
Oct 02 12:40:12 compute-0 podman[249692]: 2025-10-02 12:40:12.206904249 +0000 UTC m=+0.099388096 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_id=iscsid, container_name=iscsid, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:40:12 compute-0 nova_compute[192079]: 2025-10-02 12:40:12.714 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:12 compute-0 ovn_controller[94336]: 2025-10-02T12:40:12Z|00075|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:32:93:e7 10.100.0.11
Oct 02 12:40:12 compute-0 ovn_controller[94336]: 2025-10-02T12:40:12Z|00076|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:32:93:e7 10.100.0.11
Oct 02 12:40:13 compute-0 nova_compute[192079]: 2025-10-02 12:40:13.133 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:17 compute-0 nova_compute[192079]: 2025-10-02 12:40:17.718 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:17 compute-0 nova_compute[192079]: 2025-10-02 12:40:17.827 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._run_pending_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:40:17 compute-0 nova_compute[192079]: 2025-10-02 12:40:17.827 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Cleaning up deleted instances _run_pending_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:11145
Oct 02 12:40:17 compute-0 nova_compute[192079]: 2025-10-02 12:40:17.877 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] There are 0 instances to clean _run_pending_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:11154
Oct 02 12:40:18 compute-0 nova_compute[192079]: 2025-10-02 12:40:18.182 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:18 compute-0 nova_compute[192079]: 2025-10-02 12:40:18.311 2 INFO nova.compute.manager [None req-4ac1dfda-38e6-4a5b-93f6-061c73445083 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Get console output
Oct 02 12:40:18 compute-0 nova_compute[192079]: 2025-10-02 12:40:18.318 55 INFO nova.privsep.libvirt [-] Ignored error while reading from instance console pty: can't concat NoneType to bytes
Oct 02 12:40:20 compute-0 podman[249733]: 2025-10-02 12:40:20.17935065 +0000 UTC m=+0.081423708 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_id=ovn_metadata_agent, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:40:20 compute-0 podman[249734]: 2025-10-02 12:40:20.201899344 +0000 UTC m=+0.105240776 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, container_name=ovn_controller, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true)
Oct 02 12:40:20 compute-0 ovn_controller[94336]: 2025-10-02T12:40:20Z|00077|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:32:93:e7 10.100.0.11
Oct 02 12:40:20 compute-0 podman[249735]: 2025-10-02 12:40:20.222922077 +0000 UTC m=+0.112010011 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:40:22 compute-0 nova_compute[192079]: 2025-10-02 12:40:22.722 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:23 compute-0 nova_compute[192079]: 2025-10-02 12:40:23.183 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:23 compute-0 ovn_controller[94336]: 2025-10-02T12:40:23Z|00078|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:32:93:e7 10.100.0.11
Oct 02 12:40:25 compute-0 nova_compute[192079]: 2025-10-02 12:40:25.911 2 DEBUG nova.compute.manager [req-b4128dc8-17c8-49ba-bb63-edee839d2547 req-fb1d2413-4d0c-4c0c-8468-5cb69e94cfe5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Received event network-changed-f3f11d03-1ddb-4149-b2f0-d1f2020bab39 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:40:25 compute-0 nova_compute[192079]: 2025-10-02 12:40:25.911 2 DEBUG nova.compute.manager [req-b4128dc8-17c8-49ba-bb63-edee839d2547 req-fb1d2413-4d0c-4c0c-8468-5cb69e94cfe5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Refreshing instance network info cache due to event network-changed-f3f11d03-1ddb-4149-b2f0-d1f2020bab39. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:40:25 compute-0 nova_compute[192079]: 2025-10-02 12:40:25.912 2 DEBUG oslo_concurrency.lockutils [req-b4128dc8-17c8-49ba-bb63-edee839d2547 req-fb1d2413-4d0c-4c0c-8468-5cb69e94cfe5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-541184e3-5963-4add-ac60-b22fdcf3774b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:40:25 compute-0 nova_compute[192079]: 2025-10-02 12:40:25.913 2 DEBUG oslo_concurrency.lockutils [req-b4128dc8-17c8-49ba-bb63-edee839d2547 req-fb1d2413-4d0c-4c0c-8468-5cb69e94cfe5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-541184e3-5963-4add-ac60-b22fdcf3774b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:40:25 compute-0 nova_compute[192079]: 2025-10-02 12:40:25.913 2 DEBUG nova.network.neutron [req-b4128dc8-17c8-49ba-bb63-edee839d2547 req-fb1d2413-4d0c-4c0c-8468-5cb69e94cfe5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Refreshing network info cache for port f3f11d03-1ddb-4149-b2f0-d1f2020bab39 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:40:26 compute-0 nova_compute[192079]: 2025-10-02 12:40:26.160 2 DEBUG oslo_concurrency.lockutils [None req-58ca707c-fc54-421d-8fd3-b84b5521635c a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "541184e3-5963-4add-ac60-b22fdcf3774b" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:40:26 compute-0 nova_compute[192079]: 2025-10-02 12:40:26.161 2 DEBUG oslo_concurrency.lockutils [None req-58ca707c-fc54-421d-8fd3-b84b5521635c a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "541184e3-5963-4add-ac60-b22fdcf3774b" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:40:26 compute-0 nova_compute[192079]: 2025-10-02 12:40:26.162 2 DEBUG oslo_concurrency.lockutils [None req-58ca707c-fc54-421d-8fd3-b84b5521635c a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "541184e3-5963-4add-ac60-b22fdcf3774b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:40:26 compute-0 nova_compute[192079]: 2025-10-02 12:40:26.163 2 DEBUG oslo_concurrency.lockutils [None req-58ca707c-fc54-421d-8fd3-b84b5521635c a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "541184e3-5963-4add-ac60-b22fdcf3774b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:40:26 compute-0 nova_compute[192079]: 2025-10-02 12:40:26.165 2 DEBUG oslo_concurrency.lockutils [None req-58ca707c-fc54-421d-8fd3-b84b5521635c a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "541184e3-5963-4add-ac60-b22fdcf3774b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.002s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:40:26 compute-0 nova_compute[192079]: 2025-10-02 12:40:26.186 2 INFO nova.compute.manager [None req-58ca707c-fc54-421d-8fd3-b84b5521635c a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Terminating instance
Oct 02 12:40:26 compute-0 nova_compute[192079]: 2025-10-02 12:40:26.201 2 DEBUG nova.compute.manager [None req-58ca707c-fc54-421d-8fd3-b84b5521635c a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:40:27 compute-0 nova_compute[192079]: 2025-10-02 12:40:27.725 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:28 compute-0 nova_compute[192079]: 2025-10-02 12:40:28.104 2 DEBUG nova.network.neutron [req-b4128dc8-17c8-49ba-bb63-edee839d2547 req-fb1d2413-4d0c-4c0c-8468-5cb69e94cfe5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Updated VIF entry in instance network info cache for port f3f11d03-1ddb-4149-b2f0-d1f2020bab39. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:40:28 compute-0 nova_compute[192079]: 2025-10-02 12:40:28.105 2 DEBUG nova.network.neutron [req-b4128dc8-17c8-49ba-bb63-edee839d2547 req-fb1d2413-4d0c-4c0c-8468-5cb69e94cfe5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Updating instance_info_cache with network_info: [{"id": "f3f11d03-1ddb-4149-b2f0-d1f2020bab39", "address": "fa:16:3e:32:93:e7", "network": {"id": "c95536aa-e734-4350-9442-56cceb6f6448", "bridge": "br-int", "label": "tempest-network-smoke--847152420", "subnets": [{"cidr": "10.100.0.0/28", "dns": [{"address": "9.8.7.6", "type": "dns", "version": 4, "meta": {}}], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf3f11d03-1d", "ovs_interfaceid": "f3f11d03-1ddb-4149-b2f0-d1f2020bab39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:40:28 compute-0 nova_compute[192079]: 2025-10-02 12:40:28.170 2 DEBUG oslo_concurrency.lockutils [req-b4128dc8-17c8-49ba-bb63-edee839d2547 req-fb1d2413-4d0c-4c0c-8468-5cb69e94cfe5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-541184e3-5963-4add-ac60-b22fdcf3774b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:40:28 compute-0 nova_compute[192079]: 2025-10-02 12:40:28.185 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:29 compute-0 kernel: tapf3f11d03-1d (unregistering): left promiscuous mode
Oct 02 12:40:29 compute-0 NetworkManager[51160]: <info>  [1759408829.5942] device (tapf3f11d03-1d): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:40:29 compute-0 nova_compute[192079]: 2025-10-02 12:40:29.606 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:29 compute-0 nova_compute[192079]: 2025-10-02 12:40:29.611 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:29 compute-0 ovn_controller[94336]: 2025-10-02T12:40:29Z|00662|binding|INFO|Releasing lport f3f11d03-1ddb-4149-b2f0-d1f2020bab39 from this chassis (sb_readonly=0)
Oct 02 12:40:29 compute-0 ovn_controller[94336]: 2025-10-02T12:40:29Z|00663|binding|INFO|Setting lport f3f11d03-1ddb-4149-b2f0-d1f2020bab39 down in Southbound
Oct 02 12:40:29 compute-0 ovn_controller[94336]: 2025-10-02T12:40:29Z|00664|binding|INFO|Removing iface tapf3f11d03-1d ovn-installed in OVS
Oct 02 12:40:29 compute-0 nova_compute[192079]: 2025-10-02 12:40:29.639 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:29.657 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:32:93:e7 10.100.0.11'], port_security=['fa:16:3e:32:93:e7 10.100.0.11'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.11/28', 'neutron:device_id': '541184e3-5963-4add-ac60-b22fdcf3774b', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-c95536aa-e734-4350-9442-56cceb6f6448', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'neutron:revision_number': '4', 'neutron:security_group_ids': 'e0364827-2aef-4d66-9236-5a3d51998ebc', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=51088e7a-7274-41d4-82f4-8a7994a97b5a, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=f3f11d03-1ddb-4149-b2f0-d1f2020bab39) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:40:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:29.660 103294 INFO neutron.agent.ovn.metadata.agent [-] Port f3f11d03-1ddb-4149-b2f0-d1f2020bab39 in datapath c95536aa-e734-4350-9442-56cceb6f6448 unbound from our chassis
Oct 02 12:40:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:29.662 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network c95536aa-e734-4350-9442-56cceb6f6448, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:40:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:29.663 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[180f8f32-483c-4cf6-9583-375ab2d122d6]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:40:29 compute-0 systemd[1]: machine-qemu\x2d82\x2dinstance\x2d000000aa.scope: Deactivated successfully.
Oct 02 12:40:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:29.663 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-c95536aa-e734-4350-9442-56cceb6f6448 namespace which is not needed anymore
Oct 02 12:40:29 compute-0 systemd[1]: machine-qemu\x2d82\x2dinstance\x2d000000aa.scope: Consumed 14.879s CPU time.
Oct 02 12:40:29 compute-0 systemd-machined[152150]: Machine qemu-82-instance-000000aa terminated.
Oct 02 12:40:29 compute-0 podman[249800]: 2025-10-02 12:40:29.76542123 +0000 UTC m=+0.087519203 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=edpm, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, tcib_managed=true, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:40:29 compute-0 kernel: tapf3f11d03-1d: entered promiscuous mode
Oct 02 12:40:29 compute-0 NetworkManager[51160]: <info>  [1759408829.8291] manager: (tapf3f11d03-1d): new Tun device (/org/freedesktop/NetworkManager/Devices/326)
Oct 02 12:40:29 compute-0 nova_compute[192079]: 2025-10-02 12:40:29.830 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:29 compute-0 ovn_controller[94336]: 2025-10-02T12:40:29Z|00665|binding|INFO|Claiming lport f3f11d03-1ddb-4149-b2f0-d1f2020bab39 for this chassis.
Oct 02 12:40:29 compute-0 ovn_controller[94336]: 2025-10-02T12:40:29Z|00666|binding|INFO|f3f11d03-1ddb-4149-b2f0-d1f2020bab39: Claiming fa:16:3e:32:93:e7 10.100.0.11
Oct 02 12:40:29 compute-0 systemd-udevd[249802]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:40:29 compute-0 kernel: tapf3f11d03-1d (unregistering): left promiscuous mode
Oct 02 12:40:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:29.854 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:32:93:e7 10.100.0.11'], port_security=['fa:16:3e:32:93:e7 10.100.0.11'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.11/28', 'neutron:device_id': '541184e3-5963-4add-ac60-b22fdcf3774b', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-c95536aa-e734-4350-9442-56cceb6f6448', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'neutron:revision_number': '4', 'neutron:security_group_ids': 'e0364827-2aef-4d66-9236-5a3d51998ebc', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=51088e7a-7274-41d4-82f4-8a7994a97b5a, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=f3f11d03-1ddb-4149-b2f0-d1f2020bab39) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:40:29 compute-0 ovn_controller[94336]: 2025-10-02T12:40:29Z|00667|binding|INFO|Setting lport f3f11d03-1ddb-4149-b2f0-d1f2020bab39 ovn-installed in OVS
Oct 02 12:40:29 compute-0 ovn_controller[94336]: 2025-10-02T12:40:29Z|00668|binding|INFO|Setting lport f3f11d03-1ddb-4149-b2f0-d1f2020bab39 up in Southbound
Oct 02 12:40:29 compute-0 nova_compute[192079]: 2025-10-02 12:40:29.858 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:29 compute-0 virtnodedevd[192380]: libvirt version: 10.10.0, package: 15.el9 (builder@centos.org, 2025-08-18-13:22:20, )
Oct 02 12:40:29 compute-0 virtnodedevd[192380]: hostname: compute-0
Oct 02 12:40:29 compute-0 virtnodedevd[192380]: ethtool ioctl error on tapf3f11d03-1d: No such device
Oct 02 12:40:29 compute-0 ovn_controller[94336]: 2025-10-02T12:40:29Z|00669|binding|INFO|Releasing lport f3f11d03-1ddb-4149-b2f0-d1f2020bab39 from this chassis (sb_readonly=1)
Oct 02 12:40:29 compute-0 nova_compute[192079]: 2025-10-02 12:40:29.863 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:29 compute-0 virtnodedevd[192380]: ethtool ioctl error on tapf3f11d03-1d: No such device
Oct 02 12:40:29 compute-0 ovn_controller[94336]: 2025-10-02T12:40:29Z|00670|binding|INFO|Removing iface tapf3f11d03-1d ovn-installed in OVS
Oct 02 12:40:29 compute-0 ovn_controller[94336]: 2025-10-02T12:40:29Z|00671|if_status|INFO|Dropped 4 log messages in last 524 seconds (most recently, 524 seconds ago) due to excessive rate
Oct 02 12:40:29 compute-0 ovn_controller[94336]: 2025-10-02T12:40:29Z|00672|if_status|INFO|Not setting lport f3f11d03-1ddb-4149-b2f0-d1f2020bab39 down as sb is readonly
Oct 02 12:40:29 compute-0 nova_compute[192079]: 2025-10-02 12:40:29.867 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:29 compute-0 ovn_controller[94336]: 2025-10-02T12:40:29Z|00673|binding|INFO|Releasing lport f3f11d03-1ddb-4149-b2f0-d1f2020bab39 from this chassis (sb_readonly=0)
Oct 02 12:40:29 compute-0 ovn_controller[94336]: 2025-10-02T12:40:29Z|00674|binding|INFO|Setting lport f3f11d03-1ddb-4149-b2f0-d1f2020bab39 down in Southbound
Oct 02 12:40:29 compute-0 virtnodedevd[192380]: ethtool ioctl error on tapf3f11d03-1d: No such device
Oct 02 12:40:29 compute-0 virtnodedevd[192380]: ethtool ioctl error on tapf3f11d03-1d: No such device
Oct 02 12:40:29 compute-0 virtnodedevd[192380]: ethtool ioctl error on tapf3f11d03-1d: No such device
Oct 02 12:40:29 compute-0 nova_compute[192079]: 2025-10-02 12:40:29.879 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:29 compute-0 virtnodedevd[192380]: ethtool ioctl error on tapf3f11d03-1d: No such device
Oct 02 12:40:29 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:29.882 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:32:93:e7 10.100.0.11'], port_security=['fa:16:3e:32:93:e7 10.100.0.11'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.11/28', 'neutron:device_id': '541184e3-5963-4add-ac60-b22fdcf3774b', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-c95536aa-e734-4350-9442-56cceb6f6448', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'neutron:revision_number': '4', 'neutron:security_group_ids': 'e0364827-2aef-4d66-9236-5a3d51998ebc', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=51088e7a-7274-41d4-82f4-8a7994a97b5a, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=f3f11d03-1ddb-4149-b2f0-d1f2020bab39) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:40:29 compute-0 virtnodedevd[192380]: ethtool ioctl error on tapf3f11d03-1d: No such device
Oct 02 12:40:29 compute-0 virtnodedevd[192380]: ethtool ioctl error on tapf3f11d03-1d: No such device
Oct 02 12:40:29 compute-0 nova_compute[192079]: 2025-10-02 12:40:29.905 2 INFO nova.virt.libvirt.driver [-] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Instance destroyed successfully.
Oct 02 12:40:29 compute-0 nova_compute[192079]: 2025-10-02 12:40:29.906 2 DEBUG nova.objects.instance [None req-58ca707c-fc54-421d-8fd3-b84b5521635c a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lazy-loading 'resources' on Instance uuid 541184e3-5963-4add-ac60-b22fdcf3774b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:40:29 compute-0 nova_compute[192079]: 2025-10-02 12:40:29.927 2 DEBUG nova.virt.libvirt.vif [None req-58ca707c-fc54-421d-8fd3-b84b5521635c a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:39:48Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestNetworkBasicOps-server-192751529',display_name='tempest-TestNetworkBasicOps-server-192751529',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkbasicops-server-192751529',id=170,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBJ8WJUvqPBapU5qvPIR5mTLAdCXJsQfWZYc+MxWmuaUwHq+MlVGVk1Uz5Mo9dMVlpWwmSIS8E+AmIJ3Mwq4O1FZCpV/RUKmBYiyo4TwqWt0l+5wzQovSk/4k1pvsCoSsIQ==',key_name='tempest-TestNetworkBasicOps-1987012530',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:40:00Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='6e2a4899168a47618e377cb3ac85ddd2',ramdisk_id='',reservation_id='r-ety1r8ek',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-TestNetworkBasicOps-1323893370',owner_user_name='tempest-TestNetworkBasicOps-1323893370-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:40:01Z,user_data=None,user_id='a1898fdf056c4a249c33590f26d4d845',uuid=541184e3-5963-4add-ac60-b22fdcf3774b,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "f3f11d03-1ddb-4149-b2f0-d1f2020bab39", "address": "fa:16:3e:32:93:e7", "network": {"id": "c95536aa-e734-4350-9442-56cceb6f6448", "bridge": "br-int", "label": "tempest-network-smoke--847152420", "subnets": [{"cidr": "10.100.0.0/28", "dns": [{"address": "1.2.3.4", "type": "dns", "version": 4, "meta": {}}], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf3f11d03-1d", "ovs_interfaceid": "f3f11d03-1ddb-4149-b2f0-d1f2020bab39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:40:29 compute-0 nova_compute[192079]: 2025-10-02 12:40:29.927 2 DEBUG nova.network.os_vif_util [None req-58ca707c-fc54-421d-8fd3-b84b5521635c a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converting VIF {"id": "f3f11d03-1ddb-4149-b2f0-d1f2020bab39", "address": "fa:16:3e:32:93:e7", "network": {"id": "c95536aa-e734-4350-9442-56cceb6f6448", "bridge": "br-int", "label": "tempest-network-smoke--847152420", "subnets": [{"cidr": "10.100.0.0/28", "dns": [{"address": "1.2.3.4", "type": "dns", "version": 4, "meta": {}}], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf3f11d03-1d", "ovs_interfaceid": "f3f11d03-1ddb-4149-b2f0-d1f2020bab39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:40:29 compute-0 nova_compute[192079]: 2025-10-02 12:40:29.927 2 DEBUG nova.network.os_vif_util [None req-58ca707c-fc54-421d-8fd3-b84b5521635c a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:32:93:e7,bridge_name='br-int',has_traffic_filtering=True,id=f3f11d03-1ddb-4149-b2f0-d1f2020bab39,network=Network(c95536aa-e734-4350-9442-56cceb6f6448),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf3f11d03-1d') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:40:29 compute-0 nova_compute[192079]: 2025-10-02 12:40:29.928 2 DEBUG os_vif [None req-58ca707c-fc54-421d-8fd3-b84b5521635c a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:32:93:e7,bridge_name='br-int',has_traffic_filtering=True,id=f3f11d03-1ddb-4149-b2f0-d1f2020bab39,network=Network(c95536aa-e734-4350-9442-56cceb6f6448),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf3f11d03-1d') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:40:29 compute-0 nova_compute[192079]: 2025-10-02 12:40:29.929 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:29 compute-0 nova_compute[192079]: 2025-10-02 12:40:29.929 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapf3f11d03-1d, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:40:29 compute-0 nova_compute[192079]: 2025-10-02 12:40:29.930 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:29 compute-0 nova_compute[192079]: 2025-10-02 12:40:29.933 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:40:29 compute-0 nova_compute[192079]: 2025-10-02 12:40:29.935 2 INFO os_vif [None req-58ca707c-fc54-421d-8fd3-b84b5521635c a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:32:93:e7,bridge_name='br-int',has_traffic_filtering=True,id=f3f11d03-1ddb-4149-b2f0-d1f2020bab39,network=Network(c95536aa-e734-4350-9442-56cceb6f6448),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapf3f11d03-1d')
Oct 02 12:40:29 compute-0 nova_compute[192079]: 2025-10-02 12:40:29.936 2 INFO nova.virt.libvirt.driver [None req-58ca707c-fc54-421d-8fd3-b84b5521635c a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Deleting instance files /var/lib/nova/instances/541184e3-5963-4add-ac60-b22fdcf3774b_del
Oct 02 12:40:29 compute-0 nova_compute[192079]: 2025-10-02 12:40:29.936 2 INFO nova.virt.libvirt.driver [None req-58ca707c-fc54-421d-8fd3-b84b5521635c a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Deletion of /var/lib/nova/instances/541184e3-5963-4add-ac60-b22fdcf3774b_del complete
Oct 02 12:40:29 compute-0 neutron-haproxy-ovnmeta-c95536aa-e734-4350-9442-56cceb6f6448[249595]: [NOTICE]   (249617) : haproxy version is 2.8.14-c23fe91
Oct 02 12:40:29 compute-0 neutron-haproxy-ovnmeta-c95536aa-e734-4350-9442-56cceb6f6448[249595]: [NOTICE]   (249617) : path to executable is /usr/sbin/haproxy
Oct 02 12:40:29 compute-0 neutron-haproxy-ovnmeta-c95536aa-e734-4350-9442-56cceb6f6448[249595]: [WARNING]  (249617) : Exiting Master process...
Oct 02 12:40:29 compute-0 neutron-haproxy-ovnmeta-c95536aa-e734-4350-9442-56cceb6f6448[249595]: [WARNING]  (249617) : Exiting Master process...
Oct 02 12:40:29 compute-0 neutron-haproxy-ovnmeta-c95536aa-e734-4350-9442-56cceb6f6448[249595]: [ALERT]    (249617) : Current worker (249620) exited with code 143 (Terminated)
Oct 02 12:40:29 compute-0 neutron-haproxy-ovnmeta-c95536aa-e734-4350-9442-56cceb6f6448[249595]: [WARNING]  (249617) : All workers exited. Exiting... (0)
Oct 02 12:40:29 compute-0 systemd[1]: libpod-165496a95d9922921f007008d81c16a5053f1ec84f301eb06d21321094c7fa2b.scope: Deactivated successfully.
Oct 02 12:40:29 compute-0 podman[249841]: 2025-10-02 12:40:29.993946912 +0000 UTC m=+0.202900155 container died 165496a95d9922921f007008d81c16a5053f1ec84f301eb06d21321094c7fa2b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-c95536aa-e734-4350-9442-56cceb6f6448, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS)
Oct 02 12:40:30 compute-0 nova_compute[192079]: 2025-10-02 12:40:30.053 2 INFO nova.compute.manager [None req-58ca707c-fc54-421d-8fd3-b84b5521635c a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Took 3.85 seconds to destroy the instance on the hypervisor.
Oct 02 12:40:30 compute-0 nova_compute[192079]: 2025-10-02 12:40:30.054 2 DEBUG oslo.service.loopingcall [None req-58ca707c-fc54-421d-8fd3-b84b5521635c a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:40:30 compute-0 nova_compute[192079]: 2025-10-02 12:40:30.054 2 DEBUG nova.compute.manager [-] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:40:30 compute-0 nova_compute[192079]: 2025-10-02 12:40:30.055 2 DEBUG nova.network.neutron [-] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:40:30 compute-0 systemd[1]: var-lib-containers-storage-overlay-10ae88263f63148e5802392bd427fdc300a5ce2d7cd6aed40623659d58fd96dc-merged.mount: Deactivated successfully.
Oct 02 12:40:30 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-165496a95d9922921f007008d81c16a5053f1ec84f301eb06d21321094c7fa2b-userdata-shm.mount: Deactivated successfully.
Oct 02 12:40:30 compute-0 podman[249841]: 2025-10-02 12:40:30.367067509 +0000 UTC m=+0.576020742 container cleanup 165496a95d9922921f007008d81c16a5053f1ec84f301eb06d21321094c7fa2b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-c95536aa-e734-4350-9442-56cceb6f6448, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:40:30 compute-0 systemd[1]: libpod-conmon-165496a95d9922921f007008d81c16a5053f1ec84f301eb06d21321094c7fa2b.scope: Deactivated successfully.
Oct 02 12:40:30 compute-0 nova_compute[192079]: 2025-10-02 12:40:30.463 2 DEBUG nova.compute.manager [req-70c57162-2958-4141-8f1d-7fccfe762039 req-72f365e5-4183-4bc1-a067-cdbb42c023db 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Received event network-vif-unplugged-f3f11d03-1ddb-4149-b2f0-d1f2020bab39 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:40:30 compute-0 nova_compute[192079]: 2025-10-02 12:40:30.463 2 DEBUG oslo_concurrency.lockutils [req-70c57162-2958-4141-8f1d-7fccfe762039 req-72f365e5-4183-4bc1-a067-cdbb42c023db 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "541184e3-5963-4add-ac60-b22fdcf3774b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:40:30 compute-0 nova_compute[192079]: 2025-10-02 12:40:30.464 2 DEBUG oslo_concurrency.lockutils [req-70c57162-2958-4141-8f1d-7fccfe762039 req-72f365e5-4183-4bc1-a067-cdbb42c023db 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "541184e3-5963-4add-ac60-b22fdcf3774b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:40:30 compute-0 nova_compute[192079]: 2025-10-02 12:40:30.464 2 DEBUG oslo_concurrency.lockutils [req-70c57162-2958-4141-8f1d-7fccfe762039 req-72f365e5-4183-4bc1-a067-cdbb42c023db 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "541184e3-5963-4add-ac60-b22fdcf3774b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:40:30 compute-0 nova_compute[192079]: 2025-10-02 12:40:30.464 2 DEBUG nova.compute.manager [req-70c57162-2958-4141-8f1d-7fccfe762039 req-72f365e5-4183-4bc1-a067-cdbb42c023db 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] No waiting events found dispatching network-vif-unplugged-f3f11d03-1ddb-4149-b2f0-d1f2020bab39 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:40:30 compute-0 nova_compute[192079]: 2025-10-02 12:40:30.464 2 DEBUG nova.compute.manager [req-70c57162-2958-4141-8f1d-7fccfe762039 req-72f365e5-4183-4bc1-a067-cdbb42c023db 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Received event network-vif-unplugged-f3f11d03-1ddb-4149-b2f0-d1f2020bab39 for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:40:30 compute-0 podman[249901]: 2025-10-02 12:40:30.63710254 +0000 UTC m=+0.225507520 container remove 165496a95d9922921f007008d81c16a5053f1ec84f301eb06d21321094c7fa2b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-c95536aa-e734-4350-9442-56cceb6f6448, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2)
Oct 02 12:40:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:30.646 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[707b0d75-e4dc-4fab-9ebd-6b3b5e923780]: (4, ('Thu Oct  2 12:40:29 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-c95536aa-e734-4350-9442-56cceb6f6448 (165496a95d9922921f007008d81c16a5053f1ec84f301eb06d21321094c7fa2b)\n165496a95d9922921f007008d81c16a5053f1ec84f301eb06d21321094c7fa2b\nThu Oct  2 12:40:30 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-c95536aa-e734-4350-9442-56cceb6f6448 (165496a95d9922921f007008d81c16a5053f1ec84f301eb06d21321094c7fa2b)\n165496a95d9922921f007008d81c16a5053f1ec84f301eb06d21321094c7fa2b\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:40:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:30.650 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[805dee58-748f-4c72-b467-7fcdb31622bc]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:40:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:30.652 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapc95536aa-e0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:40:30 compute-0 nova_compute[192079]: 2025-10-02 12:40:30.689 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:30 compute-0 kernel: tapc95536aa-e0: left promiscuous mode
Oct 02 12:40:30 compute-0 nova_compute[192079]: 2025-10-02 12:40:30.717 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:30.723 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[258227fa-830b-4f7e-900d-ec9968ec1c2b]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:40:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:30.757 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[25f2288d-f816-4975-80dc-8403351df8d0]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:40:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:30.759 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[27d26dd0-6c28-49c5-8fb1-502b8573f153]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:40:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:30.797 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b4ff0841-45fd-44b0-9537-5584bdd369e4]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 679634, 'reachable_time': 15696, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 249916, 'error': None, 'target': 'ovnmeta-c95536aa-e734-4350-9442-56cceb6f6448', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:40:30 compute-0 systemd[1]: run-netns-ovnmeta\x2dc95536aa\x2de734\x2d4350\x2d9442\x2d56cceb6f6448.mount: Deactivated successfully.
Oct 02 12:40:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:30.802 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-c95536aa-e734-4350-9442-56cceb6f6448 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:40:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:30.802 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[41b7c0b5-1800-4187-833e-9ad4cde5a157]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:40:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:30.804 103294 INFO neutron.agent.ovn.metadata.agent [-] Port f3f11d03-1ddb-4149-b2f0-d1f2020bab39 in datapath c95536aa-e734-4350-9442-56cceb6f6448 unbound from our chassis
Oct 02 12:40:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:30.806 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network c95536aa-e734-4350-9442-56cceb6f6448, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:40:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:30.807 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b86f5361-5f7a-4069-9e1e-fed966beebe8]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:40:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:30.808 103294 INFO neutron.agent.ovn.metadata.agent [-] Port f3f11d03-1ddb-4149-b2f0-d1f2020bab39 in datapath c95536aa-e734-4350-9442-56cceb6f6448 unbound from our chassis
Oct 02 12:40:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:30.809 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network c95536aa-e734-4350-9442-56cceb6f6448, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:40:30 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:30.810 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3e210428-318c-4ec1-bc31-aecffe9faacb]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:40:32 compute-0 nova_compute[192079]: 2025-10-02 12:40:32.721 2 DEBUG nova.compute.manager [req-caab05d4-abef-4b7f-99c9-f57b33941434 req-139815cc-59e2-4536-acd2-617eb564f05f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Received event network-vif-plugged-f3f11d03-1ddb-4149-b2f0-d1f2020bab39 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:40:32 compute-0 nova_compute[192079]: 2025-10-02 12:40:32.722 2 DEBUG oslo_concurrency.lockutils [req-caab05d4-abef-4b7f-99c9-f57b33941434 req-139815cc-59e2-4536-acd2-617eb564f05f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "541184e3-5963-4add-ac60-b22fdcf3774b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:40:32 compute-0 nova_compute[192079]: 2025-10-02 12:40:32.723 2 DEBUG oslo_concurrency.lockutils [req-caab05d4-abef-4b7f-99c9-f57b33941434 req-139815cc-59e2-4536-acd2-617eb564f05f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "541184e3-5963-4add-ac60-b22fdcf3774b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:40:32 compute-0 nova_compute[192079]: 2025-10-02 12:40:32.723 2 DEBUG oslo_concurrency.lockutils [req-caab05d4-abef-4b7f-99c9-f57b33941434 req-139815cc-59e2-4536-acd2-617eb564f05f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "541184e3-5963-4add-ac60-b22fdcf3774b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:40:32 compute-0 nova_compute[192079]: 2025-10-02 12:40:32.724 2 DEBUG nova.compute.manager [req-caab05d4-abef-4b7f-99c9-f57b33941434 req-139815cc-59e2-4536-acd2-617eb564f05f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] No waiting events found dispatching network-vif-plugged-f3f11d03-1ddb-4149-b2f0-d1f2020bab39 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:40:32 compute-0 nova_compute[192079]: 2025-10-02 12:40:32.724 2 WARNING nova.compute.manager [req-caab05d4-abef-4b7f-99c9-f57b33941434 req-139815cc-59e2-4536-acd2-617eb564f05f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Received unexpected event network-vif-plugged-f3f11d03-1ddb-4149-b2f0-d1f2020bab39 for instance with vm_state active and task_state deleting.
Oct 02 12:40:32 compute-0 nova_compute[192079]: 2025-10-02 12:40:32.726 2 DEBUG nova.compute.manager [req-caab05d4-abef-4b7f-99c9-f57b33941434 req-139815cc-59e2-4536-acd2-617eb564f05f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Received event network-vif-plugged-f3f11d03-1ddb-4149-b2f0-d1f2020bab39 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:40:32 compute-0 nova_compute[192079]: 2025-10-02 12:40:32.727 2 DEBUG oslo_concurrency.lockutils [req-caab05d4-abef-4b7f-99c9-f57b33941434 req-139815cc-59e2-4536-acd2-617eb564f05f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "541184e3-5963-4add-ac60-b22fdcf3774b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:40:32 compute-0 nova_compute[192079]: 2025-10-02 12:40:32.728 2 DEBUG oslo_concurrency.lockutils [req-caab05d4-abef-4b7f-99c9-f57b33941434 req-139815cc-59e2-4536-acd2-617eb564f05f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "541184e3-5963-4add-ac60-b22fdcf3774b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:40:32 compute-0 nova_compute[192079]: 2025-10-02 12:40:32.728 2 DEBUG oslo_concurrency.lockutils [req-caab05d4-abef-4b7f-99c9-f57b33941434 req-139815cc-59e2-4536-acd2-617eb564f05f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "541184e3-5963-4add-ac60-b22fdcf3774b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:40:32 compute-0 nova_compute[192079]: 2025-10-02 12:40:32.729 2 DEBUG nova.compute.manager [req-caab05d4-abef-4b7f-99c9-f57b33941434 req-139815cc-59e2-4536-acd2-617eb564f05f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] No waiting events found dispatching network-vif-plugged-f3f11d03-1ddb-4149-b2f0-d1f2020bab39 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:40:32 compute-0 nova_compute[192079]: 2025-10-02 12:40:32.729 2 WARNING nova.compute.manager [req-caab05d4-abef-4b7f-99c9-f57b33941434 req-139815cc-59e2-4536-acd2-617eb564f05f 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Received unexpected event network-vif-plugged-f3f11d03-1ddb-4149-b2f0-d1f2020bab39 for instance with vm_state active and task_state deleting.
Oct 02 12:40:33 compute-0 nova_compute[192079]: 2025-10-02 12:40:33.187 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:33 compute-0 nova_compute[192079]: 2025-10-02 12:40:33.282 2 DEBUG nova.network.neutron [-] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:40:33 compute-0 nova_compute[192079]: 2025-10-02 12:40:33.349 2 INFO nova.compute.manager [-] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Took 3.29 seconds to deallocate network for instance.
Oct 02 12:40:33 compute-0 nova_compute[192079]: 2025-10-02 12:40:33.520 2 DEBUG oslo_concurrency.lockutils [None req-58ca707c-fc54-421d-8fd3-b84b5521635c a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:40:33 compute-0 nova_compute[192079]: 2025-10-02 12:40:33.521 2 DEBUG oslo_concurrency.lockutils [None req-58ca707c-fc54-421d-8fd3-b84b5521635c a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:40:33 compute-0 nova_compute[192079]: 2025-10-02 12:40:33.581 2 DEBUG nova.compute.manager [req-e68251b0-dc16-4972-9dbe-bd0047b23f54 req-db6e8ab5-8c61-466c-a07b-faeb27af3c7c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Received event network-vif-deleted-f3f11d03-1ddb-4149-b2f0-d1f2020bab39 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:40:33 compute-0 nova_compute[192079]: 2025-10-02 12:40:33.637 2 DEBUG nova.compute.provider_tree [None req-58ca707c-fc54-421d-8fd3-b84b5521635c a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:40:33 compute-0 nova_compute[192079]: 2025-10-02 12:40:33.660 2 DEBUG nova.scheduler.client.report [None req-58ca707c-fc54-421d-8fd3-b84b5521635c a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:40:33 compute-0 nova_compute[192079]: 2025-10-02 12:40:33.688 2 DEBUG oslo_concurrency.lockutils [None req-58ca707c-fc54-421d-8fd3-b84b5521635c a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.167s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:40:33 compute-0 nova_compute[192079]: 2025-10-02 12:40:33.737 2 INFO nova.scheduler.client.report [None req-58ca707c-fc54-421d-8fd3-b84b5521635c a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Deleted allocations for instance 541184e3-5963-4add-ac60-b22fdcf3774b
Oct 02 12:40:33 compute-0 nova_compute[192079]: 2025-10-02 12:40:33.886 2 DEBUG oslo_concurrency.lockutils [None req-58ca707c-fc54-421d-8fd3-b84b5521635c a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "541184e3-5963-4add-ac60-b22fdcf3774b" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 7.725s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:40:34 compute-0 nova_compute[192079]: 2025-10-02 12:40:34.932 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:37 compute-0 podman[249917]: 2025-10-02 12:40:37.160723824 +0000 UTC m=+0.070565500 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.openshift.tags=minimal rhel9, release=1755695350, com.redhat.component=ubi9-minimal-container, maintainer=Red Hat, Inc., managed_by=edpm_ansible, version=9.6, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, container_name=openstack_network_exporter, vcs-type=git, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., name=ubi9-minimal, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, vendor=Red Hat, Inc., architecture=x86_64, build-date=2025-08-20T13:12:41, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, config_id=edpm, distribution-scope=public, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., url=https://catalog.redhat.com/en/search?searchType=containers, io.buildah.version=1.33.7, io.openshift.expose-services=, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9.)
Oct 02 12:40:37 compute-0 podman[249918]: 2025-10-02 12:40:37.160847628 +0000 UTC m=+0.071555489 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, container_name=multipathd, org.label-schema.license=GPLv2, config_id=multipathd, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible)
Oct 02 12:40:37 compute-0 nova_compute[192079]: 2025-10-02 12:40:37.712 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:40:38 compute-0 nova_compute[192079]: 2025-10-02 12:40:38.191 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:38 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:38.402 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:2b:a1:21 10.100.0.2 2001:db8::f816:3eff:fe2b:a121'], port_security=[], type=localport, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': ''}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.2/28 2001:db8::f816:3eff:fe2b:a121/64', 'neutron:device_id': 'ovnmeta-b9d6d69e-0327-4bcf-b8a6-b2cf69a4d177', 'neutron:device_owner': 'network:distributed', 'neutron:mtu': '', 'neutron:network_name': 'neutron-b9d6d69e-0327-4bcf-b8a6-b2cf69a4d177', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'neutron:revision_number': '5', 'neutron:security_group_ids': '', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=5fce3bea-36c3-4b1e-bdee-b694cf8990ad, chassis=[], tunnel_key=1, gateway_chassis=[], requested_chassis=[], logical_port=ad07d234-3bc8-429a-8834-7a9ae3274be2) old=Port_Binding(mac=['fa:16:3e:2b:a1:21 10.100.0.2 2001:db8:0:1:f816:3eff:fe2b:a121 2001:db8::f816:3eff:fe2b:a121'], external_ids={'neutron:cidrs': '10.100.0.2/28 2001:db8:0:1:f816:3eff:fe2b:a121/64 2001:db8::f816:3eff:fe2b:a121/64', 'neutron:device_id': 'ovnmeta-b9d6d69e-0327-4bcf-b8a6-b2cf69a4d177', 'neutron:device_owner': 'network:distributed', 'neutron:mtu': '', 'neutron:network_name': 'neutron-b9d6d69e-0327-4bcf-b8a6-b2cf69a4d177', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'neutron:revision_number': '4', 'neutron:security_group_ids': '', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:40:38 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:38.403 103294 INFO neutron.agent.ovn.metadata.agent [-] Metadata Port ad07d234-3bc8-429a-8834-7a9ae3274be2 in datapath b9d6d69e-0327-4bcf-b8a6-b2cf69a4d177 updated
Oct 02 12:40:38 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:38.404 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network b9d6d69e-0327-4bcf-b8a6-b2cf69a4d177, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:40:38 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:38.405 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[833382e9-0749-4851-9fbc-af3bd9fb7d22]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:40:39 compute-0 nova_compute[192079]: 2025-10-02 12:40:39.935 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:40 compute-0 nova_compute[192079]: 2025-10-02 12:40:40.463 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:40 compute-0 nova_compute[192079]: 2025-10-02 12:40:40.568 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:41 compute-0 nova_compute[192079]: 2025-10-02 12:40:41.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:40:41 compute-0 nova_compute[192079]: 2025-10-02 12:40:41.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:40:43 compute-0 podman[249955]: 2025-10-02 12:40:43.135002513 +0000 UTC m=+0.053093300 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter)
Oct 02 12:40:43 compute-0 podman[249956]: 2025-10-02 12:40:43.143197777 +0000 UTC m=+0.057787319 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, container_name=iscsid, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001)
Oct 02 12:40:43 compute-0 nova_compute[192079]: 2025-10-02 12:40:43.193 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:44 compute-0 nova_compute[192079]: 2025-10-02 12:40:44.904 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759408829.9034362, 541184e3-5963-4add-ac60-b22fdcf3774b => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:40:44 compute-0 nova_compute[192079]: 2025-10-02 12:40:44.905 2 INFO nova.compute.manager [-] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] VM Stopped (Lifecycle Event)
Oct 02 12:40:44 compute-0 nova_compute[192079]: 2025-10-02 12:40:44.928 2 DEBUG nova.compute.manager [None req-28858146-2891-4ae4-a1be-a30bd40b41e4 - - - - - -] [instance: 541184e3-5963-4add-ac60-b22fdcf3774b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:40:44 compute-0 nova_compute[192079]: 2025-10-02 12:40:44.939 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:46 compute-0 nova_compute[192079]: 2025-10-02 12:40:46.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:40:46 compute-0 nova_compute[192079]: 2025-10-02 12:40:46.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:40:46 compute-0 nova_compute[192079]: 2025-10-02 12:40:46.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:40:46 compute-0 nova_compute[192079]: 2025-10-02 12:40:46.695 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:40:46 compute-0 nova_compute[192079]: 2025-10-02 12:40:46.696 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:40:46 compute-0 nova_compute[192079]: 2025-10-02 12:40:46.696 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:40:46 compute-0 nova_compute[192079]: 2025-10-02 12:40:46.696 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:40:46 compute-0 nova_compute[192079]: 2025-10-02 12:40:46.833 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:40:46 compute-0 nova_compute[192079]: 2025-10-02 12:40:46.834 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5720MB free_disk=73.27241134643555GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:40:46 compute-0 nova_compute[192079]: 2025-10-02 12:40:46.834 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:40:46 compute-0 nova_compute[192079]: 2025-10-02 12:40:46.834 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:40:47 compute-0 nova_compute[192079]: 2025-10-02 12:40:47.011 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:40:47 compute-0 nova_compute[192079]: 2025-10-02 12:40:47.011 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:40:47 compute-0 nova_compute[192079]: 2025-10-02 12:40:47.042 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:40:47 compute-0 nova_compute[192079]: 2025-10-02 12:40:47.062 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:40:47 compute-0 nova_compute[192079]: 2025-10-02 12:40:47.090 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:40:47 compute-0 nova_compute[192079]: 2025-10-02 12:40:47.091 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.256s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:40:48 compute-0 nova_compute[192079]: 2025-10-02 12:40:48.305 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:49 compute-0 nova_compute[192079]: 2025-10-02 12:40:49.941 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:50 compute-0 nova_compute[192079]: 2025-10-02 12:40:50.091 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:40:50 compute-0 nova_compute[192079]: 2025-10-02 12:40:50.092 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:40:51 compute-0 podman[250003]: 2025-10-02 12:40:51.187077804 +0000 UTC m=+0.066684265 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:40:51 compute-0 podman[250001]: 2025-10-02 12:40:51.203901464 +0000 UTC m=+0.114262336 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=ovn_metadata_agent, managed_by=edpm_ansible, org.label-schema.license=GPLv2, container_name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS)
Oct 02 12:40:51 compute-0 podman[250002]: 2025-10-02 12:40:51.233793822 +0000 UTC m=+0.113755893 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.build-date=20251001, container_name=ovn_controller, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:40:51 compute-0 nova_compute[192079]: 2025-10-02 12:40:51.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:40:52 compute-0 nova_compute[192079]: 2025-10-02 12:40:52.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:40:52 compute-0 nova_compute[192079]: 2025-10-02 12:40:52.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:40:52 compute-0 nova_compute[192079]: 2025-10-02 12:40:52.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:40:52 compute-0 nova_compute[192079]: 2025-10-02 12:40:52.738 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:40:53 compute-0 nova_compute[192079]: 2025-10-02 12:40:53.307 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:54 compute-0 nova_compute[192079]: 2025-10-02 12:40:54.952 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:58 compute-0 nova_compute[192079]: 2025-10-02 12:40:58.309 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:40:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:59.121 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:c1:9b:5a 10.100.0.2 2001:db8::f816:3eff:fec1:9b5a'], port_security=[], type=localport, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': ''}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.2/28 2001:db8::f816:3eff:fec1:9b5a/64', 'neutron:device_id': 'ovnmeta-c4f50473-7465-4325-8b4d-bb57fca0162f', 'neutron:device_owner': 'network:distributed', 'neutron:mtu': '', 'neutron:network_name': 'neutron-c4f50473-7465-4325-8b4d-bb57fca0162f', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'neutron:revision_number': '3', 'neutron:security_group_ids': '', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=d90df5bc-8770-4be5-937c-0abfe33bbe11, chassis=[], tunnel_key=1, gateway_chassis=[], requested_chassis=[], logical_port=f69cd95a-5b20-4a47-8acc-7e190d1dac4c) old=Port_Binding(mac=['fa:16:3e:c1:9b:5a 10.100.0.2'], external_ids={'neutron:cidrs': '10.100.0.2/28', 'neutron:device_id': 'ovnmeta-c4f50473-7465-4325-8b4d-bb57fca0162f', 'neutron:device_owner': 'network:distributed', 'neutron:mtu': '', 'neutron:network_name': 'neutron-c4f50473-7465-4325-8b4d-bb57fca0162f', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': 'fd801958556f4c8aab047ecdef6b5ee8', 'neutron:revision_number': '2', 'neutron:security_group_ids': '', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:40:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:59.123 103294 INFO neutron.agent.ovn.metadata.agent [-] Metadata Port f69cd95a-5b20-4a47-8acc-7e190d1dac4c in datapath c4f50473-7465-4325-8b4d-bb57fca0162f updated
Oct 02 12:40:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:59.123 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network c4f50473-7465-4325-8b4d-bb57fca0162f, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:40:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:40:59.124 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9c6c11b1-cacc-494a-ad5f-ed9d02104a4a]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:40:59 compute-0 nova_compute[192079]: 2025-10-02 12:40:59.959 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:00 compute-0 podman[250070]: 2025-10-02 12:41:00.151352435 +0000 UTC m=+0.061531654 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, container_name=ceilometer_agent_compute, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=edpm, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']})
Oct 02 12:41:00 compute-0 nova_compute[192079]: 2025-10-02 12:41:00.734 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:41:01 compute-0 nova_compute[192079]: 2025-10-02 12:41:01.295 2 DEBUG oslo_concurrency.lockutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "e19b3c67-012d-4720-9ed5-92530129270c" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:41:01 compute-0 nova_compute[192079]: 2025-10-02 12:41:01.295 2 DEBUG oslo_concurrency.lockutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "e19b3c67-012d-4720-9ed5-92530129270c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:41:01 compute-0 nova_compute[192079]: 2025-10-02 12:41:01.330 2 DEBUG nova.compute.manager [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:41:01 compute-0 nova_compute[192079]: 2025-10-02 12:41:01.474 2 DEBUG oslo_concurrency.lockutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:41:01 compute-0 nova_compute[192079]: 2025-10-02 12:41:01.475 2 DEBUG oslo_concurrency.lockutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:41:01 compute-0 nova_compute[192079]: 2025-10-02 12:41:01.485 2 DEBUG nova.virt.hardware [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:41:01 compute-0 nova_compute[192079]: 2025-10-02 12:41:01.485 2 INFO nova.compute.claims [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:41:01 compute-0 nova_compute[192079]: 2025-10-02 12:41:01.629 2 DEBUG nova.compute.provider_tree [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:41:01 compute-0 nova_compute[192079]: 2025-10-02 12:41:01.644 2 DEBUG nova.scheduler.client.report [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:41:01 compute-0 nova_compute[192079]: 2025-10-02 12:41:01.670 2 DEBUG oslo_concurrency.lockutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.196s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:41:01 compute-0 nova_compute[192079]: 2025-10-02 12:41:01.671 2 DEBUG nova.compute.manager [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:41:01 compute-0 nova_compute[192079]: 2025-10-02 12:41:01.735 2 DEBUG nova.compute.manager [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:41:01 compute-0 nova_compute[192079]: 2025-10-02 12:41:01.736 2 DEBUG nova.network.neutron [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:41:01 compute-0 nova_compute[192079]: 2025-10-02 12:41:01.761 2 INFO nova.virt.libvirt.driver [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:41:01 compute-0 nova_compute[192079]: 2025-10-02 12:41:01.780 2 DEBUG nova.compute.manager [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:41:01 compute-0 nova_compute[192079]: 2025-10-02 12:41:01.911 2 DEBUG nova.compute.manager [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:41:01 compute-0 nova_compute[192079]: 2025-10-02 12:41:01.912 2 DEBUG nova.virt.libvirt.driver [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:41:01 compute-0 nova_compute[192079]: 2025-10-02 12:41:01.912 2 INFO nova.virt.libvirt.driver [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Creating image(s)
Oct 02 12:41:01 compute-0 nova_compute[192079]: 2025-10-02 12:41:01.913 2 DEBUG oslo_concurrency.lockutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "/var/lib/nova/instances/e19b3c67-012d-4720-9ed5-92530129270c/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:41:01 compute-0 nova_compute[192079]: 2025-10-02 12:41:01.913 2 DEBUG oslo_concurrency.lockutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "/var/lib/nova/instances/e19b3c67-012d-4720-9ed5-92530129270c/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:41:01 compute-0 nova_compute[192079]: 2025-10-02 12:41:01.913 2 DEBUG oslo_concurrency.lockutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "/var/lib/nova/instances/e19b3c67-012d-4720-9ed5-92530129270c/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:41:01 compute-0 nova_compute[192079]: 2025-10-02 12:41:01.925 2 DEBUG oslo_concurrency.processutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:41:01 compute-0 nova_compute[192079]: 2025-10-02 12:41:01.977 2 DEBUG oslo_concurrency.processutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.052s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:41:01 compute-0 nova_compute[192079]: 2025-10-02 12:41:01.978 2 DEBUG oslo_concurrency.lockutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:41:01 compute-0 nova_compute[192079]: 2025-10-02 12:41:01.979 2 DEBUG oslo_concurrency.lockutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:41:01 compute-0 nova_compute[192079]: 2025-10-02 12:41:01.990 2 DEBUG oslo_concurrency.processutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:41:02 compute-0 nova_compute[192079]: 2025-10-02 12:41:02.042 2 DEBUG oslo_concurrency.processutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.052s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:41:02 compute-0 nova_compute[192079]: 2025-10-02 12:41:02.043 2 DEBUG oslo_concurrency.processutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/e19b3c67-012d-4720-9ed5-92530129270c/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:41:02 compute-0 nova_compute[192079]: 2025-10-02 12:41:02.092 2 DEBUG nova.policy [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:41:02 compute-0 nova_compute[192079]: 2025-10-02 12:41:02.095 2 DEBUG oslo_concurrency.processutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/e19b3c67-012d-4720-9ed5-92530129270c/disk 1073741824" returned: 0 in 0.051s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:41:02 compute-0 nova_compute[192079]: 2025-10-02 12:41:02.096 2 DEBUG oslo_concurrency.lockutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.117s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:41:02 compute-0 nova_compute[192079]: 2025-10-02 12:41:02.096 2 DEBUG oslo_concurrency.processutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:41:02 compute-0 nova_compute[192079]: 2025-10-02 12:41:02.152 2 DEBUG oslo_concurrency.processutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:41:02 compute-0 nova_compute[192079]: 2025-10-02 12:41:02.153 2 DEBUG nova.virt.disk.api [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Checking if we can resize image /var/lib/nova/instances/e19b3c67-012d-4720-9ed5-92530129270c/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:41:02 compute-0 nova_compute[192079]: 2025-10-02 12:41:02.154 2 DEBUG oslo_concurrency.processutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/e19b3c67-012d-4720-9ed5-92530129270c/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:41:02 compute-0 nova_compute[192079]: 2025-10-02 12:41:02.208 2 DEBUG oslo_concurrency.processutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/e19b3c67-012d-4720-9ed5-92530129270c/disk --force-share --output=json" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:41:02 compute-0 nova_compute[192079]: 2025-10-02 12:41:02.209 2 DEBUG nova.virt.disk.api [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Cannot resize image /var/lib/nova/instances/e19b3c67-012d-4720-9ed5-92530129270c/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:41:02 compute-0 nova_compute[192079]: 2025-10-02 12:41:02.209 2 DEBUG nova.objects.instance [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lazy-loading 'migration_context' on Instance uuid e19b3c67-012d-4720-9ed5-92530129270c obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:41:02 compute-0 nova_compute[192079]: 2025-10-02 12:41:02.230 2 DEBUG nova.virt.libvirt.driver [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:41:02 compute-0 nova_compute[192079]: 2025-10-02 12:41:02.231 2 DEBUG nova.virt.libvirt.driver [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Ensure instance console log exists: /var/lib/nova/instances/e19b3c67-012d-4720-9ed5-92530129270c/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:41:02 compute-0 nova_compute[192079]: 2025-10-02 12:41:02.231 2 DEBUG oslo_concurrency.lockutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:41:02 compute-0 nova_compute[192079]: 2025-10-02 12:41:02.231 2 DEBUG oslo_concurrency.lockutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:41:02 compute-0 nova_compute[192079]: 2025-10-02 12:41:02.232 2 DEBUG oslo_concurrency.lockutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:41:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:02.245 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:41:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:02.246 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:41:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:02.246 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:41:03 compute-0 nova_compute[192079]: 2025-10-02 12:41:03.347 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:04 compute-0 nova_compute[192079]: 2025-10-02 12:41:04.962 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:05 compute-0 nova_compute[192079]: 2025-10-02 12:41:05.450 2 DEBUG nova.network.neutron [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Successfully created port: 8cfd60ac-8c7b-4732-bae4-7099f5767458 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:41:07 compute-0 nova_compute[192079]: 2025-10-02 12:41:07.595 2 DEBUG nova.network.neutron [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Successfully updated port: 8cfd60ac-8c7b-4732-bae4-7099f5767458 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:41:07 compute-0 nova_compute[192079]: 2025-10-02 12:41:07.651 2 DEBUG oslo_concurrency.lockutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "refresh_cache-e19b3c67-012d-4720-9ed5-92530129270c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:41:07 compute-0 nova_compute[192079]: 2025-10-02 12:41:07.652 2 DEBUG oslo_concurrency.lockutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquired lock "refresh_cache-e19b3c67-012d-4720-9ed5-92530129270c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:41:07 compute-0 nova_compute[192079]: 2025-10-02 12:41:07.652 2 DEBUG nova.network.neutron [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:41:07 compute-0 nova_compute[192079]: 2025-10-02 12:41:07.813 2 DEBUG nova.compute.manager [req-82496f34-fe45-41a5-8f4e-c0a943cbc7ce req-24b51d7f-3505-4fd8-bf15-6217a031616c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Received event network-changed-8cfd60ac-8c7b-4732-bae4-7099f5767458 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:41:07 compute-0 nova_compute[192079]: 2025-10-02 12:41:07.814 2 DEBUG nova.compute.manager [req-82496f34-fe45-41a5-8f4e-c0a943cbc7ce req-24b51d7f-3505-4fd8-bf15-6217a031616c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Refreshing instance network info cache due to event network-changed-8cfd60ac-8c7b-4732-bae4-7099f5767458. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:41:07 compute-0 nova_compute[192079]: 2025-10-02 12:41:07.814 2 DEBUG oslo_concurrency.lockutils [req-82496f34-fe45-41a5-8f4e-c0a943cbc7ce req-24b51d7f-3505-4fd8-bf15-6217a031616c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-e19b3c67-012d-4720-9ed5-92530129270c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:41:08 compute-0 podman[250105]: 2025-10-02 12:41:08.156971827 +0000 UTC m=+0.057005800 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, vendor=Red Hat, Inc., config_id=edpm, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.openshift.expose-services=, managed_by=edpm_ansible, url=https://catalog.redhat.com/en/search?searchType=containers, version=9.6, distribution-scope=public, maintainer=Red Hat, Inc., summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., io.openshift.tags=minimal rhel9, name=ubi9-minimal, com.redhat.component=ubi9-minimal-container, container_name=openstack_network_exporter, io.buildah.version=1.33.7, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, release=1755695350, vcs-type=git, build-date=2025-08-20T13:12:41, architecture=x86_64)
Oct 02 12:41:08 compute-0 podman[250106]: 2025-10-02 12:41:08.174872666 +0000 UTC m=+0.076597145 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=multipathd, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, config_id=multipathd, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0)
Oct 02 12:41:08 compute-0 nova_compute[192079]: 2025-10-02 12:41:08.349 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:08 compute-0 nova_compute[192079]: 2025-10-02 12:41:08.407 2 DEBUG nova.network.neutron [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:41:09 compute-0 nova_compute[192079]: 2025-10-02 12:41:09.966 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.449 2 DEBUG nova.network.neutron [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Updating instance_info_cache with network_info: [{"id": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "address": "fa:16:3e:bf:f6:bb", "network": {"id": "3a127238-c3fd-4117-ae39-3087c30f09a1", "bridge": "br-int", "label": "tempest-network-smoke--12525199", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap8cfd60ac-8c", "ovs_interfaceid": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.470 2 DEBUG oslo_concurrency.lockutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Releasing lock "refresh_cache-e19b3c67-012d-4720-9ed5-92530129270c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.471 2 DEBUG nova.compute.manager [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Instance network_info: |[{"id": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "address": "fa:16:3e:bf:f6:bb", "network": {"id": "3a127238-c3fd-4117-ae39-3087c30f09a1", "bridge": "br-int", "label": "tempest-network-smoke--12525199", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap8cfd60ac-8c", "ovs_interfaceid": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.472 2 DEBUG oslo_concurrency.lockutils [req-82496f34-fe45-41a5-8f4e-c0a943cbc7ce req-24b51d7f-3505-4fd8-bf15-6217a031616c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-e19b3c67-012d-4720-9ed5-92530129270c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.473 2 DEBUG nova.network.neutron [req-82496f34-fe45-41a5-8f4e-c0a943cbc7ce req-24b51d7f-3505-4fd8-bf15-6217a031616c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Refreshing network info cache for port 8cfd60ac-8c7b-4732-bae4-7099f5767458 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.477 2 DEBUG nova.virt.libvirt.driver [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Start _get_guest_xml network_info=[{"id": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "address": "fa:16:3e:bf:f6:bb", "network": {"id": "3a127238-c3fd-4117-ae39-3087c30f09a1", "bridge": "br-int", "label": "tempest-network-smoke--12525199", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap8cfd60ac-8c", "ovs_interfaceid": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.482 2 WARNING nova.virt.libvirt.driver [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.515 2 DEBUG nova.virt.libvirt.host [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.516 2 DEBUG nova.virt.libvirt.host [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.525 2 DEBUG nova.virt.libvirt.host [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.526 2 DEBUG nova.virt.libvirt.host [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.528 2 DEBUG nova.virt.libvirt.driver [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.529 2 DEBUG nova.virt.hardware [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.530 2 DEBUG nova.virt.hardware [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.530 2 DEBUG nova.virt.hardware [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.531 2 DEBUG nova.virt.hardware [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.531 2 DEBUG nova.virt.hardware [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.532 2 DEBUG nova.virt.hardware [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.533 2 DEBUG nova.virt.hardware [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.533 2 DEBUG nova.virt.hardware [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.534 2 DEBUG nova.virt.hardware [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.534 2 DEBUG nova.virt.hardware [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.534 2 DEBUG nova.virt.hardware [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.541 2 DEBUG nova.virt.libvirt.vif [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:41:00Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestNetworkBasicOps-server-1466705682',display_name='tempest-TestNetworkBasicOps-server-1466705682',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkbasicops-server-1466705682',id=171,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBAL3IafrkUUDMBSP53gCxBwuDelBHD0YUoTUSoWqfmEzl93CdF9lTmxq9bNWf/TU7YpVINFdBsXy5LsUzhAl7hFwzu9/1LUxPhu8oeLDeXNeE9FYhi3sduX/kxI17gLspA==',key_name='tempest-TestNetworkBasicOps-134374525',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='6e2a4899168a47618e377cb3ac85ddd2',ramdisk_id='',reservation_id='r-1qbz6ygr',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestNetworkBasicOps-1323893370',owner_user_name='tempest-TestNetworkBasicOps-1323893370-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:41:01Z,user_data=None,user_id='a1898fdf056c4a249c33590f26d4d845',uuid=e19b3c67-012d-4720-9ed5-92530129270c,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "address": "fa:16:3e:bf:f6:bb", "network": {"id": "3a127238-c3fd-4117-ae39-3087c30f09a1", "bridge": "br-int", "label": "tempest-network-smoke--12525199", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap8cfd60ac-8c", "ovs_interfaceid": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.541 2 DEBUG nova.network.os_vif_util [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converting VIF {"id": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "address": "fa:16:3e:bf:f6:bb", "network": {"id": "3a127238-c3fd-4117-ae39-3087c30f09a1", "bridge": "br-int", "label": "tempest-network-smoke--12525199", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap8cfd60ac-8c", "ovs_interfaceid": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.543 2 DEBUG nova.network.os_vif_util [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:bf:f6:bb,bridge_name='br-int',has_traffic_filtering=True,id=8cfd60ac-8c7b-4732-bae4-7099f5767458,network=Network(3a127238-c3fd-4117-ae39-3087c30f09a1),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap8cfd60ac-8c') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.544 2 DEBUG nova.objects.instance [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lazy-loading 'pci_devices' on Instance uuid e19b3c67-012d-4720-9ed5-92530129270c obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.583 2 DEBUG nova.virt.libvirt.driver [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:41:11 compute-0 nova_compute[192079]:   <uuid>e19b3c67-012d-4720-9ed5-92530129270c</uuid>
Oct 02 12:41:11 compute-0 nova_compute[192079]:   <name>instance-000000ab</name>
Oct 02 12:41:11 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:41:11 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:41:11 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:41:11 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:       <nova:name>tempest-TestNetworkBasicOps-server-1466705682</nova:name>
Oct 02 12:41:11 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:41:11</nova:creationTime>
Oct 02 12:41:11 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:41:11 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:41:11 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:41:11 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:41:11 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:41:11 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:41:11 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:41:11 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:41:11 compute-0 nova_compute[192079]:         <nova:user uuid="a1898fdf056c4a249c33590f26d4d845">tempest-TestNetworkBasicOps-1323893370-project-member</nova:user>
Oct 02 12:41:11 compute-0 nova_compute[192079]:         <nova:project uuid="6e2a4899168a47618e377cb3ac85ddd2">tempest-TestNetworkBasicOps-1323893370</nova:project>
Oct 02 12:41:11 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:41:11 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:41:11 compute-0 nova_compute[192079]:         <nova:port uuid="8cfd60ac-8c7b-4732-bae4-7099f5767458">
Oct 02 12:41:11 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.6" ipVersion="4"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:41:11 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:41:11 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:41:11 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <system>
Oct 02 12:41:11 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:41:11 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:41:11 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:41:11 compute-0 nova_compute[192079]:       <entry name="serial">e19b3c67-012d-4720-9ed5-92530129270c</entry>
Oct 02 12:41:11 compute-0 nova_compute[192079]:       <entry name="uuid">e19b3c67-012d-4720-9ed5-92530129270c</entry>
Oct 02 12:41:11 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     </system>
Oct 02 12:41:11 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:41:11 compute-0 nova_compute[192079]:   <os>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:   </os>
Oct 02 12:41:11 compute-0 nova_compute[192079]:   <features>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:   </features>
Oct 02 12:41:11 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:41:11 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:41:11 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:41:11 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/e19b3c67-012d-4720-9ed5-92530129270c/disk"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:41:11 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/e19b3c67-012d-4720-9ed5-92530129270c/disk.config"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:41:11 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:bf:f6:bb"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:       <target dev="tap8cfd60ac-8c"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:41:11 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/e19b3c67-012d-4720-9ed5-92530129270c/console.log" append="off"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <video>
Oct 02 12:41:11 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     </video>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:41:11 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:41:11 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:41:11 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:41:11 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:41:11 compute-0 nova_compute[192079]: </domain>
Oct 02 12:41:11 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.584 2 DEBUG nova.compute.manager [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Preparing to wait for external event network-vif-plugged-8cfd60ac-8c7b-4732-bae4-7099f5767458 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.585 2 DEBUG oslo_concurrency.lockutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "e19b3c67-012d-4720-9ed5-92530129270c-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.585 2 DEBUG oslo_concurrency.lockutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "e19b3c67-012d-4720-9ed5-92530129270c-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.585 2 DEBUG oslo_concurrency.lockutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "e19b3c67-012d-4720-9ed5-92530129270c-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.586 2 DEBUG nova.virt.libvirt.vif [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:41:00Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestNetworkBasicOps-server-1466705682',display_name='tempest-TestNetworkBasicOps-server-1466705682',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkbasicops-server-1466705682',id=171,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBAL3IafrkUUDMBSP53gCxBwuDelBHD0YUoTUSoWqfmEzl93CdF9lTmxq9bNWf/TU7YpVINFdBsXy5LsUzhAl7hFwzu9/1LUxPhu8oeLDeXNeE9FYhi3sduX/kxI17gLspA==',key_name='tempest-TestNetworkBasicOps-134374525',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='6e2a4899168a47618e377cb3ac85ddd2',ramdisk_id='',reservation_id='r-1qbz6ygr',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestNetworkBasicOps-1323893370',owner_user_name='tempest-TestNetworkBasicOps-1323893370-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:41:01Z,user_data=None,user_id='a1898fdf056c4a249c33590f26d4d845',uuid=e19b3c67-012d-4720-9ed5-92530129270c,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "address": "fa:16:3e:bf:f6:bb", "network": {"id": "3a127238-c3fd-4117-ae39-3087c30f09a1", "bridge": "br-int", "label": "tempest-network-smoke--12525199", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap8cfd60ac-8c", "ovs_interfaceid": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.586 2 DEBUG nova.network.os_vif_util [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converting VIF {"id": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "address": "fa:16:3e:bf:f6:bb", "network": {"id": "3a127238-c3fd-4117-ae39-3087c30f09a1", "bridge": "br-int", "label": "tempest-network-smoke--12525199", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap8cfd60ac-8c", "ovs_interfaceid": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.587 2 DEBUG nova.network.os_vif_util [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:bf:f6:bb,bridge_name='br-int',has_traffic_filtering=True,id=8cfd60ac-8c7b-4732-bae4-7099f5767458,network=Network(3a127238-c3fd-4117-ae39-3087c30f09a1),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap8cfd60ac-8c') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.587 2 DEBUG os_vif [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:bf:f6:bb,bridge_name='br-int',has_traffic_filtering=True,id=8cfd60ac-8c7b-4732-bae4-7099f5767458,network=Network(3a127238-c3fd-4117-ae39-3087c30f09a1),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap8cfd60ac-8c') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.587 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.588 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.588 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.590 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.590 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap8cfd60ac-8c, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.590 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap8cfd60ac-8c, col_values=(('external_ids', {'iface-id': '8cfd60ac-8c7b-4732-bae4-7099f5767458', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:bf:f6:bb', 'vm-uuid': 'e19b3c67-012d-4720-9ed5-92530129270c'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.592 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:11 compute-0 NetworkManager[51160]: <info>  [1759408871.5930] manager: (tap8cfd60ac-8c): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/327)
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.595 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.600 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.600 2 INFO os_vif [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:bf:f6:bb,bridge_name='br-int',has_traffic_filtering=True,id=8cfd60ac-8c7b-4732-bae4-7099f5767458,network=Network(3a127238-c3fd-4117-ae39-3087c30f09a1),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap8cfd60ac-8c')
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.734 2 DEBUG nova.virt.libvirt.driver [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.734 2 DEBUG nova.virt.libvirt.driver [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.735 2 DEBUG nova.virt.libvirt.driver [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] No VIF found with MAC fa:16:3e:bf:f6:bb, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:41:11 compute-0 nova_compute[192079]: 2025-10-02 12:41:11.735 2 INFO nova.virt.libvirt.driver [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Using config drive
Oct 02 12:41:12 compute-0 nova_compute[192079]: 2025-10-02 12:41:12.318 2 INFO nova.virt.libvirt.driver [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Creating config drive at /var/lib/nova/instances/e19b3c67-012d-4720-9ed5-92530129270c/disk.config
Oct 02 12:41:12 compute-0 nova_compute[192079]: 2025-10-02 12:41:12.325 2 DEBUG oslo_concurrency.processutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/e19b3c67-012d-4720-9ed5-92530129270c/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp8hyf1a6r execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:41:12 compute-0 nova_compute[192079]: 2025-10-02 12:41:12.457 2 DEBUG oslo_concurrency.processutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/e19b3c67-012d-4720-9ed5-92530129270c/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp8hyf1a6r" returned: 0 in 0.132s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:41:12 compute-0 kernel: tap8cfd60ac-8c: entered promiscuous mode
Oct 02 12:41:12 compute-0 nova_compute[192079]: 2025-10-02 12:41:12.509 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:12 compute-0 ovn_controller[94336]: 2025-10-02T12:41:12Z|00675|binding|INFO|Claiming lport 8cfd60ac-8c7b-4732-bae4-7099f5767458 for this chassis.
Oct 02 12:41:12 compute-0 ovn_controller[94336]: 2025-10-02T12:41:12Z|00676|binding|INFO|8cfd60ac-8c7b-4732-bae4-7099f5767458: Claiming fa:16:3e:bf:f6:bb 10.100.0.6
Oct 02 12:41:12 compute-0 NetworkManager[51160]: <info>  [1759408872.5161] manager: (tap8cfd60ac-8c): new Tun device (/org/freedesktop/NetworkManager/Devices/328)
Oct 02 12:41:12 compute-0 nova_compute[192079]: 2025-10-02 12:41:12.517 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:12 compute-0 nova_compute[192079]: 2025-10-02 12:41:12.529 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:12 compute-0 nova_compute[192079]: 2025-10-02 12:41:12.536 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:12 compute-0 systemd-udevd[250165]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:41:12 compute-0 NetworkManager[51160]: <info>  [1759408872.5666] device (tap8cfd60ac-8c): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:12.565 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:bf:f6:bb 10.100.0.6'], port_security=['fa:16:3e:bf:f6:bb 10.100.0.6'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.6/28', 'neutron:device_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-3a127238-c3fd-4117-ae39-3087c30f09a1', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'neutron:revision_number': '2', 'neutron:security_group_ids': '8c6b0ed7-248d-4688-8753-c9cb6fe8719c', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=5dbca848-bd3c-415e-9cb9-ed4c61904df1, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=8cfd60ac-8c7b-4732-bae4-7099f5767458) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:41:12 compute-0 NetworkManager[51160]: <info>  [1759408872.5676] device (tap8cfd60ac-8c): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:12.567 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 8cfd60ac-8c7b-4732-bae4-7099f5767458 in datapath 3a127238-c3fd-4117-ae39-3087c30f09a1 bound to our chassis
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:12.568 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 3a127238-c3fd-4117-ae39-3087c30f09a1
Oct 02 12:41:12 compute-0 systemd-machined[152150]: New machine qemu-83-instance-000000ab.
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:12.580 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[31f7710f-0f3a-4b29-9a55-969af898a69a]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:12.581 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap3a127238-c1 in ovnmeta-3a127238-c3fd-4117-ae39-3087c30f09a1 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:12.583 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap3a127238-c0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:12.583 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[fc261216-37cd-4e03-8958-caa75ff08c91]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:12.584 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[48e18001-8250-4b5f-9ea9-1380875096a7]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:12.596 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[7bf9d016-a25a-430a-9d02-8fc4ac720fc5]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:41:12 compute-0 systemd[1]: Started Virtual Machine qemu-83-instance-000000ab.
Oct 02 12:41:12 compute-0 ovn_controller[94336]: 2025-10-02T12:41:12Z|00677|binding|INFO|Setting lport 8cfd60ac-8c7b-4732-bae4-7099f5767458 ovn-installed in OVS
Oct 02 12:41:12 compute-0 ovn_controller[94336]: 2025-10-02T12:41:12Z|00678|binding|INFO|Setting lport 8cfd60ac-8c7b-4732-bae4-7099f5767458 up in Southbound
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:12.613 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[880065a2-ae48-4e63-be01-3a4cee21d61b]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:41:12 compute-0 nova_compute[192079]: 2025-10-02 12:41:12.614 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:12.642 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[32a9ff6b-5641-41e1-84ef-470a8c7fbe7d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:41:12 compute-0 systemd-udevd[250168]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:12.649 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[013c1229-fa71-433e-ae51-d733cdd06888]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:41:12 compute-0 NetworkManager[51160]: <info>  [1759408872.6504] manager: (tap3a127238-c0): new Veth device (/org/freedesktop/NetworkManager/Devices/329)
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:12.680 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[76727dad-7074-4e14-bf94-4fb83e8d3dd7]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:12.683 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[b339d5ab-f65d-4aa4-a0bb-f89203e17982]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:41:12 compute-0 NetworkManager[51160]: <info>  [1759408872.7091] device (tap3a127238-c0): carrier: link connected
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:12.718 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[1dd9b4fc-5b89-49a7-8d4c-04915a6b172f]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:12.741 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c7bafa96-8170-4dda-8c1f-81f84d98e4fa]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap3a127238-c1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:95:98:dc'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 212], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 687033, 'reachable_time': 15278, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 250199, 'error': None, 'target': 'ovnmeta-3a127238-c3fd-4117-ae39-3087c30f09a1', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:12.762 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e50786ae-10a2-4cb7-a51b-02e8ed31438a]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe95:98dc'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 687033, 'tstamp': 687033}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 250200, 'error': None, 'target': 'ovnmeta-3a127238-c3fd-4117-ae39-3087c30f09a1', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:12.785 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d1469a36-0150-4535-b1a6-627e58017b63]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap3a127238-c1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:95:98:dc'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 212], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 687033, 'reachable_time': 15278, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 250201, 'error': None, 'target': 'ovnmeta-3a127238-c3fd-4117-ae39-3087c30f09a1', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:12.816 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f224473a-9164-4126-9966-b107818ac652]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:12.887 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a52ccfc3-3c4c-4c11-a377-d515c2e87d8f]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:12.889 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap3a127238-c0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:12.889 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:12.890 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap3a127238-c0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:41:12 compute-0 NetworkManager[51160]: <info>  [1759408872.8943] manager: (tap3a127238-c0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/330)
Oct 02 12:41:12 compute-0 kernel: tap3a127238-c0: entered promiscuous mode
Oct 02 12:41:12 compute-0 nova_compute[192079]: 2025-10-02 12:41:12.893 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:12.897 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap3a127238-c0, col_values=(('external_ids', {'iface-id': '5f0a8cdb-b85d-4bfc-8a2d-3f1f2d39612e'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:41:12 compute-0 nova_compute[192079]: 2025-10-02 12:41:12.898 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:12 compute-0 ovn_controller[94336]: 2025-10-02T12:41:12Z|00679|binding|INFO|Releasing lport 5f0a8cdb-b85d-4bfc-8a2d-3f1f2d39612e from this chassis (sb_readonly=0)
Oct 02 12:41:12 compute-0 nova_compute[192079]: 2025-10-02 12:41:12.899 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:12.900 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/3a127238-c3fd-4117-ae39-3087c30f09a1.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/3a127238-c3fd-4117-ae39-3087c30f09a1.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:12.901 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[85e6a020-8940-46db-b66c-5944848d8827]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:12.901 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-3a127238-c3fd-4117-ae39-3087c30f09a1
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/3a127238-c3fd-4117-ae39-3087c30f09a1.pid.haproxy
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 3a127238-c3fd-4117-ae39-3087c30f09a1
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:41:12 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:12.902 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-3a127238-c3fd-4117-ae39-3087c30f09a1', 'env', 'PROCESS_TAG=haproxy-3a127238-c3fd-4117-ae39-3087c30f09a1', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/3a127238-c3fd-4117-ae39-3087c30f09a1.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:41:12 compute-0 nova_compute[192079]: 2025-10-02 12:41:12.910 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:13 compute-0 nova_compute[192079]: 2025-10-02 12:41:13.244 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408873.2436764, e19b3c67-012d-4720-9ed5-92530129270c => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:41:13 compute-0 nova_compute[192079]: 2025-10-02 12:41:13.245 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: e19b3c67-012d-4720-9ed5-92530129270c] VM Started (Lifecycle Event)
Oct 02 12:41:13 compute-0 podman[250241]: 2025-10-02 12:41:13.258178957 +0000 UTC m=+0.044588270 container create bb882e503a97c70bfa8d80d3a084c40458ce0cb946b431216503941c63501d7b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-3a127238-c3fd-4117-ae39-3087c30f09a1, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001)
Oct 02 12:41:13 compute-0 nova_compute[192079]: 2025-10-02 12:41:13.267 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:41:13 compute-0 nova_compute[192079]: 2025-10-02 12:41:13.271 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408873.244724, e19b3c67-012d-4720-9ed5-92530129270c => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:41:13 compute-0 nova_compute[192079]: 2025-10-02 12:41:13.271 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: e19b3c67-012d-4720-9ed5-92530129270c] VM Paused (Lifecycle Event)
Oct 02 12:41:13 compute-0 systemd[1]: Started libpod-conmon-bb882e503a97c70bfa8d80d3a084c40458ce0cb946b431216503941c63501d7b.scope.
Oct 02 12:41:13 compute-0 nova_compute[192079]: 2025-10-02 12:41:13.293 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:41:13 compute-0 nova_compute[192079]: 2025-10-02 12:41:13.300 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:41:13 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:41:13 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/46b7e79a2f06d0546737335afbf0a312009a4140aa2deb48cac52ec52c54d82a/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:41:13 compute-0 podman[250241]: 2025-10-02 12:41:13.320242654 +0000 UTC m=+0.106651967 container init bb882e503a97c70bfa8d80d3a084c40458ce0cb946b431216503941c63501d7b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-3a127238-c3fd-4117-ae39-3087c30f09a1, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, org.label-schema.schema-version=1.0)
Oct 02 12:41:13 compute-0 podman[250241]: 2025-10-02 12:41:13.32595183 +0000 UTC m=+0.112361133 container start bb882e503a97c70bfa8d80d3a084c40458ce0cb946b431216503941c63501d7b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-3a127238-c3fd-4117-ae39-3087c30f09a1, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true)
Oct 02 12:41:13 compute-0 podman[250241]: 2025-10-02 12:41:13.234091508 +0000 UTC m=+0.020500841 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:41:13 compute-0 podman[250257]: 2025-10-02 12:41:13.340122228 +0000 UTC m=+0.049346761 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=iscsid, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, container_name=iscsid, org.label-schema.vendor=CentOS)
Oct 02 12:41:13 compute-0 nova_compute[192079]: 2025-10-02 12:41:13.347 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: e19b3c67-012d-4720-9ed5-92530129270c] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:41:13 compute-0 neutron-haproxy-ovnmeta-3a127238-c3fd-4117-ae39-3087c30f09a1[250258]: [NOTICE]   (250290) : New worker (250302) forked
Oct 02 12:41:13 compute-0 neutron-haproxy-ovnmeta-3a127238-c3fd-4117-ae39-3087c30f09a1[250258]: [NOTICE]   (250290) : Loading success.
Oct 02 12:41:13 compute-0 nova_compute[192079]: 2025-10-02 12:41:13.350 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:13 compute-0 podman[250254]: 2025-10-02 12:41:13.366936451 +0000 UTC m=+0.078597831 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:41:14 compute-0 nova_compute[192079]: 2025-10-02 12:41:14.550 2 DEBUG nova.compute.manager [req-eb656c11-6644-434a-b4b8-17918c15f30c req-ffba13c4-34b4-4238-a885-75d4c88d2fed 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Received event network-vif-plugged-8cfd60ac-8c7b-4732-bae4-7099f5767458 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:41:14 compute-0 nova_compute[192079]: 2025-10-02 12:41:14.551 2 DEBUG oslo_concurrency.lockutils [req-eb656c11-6644-434a-b4b8-17918c15f30c req-ffba13c4-34b4-4238-a885-75d4c88d2fed 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "e19b3c67-012d-4720-9ed5-92530129270c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:41:14 compute-0 nova_compute[192079]: 2025-10-02 12:41:14.552 2 DEBUG oslo_concurrency.lockutils [req-eb656c11-6644-434a-b4b8-17918c15f30c req-ffba13c4-34b4-4238-a885-75d4c88d2fed 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "e19b3c67-012d-4720-9ed5-92530129270c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:41:14 compute-0 nova_compute[192079]: 2025-10-02 12:41:14.552 2 DEBUG oslo_concurrency.lockutils [req-eb656c11-6644-434a-b4b8-17918c15f30c req-ffba13c4-34b4-4238-a885-75d4c88d2fed 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "e19b3c67-012d-4720-9ed5-92530129270c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:41:14 compute-0 nova_compute[192079]: 2025-10-02 12:41:14.552 2 DEBUG nova.compute.manager [req-eb656c11-6644-434a-b4b8-17918c15f30c req-ffba13c4-34b4-4238-a885-75d4c88d2fed 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Processing event network-vif-plugged-8cfd60ac-8c7b-4732-bae4-7099f5767458 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:41:14 compute-0 nova_compute[192079]: 2025-10-02 12:41:14.553 2 DEBUG nova.compute.manager [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Instance event wait completed in 1 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:41:14 compute-0 nova_compute[192079]: 2025-10-02 12:41:14.556 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408874.5567281, e19b3c67-012d-4720-9ed5-92530129270c => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:41:14 compute-0 nova_compute[192079]: 2025-10-02 12:41:14.557 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: e19b3c67-012d-4720-9ed5-92530129270c] VM Resumed (Lifecycle Event)
Oct 02 12:41:14 compute-0 nova_compute[192079]: 2025-10-02 12:41:14.558 2 DEBUG nova.virt.libvirt.driver [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:41:14 compute-0 nova_compute[192079]: 2025-10-02 12:41:14.561 2 INFO nova.virt.libvirt.driver [-] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Instance spawned successfully.
Oct 02 12:41:14 compute-0 nova_compute[192079]: 2025-10-02 12:41:14.562 2 DEBUG nova.virt.libvirt.driver [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:41:14 compute-0 nova_compute[192079]: 2025-10-02 12:41:14.623 2 DEBUG nova.virt.libvirt.driver [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:41:14 compute-0 nova_compute[192079]: 2025-10-02 12:41:14.623 2 DEBUG nova.virt.libvirt.driver [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:41:14 compute-0 nova_compute[192079]: 2025-10-02 12:41:14.624 2 DEBUG nova.virt.libvirt.driver [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:41:14 compute-0 nova_compute[192079]: 2025-10-02 12:41:14.624 2 DEBUG nova.virt.libvirt.driver [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:41:14 compute-0 nova_compute[192079]: 2025-10-02 12:41:14.624 2 DEBUG nova.virt.libvirt.driver [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:41:14 compute-0 nova_compute[192079]: 2025-10-02 12:41:14.625 2 DEBUG nova.virt.libvirt.driver [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:41:14 compute-0 nova_compute[192079]: 2025-10-02 12:41:14.628 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:41:14 compute-0 nova_compute[192079]: 2025-10-02 12:41:14.631 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:41:14 compute-0 nova_compute[192079]: 2025-10-02 12:41:14.669 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: e19b3c67-012d-4720-9ed5-92530129270c] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:41:14 compute-0 nova_compute[192079]: 2025-10-02 12:41:14.987 2 INFO nova.compute.manager [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Took 13.08 seconds to spawn the instance on the hypervisor.
Oct 02 12:41:14 compute-0 nova_compute[192079]: 2025-10-02 12:41:14.988 2 DEBUG nova.compute.manager [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:41:15 compute-0 nova_compute[192079]: 2025-10-02 12:41:15.096 2 INFO nova.compute.manager [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Took 13.71 seconds to build instance.
Oct 02 12:41:15 compute-0 nova_compute[192079]: 2025-10-02 12:41:15.149 2 DEBUG oslo_concurrency.lockutils [None req-b8009788-1ae1-4365-8a85-cdb25c8eb330 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "e19b3c67-012d-4720-9ed5-92530129270c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 13.853s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:41:15 compute-0 nova_compute[192079]: 2025-10-02 12:41:15.526 2 DEBUG nova.network.neutron [req-82496f34-fe45-41a5-8f4e-c0a943cbc7ce req-24b51d7f-3505-4fd8-bf15-6217a031616c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Updated VIF entry in instance network info cache for port 8cfd60ac-8c7b-4732-bae4-7099f5767458. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:41:15 compute-0 nova_compute[192079]: 2025-10-02 12:41:15.526 2 DEBUG nova.network.neutron [req-82496f34-fe45-41a5-8f4e-c0a943cbc7ce req-24b51d7f-3505-4fd8-bf15-6217a031616c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Updating instance_info_cache with network_info: [{"id": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "address": "fa:16:3e:bf:f6:bb", "network": {"id": "3a127238-c3fd-4117-ae39-3087c30f09a1", "bridge": "br-int", "label": "tempest-network-smoke--12525199", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap8cfd60ac-8c", "ovs_interfaceid": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:41:15 compute-0 nova_compute[192079]: 2025-10-02 12:41:15.555 2 DEBUG oslo_concurrency.lockutils [req-82496f34-fe45-41a5-8f4e-c0a943cbc7ce req-24b51d7f-3505-4fd8-bf15-6217a031616c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-e19b3c67-012d-4720-9ed5-92530129270c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:41:16 compute-0 nova_compute[192079]: 2025-10-02 12:41:16.592 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:16 compute-0 nova_compute[192079]: 2025-10-02 12:41:16.658 2 DEBUG nova.compute.manager [req-7515fc8f-77d7-46cf-9b3b-9067806ed970 req-d969a232-3e35-4a3e-8bfa-09a9d8876612 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Received event network-vif-plugged-8cfd60ac-8c7b-4732-bae4-7099f5767458 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:41:16 compute-0 nova_compute[192079]: 2025-10-02 12:41:16.660 2 DEBUG oslo_concurrency.lockutils [req-7515fc8f-77d7-46cf-9b3b-9067806ed970 req-d969a232-3e35-4a3e-8bfa-09a9d8876612 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "e19b3c67-012d-4720-9ed5-92530129270c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:41:16 compute-0 nova_compute[192079]: 2025-10-02 12:41:16.660 2 DEBUG oslo_concurrency.lockutils [req-7515fc8f-77d7-46cf-9b3b-9067806ed970 req-d969a232-3e35-4a3e-8bfa-09a9d8876612 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "e19b3c67-012d-4720-9ed5-92530129270c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:41:16 compute-0 nova_compute[192079]: 2025-10-02 12:41:16.661 2 DEBUG oslo_concurrency.lockutils [req-7515fc8f-77d7-46cf-9b3b-9067806ed970 req-d969a232-3e35-4a3e-8bfa-09a9d8876612 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "e19b3c67-012d-4720-9ed5-92530129270c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:41:16 compute-0 nova_compute[192079]: 2025-10-02 12:41:16.662 2 DEBUG nova.compute.manager [req-7515fc8f-77d7-46cf-9b3b-9067806ed970 req-d969a232-3e35-4a3e-8bfa-09a9d8876612 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] No waiting events found dispatching network-vif-plugged-8cfd60ac-8c7b-4732-bae4-7099f5767458 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:41:16 compute-0 nova_compute[192079]: 2025-10-02 12:41:16.662 2 WARNING nova.compute.manager [req-7515fc8f-77d7-46cf-9b3b-9067806ed970 req-d969a232-3e35-4a3e-8bfa-09a9d8876612 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Received unexpected event network-vif-plugged-8cfd60ac-8c7b-4732-bae4-7099f5767458 for instance with vm_state active and task_state None.
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.115 12 DEBUG ceilometer.compute.discovery [-] instance data: {'id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'name': 'tempest-TestNetworkBasicOps-server-1466705682', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'os_type': 'hvm', 'architecture': 'x86_64', 'OS-EXT-SRV-ATTR:instance_name': 'instance-000000ab', 'OS-EXT-SRV-ATTR:host': 'compute-0.ctlplane.example.com', 'OS-EXT-STS:vm_state': 'running', 'tenant_id': '6e2a4899168a47618e377cb3ac85ddd2', 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'hostId': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'status': 'active', 'metadata': {}} discover_libvirt_polling /usr/lib/python3.9/site-packages/ceilometer/compute/discovery.py:228
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.116 12 INFO ceilometer.polling.manager [-] Polling pollster memory.usage in the context of pollsters
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.136 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/memory.usage volume: Unavailable _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.136 12 WARNING ceilometer.compute.pollsters [-] memory.usage statistic in not available for instance e19b3c67-012d-4720-9ed5-92530129270c: ceilometer.compute.pollsters.NoVolumeException
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.136 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.latency in the context of pollsters
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.136 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for PerDeviceDiskLatencyPollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.136 12 ERROR ceilometer.polling.manager [-] Prevent pollster disk.device.latency from polling [<NovaLikeServer: tempest-TestNetworkBasicOps-server-1466705682>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-TestNetworkBasicOps-server-1466705682>]
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.137 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes.delta in the context of pollsters
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.139 12 DEBUG ceilometer.compute.virt.libvirt.inspector [-] No delta meter predecessor for e19b3c67-012d-4720-9ed5-92530129270c / tap8cfd60ac-8c inspect_vnics /usr/lib/python3.9/site-packages/ceilometer/compute/virt/libvirt/inspector.py:136
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.140 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/network.outgoing.bytes.delta volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '2630eba9-7623-4d9e-bfe7-960cae8383c2', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.bytes.delta', 'counter_type': 'delta', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'instance-000000ab-e19b3c67-012d-4720-9ed5-92530129270c-tap8cfd60ac-8c', 'timestamp': '2025-10-02T12:41:17.137306', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1466705682', 'name': 'tap8cfd60ac-8c', 'instance_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:bf:f6:bb', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap8cfd60ac-8c'}, 'message_id': '16df0b8a-9f8d-11f0-af18-fa163efc5e78', 'monotonic_time': 6874.824370793, 'message_signature': 'bfe07c480c7a88d17c0820ae4e227534ef60d2063840105f36cf5e9c82dade1e'}]}, 'timestamp': '2025-10-02 12:41:17.140889', '_unique_id': 'd98f1c5694da4e9ba64975bc990bd005'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.142 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.143 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets in the context of pollsters
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.143 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/network.outgoing.packets volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '38763a40-05e0-4fe8-8c34-eed3c93f6043', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'instance-000000ab-e19b3c67-012d-4720-9ed5-92530129270c-tap8cfd60ac-8c', 'timestamp': '2025-10-02T12:41:17.143245', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1466705682', 'name': 'tap8cfd60ac-8c', 'instance_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:bf:f6:bb', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap8cfd60ac-8c'}, 'message_id': '16df769c-9f8d-11f0-af18-fa163efc5e78', 'monotonic_time': 6874.824370793, 'message_signature': 'b94d346a02fcfd17c9a18e0f893962e43421698d6c892e8c5ab5c0db92e22f23'}]}, 'timestamp': '2025-10-02 12:41:17.143571', '_unique_id': '04601a5c42c94174993291afe8a4ce7b'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.144 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.146 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.usage in the context of pollsters
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.160 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/disk.device.usage volume: 196624 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.161 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/disk.device.usage volume: 485376 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '8890ae8c-db3c-4716-b260-07daad664107', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 196624, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'e19b3c67-012d-4720-9ed5-92530129270c-vda', 'timestamp': '2025-10-02T12:41:17.146226', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1466705682', 'name': 'instance-000000ab', 'instance_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '16e23a12-9f8d-11f0-af18-fa163efc5e78', 'monotonic_time': 6874.833325427, 'message_signature': '49572305f913550a43dcbda65bf4bf4e1d8a26655a7c98f3741ffa9e6952ba9b'}, {'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 485376, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'e19b3c67-012d-4720-9ed5-92530129270c-sda', 'timestamp': '2025-10-02T12:41:17.146226', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1466705682', 'name': 'instance-000000ab', 'instance_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '16e24a52-9f8d-11f0-af18-fa163efc5e78', 'monotonic_time': 6874.833325427, 'message_signature': '3f796c1af7eef86914fb78d093f1641e3f662960a54951bb5c65cb98106822a2'}]}, 'timestamp': '2025-10-02 12:41:17.162089', '_unique_id': '7af3a2ed5e68420e98d002168b40bfe0'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.163 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.164 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets in the context of pollsters
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.164 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/network.incoming.packets volume: 1 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '2ba5f29e-370c-4890-8320-6414f221df2d', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 1, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'instance-000000ab-e19b3c67-012d-4720-9ed5-92530129270c-tap8cfd60ac-8c', 'timestamp': '2025-10-02T12:41:17.164395', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1466705682', 'name': 'tap8cfd60ac-8c', 'instance_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:bf:f6:bb', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap8cfd60ac-8c'}, 'message_id': '16e2b2a8-9f8d-11f0-af18-fa163efc5e78', 'monotonic_time': 6874.824370793, 'message_signature': 'c5dcd1fd6015d25d400fc5a436e0ff3a8942c450cc1fed2cf4e79ab00266b5c6'}]}, 'timestamp': '2025-10-02 12:41:17.164771', '_unique_id': 'f10efc44861a480a9d214359962e5c53'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.165 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.166 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes in the context of pollsters
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.166 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/network.incoming.bytes volume: 110 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'ee8f53e2-688a-4ded-81ff-f639254e69a8', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 110, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'instance-000000ab-e19b3c67-012d-4720-9ed5-92530129270c-tap8cfd60ac-8c', 'timestamp': '2025-10-02T12:41:17.166656', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1466705682', 'name': 'tap8cfd60ac-8c', 'instance_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:bf:f6:bb', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap8cfd60ac-8c'}, 'message_id': '16e30a0a-9f8d-11f0-af18-fa163efc5e78', 'monotonic_time': 6874.824370793, 'message_signature': '4b134528be8ca6d5e9c8b00becf7c10d763797d742a03085f8347e149470f7a1'}]}, 'timestamp': '2025-10-02 12:41:17.166962', '_unique_id': '232882e8cb9e4f22886a2e3220eb07aa'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.167 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.168 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.bytes in the context of pollsters
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.188 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/disk.device.write.bytes volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.188 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/disk.device.write.bytes volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '2f4adb69-2f33-489a-86b6-d4ac1a6d1b14', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'e19b3c67-012d-4720-9ed5-92530129270c-vda', 'timestamp': '2025-10-02T12:41:17.168480', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1466705682', 'name': 'instance-000000ab', 'instance_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '16e65d40-9f8d-11f0-af18-fa163efc5e78', 'monotonic_time': 6874.855572896, 'message_signature': 'f0341d34fc226214bb06d52e57db65119ade35a1821b4ac0b566856462014a9b'}, {'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'e19b3c67-012d-4720-9ed5-92530129270c-sda', 'timestamp': '2025-10-02T12:41:17.168480', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1466705682', 'name': 'instance-000000ab', 'instance_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '16e66da8-9f8d-11f0-af18-fa163efc5e78', 'monotonic_time': 6874.855572896, 'message_signature': '756bd91c203e6512c9cc2cb311c29b96603f86c19798b006a0f3702d89b60e9b'}]}, 'timestamp': '2025-10-02 12:41:17.189221', '_unique_id': '0ed57ce49a14458ab4459676bdcc2bdf'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:41:17 compute-0 rsyslogd[1013]: imjournal: journal files changed, reloading...  [v8.2506.0-2.el9 try https://www.rsyslog.com/e/0 ]
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.190 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.191 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.capacity in the context of pollsters
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.191 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/disk.device.capacity volume: 1073741824 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.192 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/disk.device.capacity volume: 485376 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '63e792e2-fd34-420e-a614-7d33ab27646c', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 1073741824, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'e19b3c67-012d-4720-9ed5-92530129270c-vda', 'timestamp': '2025-10-02T12:41:17.191738', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1466705682', 'name': 'instance-000000ab', 'instance_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '16e6de96-9f8d-11f0-af18-fa163efc5e78', 'monotonic_time': 6874.833325427, 'message_signature': 'bed7455aa3ee18d62193d77d543748ac48135bb828d6a459b1ec0750bfc85c5f'}, {'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 485376, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'e19b3c67-012d-4720-9ed5-92530129270c-sda', 'timestamp': '2025-10-02T12:41:17.191738', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1466705682', 'name': 'instance-000000ab', 'instance_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '16e6ec88-9f8d-11f0-af18-fa163efc5e78', 'monotonic_time': 6874.833325427, 'message_signature': '334d019cf3a8db82d9bd0c5bc5833254e9851ca19820a58a669848042efe32e9'}]}, 'timestamp': '2025-10-02 12:41:17.192444', '_unique_id': 'b84212622abc42c1898d5bda61bdb6b4'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.197 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.198 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets.drop in the context of pollsters
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.198 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/network.incoming.packets.drop volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'ca4e8a04-ecbb-4d9c-9e43-838bfd6e96ab', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets.drop', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'instance-000000ab-e19b3c67-012d-4720-9ed5-92530129270c-tap8cfd60ac-8c', 'timestamp': '2025-10-02T12:41:17.198383', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1466705682', 'name': 'tap8cfd60ac-8c', 'instance_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:bf:f6:bb', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap8cfd60ac-8c'}, 'message_id': '16e7e156-9f8d-11f0-af18-fa163efc5e78', 'monotonic_time': 6874.824370793, 'message_signature': 'c0080fdebd0a3ee79569d7c531dcaba31e2adb348cac01504b704fb1b2d3e556'}]}, 'timestamp': '2025-10-02 12:41:17.198714', '_unique_id': '2017a3eaa49a471e9511f60429dfdd15'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.199 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes in the context of pollsters
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/network.outgoing.bytes volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'b5a6089d-ca9b-4ec7-ada8-d162335802ac', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'instance-000000ab-e19b3c67-012d-4720-9ed5-92530129270c-tap8cfd60ac-8c', 'timestamp': '2025-10-02T12:41:17.200044', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1466705682', 'name': 'tap8cfd60ac-8c', 'instance_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:bf:f6:bb', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap8cfd60ac-8c'}, 'message_id': '16e821fc-9f8d-11f0-af18-fa163efc5e78', 'monotonic_time': 6874.824370793, 'message_signature': '84fc26b377c10210b908ee7a9cda1c4952befd54ac58b2ac27230564c7c19860'}]}, 'timestamp': '2025-10-02 12:41:17.200407', '_unique_id': '5416d6d3141946868f8409706e4d8d5a'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.200 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.201 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.allocation in the context of pollsters
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.202 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/disk.device.allocation volume: 204800 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.203 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/disk.device.allocation volume: 487424 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '42715a16-e880-4275-8005-61bad1b9f371', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 204800, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'e19b3c67-012d-4720-9ed5-92530129270c-vda', 'timestamp': '2025-10-02T12:41:17.202819', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1466705682', 'name': 'instance-000000ab', 'instance_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '16e890e2-9f8d-11f0-af18-fa163efc5e78', 'monotonic_time': 6874.833325427, 'message_signature': '0244dd281ae0f67ced89d4b57052216be2d3d7277bceab4f7df43bb570194efd'}, {'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 487424, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'e19b3c67-012d-4720-9ed5-92530129270c-sda', 'timestamp': '2025-10-02T12:41:17.202819', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1466705682', 'name': 'instance-000000ab', 'instance_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '16e89dd0-9f8d-11f0-af18-fa163efc5e78', 'monotonic_time': 6874.833325427, 'message_signature': 'cf875c8d46a395217dd6201d74024828018731875f7265d63f18307aef6109e6'}]}, 'timestamp': '2025-10-02 12:41:17.203531', '_unique_id': '6566e9ee78f044e39a0b14207e2cd2e5'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.204 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.205 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.latency in the context of pollsters
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.205 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/disk.device.read.latency volume: 387426451 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.205 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/disk.device.read.latency volume: 4091032 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '03c8011c-4190-45dc-95d7-bc82840d441d', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 387426451, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'e19b3c67-012d-4720-9ed5-92530129270c-vda', 'timestamp': '2025-10-02T12:41:17.205391', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1466705682', 'name': 'instance-000000ab', 'instance_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '16e8f3d4-9f8d-11f0-af18-fa163efc5e78', 'monotonic_time': 6874.855572896, 'message_signature': '0dadce7569c128dc10400acefbce03d1b685452c80320cc10714bf0e0a2df3ad'}, {'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 4091032, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'e19b3c67-012d-4720-9ed5-92530129270c-sda', 'timestamp': '2025-10-02T12:41:17.205391', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1466705682', 'name': 'instance-000000ab', 'instance_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '16e90036-9f8d-11f0-af18-fa163efc5e78', 'monotonic_time': 6874.855572896, 'message_signature': '7aa0d61cdd22115a3d45c9b94d4dac47b7c1b7e7aeb8cfab1f0cf19f96c223cf'}]}, 'timestamp': '2025-10-02 12:41:17.206078', '_unique_id': 'b24603c07838486d8bb114d76a7d62f5'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.206 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.207 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.latency in the context of pollsters
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.207 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/disk.device.write.latency volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.208 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/disk.device.write.latency volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '103ed87c-ef7d-4185-bab9-8072bfd43caa', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 0, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'e19b3c67-012d-4720-9ed5-92530129270c-vda', 'timestamp': '2025-10-02T12:41:17.207920', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1466705682', 'name': 'instance-000000ab', 'instance_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '16e9572a-9f8d-11f0-af18-fa163efc5e78', 'monotonic_time': 6874.855572896, 'message_signature': '2b3a137195d71766c16d1370d4cb5935ab372095150ab8b19e30ae2e29bef576'}, {'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 0, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'e19b3c67-012d-4720-9ed5-92530129270c-sda', 'timestamp': '2025-10-02T12:41:17.207920', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1466705682', 'name': 'instance-000000ab', 'instance_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '16e9633c-9f8d-11f0-af18-fa163efc5e78', 'monotonic_time': 6874.855572896, 'message_signature': '182ae764fba597af159739158b9bfe13822bd0bbb9242cad245cce8f88f4fedf'}]}, 'timestamp': '2025-10-02 12:41:17.208582', '_unique_id': '715ba8867a924b8f813a2343a797c73a'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.209 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.210 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.bytes in the context of pollsters
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.210 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/disk.device.read.bytes volume: 23775232 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.210 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/disk.device.read.bytes volume: 2048 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'e138cc03-5baa-41fa-8bb9-05557010a2ef', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 23775232, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'e19b3c67-012d-4720-9ed5-92530129270c-vda', 'timestamp': '2025-10-02T12:41:17.210407', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1466705682', 'name': 'instance-000000ab', 'instance_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '16e9b6ac-9f8d-11f0-af18-fa163efc5e78', 'monotonic_time': 6874.855572896, 'message_signature': 'e15d7842b4bdb7f28fb7d8eb0d431664186cbfecbd074516179e3eecb566c39e'}, {'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 2048, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'e19b3c67-012d-4720-9ed5-92530129270c-sda', 'timestamp': '2025-10-02T12:41:17.210407', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1466705682', 'name': 'instance-000000ab', 'instance_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '16e9c23c-9f8d-11f0-af18-fa163efc5e78', 'monotonic_time': 6874.855572896, 'message_signature': 'fa59bc3392f7078e50574bc4ac9dfa3e6f9c5e5d3e383661c6826cea707fa45e'}]}, 'timestamp': '2025-10-02 12:41:17.211001', '_unique_id': '8df16f9172d242d5a20d5771e7baf7ef'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.211 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.212 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets.error in the context of pollsters
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.212 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/network.incoming.packets.error volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '50c39df1-107a-4415-b2ea-67a42b7e7be1', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets.error', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'instance-000000ab-e19b3c67-012d-4720-9ed5-92530129270c-tap8cfd60ac-8c', 'timestamp': '2025-10-02T12:41:17.212804', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1466705682', 'name': 'tap8cfd60ac-8c', 'instance_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:bf:f6:bb', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap8cfd60ac-8c'}, 'message_id': '16ea15ca-9f8d-11f0-af18-fa163efc5e78', 'monotonic_time': 6874.824370793, 'message_signature': '9ca7e800b40b1bf1b58569450639ec3701c52b101f83067a65715fa362cf77c1'}]}, 'timestamp': '2025-10-02 12:41:17.213172', '_unique_id': 'b8fc961b189849b083baded035e0f84b'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.213 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.214 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes.rate in the context of pollsters
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.215 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for OutgoingBytesRatePollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.215 12 ERROR ceilometer.polling.manager [-] Prevent pollster network.outgoing.bytes.rate from polling [<NovaLikeServer: tempest-TestNetworkBasicOps-server-1466705682>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-TestNetworkBasicOps-server-1466705682>]
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.215 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.iops in the context of pollsters
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.215 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for PerDeviceDiskIOPSPollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.215 12 ERROR ceilometer.polling.manager [-] Prevent pollster disk.device.iops from polling [<NovaLikeServer: tempest-TestNetworkBasicOps-server-1466705682>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-TestNetworkBasicOps-server-1466705682>]
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.215 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets.drop in the context of pollsters
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.216 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/network.outgoing.packets.drop volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'f678577a-47f3-4b99-88a0-4df306993a52', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets.drop', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'instance-000000ab-e19b3c67-012d-4720-9ed5-92530129270c-tap8cfd60ac-8c', 'timestamp': '2025-10-02T12:41:17.215966', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1466705682', 'name': 'tap8cfd60ac-8c', 'instance_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:bf:f6:bb', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap8cfd60ac-8c'}, 'message_id': '16ea9216-9f8d-11f0-af18-fa163efc5e78', 'monotonic_time': 6874.824370793, 'message_signature': 'cafb133d70d89861920e1309a06f6a7edc1167f38404a5d26dbef9b441e71c79'}]}, 'timestamp': '2025-10-02 12:41:17.216372', '_unique_id': 'a2a20702a64b478fb7b4915673933ce9'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.217 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.218 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets.error in the context of pollsters
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.218 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/network.outgoing.packets.error volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'acde0cee-5ab9-48ba-bf6a-18523c7eae4d', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets.error', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'instance-000000ab-e19b3c67-012d-4720-9ed5-92530129270c-tap8cfd60ac-8c', 'timestamp': '2025-10-02T12:41:17.218457', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1466705682', 'name': 'tap8cfd60ac-8c', 'instance_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:bf:f6:bb', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap8cfd60ac-8c'}, 'message_id': '16eaf1f2-9f8d-11f0-af18-fa163efc5e78', 'monotonic_time': 6874.824370793, 'message_signature': 'f245eeb0eb419a28025230e4a1f2fce8d95af2e064f9a58cd1a0775c86e13151'}]}, 'timestamp': '2025-10-02 12:41:17.218813', '_unique_id': '9343d65960b046abbd0f1669e704a505'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.219 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.220 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.requests in the context of pollsters
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.220 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/disk.device.write.requests volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/disk.device.write.requests volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '08005b10-d40c-48f5-8e0a-68aaeb3c3e56', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 0, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'e19b3c67-012d-4720-9ed5-92530129270c-vda', 'timestamp': '2025-10-02T12:41:17.220745', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1466705682', 'name': 'instance-000000ab', 'instance_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '16eb4ae4-9f8d-11f0-af18-fa163efc5e78', 'monotonic_time': 6874.855572896, 'message_signature': '3da97c2c921796831b59dbd08e4d76c9e3cf593e90f19c79f9d0b0d31417eeb0'}, {'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 0, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'e19b3c67-012d-4720-9ed5-92530129270c-sda', 'timestamp': '2025-10-02T12:41:17.220745', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1466705682', 'name': 'instance-000000ab', 'instance_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '16eb55e8-9f8d-11f0-af18-fa163efc5e78', 'monotonic_time': 6874.855572896, 'message_signature': '9022a3a8ec95121139bf65b5588e74fbf554a2aabb22a0a2677fd131ca2799cd'}]}, 'timestamp': '2025-10-02 12:41:17.221382', '_unique_id': '7818b279849a4189934fc0ae7bbab8c7'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.221 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.223 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.requests in the context of pollsters
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.223 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/disk.device.read.requests volume: 760 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.224 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/disk.device.read.requests volume: 1 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'f8678cdc-ac26-48e0-8298-184cb65ace61', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 760, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'e19b3c67-012d-4720-9ed5-92530129270c-vda', 'timestamp': '2025-10-02T12:41:17.223785', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1466705682', 'name': 'instance-000000ab', 'instance_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '16ebc0c8-9f8d-11f0-af18-fa163efc5e78', 'monotonic_time': 6874.855572896, 'message_signature': '5c8d599171d0ac2cfcb05bc220e330b737de25869b0ccffd842f5ca6100011df'}, {'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 1, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'e19b3c67-012d-4720-9ed5-92530129270c-sda', 'timestamp': '2025-10-02T12:41:17.223785', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1466705682', 'name': 'instance-000000ab', 'instance_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '16ebcbae-9f8d-11f0-af18-fa163efc5e78', 'monotonic_time': 6874.855572896, 'message_signature': 'b3b5ac18a7385c059c457b5a00ebfaf10e5ee131d115f98fe988f0a020c56823'}]}, 'timestamp': '2025-10-02 12:41:17.224366', '_unique_id': '2e08e37147d64a678bc660a930902624'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:41:17 compute-0 rsyslogd[1013]: imjournal: journal files changed, reloading...  [v8.2506.0-2.el9 try https://www.rsyslog.com/e/0 ]
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.225 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes.delta in the context of pollsters
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.226 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/network.incoming.bytes.delta volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'b9ee45d7-ba4c-445b-8663-397ddc9f424c', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.bytes.delta', 'counter_type': 'delta', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'instance-000000ab-e19b3c67-012d-4720-9ed5-92530129270c-tap8cfd60ac-8c', 'timestamp': '2025-10-02T12:41:17.226032', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1466705682', 'name': 'tap8cfd60ac-8c', 'instance_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:bf:f6:bb', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tap8cfd60ac-8c'}, 'message_id': '16ec182a-9f8d-11f0-af18-fa163efc5e78', 'monotonic_time': 6874.824370793, 'message_signature': '2f2925128997cca089bf70a2f3cc7e8292cad6d02c9716006c8200388c0fe337'}]}, 'timestamp': '2025-10-02 12:41:17.226343', '_unique_id': 'b51e6511cc0a46b79d80ac80ef72e6e6'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.227 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.228 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes.rate in the context of pollsters
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.228 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for IncomingBytesRatePollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.228 12 ERROR ceilometer.polling.manager [-] Prevent pollster network.incoming.bytes.rate from polling [<NovaLikeServer: tempest-TestNetworkBasicOps-server-1466705682>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-TestNetworkBasicOps-server-1466705682>]
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.228 12 INFO ceilometer.polling.manager [-] Polling pollster cpu in the context of pollsters
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 DEBUG ceilometer.compute.pollsters [-] e19b3c67-012d-4720-9ed5-92530129270c/cpu volume: 2510000000 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'aa5522f3-1cf1-48d3-9b66-083891741c47', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'cpu', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 2510000000, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_name': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_name': None, 'resource_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'timestamp': '2025-10-02T12:41:17.229036', 'resource_metadata': {'display_name': 'tempest-TestNetworkBasicOps-server-1466705682', 'name': 'instance-000000ab', 'instance_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'instance_type': 'm1.nano', 'host': 'c40fe833fbcc5eb9867f228f1f9a29d1bd67dccf5bb0120cc931ad94', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'cpu_number': 1}, 'message_id': '16ec8d1e-9f8d-11f0-af18-fa163efc5e78', 'monotonic_time': 6874.823064497, 'message_signature': '4fda1d9063e11e19c2bbd14319b55ea872d057b74e202386b51f8e198e5ffead'}]}, 'timestamp': '2025-10-02 12:41:17.229348', '_unique_id': 'ff180f92005d4fd6bcb65ce4b9325218'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:41:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:41:17.229 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:41:18 compute-0 nova_compute[192079]: 2025-10-02 12:41:18.354 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:19.428 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=46, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=45) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:41:19 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:19.430 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 9 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:41:19 compute-0 nova_compute[192079]: 2025-10-02 12:41:19.473 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:21 compute-0 nova_compute[192079]: 2025-10-02 12:41:21.370 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:21 compute-0 NetworkManager[51160]: <info>  [1759408881.3725] manager: (patch-br-int-to-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/331)
Oct 02 12:41:21 compute-0 NetworkManager[51160]: <info>  [1759408881.3754] manager: (patch-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d-to-br-int): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/332)
Oct 02 12:41:21 compute-0 nova_compute[192079]: 2025-10-02 12:41:21.456 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:21 compute-0 ovn_controller[94336]: 2025-10-02T12:41:21Z|00680|binding|INFO|Releasing lport 5f0a8cdb-b85d-4bfc-8a2d-3f1f2d39612e from this chassis (sb_readonly=0)
Oct 02 12:41:21 compute-0 nova_compute[192079]: 2025-10-02 12:41:21.477 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:21 compute-0 nova_compute[192079]: 2025-10-02 12:41:21.594 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:21 compute-0 nova_compute[192079]: 2025-10-02 12:41:21.814 2 DEBUG nova.compute.manager [req-c897e565-8900-4535-9369-0189e12c5156 req-2c703db8-ca3f-41f9-9f34-e5934cdeca8b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Received event network-changed-8cfd60ac-8c7b-4732-bae4-7099f5767458 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:41:21 compute-0 nova_compute[192079]: 2025-10-02 12:41:21.816 2 DEBUG nova.compute.manager [req-c897e565-8900-4535-9369-0189e12c5156 req-2c703db8-ca3f-41f9-9f34-e5934cdeca8b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Refreshing instance network info cache due to event network-changed-8cfd60ac-8c7b-4732-bae4-7099f5767458. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:41:21 compute-0 nova_compute[192079]: 2025-10-02 12:41:21.816 2 DEBUG oslo_concurrency.lockutils [req-c897e565-8900-4535-9369-0189e12c5156 req-2c703db8-ca3f-41f9-9f34-e5934cdeca8b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-e19b3c67-012d-4720-9ed5-92530129270c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:41:21 compute-0 nova_compute[192079]: 2025-10-02 12:41:21.817 2 DEBUG oslo_concurrency.lockutils [req-c897e565-8900-4535-9369-0189e12c5156 req-2c703db8-ca3f-41f9-9f34-e5934cdeca8b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-e19b3c67-012d-4720-9ed5-92530129270c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:41:21 compute-0 nova_compute[192079]: 2025-10-02 12:41:21.817 2 DEBUG nova.network.neutron [req-c897e565-8900-4535-9369-0189e12c5156 req-2c703db8-ca3f-41f9-9f34-e5934cdeca8b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Refreshing network info cache for port 8cfd60ac-8c7b-4732-bae4-7099f5767458 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:41:22 compute-0 podman[250313]: 2025-10-02 12:41:22.170233759 +0000 UTC m=+0.079866646 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_managed=true, config_id=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, container_name=ovn_metadata_agent)
Oct 02 12:41:22 compute-0 podman[250315]: 2025-10-02 12:41:22.18341678 +0000 UTC m=+0.074208851 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']})
Oct 02 12:41:22 compute-0 podman[250314]: 2025-10-02 12:41:22.273690998 +0000 UTC m=+0.175005167 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_id=ovn_controller, container_name=ovn_controller)
Oct 02 12:41:23 compute-0 nova_compute[192079]: 2025-10-02 12:41:23.356 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:25 compute-0 nova_compute[192079]: 2025-10-02 12:41:25.347 2 DEBUG nova.network.neutron [req-c897e565-8900-4535-9369-0189e12c5156 req-2c703db8-ca3f-41f9-9f34-e5934cdeca8b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Updated VIF entry in instance network info cache for port 8cfd60ac-8c7b-4732-bae4-7099f5767458. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:41:25 compute-0 nova_compute[192079]: 2025-10-02 12:41:25.348 2 DEBUG nova.network.neutron [req-c897e565-8900-4535-9369-0189e12c5156 req-2c703db8-ca3f-41f9-9f34-e5934cdeca8b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Updating instance_info_cache with network_info: [{"id": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "address": "fa:16:3e:bf:f6:bb", "network": {"id": "3a127238-c3fd-4117-ae39-3087c30f09a1", "bridge": "br-int", "label": "tempest-network-smoke--12525199", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap8cfd60ac-8c", "ovs_interfaceid": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:41:25 compute-0 nova_compute[192079]: 2025-10-02 12:41:25.388 2 DEBUG oslo_concurrency.lockutils [req-c897e565-8900-4535-9369-0189e12c5156 req-2c703db8-ca3f-41f9-9f34-e5934cdeca8b 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-e19b3c67-012d-4720-9ed5-92530129270c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:41:26 compute-0 nova_compute[192079]: 2025-10-02 12:41:26.596 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:27 compute-0 ovn_controller[94336]: 2025-10-02T12:41:27Z|00079|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:bf:f6:bb 10.100.0.6
Oct 02 12:41:27 compute-0 ovn_controller[94336]: 2025-10-02T12:41:27Z|00080|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:bf:f6:bb 10.100.0.6
Oct 02 12:41:28 compute-0 nova_compute[192079]: 2025-10-02 12:41:28.390 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:28 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:41:28.432 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '46'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:41:31 compute-0 podman[250399]: 2025-10-02 12:41:31.154922386 +0000 UTC m=+0.069445371 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=edpm, io.buildah.version=1.41.3)
Oct 02 12:41:31 compute-0 nova_compute[192079]: 2025-10-02 12:41:31.598 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:33 compute-0 nova_compute[192079]: 2025-10-02 12:41:33.393 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:36 compute-0 nova_compute[192079]: 2025-10-02 12:41:36.601 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:38 compute-0 nova_compute[192079]: 2025-10-02 12:41:38.396 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:39 compute-0 podman[250420]: 2025-10-02 12:41:39.150901876 +0000 UTC m=+0.059741245 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=multipathd, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, container_name=multipathd)
Oct 02 12:41:39 compute-0 podman[250419]: 2025-10-02 12:41:39.178537202 +0000 UTC m=+0.092782329 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, distribution-scope=public, container_name=openstack_network_exporter, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, vcs-type=git, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., io.openshift.expose-services=, architecture=x86_64, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.openshift.tags=minimal rhel9, com.redhat.component=ubi9-minimal-container, io.buildah.version=1.33.7, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, managed_by=edpm_ansible, vendor=Red Hat, Inc., description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., maintainer=Red Hat, Inc., build-date=2025-08-20T13:12:41, config_id=edpm, release=1755695350, version=9.6, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., name=ubi9-minimal, url=https://catalog.redhat.com/en/search?searchType=containers, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b)
Oct 02 12:41:39 compute-0 nova_compute[192079]: 2025-10-02 12:41:39.677 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:41:41 compute-0 nova_compute[192079]: 2025-10-02 12:41:41.604 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:43 compute-0 nova_compute[192079]: 2025-10-02 12:41:43.439 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:43 compute-0 nova_compute[192079]: 2025-10-02 12:41:43.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:41:43 compute-0 nova_compute[192079]: 2025-10-02 12:41:43.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:41:44 compute-0 podman[250461]: 2025-10-02 12:41:44.164417908 +0000 UTC m=+0.068175725 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']})
Oct 02 12:41:44 compute-0 podman[250462]: 2025-10-02 12:41:44.170055712 +0000 UTC m=+0.065160163 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS)
Oct 02 12:41:46 compute-0 nova_compute[192079]: 2025-10-02 12:41:46.606 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:46 compute-0 nova_compute[192079]: 2025-10-02 12:41:46.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:41:46 compute-0 nova_compute[192079]: 2025-10-02 12:41:46.664 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:41:46 compute-0 nova_compute[192079]: 2025-10-02 12:41:46.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:41:46 compute-0 nova_compute[192079]: 2025-10-02 12:41:46.690 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:41:46 compute-0 nova_compute[192079]: 2025-10-02 12:41:46.692 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.002s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:41:46 compute-0 nova_compute[192079]: 2025-10-02 12:41:46.693 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:41:46 compute-0 nova_compute[192079]: 2025-10-02 12:41:46.693 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:41:46 compute-0 nova_compute[192079]: 2025-10-02 12:41:46.772 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/e19b3c67-012d-4720-9ed5-92530129270c/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:41:46 compute-0 nova_compute[192079]: 2025-10-02 12:41:46.873 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/e19b3c67-012d-4720-9ed5-92530129270c/disk --force-share --output=json" returned: 0 in 0.101s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:41:46 compute-0 nova_compute[192079]: 2025-10-02 12:41:46.874 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/e19b3c67-012d-4720-9ed5-92530129270c/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:41:46 compute-0 nova_compute[192079]: 2025-10-02 12:41:46.960 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/e19b3c67-012d-4720-9ed5-92530129270c/disk --force-share --output=json" returned: 0 in 0.085s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:41:47 compute-0 nova_compute[192079]: 2025-10-02 12:41:47.110 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:41:47 compute-0 nova_compute[192079]: 2025-10-02 12:41:47.111 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5557MB free_disk=73.24374389648438GB free_vcpus=7 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:41:47 compute-0 nova_compute[192079]: 2025-10-02 12:41:47.111 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:41:47 compute-0 nova_compute[192079]: 2025-10-02 12:41:47.112 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:41:47 compute-0 nova_compute[192079]: 2025-10-02 12:41:47.337 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance e19b3c67-012d-4720-9ed5-92530129270c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:41:47 compute-0 nova_compute[192079]: 2025-10-02 12:41:47.337 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 1 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:41:47 compute-0 nova_compute[192079]: 2025-10-02 12:41:47.337 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=640MB phys_disk=79GB used_disk=1GB total_vcpus=8 used_vcpus=1 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:41:47 compute-0 nova_compute[192079]: 2025-10-02 12:41:47.363 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing inventories for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708 _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:804
Oct 02 12:41:47 compute-0 nova_compute[192079]: 2025-10-02 12:41:47.393 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Updating ProviderTree inventory for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 from _refresh_and_get_inventory using data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} _refresh_and_get_inventory /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:768
Oct 02 12:41:47 compute-0 nova_compute[192079]: 2025-10-02 12:41:47.393 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Updating inventory in ProviderTree for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 with inventory: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:176
Oct 02 12:41:47 compute-0 nova_compute[192079]: 2025-10-02 12:41:47.432 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing aggregate associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, aggregates: None _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:813
Oct 02 12:41:47 compute-0 nova_compute[192079]: 2025-10-02 12:41:47.469 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing trait associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, traits: COMPUTE_SECURITY_UEFI_SECURE_BOOT,COMPUTE_VIOMMU_MODEL_VIRTIO,COMPUTE_VIOMMU_MODEL_AUTO,COMPUTE_IMAGE_TYPE_AKI,COMPUTE_GRAPHICS_MODEL_VIRTIO,COMPUTE_NET_VIF_MODEL_PCNET,HW_CPU_X86_SSE42,COMPUTE_RESCUE_BFV,COMPUTE_VOLUME_EXTEND,COMPUTE_IMAGE_TYPE_QCOW2,COMPUTE_TRUSTED_CERTS,COMPUTE_SOCKET_PCI_NUMA_AFFINITY,COMPUTE_GRAPHICS_MODEL_CIRRUS,HW_CPU_X86_MMX,COMPUTE_STORAGE_BUS_VIRTIO,COMPUTE_NET_ATTACH_INTERFACE_WITH_TAG,COMPUTE_STORAGE_BUS_FDC,COMPUTE_STORAGE_BUS_USB,COMPUTE_NODE,HW_CPU_X86_SSSE3,HW_CPU_X86_SSE2,COMPUTE_GRAPHICS_MODEL_BOCHS,COMPUTE_NET_VIF_MODEL_E1000E,COMPUTE_IMAGE_TYPE_RAW,COMPUTE_NET_VIF_MODEL_NE2K_PCI,COMPUTE_IMAGE_TYPE_AMI,COMPUTE_VIOMMU_MODEL_INTEL,COMPUTE_SECURITY_TPM_2_0,COMPUTE_STORAGE_BUS_SCSI,COMPUTE_IMAGE_TYPE_ARI,COMPUTE_NET_VIF_MODEL_VMXNET3,COMPUTE_SECURITY_TPM_1_2,COMPUTE_NET_VIF_MODEL_E1000,HW_CPU_X86_SSE,COMPUTE_VOLUME_MULTI_ATTACH,COMPUTE_STORAGE_BUS_IDE,COMPUTE_GRAPHICS_MODEL_NONE,COMPUTE_VOLUME_ATTACH_WITH_TAG,COMPUTE_NET_VIF_MODEL_VIRTIO,HW_CPU_X86_SSE41,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_DEVICE_TAGGING,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_ACCELERATORS,COMPUTE_NET_VIF_MODEL_RTL8139,COMPUTE_GRAPHICS_MODEL_VGA,COMPUTE_STORAGE_BUS_SATA,COMPUTE_NET_VIF_MODEL_SPAPR_VLAN _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:825
Oct 02 12:41:47 compute-0 nova_compute[192079]: 2025-10-02 12:41:47.542 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:41:47 compute-0 nova_compute[192079]: 2025-10-02 12:41:47.567 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:41:47 compute-0 nova_compute[192079]: 2025-10-02 12:41:47.591 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:41:47 compute-0 nova_compute[192079]: 2025-10-02 12:41:47.591 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.480s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:41:48 compute-0 nova_compute[192079]: 2025-10-02 12:41:48.441 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:51 compute-0 nova_compute[192079]: 2025-10-02 12:41:51.593 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:41:51 compute-0 nova_compute[192079]: 2025-10-02 12:41:51.593 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:41:51 compute-0 nova_compute[192079]: 2025-10-02 12:41:51.608 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:53 compute-0 podman[250513]: 2025-10-02 12:41:53.152918591 +0000 UTC m=+0.063474577 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_managed=true, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_metadata_agent, managed_by=edpm_ansible)
Oct 02 12:41:53 compute-0 podman[250515]: 2025-10-02 12:41:53.15363781 +0000 UTC m=+0.057249447 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>)
Oct 02 12:41:53 compute-0 podman[250514]: 2025-10-02 12:41:53.182802758 +0000 UTC m=+0.089484299 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3, managed_by=edpm_ansible, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, container_name=ovn_controller)
Oct 02 12:41:53 compute-0 nova_compute[192079]: 2025-10-02 12:41:53.443 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:53 compute-0 nova_compute[192079]: 2025-10-02 12:41:53.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:41:54 compute-0 nova_compute[192079]: 2025-10-02 12:41:54.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:41:54 compute-0 nova_compute[192079]: 2025-10-02 12:41:54.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:41:54 compute-0 nova_compute[192079]: 2025-10-02 12:41:54.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:41:55 compute-0 nova_compute[192079]: 2025-10-02 12:41:55.341 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "refresh_cache-e19b3c67-012d-4720-9ed5-92530129270c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:41:55 compute-0 nova_compute[192079]: 2025-10-02 12:41:55.342 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquired lock "refresh_cache-e19b3c67-012d-4720-9ed5-92530129270c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:41:55 compute-0 nova_compute[192079]: 2025-10-02 12:41:55.342 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Forcefully refreshing network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2004
Oct 02 12:41:55 compute-0 nova_compute[192079]: 2025-10-02 12:41:55.342 2 DEBUG nova.objects.instance [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lazy-loading 'info_cache' on Instance uuid e19b3c67-012d-4720-9ed5-92530129270c obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:41:56 compute-0 nova_compute[192079]: 2025-10-02 12:41:56.610 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:41:57 compute-0 nova_compute[192079]: 2025-10-02 12:41:57.964 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Updating instance_info_cache with network_info: [{"id": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "address": "fa:16:3e:bf:f6:bb", "network": {"id": "3a127238-c3fd-4117-ae39-3087c30f09a1", "bridge": "br-int", "label": "tempest-network-smoke--12525199", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap8cfd60ac-8c", "ovs_interfaceid": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:41:57 compute-0 nova_compute[192079]: 2025-10-02 12:41:57.980 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Releasing lock "refresh_cache-e19b3c67-012d-4720-9ed5-92530129270c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:41:57 compute-0 nova_compute[192079]: 2025-10-02 12:41:57.981 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Updated the network info_cache for instance _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9929
Oct 02 12:41:58 compute-0 nova_compute[192079]: 2025-10-02 12:41:58.445 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:01 compute-0 nova_compute[192079]: 2025-10-02 12:42:01.614 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:01 compute-0 nova_compute[192079]: 2025-10-02 12:42:01.701 2 INFO nova.compute.manager [None req-e3b0489a-addc-49d2-861a-0a0766ad071b a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Get console output
Oct 02 12:42:01 compute-0 nova_compute[192079]: 2025-10-02 12:42:01.706 55 INFO nova.privsep.libvirt [-] Ignored error while reading from instance console pty: can't concat NoneType to bytes
Oct 02 12:42:01 compute-0 nova_compute[192079]: 2025-10-02 12:42:01.735 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:01.736 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=47, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=46) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:42:01 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:01.737 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 10 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:42:02 compute-0 podman[250580]: 2025-10-02 12:42:02.145048844 +0000 UTC m=+0.059276612 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=edpm, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:42:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:02.247 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:42:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:02.248 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:42:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:02.249 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:42:03 compute-0 nova_compute[192079]: 2025-10-02 12:42:03.449 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:03 compute-0 nova_compute[192079]: 2025-10-02 12:42:03.591 2 INFO nova.compute.manager [None req-684a7e0d-20cd-47aa-955d-9e201df87a1a a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Get console output
Oct 02 12:42:03 compute-0 nova_compute[192079]: 2025-10-02 12:42:03.597 55 INFO nova.privsep.libvirt [-] Ignored error while reading from instance console pty: can't concat NoneType to bytes
Oct 02 12:42:03 compute-0 nova_compute[192079]: 2025-10-02 12:42:03.698 2 DEBUG nova.compute.manager [req-38918ec8-8fbe-4ab1-a658-1b578d5715a5 req-e806afa2-a823-4f95-b221-d0a879558387 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Received event network-changed-8cfd60ac-8c7b-4732-bae4-7099f5767458 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:42:03 compute-0 nova_compute[192079]: 2025-10-02 12:42:03.698 2 DEBUG nova.compute.manager [req-38918ec8-8fbe-4ab1-a658-1b578d5715a5 req-e806afa2-a823-4f95-b221-d0a879558387 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Refreshing instance network info cache due to event network-changed-8cfd60ac-8c7b-4732-bae4-7099f5767458. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:42:03 compute-0 nova_compute[192079]: 2025-10-02 12:42:03.699 2 DEBUG oslo_concurrency.lockutils [req-38918ec8-8fbe-4ab1-a658-1b578d5715a5 req-e806afa2-a823-4f95-b221-d0a879558387 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-e19b3c67-012d-4720-9ed5-92530129270c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:42:03 compute-0 nova_compute[192079]: 2025-10-02 12:42:03.699 2 DEBUG oslo_concurrency.lockutils [req-38918ec8-8fbe-4ab1-a658-1b578d5715a5 req-e806afa2-a823-4f95-b221-d0a879558387 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-e19b3c67-012d-4720-9ed5-92530129270c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:42:03 compute-0 nova_compute[192079]: 2025-10-02 12:42:03.699 2 DEBUG nova.network.neutron [req-38918ec8-8fbe-4ab1-a658-1b578d5715a5 req-e806afa2-a823-4f95-b221-d0a879558387 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Refreshing network info cache for port 8cfd60ac-8c7b-4732-bae4-7099f5767458 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:42:05 compute-0 nova_compute[192079]: 2025-10-02 12:42:05.022 2 DEBUG nova.network.neutron [req-38918ec8-8fbe-4ab1-a658-1b578d5715a5 req-e806afa2-a823-4f95-b221-d0a879558387 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Updated VIF entry in instance network info cache for port 8cfd60ac-8c7b-4732-bae4-7099f5767458. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:42:05 compute-0 nova_compute[192079]: 2025-10-02 12:42:05.023 2 DEBUG nova.network.neutron [req-38918ec8-8fbe-4ab1-a658-1b578d5715a5 req-e806afa2-a823-4f95-b221-d0a879558387 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Updating instance_info_cache with network_info: [{"id": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "address": "fa:16:3e:bf:f6:bb", "network": {"id": "3a127238-c3fd-4117-ae39-3087c30f09a1", "bridge": "br-int", "label": "tempest-network-smoke--12525199", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap8cfd60ac-8c", "ovs_interfaceid": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:42:05 compute-0 nova_compute[192079]: 2025-10-02 12:42:05.044 2 DEBUG oslo_concurrency.lockutils [req-38918ec8-8fbe-4ab1-a658-1b578d5715a5 req-e806afa2-a823-4f95-b221-d0a879558387 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-e19b3c67-012d-4720-9ed5-92530129270c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:42:05 compute-0 nova_compute[192079]: 2025-10-02 12:42:05.045 2 DEBUG nova.compute.manager [req-38918ec8-8fbe-4ab1-a658-1b578d5715a5 req-e806afa2-a823-4f95-b221-d0a879558387 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Received event network-vif-unplugged-8cfd60ac-8c7b-4732-bae4-7099f5767458 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:42:05 compute-0 nova_compute[192079]: 2025-10-02 12:42:05.045 2 DEBUG oslo_concurrency.lockutils [req-38918ec8-8fbe-4ab1-a658-1b578d5715a5 req-e806afa2-a823-4f95-b221-d0a879558387 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "e19b3c67-012d-4720-9ed5-92530129270c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:42:05 compute-0 nova_compute[192079]: 2025-10-02 12:42:05.046 2 DEBUG oslo_concurrency.lockutils [req-38918ec8-8fbe-4ab1-a658-1b578d5715a5 req-e806afa2-a823-4f95-b221-d0a879558387 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "e19b3c67-012d-4720-9ed5-92530129270c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:42:05 compute-0 nova_compute[192079]: 2025-10-02 12:42:05.046 2 DEBUG oslo_concurrency.lockutils [req-38918ec8-8fbe-4ab1-a658-1b578d5715a5 req-e806afa2-a823-4f95-b221-d0a879558387 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "e19b3c67-012d-4720-9ed5-92530129270c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:42:05 compute-0 nova_compute[192079]: 2025-10-02 12:42:05.046 2 DEBUG nova.compute.manager [req-38918ec8-8fbe-4ab1-a658-1b578d5715a5 req-e806afa2-a823-4f95-b221-d0a879558387 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] No waiting events found dispatching network-vif-unplugged-8cfd60ac-8c7b-4732-bae4-7099f5767458 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:42:05 compute-0 nova_compute[192079]: 2025-10-02 12:42:05.046 2 WARNING nova.compute.manager [req-38918ec8-8fbe-4ab1-a658-1b578d5715a5 req-e806afa2-a823-4f95-b221-d0a879558387 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Received unexpected event network-vif-unplugged-8cfd60ac-8c7b-4732-bae4-7099f5767458 for instance with vm_state active and task_state None.
Oct 02 12:42:05 compute-0 nova_compute[192079]: 2025-10-02 12:42:05.046 2 DEBUG nova.compute.manager [req-38918ec8-8fbe-4ab1-a658-1b578d5715a5 req-e806afa2-a823-4f95-b221-d0a879558387 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Received event network-vif-plugged-8cfd60ac-8c7b-4732-bae4-7099f5767458 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:42:05 compute-0 nova_compute[192079]: 2025-10-02 12:42:05.047 2 DEBUG oslo_concurrency.lockutils [req-38918ec8-8fbe-4ab1-a658-1b578d5715a5 req-e806afa2-a823-4f95-b221-d0a879558387 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "e19b3c67-012d-4720-9ed5-92530129270c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:42:05 compute-0 nova_compute[192079]: 2025-10-02 12:42:05.047 2 DEBUG oslo_concurrency.lockutils [req-38918ec8-8fbe-4ab1-a658-1b578d5715a5 req-e806afa2-a823-4f95-b221-d0a879558387 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "e19b3c67-012d-4720-9ed5-92530129270c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:42:05 compute-0 nova_compute[192079]: 2025-10-02 12:42:05.047 2 DEBUG oslo_concurrency.lockutils [req-38918ec8-8fbe-4ab1-a658-1b578d5715a5 req-e806afa2-a823-4f95-b221-d0a879558387 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "e19b3c67-012d-4720-9ed5-92530129270c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:42:05 compute-0 nova_compute[192079]: 2025-10-02 12:42:05.047 2 DEBUG nova.compute.manager [req-38918ec8-8fbe-4ab1-a658-1b578d5715a5 req-e806afa2-a823-4f95-b221-d0a879558387 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] No waiting events found dispatching network-vif-plugged-8cfd60ac-8c7b-4732-bae4-7099f5767458 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:42:05 compute-0 nova_compute[192079]: 2025-10-02 12:42:05.048 2 WARNING nova.compute.manager [req-38918ec8-8fbe-4ab1-a658-1b578d5715a5 req-e806afa2-a823-4f95-b221-d0a879558387 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Received unexpected event network-vif-plugged-8cfd60ac-8c7b-4732-bae4-7099f5767458 for instance with vm_state active and task_state None.
Oct 02 12:42:05 compute-0 nova_compute[192079]: 2025-10-02 12:42:05.587 2 INFO nova.compute.manager [None req-f2e9b272-6f58-4bc3-ae28-595e3f752e96 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Get console output
Oct 02 12:42:05 compute-0 nova_compute[192079]: 2025-10-02 12:42:05.593 55 INFO nova.privsep.libvirt [-] Ignored error while reading from instance console pty: can't concat NoneType to bytes
Oct 02 12:42:05 compute-0 nova_compute[192079]: 2025-10-02 12:42:05.780 2 DEBUG nova.compute.manager [req-e47fe06e-9064-4bdd-83ee-dcd1d2fcb31b req-06252ecd-e918-4e37-80a7-eb88ce0f43ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Received event network-changed-8cfd60ac-8c7b-4732-bae4-7099f5767458 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:42:05 compute-0 nova_compute[192079]: 2025-10-02 12:42:05.780 2 DEBUG nova.compute.manager [req-e47fe06e-9064-4bdd-83ee-dcd1d2fcb31b req-06252ecd-e918-4e37-80a7-eb88ce0f43ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Refreshing instance network info cache due to event network-changed-8cfd60ac-8c7b-4732-bae4-7099f5767458. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:42:05 compute-0 nova_compute[192079]: 2025-10-02 12:42:05.781 2 DEBUG oslo_concurrency.lockutils [req-e47fe06e-9064-4bdd-83ee-dcd1d2fcb31b req-06252ecd-e918-4e37-80a7-eb88ce0f43ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-e19b3c67-012d-4720-9ed5-92530129270c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:42:05 compute-0 nova_compute[192079]: 2025-10-02 12:42:05.781 2 DEBUG oslo_concurrency.lockutils [req-e47fe06e-9064-4bdd-83ee-dcd1d2fcb31b req-06252ecd-e918-4e37-80a7-eb88ce0f43ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-e19b3c67-012d-4720-9ed5-92530129270c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:42:05 compute-0 nova_compute[192079]: 2025-10-02 12:42:05.782 2 DEBUG nova.network.neutron [req-e47fe06e-9064-4bdd-83ee-dcd1d2fcb31b req-06252ecd-e918-4e37-80a7-eb88ce0f43ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Refreshing network info cache for port 8cfd60ac-8c7b-4732-bae4-7099f5767458 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:42:06 compute-0 nova_compute[192079]: 2025-10-02 12:42:06.616 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:06 compute-0 nova_compute[192079]: 2025-10-02 12:42:06.997 2 DEBUG nova.network.neutron [req-e47fe06e-9064-4bdd-83ee-dcd1d2fcb31b req-06252ecd-e918-4e37-80a7-eb88ce0f43ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Updated VIF entry in instance network info cache for port 8cfd60ac-8c7b-4732-bae4-7099f5767458. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:42:06 compute-0 nova_compute[192079]: 2025-10-02 12:42:06.998 2 DEBUG nova.network.neutron [req-e47fe06e-9064-4bdd-83ee-dcd1d2fcb31b req-06252ecd-e918-4e37-80a7-eb88ce0f43ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Updating instance_info_cache with network_info: [{"id": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "address": "fa:16:3e:bf:f6:bb", "network": {"id": "3a127238-c3fd-4117-ae39-3087c30f09a1", "bridge": "br-int", "label": "tempest-network-smoke--12525199", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap8cfd60ac-8c", "ovs_interfaceid": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:42:07 compute-0 nova_compute[192079]: 2025-10-02 12:42:07.016 2 DEBUG oslo_concurrency.lockutils [req-e47fe06e-9064-4bdd-83ee-dcd1d2fcb31b req-06252ecd-e918-4e37-80a7-eb88ce0f43ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-e19b3c67-012d-4720-9ed5-92530129270c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:42:07 compute-0 nova_compute[192079]: 2025-10-02 12:42:07.017 2 DEBUG nova.compute.manager [req-e47fe06e-9064-4bdd-83ee-dcd1d2fcb31b req-06252ecd-e918-4e37-80a7-eb88ce0f43ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Received event network-vif-plugged-8cfd60ac-8c7b-4732-bae4-7099f5767458 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:42:07 compute-0 nova_compute[192079]: 2025-10-02 12:42:07.017 2 DEBUG oslo_concurrency.lockutils [req-e47fe06e-9064-4bdd-83ee-dcd1d2fcb31b req-06252ecd-e918-4e37-80a7-eb88ce0f43ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "e19b3c67-012d-4720-9ed5-92530129270c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:42:07 compute-0 nova_compute[192079]: 2025-10-02 12:42:07.017 2 DEBUG oslo_concurrency.lockutils [req-e47fe06e-9064-4bdd-83ee-dcd1d2fcb31b req-06252ecd-e918-4e37-80a7-eb88ce0f43ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "e19b3c67-012d-4720-9ed5-92530129270c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:42:07 compute-0 nova_compute[192079]: 2025-10-02 12:42:07.018 2 DEBUG oslo_concurrency.lockutils [req-e47fe06e-9064-4bdd-83ee-dcd1d2fcb31b req-06252ecd-e918-4e37-80a7-eb88ce0f43ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "e19b3c67-012d-4720-9ed5-92530129270c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:42:07 compute-0 nova_compute[192079]: 2025-10-02 12:42:07.018 2 DEBUG nova.compute.manager [req-e47fe06e-9064-4bdd-83ee-dcd1d2fcb31b req-06252ecd-e918-4e37-80a7-eb88ce0f43ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] No waiting events found dispatching network-vif-plugged-8cfd60ac-8c7b-4732-bae4-7099f5767458 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:42:07 compute-0 nova_compute[192079]: 2025-10-02 12:42:07.018 2 WARNING nova.compute.manager [req-e47fe06e-9064-4bdd-83ee-dcd1d2fcb31b req-06252ecd-e918-4e37-80a7-eb88ce0f43ee 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Received unexpected event network-vif-plugged-8cfd60ac-8c7b-4732-bae4-7099f5767458 for instance with vm_state active and task_state None.
Oct 02 12:42:07 compute-0 nova_compute[192079]: 2025-10-02 12:42:07.874 2 DEBUG nova.compute.manager [req-901d2ad6-21f8-4533-bb39-33acafe8ca15 req-80ba738d-bdf0-496b-a246-ccf7c23060d2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Received event network-vif-plugged-8cfd60ac-8c7b-4732-bae4-7099f5767458 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:42:07 compute-0 nova_compute[192079]: 2025-10-02 12:42:07.874 2 DEBUG oslo_concurrency.lockutils [req-901d2ad6-21f8-4533-bb39-33acafe8ca15 req-80ba738d-bdf0-496b-a246-ccf7c23060d2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "e19b3c67-012d-4720-9ed5-92530129270c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:42:07 compute-0 nova_compute[192079]: 2025-10-02 12:42:07.874 2 DEBUG oslo_concurrency.lockutils [req-901d2ad6-21f8-4533-bb39-33acafe8ca15 req-80ba738d-bdf0-496b-a246-ccf7c23060d2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "e19b3c67-012d-4720-9ed5-92530129270c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:42:07 compute-0 nova_compute[192079]: 2025-10-02 12:42:07.874 2 DEBUG oslo_concurrency.lockutils [req-901d2ad6-21f8-4533-bb39-33acafe8ca15 req-80ba738d-bdf0-496b-a246-ccf7c23060d2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "e19b3c67-012d-4720-9ed5-92530129270c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:42:07 compute-0 nova_compute[192079]: 2025-10-02 12:42:07.875 2 DEBUG nova.compute.manager [req-901d2ad6-21f8-4533-bb39-33acafe8ca15 req-80ba738d-bdf0-496b-a246-ccf7c23060d2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] No waiting events found dispatching network-vif-plugged-8cfd60ac-8c7b-4732-bae4-7099f5767458 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:42:07 compute-0 nova_compute[192079]: 2025-10-02 12:42:07.875 2 WARNING nova.compute.manager [req-901d2ad6-21f8-4533-bb39-33acafe8ca15 req-80ba738d-bdf0-496b-a246-ccf7c23060d2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Received unexpected event network-vif-plugged-8cfd60ac-8c7b-4732-bae4-7099f5767458 for instance with vm_state active and task_state None.
Oct 02 12:42:08 compute-0 nova_compute[192079]: 2025-10-02 12:42:08.450 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:09 compute-0 nova_compute[192079]: 2025-10-02 12:42:09.807 2 DEBUG nova.compute.manager [req-3422c9d9-20f5-4ce4-b15b-ae5ff7896f59 req-9a2f22d7-8d6c-4253-993a-85190c66331c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Received event network-changed-8cfd60ac-8c7b-4732-bae4-7099f5767458 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:42:09 compute-0 nova_compute[192079]: 2025-10-02 12:42:09.807 2 DEBUG nova.compute.manager [req-3422c9d9-20f5-4ce4-b15b-ae5ff7896f59 req-9a2f22d7-8d6c-4253-993a-85190c66331c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Refreshing instance network info cache due to event network-changed-8cfd60ac-8c7b-4732-bae4-7099f5767458. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:42:09 compute-0 nova_compute[192079]: 2025-10-02 12:42:09.807 2 DEBUG oslo_concurrency.lockutils [req-3422c9d9-20f5-4ce4-b15b-ae5ff7896f59 req-9a2f22d7-8d6c-4253-993a-85190c66331c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-e19b3c67-012d-4720-9ed5-92530129270c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:42:09 compute-0 nova_compute[192079]: 2025-10-02 12:42:09.807 2 DEBUG oslo_concurrency.lockutils [req-3422c9d9-20f5-4ce4-b15b-ae5ff7896f59 req-9a2f22d7-8d6c-4253-993a-85190c66331c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-e19b3c67-012d-4720-9ed5-92530129270c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:42:09 compute-0 nova_compute[192079]: 2025-10-02 12:42:09.808 2 DEBUG nova.network.neutron [req-3422c9d9-20f5-4ce4-b15b-ae5ff7896f59 req-9a2f22d7-8d6c-4253-993a-85190c66331c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Refreshing network info cache for port 8cfd60ac-8c7b-4732-bae4-7099f5767458 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:42:09 compute-0 nova_compute[192079]: 2025-10-02 12:42:09.906 2 DEBUG oslo_concurrency.lockutils [None req-c7353325-b0f3-48ce-848b-4f13e0078e70 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "e19b3c67-012d-4720-9ed5-92530129270c" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:42:09 compute-0 nova_compute[192079]: 2025-10-02 12:42:09.907 2 DEBUG oslo_concurrency.lockutils [None req-c7353325-b0f3-48ce-848b-4f13e0078e70 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "e19b3c67-012d-4720-9ed5-92530129270c" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:42:09 compute-0 nova_compute[192079]: 2025-10-02 12:42:09.907 2 DEBUG oslo_concurrency.lockutils [None req-c7353325-b0f3-48ce-848b-4f13e0078e70 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "e19b3c67-012d-4720-9ed5-92530129270c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:42:09 compute-0 nova_compute[192079]: 2025-10-02 12:42:09.907 2 DEBUG oslo_concurrency.lockutils [None req-c7353325-b0f3-48ce-848b-4f13e0078e70 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "e19b3c67-012d-4720-9ed5-92530129270c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:42:09 compute-0 nova_compute[192079]: 2025-10-02 12:42:09.907 2 DEBUG oslo_concurrency.lockutils [None req-c7353325-b0f3-48ce-848b-4f13e0078e70 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "e19b3c67-012d-4720-9ed5-92530129270c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:42:09 compute-0 nova_compute[192079]: 2025-10-02 12:42:09.923 2 INFO nova.compute.manager [None req-c7353325-b0f3-48ce-848b-4f13e0078e70 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Terminating instance
Oct 02 12:42:09 compute-0 nova_compute[192079]: 2025-10-02 12:42:09.935 2 DEBUG nova.compute.manager [None req-c7353325-b0f3-48ce-848b-4f13e0078e70 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:42:09 compute-0 kernel: tap8cfd60ac-8c (unregistering): left promiscuous mode
Oct 02 12:42:09 compute-0 NetworkManager[51160]: <info>  [1759408929.9631] device (tap8cfd60ac-8c): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:42:09 compute-0 ovn_controller[94336]: 2025-10-02T12:42:09Z|00681|binding|INFO|Releasing lport 8cfd60ac-8c7b-4732-bae4-7099f5767458 from this chassis (sb_readonly=0)
Oct 02 12:42:09 compute-0 ovn_controller[94336]: 2025-10-02T12:42:09Z|00682|binding|INFO|Setting lport 8cfd60ac-8c7b-4732-bae4-7099f5767458 down in Southbound
Oct 02 12:42:09 compute-0 ovn_controller[94336]: 2025-10-02T12:42:09Z|00683|binding|INFO|Removing iface tap8cfd60ac-8c ovn-installed in OVS
Oct 02 12:42:09 compute-0 nova_compute[192079]: 2025-10-02 12:42:09.976 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:09.982 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:bf:f6:bb 10.100.0.6'], port_security=['fa:16:3e:bf:f6:bb 10.100.0.6'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.6/28', 'neutron:device_id': 'e19b3c67-012d-4720-9ed5-92530129270c', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-3a127238-c3fd-4117-ae39-3087c30f09a1', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'neutron:revision_number': '8', 'neutron:security_group_ids': '8c6b0ed7-248d-4688-8753-c9cb6fe8719c', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=5dbca848-bd3c-415e-9cb9-ed4c61904df1, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=8cfd60ac-8c7b-4732-bae4-7099f5767458) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:42:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:09.983 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 8cfd60ac-8c7b-4732-bae4-7099f5767458 in datapath 3a127238-c3fd-4117-ae39-3087c30f09a1 unbound from our chassis
Oct 02 12:42:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:09.985 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 3a127238-c3fd-4117-ae39-3087c30f09a1, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:42:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:09.987 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ac9b2cb8-5f83-44fe-9c4d-7863589379b5]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:42:09 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:09.987 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-3a127238-c3fd-4117-ae39-3087c30f09a1 namespace which is not needed anymore
Oct 02 12:42:09 compute-0 nova_compute[192079]: 2025-10-02 12:42:09.994 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:10 compute-0 systemd[1]: machine-qemu\x2d83\x2dinstance\x2d000000ab.scope: Deactivated successfully.
Oct 02 12:42:10 compute-0 systemd[1]: machine-qemu\x2d83\x2dinstance\x2d000000ab.scope: Consumed 14.465s CPU time.
Oct 02 12:42:10 compute-0 systemd-machined[152150]: Machine qemu-83-instance-000000ab terminated.
Oct 02 12:42:10 compute-0 podman[250601]: 2025-10-02 12:42:10.06901222 +0000 UTC m=+0.070781856 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, url=https://catalog.redhat.com/en/search?searchType=containers, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, name=ubi9-minimal, distribution-scope=public, maintainer=Red Hat, Inc., com.redhat.component=ubi9-minimal-container, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, build-date=2025-08-20T13:12:41, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., version=9.6, container_name=openstack_network_exporter, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., architecture=x86_64, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.openshift.expose-services=, io.openshift.tags=minimal rhel9, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vendor=Red Hat, Inc., release=1755695350, io.buildah.version=1.33.7, vcs-type=git, config_id=edpm)
Oct 02 12:42:10 compute-0 podman[250604]: 2025-10-02 12:42:10.089523281 +0000 UTC m=+0.089822048 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, container_name=multipathd, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:42:10 compute-0 neutron-haproxy-ovnmeta-3a127238-c3fd-4117-ae39-3087c30f09a1[250258]: [NOTICE]   (250290) : haproxy version is 2.8.14-c23fe91
Oct 02 12:42:10 compute-0 neutron-haproxy-ovnmeta-3a127238-c3fd-4117-ae39-3087c30f09a1[250258]: [NOTICE]   (250290) : path to executable is /usr/sbin/haproxy
Oct 02 12:42:10 compute-0 neutron-haproxy-ovnmeta-3a127238-c3fd-4117-ae39-3087c30f09a1[250258]: [ALERT]    (250290) : Current worker (250302) exited with code 143 (Terminated)
Oct 02 12:42:10 compute-0 neutron-haproxy-ovnmeta-3a127238-c3fd-4117-ae39-3087c30f09a1[250258]: [WARNING]  (250290) : All workers exited. Exiting... (0)
Oct 02 12:42:10 compute-0 systemd[1]: libpod-bb882e503a97c70bfa8d80d3a084c40458ce0cb946b431216503941c63501d7b.scope: Deactivated successfully.
Oct 02 12:42:10 compute-0 podman[250663]: 2025-10-02 12:42:10.127661354 +0000 UTC m=+0.045274129 container died bb882e503a97c70bfa8d80d3a084c40458ce0cb946b431216503941c63501d7b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-3a127238-c3fd-4117-ae39-3087c30f09a1, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS)
Oct 02 12:42:10 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-bb882e503a97c70bfa8d80d3a084c40458ce0cb946b431216503941c63501d7b-userdata-shm.mount: Deactivated successfully.
Oct 02 12:42:10 compute-0 systemd[1]: var-lib-containers-storage-overlay-46b7e79a2f06d0546737335afbf0a312009a4140aa2deb48cac52ec52c54d82a-merged.mount: Deactivated successfully.
Oct 02 12:42:10 compute-0 podman[250663]: 2025-10-02 12:42:10.169878819 +0000 UTC m=+0.087491594 container cleanup bb882e503a97c70bfa8d80d3a084c40458ce0cb946b431216503941c63501d7b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-3a127238-c3fd-4117-ae39-3087c30f09a1, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:42:10 compute-0 systemd[1]: libpod-conmon-bb882e503a97c70bfa8d80d3a084c40458ce0cb946b431216503941c63501d7b.scope: Deactivated successfully.
Oct 02 12:42:10 compute-0 nova_compute[192079]: 2025-10-02 12:42:10.192 2 INFO nova.virt.libvirt.driver [-] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Instance destroyed successfully.
Oct 02 12:42:10 compute-0 nova_compute[192079]: 2025-10-02 12:42:10.193 2 DEBUG nova.objects.instance [None req-c7353325-b0f3-48ce-848b-4f13e0078e70 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lazy-loading 'resources' on Instance uuid e19b3c67-012d-4720-9ed5-92530129270c obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:42:10 compute-0 nova_compute[192079]: 2025-10-02 12:42:10.206 2 DEBUG nova.virt.libvirt.vif [None req-c7353325-b0f3-48ce-848b-4f13e0078e70 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:41:00Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestNetworkBasicOps-server-1466705682',display_name='tempest-TestNetworkBasicOps-server-1466705682',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkbasicops-server-1466705682',id=171,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBAL3IafrkUUDMBSP53gCxBwuDelBHD0YUoTUSoWqfmEzl93CdF9lTmxq9bNWf/TU7YpVINFdBsXy5LsUzhAl7hFwzu9/1LUxPhu8oeLDeXNeE9FYhi3sduX/kxI17gLspA==',key_name='tempest-TestNetworkBasicOps-134374525',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:41:14Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='6e2a4899168a47618e377cb3ac85ddd2',ramdisk_id='',reservation_id='r-1qbz6ygr',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-TestNetworkBasicOps-1323893370',owner_user_name='tempest-TestNetworkBasicOps-1323893370-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:41:15Z,user_data=None,user_id='a1898fdf056c4a249c33590f26d4d845',uuid=e19b3c67-012d-4720-9ed5-92530129270c,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "address": "fa:16:3e:bf:f6:bb", "network": {"id": "3a127238-c3fd-4117-ae39-3087c30f09a1", "bridge": "br-int", "label": "tempest-network-smoke--12525199", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap8cfd60ac-8c", "ovs_interfaceid": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:42:10 compute-0 nova_compute[192079]: 2025-10-02 12:42:10.207 2 DEBUG nova.network.os_vif_util [None req-c7353325-b0f3-48ce-848b-4f13e0078e70 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converting VIF {"id": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "address": "fa:16:3e:bf:f6:bb", "network": {"id": "3a127238-c3fd-4117-ae39-3087c30f09a1", "bridge": "br-int", "label": "tempest-network-smoke--12525199", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap8cfd60ac-8c", "ovs_interfaceid": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:42:10 compute-0 nova_compute[192079]: 2025-10-02 12:42:10.208 2 DEBUG nova.network.os_vif_util [None req-c7353325-b0f3-48ce-848b-4f13e0078e70 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:bf:f6:bb,bridge_name='br-int',has_traffic_filtering=True,id=8cfd60ac-8c7b-4732-bae4-7099f5767458,network=Network(3a127238-c3fd-4117-ae39-3087c30f09a1),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap8cfd60ac-8c') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:42:10 compute-0 nova_compute[192079]: 2025-10-02 12:42:10.208 2 DEBUG os_vif [None req-c7353325-b0f3-48ce-848b-4f13e0078e70 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:bf:f6:bb,bridge_name='br-int',has_traffic_filtering=True,id=8cfd60ac-8c7b-4732-bae4-7099f5767458,network=Network(3a127238-c3fd-4117-ae39-3087c30f09a1),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap8cfd60ac-8c') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:42:10 compute-0 nova_compute[192079]: 2025-10-02 12:42:10.210 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:10 compute-0 nova_compute[192079]: 2025-10-02 12:42:10.211 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap8cfd60ac-8c, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:42:10 compute-0 nova_compute[192079]: 2025-10-02 12:42:10.212 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:10 compute-0 nova_compute[192079]: 2025-10-02 12:42:10.215 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:42:10 compute-0 nova_compute[192079]: 2025-10-02 12:42:10.217 2 INFO os_vif [None req-c7353325-b0f3-48ce-848b-4f13e0078e70 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:bf:f6:bb,bridge_name='br-int',has_traffic_filtering=True,id=8cfd60ac-8c7b-4732-bae4-7099f5767458,network=Network(3a127238-c3fd-4117-ae39-3087c30f09a1),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap8cfd60ac-8c')
Oct 02 12:42:10 compute-0 nova_compute[192079]: 2025-10-02 12:42:10.218 2 INFO nova.virt.libvirt.driver [None req-c7353325-b0f3-48ce-848b-4f13e0078e70 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Deleting instance files /var/lib/nova/instances/e19b3c67-012d-4720-9ed5-92530129270c_del
Oct 02 12:42:10 compute-0 nova_compute[192079]: 2025-10-02 12:42:10.218 2 INFO nova.virt.libvirt.driver [None req-c7353325-b0f3-48ce-848b-4f13e0078e70 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Deletion of /var/lib/nova/instances/e19b3c67-012d-4720-9ed5-92530129270c_del complete
Oct 02 12:42:10 compute-0 podman[250708]: 2025-10-02 12:42:10.237040326 +0000 UTC m=+0.046425101 container remove bb882e503a97c70bfa8d80d3a084c40458ce0cb946b431216503941c63501d7b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-3a127238-c3fd-4117-ae39-3087c30f09a1, tcib_managed=true, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:42:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:10.242 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a8eb0adb-bb5c-4654-8880-f301af38b0a5]: (4, ('Thu Oct  2 12:42:10 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-3a127238-c3fd-4117-ae39-3087c30f09a1 (bb882e503a97c70bfa8d80d3a084c40458ce0cb946b431216503941c63501d7b)\nbb882e503a97c70bfa8d80d3a084c40458ce0cb946b431216503941c63501d7b\nThu Oct  2 12:42:10 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-3a127238-c3fd-4117-ae39-3087c30f09a1 (bb882e503a97c70bfa8d80d3a084c40458ce0cb946b431216503941c63501d7b)\nbb882e503a97c70bfa8d80d3a084c40458ce0cb946b431216503941c63501d7b\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:42:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:10.243 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2117db78-9aba-4c46-9909-d9e2a9608882]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:42:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:10.244 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap3a127238-c0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:42:10 compute-0 nova_compute[192079]: 2025-10-02 12:42:10.245 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:10 compute-0 kernel: tap3a127238-c0: left promiscuous mode
Oct 02 12:42:10 compute-0 nova_compute[192079]: 2025-10-02 12:42:10.257 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:10 compute-0 nova_compute[192079]: 2025-10-02 12:42:10.258 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:10.261 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[484f819d-4eb7-46e5-9301-52c2a60db9ff]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:42:10 compute-0 nova_compute[192079]: 2025-10-02 12:42:10.276 2 INFO nova.compute.manager [None req-c7353325-b0f3-48ce-848b-4f13e0078e70 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Took 0.34 seconds to destroy the instance on the hypervisor.
Oct 02 12:42:10 compute-0 nova_compute[192079]: 2025-10-02 12:42:10.277 2 DEBUG oslo.service.loopingcall [None req-c7353325-b0f3-48ce-848b-4f13e0078e70 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:42:10 compute-0 nova_compute[192079]: 2025-10-02 12:42:10.278 2 DEBUG nova.compute.manager [-] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:42:10 compute-0 nova_compute[192079]: 2025-10-02 12:42:10.278 2 DEBUG nova.network.neutron [-] [instance: e19b3c67-012d-4720-9ed5-92530129270c] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:42:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:10.289 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[07534df8-482c-4104-88bf-5cc7d8e9e370]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:42:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:10.290 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[eef1934b-7696-4b4b-9c5a-716f10261265]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:42:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:10.305 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c3d2f426-10bb-431b-a015-72806970127a]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 687026, 'reachable_time': 20525, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 250725, 'error': None, 'target': 'ovnmeta-3a127238-c3fd-4117-ae39-3087c30f09a1', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:42:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:10.308 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-3a127238-c3fd-4117-ae39-3087c30f09a1 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:42:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:10.308 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[00626812-b238-4a63-a49d-498e8a17d5fe]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:42:10 compute-0 systemd[1]: run-netns-ovnmeta\x2d3a127238\x2dc3fd\x2d4117\x2dae39\x2d3087c30f09a1.mount: Deactivated successfully.
Oct 02 12:42:11 compute-0 nova_compute[192079]: 2025-10-02 12:42:11.082 2 DEBUG nova.network.neutron [-] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:42:11 compute-0 nova_compute[192079]: 2025-10-02 12:42:11.099 2 INFO nova.compute.manager [-] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Took 0.82 seconds to deallocate network for instance.
Oct 02 12:42:11 compute-0 nova_compute[192079]: 2025-10-02 12:42:11.172 2 DEBUG oslo_concurrency.lockutils [None req-c7353325-b0f3-48ce-848b-4f13e0078e70 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:42:11 compute-0 nova_compute[192079]: 2025-10-02 12:42:11.173 2 DEBUG oslo_concurrency.lockutils [None req-c7353325-b0f3-48ce-848b-4f13e0078e70 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:42:11 compute-0 nova_compute[192079]: 2025-10-02 12:42:11.241 2 DEBUG nova.compute.provider_tree [None req-c7353325-b0f3-48ce-848b-4f13e0078e70 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:42:11 compute-0 nova_compute[192079]: 2025-10-02 12:42:11.256 2 DEBUG nova.scheduler.client.report [None req-c7353325-b0f3-48ce-848b-4f13e0078e70 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:42:11 compute-0 nova_compute[192079]: 2025-10-02 12:42:11.280 2 DEBUG oslo_concurrency.lockutils [None req-c7353325-b0f3-48ce-848b-4f13e0078e70 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.106s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:42:11 compute-0 nova_compute[192079]: 2025-10-02 12:42:11.311 2 INFO nova.scheduler.client.report [None req-c7353325-b0f3-48ce-848b-4f13e0078e70 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Deleted allocations for instance e19b3c67-012d-4720-9ed5-92530129270c
Oct 02 12:42:11 compute-0 nova_compute[192079]: 2025-10-02 12:42:11.382 2 DEBUG nova.network.neutron [req-3422c9d9-20f5-4ce4-b15b-ae5ff7896f59 req-9a2f22d7-8d6c-4253-993a-85190c66331c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Updated VIF entry in instance network info cache for port 8cfd60ac-8c7b-4732-bae4-7099f5767458. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:42:11 compute-0 nova_compute[192079]: 2025-10-02 12:42:11.382 2 DEBUG nova.network.neutron [req-3422c9d9-20f5-4ce4-b15b-ae5ff7896f59 req-9a2f22d7-8d6c-4253-993a-85190c66331c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Updating instance_info_cache with network_info: [{"id": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "address": "fa:16:3e:bf:f6:bb", "network": {"id": "3a127238-c3fd-4117-ae39-3087c30f09a1", "bridge": "br-int", "label": "tempest-network-smoke--12525199", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap8cfd60ac-8c", "ovs_interfaceid": "8cfd60ac-8c7b-4732-bae4-7099f5767458", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:42:11 compute-0 nova_compute[192079]: 2025-10-02 12:42:11.392 2 DEBUG oslo_concurrency.lockutils [None req-c7353325-b0f3-48ce-848b-4f13e0078e70 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "e19b3c67-012d-4720-9ed5-92530129270c" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.486s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:42:11 compute-0 nova_compute[192079]: 2025-10-02 12:42:11.404 2 DEBUG oslo_concurrency.lockutils [req-3422c9d9-20f5-4ce4-b15b-ae5ff7896f59 req-9a2f22d7-8d6c-4253-993a-85190c66331c 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-e19b3c67-012d-4720-9ed5-92530129270c" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:42:11 compute-0 nova_compute[192079]: 2025-10-02 12:42:11.726 2 DEBUG nova.compute.manager [req-4b4b12ae-ff64-4739-ad7b-2a30f2392edc req-f853bb81-d56e-4970-81bb-fc9cb2c6574a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Received event network-vif-deleted-8cfd60ac-8c7b-4732-bae4-7099f5767458 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:42:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:11.740 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '47'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:42:12 compute-0 nova_compute[192079]: 2025-10-02 12:42:12.078 2 DEBUG nova.compute.manager [req-d0021f86-f613-497d-98b8-8e9348aecbe0 req-2ff7d53a-1776-40fd-9f7b-8fb796678317 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Received event network-vif-unplugged-8cfd60ac-8c7b-4732-bae4-7099f5767458 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:42:12 compute-0 nova_compute[192079]: 2025-10-02 12:42:12.079 2 DEBUG oslo_concurrency.lockutils [req-d0021f86-f613-497d-98b8-8e9348aecbe0 req-2ff7d53a-1776-40fd-9f7b-8fb796678317 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "e19b3c67-012d-4720-9ed5-92530129270c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:42:12 compute-0 nova_compute[192079]: 2025-10-02 12:42:12.079 2 DEBUG oslo_concurrency.lockutils [req-d0021f86-f613-497d-98b8-8e9348aecbe0 req-2ff7d53a-1776-40fd-9f7b-8fb796678317 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "e19b3c67-012d-4720-9ed5-92530129270c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:42:12 compute-0 nova_compute[192079]: 2025-10-02 12:42:12.079 2 DEBUG oslo_concurrency.lockutils [req-d0021f86-f613-497d-98b8-8e9348aecbe0 req-2ff7d53a-1776-40fd-9f7b-8fb796678317 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "e19b3c67-012d-4720-9ed5-92530129270c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:42:12 compute-0 nova_compute[192079]: 2025-10-02 12:42:12.079 2 DEBUG nova.compute.manager [req-d0021f86-f613-497d-98b8-8e9348aecbe0 req-2ff7d53a-1776-40fd-9f7b-8fb796678317 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] No waiting events found dispatching network-vif-unplugged-8cfd60ac-8c7b-4732-bae4-7099f5767458 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:42:12 compute-0 nova_compute[192079]: 2025-10-02 12:42:12.080 2 WARNING nova.compute.manager [req-d0021f86-f613-497d-98b8-8e9348aecbe0 req-2ff7d53a-1776-40fd-9f7b-8fb796678317 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Received unexpected event network-vif-unplugged-8cfd60ac-8c7b-4732-bae4-7099f5767458 for instance with vm_state deleted and task_state None.
Oct 02 12:42:12 compute-0 nova_compute[192079]: 2025-10-02 12:42:12.080 2 DEBUG nova.compute.manager [req-d0021f86-f613-497d-98b8-8e9348aecbe0 req-2ff7d53a-1776-40fd-9f7b-8fb796678317 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Received event network-vif-plugged-8cfd60ac-8c7b-4732-bae4-7099f5767458 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:42:12 compute-0 nova_compute[192079]: 2025-10-02 12:42:12.080 2 DEBUG oslo_concurrency.lockutils [req-d0021f86-f613-497d-98b8-8e9348aecbe0 req-2ff7d53a-1776-40fd-9f7b-8fb796678317 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "e19b3c67-012d-4720-9ed5-92530129270c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:42:12 compute-0 nova_compute[192079]: 2025-10-02 12:42:12.080 2 DEBUG oslo_concurrency.lockutils [req-d0021f86-f613-497d-98b8-8e9348aecbe0 req-2ff7d53a-1776-40fd-9f7b-8fb796678317 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "e19b3c67-012d-4720-9ed5-92530129270c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:42:12 compute-0 nova_compute[192079]: 2025-10-02 12:42:12.081 2 DEBUG oslo_concurrency.lockutils [req-d0021f86-f613-497d-98b8-8e9348aecbe0 req-2ff7d53a-1776-40fd-9f7b-8fb796678317 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "e19b3c67-012d-4720-9ed5-92530129270c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:42:12 compute-0 nova_compute[192079]: 2025-10-02 12:42:12.081 2 DEBUG nova.compute.manager [req-d0021f86-f613-497d-98b8-8e9348aecbe0 req-2ff7d53a-1776-40fd-9f7b-8fb796678317 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] No waiting events found dispatching network-vif-plugged-8cfd60ac-8c7b-4732-bae4-7099f5767458 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:42:12 compute-0 nova_compute[192079]: 2025-10-02 12:42:12.081 2 WARNING nova.compute.manager [req-d0021f86-f613-497d-98b8-8e9348aecbe0 req-2ff7d53a-1776-40fd-9f7b-8fb796678317 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Received unexpected event network-vif-plugged-8cfd60ac-8c7b-4732-bae4-7099f5767458 for instance with vm_state deleted and task_state None.
Oct 02 12:42:13 compute-0 nova_compute[192079]: 2025-10-02 12:42:13.453 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:14 compute-0 nova_compute[192079]: 2025-10-02 12:42:14.175 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:14 compute-0 nova_compute[192079]: 2025-10-02 12:42:14.283 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:15 compute-0 podman[250727]: 2025-10-02 12:42:15.18407466 +0000 UTC m=+0.093531758 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']})
Oct 02 12:42:15 compute-0 podman[250728]: 2025-10-02 12:42:15.189833098 +0000 UTC m=+0.091883784 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, container_name=iscsid, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, tcib_managed=true, config_id=iscsid, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:42:15 compute-0 nova_compute[192079]: 2025-10-02 12:42:15.213 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:18 compute-0 nova_compute[192079]: 2025-10-02 12:42:18.500 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:20 compute-0 nova_compute[192079]: 2025-10-02 12:42:20.216 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:23 compute-0 nova_compute[192079]: 2025-10-02 12:42:23.501 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:24 compute-0 podman[250769]: 2025-10-02 12:42:24.133816693 +0000 UTC m=+0.051523731 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_id=ovn_metadata_agent)
Oct 02 12:42:24 compute-0 podman[250771]: 2025-10-02 12:42:24.135712494 +0000 UTC m=+0.047233152 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>)
Oct 02 12:42:24 compute-0 podman[250770]: 2025-10-02 12:42:24.188401045 +0000 UTC m=+0.101545578 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_managed=true, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, config_id=ovn_controller, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:42:25 compute-0 nova_compute[192079]: 2025-10-02 12:42:25.192 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759408930.1897078, e19b3c67-012d-4720-9ed5-92530129270c => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:42:25 compute-0 nova_compute[192079]: 2025-10-02 12:42:25.192 2 INFO nova.compute.manager [-] [instance: e19b3c67-012d-4720-9ed5-92530129270c] VM Stopped (Lifecycle Event)
Oct 02 12:42:25 compute-0 nova_compute[192079]: 2025-10-02 12:42:25.212 2 DEBUG nova.compute.manager [None req-ddb56cd1-850c-4f0d-8807-98259a8cd53e - - - - - -] [instance: e19b3c67-012d-4720-9ed5-92530129270c] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:42:25 compute-0 nova_compute[192079]: 2025-10-02 12:42:25.279 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:28 compute-0 nova_compute[192079]: 2025-10-02 12:42:28.503 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:30 compute-0 nova_compute[192079]: 2025-10-02 12:42:30.281 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:33 compute-0 podman[250834]: 2025-10-02 12:42:33.188145905 +0000 UTC m=+0.086578279 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:42:33 compute-0 nova_compute[192079]: 2025-10-02 12:42:33.554 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:34 compute-0 nova_compute[192079]: 2025-10-02 12:42:34.805 2 DEBUG oslo_concurrency.lockutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "31d1c03c-8272-4aa0-8a60-469bd8ca0853" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:42:34 compute-0 nova_compute[192079]: 2025-10-02 12:42:34.806 2 DEBUG oslo_concurrency.lockutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "31d1c03c-8272-4aa0-8a60-469bd8ca0853" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:42:34 compute-0 nova_compute[192079]: 2025-10-02 12:42:34.821 2 DEBUG nova.compute.manager [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:42:34 compute-0 nova_compute[192079]: 2025-10-02 12:42:34.935 2 DEBUG oslo_concurrency.lockutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:42:34 compute-0 nova_compute[192079]: 2025-10-02 12:42:34.936 2 DEBUG oslo_concurrency.lockutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:42:34 compute-0 nova_compute[192079]: 2025-10-02 12:42:34.944 2 DEBUG nova.virt.hardware [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:42:34 compute-0 nova_compute[192079]: 2025-10-02 12:42:34.945 2 INFO nova.compute.claims [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.049 2 DEBUG nova.compute.provider_tree [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.069 2 DEBUG nova.scheduler.client.report [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.099 2 DEBUG oslo_concurrency.lockutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.164s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.100 2 DEBUG nova.compute.manager [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.154 2 DEBUG nova.compute.manager [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.154 2 DEBUG nova.network.neutron [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.170 2 INFO nova.virt.libvirt.driver [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.190 2 DEBUG nova.compute.manager [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.283 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.301 2 DEBUG nova.compute.manager [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.303 2 DEBUG nova.virt.libvirt.driver [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.304 2 INFO nova.virt.libvirt.driver [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Creating image(s)
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.305 2 DEBUG oslo_concurrency.lockutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "/var/lib/nova/instances/31d1c03c-8272-4aa0-8a60-469bd8ca0853/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.305 2 DEBUG oslo_concurrency.lockutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "/var/lib/nova/instances/31d1c03c-8272-4aa0-8a60-469bd8ca0853/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.306 2 DEBUG oslo_concurrency.lockutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "/var/lib/nova/instances/31d1c03c-8272-4aa0-8a60-469bd8ca0853/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.329 2 DEBUG oslo_concurrency.processutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.424 2 DEBUG oslo_concurrency.processutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.095s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.425 2 DEBUG oslo_concurrency.lockutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.426 2 DEBUG oslo_concurrency.lockutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.437 2 DEBUG oslo_concurrency.processutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.462 2 DEBUG nova.policy [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a1898fdf056c4a249c33590f26d4d845', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.505 2 DEBUG oslo_concurrency.processutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.068s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.506 2 DEBUG oslo_concurrency.processutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/31d1c03c-8272-4aa0-8a60-469bd8ca0853/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.539 2 DEBUG oslo_concurrency.processutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/31d1c03c-8272-4aa0-8a60-469bd8ca0853/disk 1073741824" returned: 0 in 0.033s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.540 2 DEBUG oslo_concurrency.lockutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.114s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.540 2 DEBUG oslo_concurrency.processutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.598 2 DEBUG oslo_concurrency.processutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.057s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.599 2 DEBUG nova.virt.disk.api [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Checking if we can resize image /var/lib/nova/instances/31d1c03c-8272-4aa0-8a60-469bd8ca0853/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.599 2 DEBUG oslo_concurrency.processutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/31d1c03c-8272-4aa0-8a60-469bd8ca0853/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.652 2 DEBUG oslo_concurrency.processutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/31d1c03c-8272-4aa0-8a60-469bd8ca0853/disk --force-share --output=json" returned: 0 in 0.053s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.653 2 DEBUG nova.virt.disk.api [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Cannot resize image /var/lib/nova/instances/31d1c03c-8272-4aa0-8a60-469bd8ca0853/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.654 2 DEBUG nova.objects.instance [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lazy-loading 'migration_context' on Instance uuid 31d1c03c-8272-4aa0-8a60-469bd8ca0853 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.667 2 DEBUG nova.virt.libvirt.driver [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.667 2 DEBUG nova.virt.libvirt.driver [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Ensure instance console log exists: /var/lib/nova/instances/31d1c03c-8272-4aa0-8a60-469bd8ca0853/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.668 2 DEBUG oslo_concurrency.lockutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.668 2 DEBUG oslo_concurrency.lockutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:42:35 compute-0 nova_compute[192079]: 2025-10-02 12:42:35.668 2 DEBUG oslo_concurrency.lockutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:42:36 compute-0 nova_compute[192079]: 2025-10-02 12:42:36.128 2 DEBUG nova.network.neutron [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Successfully created port: 92e7c855-ede8-4edf-9cc1-bd4d683e87ca _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:42:36 compute-0 nova_compute[192079]: 2025-10-02 12:42:36.756 2 DEBUG nova.network.neutron [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Successfully updated port: 92e7c855-ede8-4edf-9cc1-bd4d683e87ca _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:42:36 compute-0 nova_compute[192079]: 2025-10-02 12:42:36.768 2 DEBUG oslo_concurrency.lockutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "refresh_cache-31d1c03c-8272-4aa0-8a60-469bd8ca0853" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:42:36 compute-0 nova_compute[192079]: 2025-10-02 12:42:36.769 2 DEBUG oslo_concurrency.lockutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquired lock "refresh_cache-31d1c03c-8272-4aa0-8a60-469bd8ca0853" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:42:36 compute-0 nova_compute[192079]: 2025-10-02 12:42:36.769 2 DEBUG nova.network.neutron [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:42:36 compute-0 nova_compute[192079]: 2025-10-02 12:42:36.851 2 DEBUG nova.compute.manager [req-4ed81bf4-a3a4-46d2-aa45-9c1a19fc0747 req-c490313b-968f-40d7-a7ef-f1a1415694d0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Received event network-changed-92e7c855-ede8-4edf-9cc1-bd4d683e87ca external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:42:36 compute-0 nova_compute[192079]: 2025-10-02 12:42:36.852 2 DEBUG nova.compute.manager [req-4ed81bf4-a3a4-46d2-aa45-9c1a19fc0747 req-c490313b-968f-40d7-a7ef-f1a1415694d0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Refreshing instance network info cache due to event network-changed-92e7c855-ede8-4edf-9cc1-bd4d683e87ca. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:42:36 compute-0 nova_compute[192079]: 2025-10-02 12:42:36.852 2 DEBUG oslo_concurrency.lockutils [req-4ed81bf4-a3a4-46d2-aa45-9c1a19fc0747 req-c490313b-968f-40d7-a7ef-f1a1415694d0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-31d1c03c-8272-4aa0-8a60-469bd8ca0853" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:42:36 compute-0 nova_compute[192079]: 2025-10-02 12:42:36.892 2 DEBUG nova.network.neutron [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.456 2 DEBUG nova.network.neutron [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Updating instance_info_cache with network_info: [{"id": "92e7c855-ede8-4edf-9cc1-bd4d683e87ca", "address": "fa:16:3e:7e:0a:d3", "network": {"id": "308a9d2e-30f5-4a94-9bc3-d0e8463f1653", "bridge": "br-int", "label": "tempest-network-smoke--650526529", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92e7c855-ed", "ovs_interfaceid": "92e7c855-ede8-4edf-9cc1-bd4d683e87ca", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.475 2 DEBUG oslo_concurrency.lockutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Releasing lock "refresh_cache-31d1c03c-8272-4aa0-8a60-469bd8ca0853" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.475 2 DEBUG nova.compute.manager [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Instance network_info: |[{"id": "92e7c855-ede8-4edf-9cc1-bd4d683e87ca", "address": "fa:16:3e:7e:0a:d3", "network": {"id": "308a9d2e-30f5-4a94-9bc3-d0e8463f1653", "bridge": "br-int", "label": "tempest-network-smoke--650526529", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92e7c855-ed", "ovs_interfaceid": "92e7c855-ede8-4edf-9cc1-bd4d683e87ca", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.476 2 DEBUG oslo_concurrency.lockutils [req-4ed81bf4-a3a4-46d2-aa45-9c1a19fc0747 req-c490313b-968f-40d7-a7ef-f1a1415694d0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-31d1c03c-8272-4aa0-8a60-469bd8ca0853" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.476 2 DEBUG nova.network.neutron [req-4ed81bf4-a3a4-46d2-aa45-9c1a19fc0747 req-c490313b-968f-40d7-a7ef-f1a1415694d0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Refreshing network info cache for port 92e7c855-ede8-4edf-9cc1-bd4d683e87ca _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.478 2 DEBUG nova.virt.libvirt.driver [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Start _get_guest_xml network_info=[{"id": "92e7c855-ede8-4edf-9cc1-bd4d683e87ca", "address": "fa:16:3e:7e:0a:d3", "network": {"id": "308a9d2e-30f5-4a94-9bc3-d0e8463f1653", "bridge": "br-int", "label": "tempest-network-smoke--650526529", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92e7c855-ed", "ovs_interfaceid": "92e7c855-ede8-4edf-9cc1-bd4d683e87ca", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.483 2 WARNING nova.virt.libvirt.driver [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.487 2 DEBUG nova.virt.libvirt.host [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.488 2 DEBUG nova.virt.libvirt.host [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.493 2 DEBUG nova.virt.libvirt.host [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.494 2 DEBUG nova.virt.libvirt.host [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.495 2 DEBUG nova.virt.libvirt.driver [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.495 2 DEBUG nova.virt.hardware [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.496 2 DEBUG nova.virt.hardware [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.496 2 DEBUG nova.virt.hardware [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.496 2 DEBUG nova.virt.hardware [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.497 2 DEBUG nova.virt.hardware [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.497 2 DEBUG nova.virt.hardware [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.497 2 DEBUG nova.virt.hardware [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.497 2 DEBUG nova.virt.hardware [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.497 2 DEBUG nova.virt.hardware [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.498 2 DEBUG nova.virt.hardware [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.498 2 DEBUG nova.virt.hardware [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.502 2 DEBUG nova.virt.libvirt.vif [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:42:33Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestNetworkBasicOps-server-79511923',display_name='tempest-TestNetworkBasicOps-server-79511923',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkbasicops-server-79511923',id=175,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBGjFhtEvmK/YgxCkhguNflXQ4seJsulXHSpWXpH9lcREJOgYaYrJzPAfiUZjq8nFW6YdSh6VSco6K6tZV6JddWrYCJDJZlx/bYzvTXxdCXyphJZTmlouEQenKk9vkKqn5Q==',key_name='tempest-TestNetworkBasicOps-1093149800',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='6e2a4899168a47618e377cb3ac85ddd2',ramdisk_id='',reservation_id='r-6bnlzwip',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestNetworkBasicOps-1323893370',owner_user_name='tempest-TestNetworkBasicOps-1323893370-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:42:35Z,user_data=None,user_id='a1898fdf056c4a249c33590f26d4d845',uuid=31d1c03c-8272-4aa0-8a60-469bd8ca0853,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "92e7c855-ede8-4edf-9cc1-bd4d683e87ca", "address": "fa:16:3e:7e:0a:d3", "network": {"id": "308a9d2e-30f5-4a94-9bc3-d0e8463f1653", "bridge": "br-int", "label": "tempest-network-smoke--650526529", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92e7c855-ed", "ovs_interfaceid": "92e7c855-ede8-4edf-9cc1-bd4d683e87ca", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.502 2 DEBUG nova.network.os_vif_util [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converting VIF {"id": "92e7c855-ede8-4edf-9cc1-bd4d683e87ca", "address": "fa:16:3e:7e:0a:d3", "network": {"id": "308a9d2e-30f5-4a94-9bc3-d0e8463f1653", "bridge": "br-int", "label": "tempest-network-smoke--650526529", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92e7c855-ed", "ovs_interfaceid": "92e7c855-ede8-4edf-9cc1-bd4d683e87ca", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.503 2 DEBUG nova.network.os_vif_util [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:7e:0a:d3,bridge_name='br-int',has_traffic_filtering=True,id=92e7c855-ede8-4edf-9cc1-bd4d683e87ca,network=Network(308a9d2e-30f5-4a94-9bc3-d0e8463f1653),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap92e7c855-ed') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.504 2 DEBUG nova.objects.instance [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lazy-loading 'pci_devices' on Instance uuid 31d1c03c-8272-4aa0-8a60-469bd8ca0853 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.518 2 DEBUG nova.virt.libvirt.driver [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:42:38 compute-0 nova_compute[192079]:   <uuid>31d1c03c-8272-4aa0-8a60-469bd8ca0853</uuid>
Oct 02 12:42:38 compute-0 nova_compute[192079]:   <name>instance-000000af</name>
Oct 02 12:42:38 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:42:38 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:42:38 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:42:38 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:       <nova:name>tempest-TestNetworkBasicOps-server-79511923</nova:name>
Oct 02 12:42:38 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:42:38</nova:creationTime>
Oct 02 12:42:38 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:42:38 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:42:38 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:42:38 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:42:38 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:42:38 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:42:38 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:42:38 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:42:38 compute-0 nova_compute[192079]:         <nova:user uuid="a1898fdf056c4a249c33590f26d4d845">tempest-TestNetworkBasicOps-1323893370-project-member</nova:user>
Oct 02 12:42:38 compute-0 nova_compute[192079]:         <nova:project uuid="6e2a4899168a47618e377cb3ac85ddd2">tempest-TestNetworkBasicOps-1323893370</nova:project>
Oct 02 12:42:38 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:42:38 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:42:38 compute-0 nova_compute[192079]:         <nova:port uuid="92e7c855-ede8-4edf-9cc1-bd4d683e87ca">
Oct 02 12:42:38 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.14" ipVersion="4"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:42:38 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:42:38 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:42:38 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <system>
Oct 02 12:42:38 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:42:38 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:42:38 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:42:38 compute-0 nova_compute[192079]:       <entry name="serial">31d1c03c-8272-4aa0-8a60-469bd8ca0853</entry>
Oct 02 12:42:38 compute-0 nova_compute[192079]:       <entry name="uuid">31d1c03c-8272-4aa0-8a60-469bd8ca0853</entry>
Oct 02 12:42:38 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     </system>
Oct 02 12:42:38 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:42:38 compute-0 nova_compute[192079]:   <os>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:   </os>
Oct 02 12:42:38 compute-0 nova_compute[192079]:   <features>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:   </features>
Oct 02 12:42:38 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:42:38 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:42:38 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:42:38 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/31d1c03c-8272-4aa0-8a60-469bd8ca0853/disk"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:42:38 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/31d1c03c-8272-4aa0-8a60-469bd8ca0853/disk.config"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:42:38 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:7e:0a:d3"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:       <target dev="tap92e7c855-ed"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:42:38 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/31d1c03c-8272-4aa0-8a60-469bd8ca0853/console.log" append="off"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <video>
Oct 02 12:42:38 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     </video>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:42:38 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:42:38 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:42:38 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:42:38 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:42:38 compute-0 nova_compute[192079]: </domain>
Oct 02 12:42:38 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.519 2 DEBUG nova.compute.manager [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Preparing to wait for external event network-vif-plugged-92e7c855-ede8-4edf-9cc1-bd4d683e87ca prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.520 2 DEBUG oslo_concurrency.lockutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "31d1c03c-8272-4aa0-8a60-469bd8ca0853-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.520 2 DEBUG oslo_concurrency.lockutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "31d1c03c-8272-4aa0-8a60-469bd8ca0853-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.520 2 DEBUG oslo_concurrency.lockutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "31d1c03c-8272-4aa0-8a60-469bd8ca0853-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.521 2 DEBUG nova.virt.libvirt.vif [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:42:33Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestNetworkBasicOps-server-79511923',display_name='tempest-TestNetworkBasicOps-server-79511923',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkbasicops-server-79511923',id=175,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBGjFhtEvmK/YgxCkhguNflXQ4seJsulXHSpWXpH9lcREJOgYaYrJzPAfiUZjq8nFW6YdSh6VSco6K6tZV6JddWrYCJDJZlx/bYzvTXxdCXyphJZTmlouEQenKk9vkKqn5Q==',key_name='tempest-TestNetworkBasicOps-1093149800',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='6e2a4899168a47618e377cb3ac85ddd2',ramdisk_id='',reservation_id='r-6bnlzwip',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestNetworkBasicOps-1323893370',owner_user_name='tempest-TestNetworkBasicOps-1323893370-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:42:35Z,user_data=None,user_id='a1898fdf056c4a249c33590f26d4d845',uuid=31d1c03c-8272-4aa0-8a60-469bd8ca0853,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "92e7c855-ede8-4edf-9cc1-bd4d683e87ca", "address": "fa:16:3e:7e:0a:d3", "network": {"id": "308a9d2e-30f5-4a94-9bc3-d0e8463f1653", "bridge": "br-int", "label": "tempest-network-smoke--650526529", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92e7c855-ed", "ovs_interfaceid": "92e7c855-ede8-4edf-9cc1-bd4d683e87ca", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.521 2 DEBUG nova.network.os_vif_util [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converting VIF {"id": "92e7c855-ede8-4edf-9cc1-bd4d683e87ca", "address": "fa:16:3e:7e:0a:d3", "network": {"id": "308a9d2e-30f5-4a94-9bc3-d0e8463f1653", "bridge": "br-int", "label": "tempest-network-smoke--650526529", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92e7c855-ed", "ovs_interfaceid": "92e7c855-ede8-4edf-9cc1-bd4d683e87ca", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.522 2 DEBUG nova.network.os_vif_util [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:7e:0a:d3,bridge_name='br-int',has_traffic_filtering=True,id=92e7c855-ede8-4edf-9cc1-bd4d683e87ca,network=Network(308a9d2e-30f5-4a94-9bc3-d0e8463f1653),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap92e7c855-ed') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.522 2 DEBUG os_vif [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:7e:0a:d3,bridge_name='br-int',has_traffic_filtering=True,id=92e7c855-ede8-4edf-9cc1-bd4d683e87ca,network=Network(308a9d2e-30f5-4a94-9bc3-d0e8463f1653),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap92e7c855-ed') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.522 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.523 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.523 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.525 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.525 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap92e7c855-ed, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.526 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap92e7c855-ed, col_values=(('external_ids', {'iface-id': '92e7c855-ede8-4edf-9cc1-bd4d683e87ca', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:7e:0a:d3', 'vm-uuid': '31d1c03c-8272-4aa0-8a60-469bd8ca0853'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.527 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:38 compute-0 NetworkManager[51160]: <info>  [1759408958.5283] manager: (tap92e7c855-ed): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/333)
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.530 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.534 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.535 2 INFO os_vif [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:7e:0a:d3,bridge_name='br-int',has_traffic_filtering=True,id=92e7c855-ede8-4edf-9cc1-bd4d683e87ca,network=Network(308a9d2e-30f5-4a94-9bc3-d0e8463f1653),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap92e7c855-ed')
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.556 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.584 2 DEBUG nova.virt.libvirt.driver [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.585 2 DEBUG nova.virt.libvirt.driver [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.585 2 DEBUG nova.virt.libvirt.driver [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] No VIF found with MAC fa:16:3e:7e:0a:d3, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:42:38 compute-0 nova_compute[192079]: 2025-10-02 12:42:38.585 2 INFO nova.virt.libvirt.driver [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Using config drive
Oct 02 12:42:39 compute-0 nova_compute[192079]: 2025-10-02 12:42:39.559 2 INFO nova.virt.libvirt.driver [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Creating config drive at /var/lib/nova/instances/31d1c03c-8272-4aa0-8a60-469bd8ca0853/disk.config
Oct 02 12:42:39 compute-0 nova_compute[192079]: 2025-10-02 12:42:39.564 2 DEBUG oslo_concurrency.processutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/31d1c03c-8272-4aa0-8a60-469bd8ca0853/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpc4mblp58 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:42:39 compute-0 nova_compute[192079]: 2025-10-02 12:42:39.707 2 DEBUG oslo_concurrency.processutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/31d1c03c-8272-4aa0-8a60-469bd8ca0853/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpc4mblp58" returned: 0 in 0.143s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:42:39 compute-0 kernel: tap92e7c855-ed: entered promiscuous mode
Oct 02 12:42:39 compute-0 NetworkManager[51160]: <info>  [1759408959.8061] manager: (tap92e7c855-ed): new Tun device (/org/freedesktop/NetworkManager/Devices/334)
Oct 02 12:42:39 compute-0 ovn_controller[94336]: 2025-10-02T12:42:39Z|00684|binding|INFO|Claiming lport 92e7c855-ede8-4edf-9cc1-bd4d683e87ca for this chassis.
Oct 02 12:42:39 compute-0 ovn_controller[94336]: 2025-10-02T12:42:39Z|00685|binding|INFO|92e7c855-ede8-4edf-9cc1-bd4d683e87ca: Claiming fa:16:3e:7e:0a:d3 10.100.0.14
Oct 02 12:42:39 compute-0 nova_compute[192079]: 2025-10-02 12:42:39.807 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:39.841 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:7e:0a:d3 10.100.0.14'], port_security=['fa:16:3e:7e:0a:d3 10.100.0.14'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.14/28', 'neutron:device_id': '31d1c03c-8272-4aa0-8a60-469bd8ca0853', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-308a9d2e-30f5-4a94-9bc3-d0e8463f1653', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'neutron:revision_number': '2', 'neutron:security_group_ids': '47be5342-2ddc-4caf-96cd-a5b8f37810bd', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=f2112ece-3902-43a8-9e6c-27d4f44d0f07, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=92e7c855-ede8-4edf-9cc1-bd4d683e87ca) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:42:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:39.843 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 92e7c855-ede8-4edf-9cc1-bd4d683e87ca in datapath 308a9d2e-30f5-4a94-9bc3-d0e8463f1653 bound to our chassis
Oct 02 12:42:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:39.845 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 308a9d2e-30f5-4a94-9bc3-d0e8463f1653
Oct 02 12:42:39 compute-0 systemd-udevd[250889]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:42:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:39.864 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e567851c-947a-47c8-8575-21a29b471d08]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:42:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:39.865 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap308a9d2e-31 in ovnmeta-308a9d2e-30f5-4a94-9bc3-d0e8463f1653 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:42:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:39.869 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap308a9d2e-30 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:42:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:39.869 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7acecb02-0bd2-4eed-b4c8-85b43de7b8bf]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:42:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:39.870 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[bb3d298e-4aaf-4e05-8d39-615ce6daac13]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:42:39 compute-0 systemd-machined[152150]: New machine qemu-84-instance-000000af.
Oct 02 12:42:39 compute-0 NetworkManager[51160]: <info>  [1759408959.8818] device (tap92e7c855-ed): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:42:39 compute-0 NetworkManager[51160]: <info>  [1759408959.8825] device (tap92e7c855-ed): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:42:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:39.888 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[6b88f3a7-3120-47b4-81b3-9d2c67ae0b32]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:42:39 compute-0 nova_compute[192079]: 2025-10-02 12:42:39.896 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:39 compute-0 nova_compute[192079]: 2025-10-02 12:42:39.901 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:39 compute-0 ovn_controller[94336]: 2025-10-02T12:42:39Z|00686|binding|INFO|Setting lport 92e7c855-ede8-4edf-9cc1-bd4d683e87ca ovn-installed in OVS
Oct 02 12:42:39 compute-0 ovn_controller[94336]: 2025-10-02T12:42:39Z|00687|binding|INFO|Setting lport 92e7c855-ede8-4edf-9cc1-bd4d683e87ca up in Southbound
Oct 02 12:42:39 compute-0 nova_compute[192079]: 2025-10-02 12:42:39.905 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:39 compute-0 systemd[1]: Started Virtual Machine qemu-84-instance-000000af.
Oct 02 12:42:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:39.913 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d9d6d00b-aaed-4c62-adf2-db7cfadbd643]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:42:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:39.945 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[9aa6b1e2-f9d1-4919-84b4-47bddf989968]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:42:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:39.951 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[70fbed56-fccf-46e0-9bf2-3dce2613376b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:42:39 compute-0 systemd-udevd[250894]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:42:39 compute-0 NetworkManager[51160]: <info>  [1759408959.9519] manager: (tap308a9d2e-30): new Veth device (/org/freedesktop/NetworkManager/Devices/335)
Oct 02 12:42:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:39.981 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[71bd8dc2-b4f5-43cf-bba2-d06aceff40a4]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:42:39 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:39.984 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[eb378255-5d28-480e-af74-63f55dc14622]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:42:40 compute-0 NetworkManager[51160]: <info>  [1759408960.0071] device (tap308a9d2e-30): carrier: link connected
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:40.011 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[6595a63d-0e35-4a6f-a041-95fccef7a215]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:40.028 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a7cff80f-1926-4c7e-80ed-8dd011549254]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap308a9d2e-31'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:c5:10:70'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 215], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 695763, 'reachable_time': 41408, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 250923, 'error': None, 'target': 'ovnmeta-308a9d2e-30f5-4a94-9bc3-d0e8463f1653', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:40.043 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[05a3d7d1-94a2-4fce-acf8-421ff72791ce]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fec5:1070'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 695763, 'tstamp': 695763}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 250924, 'error': None, 'target': 'ovnmeta-308a9d2e-30f5-4a94-9bc3-d0e8463f1653', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:40.059 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[86778587-627b-4ffd-b43d-8a90abc48856]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap308a9d2e-31'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:c5:10:70'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 215], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 695763, 'reachable_time': 41408, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 250925, 'error': None, 'target': 'ovnmeta-308a9d2e-30f5-4a94-9bc3-d0e8463f1653', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.089 2 DEBUG nova.compute.manager [req-1bc2d277-fb65-47f3-866d-51009d65cf7a req-9bb53caf-db95-4241-9010-9aff3a72f871 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Received event network-vif-plugged-92e7c855-ede8-4edf-9cc1-bd4d683e87ca external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.090 2 DEBUG oslo_concurrency.lockutils [req-1bc2d277-fb65-47f3-866d-51009d65cf7a req-9bb53caf-db95-4241-9010-9aff3a72f871 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "31d1c03c-8272-4aa0-8a60-469bd8ca0853-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.090 2 DEBUG oslo_concurrency.lockutils [req-1bc2d277-fb65-47f3-866d-51009d65cf7a req-9bb53caf-db95-4241-9010-9aff3a72f871 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "31d1c03c-8272-4aa0-8a60-469bd8ca0853-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.090 2 DEBUG oslo_concurrency.lockutils [req-1bc2d277-fb65-47f3-866d-51009d65cf7a req-9bb53caf-db95-4241-9010-9aff3a72f871 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "31d1c03c-8272-4aa0-8a60-469bd8ca0853-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.091 2 DEBUG nova.compute.manager [req-1bc2d277-fb65-47f3-866d-51009d65cf7a req-9bb53caf-db95-4241-9010-9aff3a72f871 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Processing event network-vif-plugged-92e7c855-ede8-4edf-9cc1-bd4d683e87ca _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:40.092 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[542f123a-ee25-48d3-a390-566a6fe0dbd7]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:40.147 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8faca661-2baf-4b31-bfe3-f4eacf3ca309]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:40.148 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap308a9d2e-30, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:40.148 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:40.148 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap308a9d2e-30, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:42:40 compute-0 NetworkManager[51160]: <info>  [1759408960.1513] manager: (tap308a9d2e-30): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/336)
Oct 02 12:42:40 compute-0 kernel: tap308a9d2e-30: entered promiscuous mode
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.150 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:40.154 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap308a9d2e-30, col_values=(('external_ids', {'iface-id': '921996ce-f2f8-4f13-9a74-ab75ec6374d6'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.155 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:40 compute-0 ovn_controller[94336]: 2025-10-02T12:42:40Z|00688|binding|INFO|Releasing lport 921996ce-f2f8-4f13-9a74-ab75ec6374d6 from this chassis (sb_readonly=0)
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:40.181 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/308a9d2e-30f5-4a94-9bc3-d0e8463f1653.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/308a9d2e-30f5-4a94-9bc3-d0e8463f1653.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.181 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:40.183 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b3e9cbc6-da15-4e79-af22-3acba6b3ee24]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:40.184 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-308a9d2e-30f5-4a94-9bc3-d0e8463f1653
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/308a9d2e-30f5-4a94-9bc3-d0e8463f1653.pid.haproxy
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 308a9d2e-30f5-4a94-9bc3-d0e8463f1653
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:40.184 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-308a9d2e-30f5-4a94-9bc3-d0e8463f1653', 'env', 'PROCESS_TAG=haproxy-308a9d2e-30f5-4a94-9bc3-d0e8463f1653', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/308a9d2e-30f5-4a94-9bc3-d0e8463f1653.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.211 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:40.211 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=48, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=47) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:42:40 compute-0 podman[250964]: 2025-10-02 12:42:40.546552457 +0000 UTC m=+0.059135479 container create 5322e8270d1af74d671b854da4a1745ddbb2f04f9b536314f66672c488488b81 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-308a9d2e-30f5-4a94-9bc3-d0e8463f1653, io.buildah.version=1.41.3, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:42:40 compute-0 podman[250964]: 2025-10-02 12:42:40.509432701 +0000 UTC m=+0.022015703 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:42:40 compute-0 systemd[1]: Started libpod-conmon-5322e8270d1af74d671b854da4a1745ddbb2f04f9b536314f66672c488488b81.scope.
Oct 02 12:42:40 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:42:40 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/97e9024b2ddf20bf12e566a2dbde6a7b50cd9fdfabe5191568f5a0efa795ac81/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:42:40 compute-0 podman[250977]: 2025-10-02 12:42:40.653509532 +0000 UTC m=+0.072034381 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, com.redhat.component=ubi9-minimal-container, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.tags=minimal rhel9, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, maintainer=Red Hat, Inc., url=https://catalog.redhat.com/en/search?searchType=containers, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, config_id=edpm, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, vendor=Red Hat, Inc., architecture=x86_64, build-date=2025-08-20T13:12:41, distribution-scope=public, vcs-type=git, io.buildah.version=1.33.7, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, container_name=openstack_network_exporter, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., managed_by=edpm_ansible, version=9.6, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., release=1755695350, io.openshift.expose-services=, name=ubi9-minimal)
Oct 02 12:42:40 compute-0 podman[250980]: 2025-10-02 12:42:40.660272027 +0000 UTC m=+0.070728545 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, container_name=multipathd, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.license=GPLv2, config_id=multipathd, io.buildah.version=1.41.3, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:42:40 compute-0 podman[250964]: 2025-10-02 12:42:40.665664414 +0000 UTC m=+0.178247446 container init 5322e8270d1af74d671b854da4a1745ddbb2f04f9b536314f66672c488488b81 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-308a9d2e-30f5-4a94-9bc3-d0e8463f1653, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, tcib_managed=true)
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.666 2 DEBUG nova.network.neutron [req-4ed81bf4-a3a4-46d2-aa45-9c1a19fc0747 req-c490313b-968f-40d7-a7ef-f1a1415694d0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Updated VIF entry in instance network info cache for port 92e7c855-ede8-4edf-9cc1-bd4d683e87ca. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.666 2 DEBUG nova.network.neutron [req-4ed81bf4-a3a4-46d2-aa45-9c1a19fc0747 req-c490313b-968f-40d7-a7ef-f1a1415694d0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Updating instance_info_cache with network_info: [{"id": "92e7c855-ede8-4edf-9cc1-bd4d683e87ca", "address": "fa:16:3e:7e:0a:d3", "network": {"id": "308a9d2e-30f5-4a94-9bc3-d0e8463f1653", "bridge": "br-int", "label": "tempest-network-smoke--650526529", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92e7c855-ed", "ovs_interfaceid": "92e7c855-ede8-4edf-9cc1-bd4d683e87ca", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:42:40 compute-0 podman[250964]: 2025-10-02 12:42:40.673519829 +0000 UTC m=+0.186102831 container start 5322e8270d1af74d671b854da4a1745ddbb2f04f9b536314f66672c488488b81 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-308a9d2e-30f5-4a94-9bc3-d0e8463f1653, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001)
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.696 2 DEBUG oslo_concurrency.lockutils [req-4ed81bf4-a3a4-46d2-aa45-9c1a19fc0747 req-c490313b-968f-40d7-a7ef-f1a1415694d0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-31d1c03c-8272-4aa0-8a60-469bd8ca0853" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:42:40 compute-0 neutron-haproxy-ovnmeta-308a9d2e-30f5-4a94-9bc3-d0e8463f1653[251004]: [NOTICE]   (251023) : New worker (251025) forked
Oct 02 12:42:40 compute-0 neutron-haproxy-ovnmeta-308a9d2e-30f5-4a94-9bc3-d0e8463f1653[251004]: [NOTICE]   (251023) : Loading success.
Oct 02 12:42:40 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:40.733 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 9 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.792 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408960.792215, 31d1c03c-8272-4aa0-8a60-469bd8ca0853 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.793 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] VM Started (Lifecycle Event)
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.795 2 DEBUG nova.compute.manager [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.801 2 DEBUG nova.virt.libvirt.driver [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.809 2 INFO nova.virt.libvirt.driver [-] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Instance spawned successfully.
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.810 2 DEBUG nova.virt.libvirt.driver [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.814 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.818 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.845 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.846 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408960.792964, 31d1c03c-8272-4aa0-8a60-469bd8ca0853 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.846 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] VM Paused (Lifecycle Event)
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.854 2 DEBUG nova.virt.libvirt.driver [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.855 2 DEBUG nova.virt.libvirt.driver [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.855 2 DEBUG nova.virt.libvirt.driver [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.856 2 DEBUG nova.virt.libvirt.driver [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.857 2 DEBUG nova.virt.libvirt.driver [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.858 2 DEBUG nova.virt.libvirt.driver [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.867 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.871 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759408960.7977114, 31d1c03c-8272-4aa0-8a60-469bd8ca0853 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.871 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] VM Resumed (Lifecycle Event)
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.894 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.898 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.916 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.939 2 INFO nova.compute.manager [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Took 5.64 seconds to spawn the instance on the hypervisor.
Oct 02 12:42:40 compute-0 nova_compute[192079]: 2025-10-02 12:42:40.940 2 DEBUG nova.compute.manager [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:42:41 compute-0 nova_compute[192079]: 2025-10-02 12:42:41.023 2 INFO nova.compute.manager [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Took 6.13 seconds to build instance.
Oct 02 12:42:41 compute-0 nova_compute[192079]: 2025-10-02 12:42:41.047 2 DEBUG oslo_concurrency.lockutils [None req-78467fd9-17ca-4793-9dbb-808ff2892d28 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "31d1c03c-8272-4aa0-8a60-469bd8ca0853" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 6.242s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:42:42 compute-0 nova_compute[192079]: 2025-10-02 12:42:42.215 2 DEBUG nova.compute.manager [req-f5161805-b49e-4e40-91ce-d84724209bb7 req-17b62939-1f83-42e7-837b-122e225a7bb4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Received event network-vif-plugged-92e7c855-ede8-4edf-9cc1-bd4d683e87ca external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:42:42 compute-0 nova_compute[192079]: 2025-10-02 12:42:42.216 2 DEBUG oslo_concurrency.lockutils [req-f5161805-b49e-4e40-91ce-d84724209bb7 req-17b62939-1f83-42e7-837b-122e225a7bb4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "31d1c03c-8272-4aa0-8a60-469bd8ca0853-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:42:42 compute-0 nova_compute[192079]: 2025-10-02 12:42:42.216 2 DEBUG oslo_concurrency.lockutils [req-f5161805-b49e-4e40-91ce-d84724209bb7 req-17b62939-1f83-42e7-837b-122e225a7bb4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "31d1c03c-8272-4aa0-8a60-469bd8ca0853-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:42:42 compute-0 nova_compute[192079]: 2025-10-02 12:42:42.217 2 DEBUG oslo_concurrency.lockutils [req-f5161805-b49e-4e40-91ce-d84724209bb7 req-17b62939-1f83-42e7-837b-122e225a7bb4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "31d1c03c-8272-4aa0-8a60-469bd8ca0853-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:42:42 compute-0 nova_compute[192079]: 2025-10-02 12:42:42.217 2 DEBUG nova.compute.manager [req-f5161805-b49e-4e40-91ce-d84724209bb7 req-17b62939-1f83-42e7-837b-122e225a7bb4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] No waiting events found dispatching network-vif-plugged-92e7c855-ede8-4edf-9cc1-bd4d683e87ca pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:42:42 compute-0 nova_compute[192079]: 2025-10-02 12:42:42.217 2 WARNING nova.compute.manager [req-f5161805-b49e-4e40-91ce-d84724209bb7 req-17b62939-1f83-42e7-837b-122e225a7bb4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Received unexpected event network-vif-plugged-92e7c855-ede8-4edf-9cc1-bd4d683e87ca for instance with vm_state active and task_state None.
Oct 02 12:42:42 compute-0 nova_compute[192079]: 2025-10-02 12:42:42.975 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:42:43 compute-0 nova_compute[192079]: 2025-10-02 12:42:43.562 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:43 compute-0 nova_compute[192079]: 2025-10-02 12:42:43.565 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:42:44 compute-0 NetworkManager[51160]: <info>  [1759408964.6645] manager: (patch-br-int-to-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/337)
Oct 02 12:42:44 compute-0 NetworkManager[51160]: <info>  [1759408964.6652] manager: (patch-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d-to-br-int): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/338)
Oct 02 12:42:44 compute-0 nova_compute[192079]: 2025-10-02 12:42:44.663 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:44 compute-0 nova_compute[192079]: 2025-10-02 12:42:44.679 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:44 compute-0 ovn_controller[94336]: 2025-10-02T12:42:44Z|00689|binding|INFO|Releasing lport 921996ce-f2f8-4f13-9a74-ab75ec6374d6 from this chassis (sb_readonly=0)
Oct 02 12:42:44 compute-0 nova_compute[192079]: 2025-10-02 12:42:44.690 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:45 compute-0 nova_compute[192079]: 2025-10-02 12:42:45.047 2 DEBUG nova.compute.manager [req-88598e72-138d-4e42-9d98-8a5caa1f7ace req-cf5a9183-cca7-4350-8b5b-e1883b598959 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Received event network-changed-92e7c855-ede8-4edf-9cc1-bd4d683e87ca external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:42:45 compute-0 nova_compute[192079]: 2025-10-02 12:42:45.048 2 DEBUG nova.compute.manager [req-88598e72-138d-4e42-9d98-8a5caa1f7ace req-cf5a9183-cca7-4350-8b5b-e1883b598959 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Refreshing instance network info cache due to event network-changed-92e7c855-ede8-4edf-9cc1-bd4d683e87ca. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:42:45 compute-0 nova_compute[192079]: 2025-10-02 12:42:45.048 2 DEBUG oslo_concurrency.lockutils [req-88598e72-138d-4e42-9d98-8a5caa1f7ace req-cf5a9183-cca7-4350-8b5b-e1883b598959 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-31d1c03c-8272-4aa0-8a60-469bd8ca0853" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:42:45 compute-0 nova_compute[192079]: 2025-10-02 12:42:45.048 2 DEBUG oslo_concurrency.lockutils [req-88598e72-138d-4e42-9d98-8a5caa1f7ace req-cf5a9183-cca7-4350-8b5b-e1883b598959 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-31d1c03c-8272-4aa0-8a60-469bd8ca0853" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:42:45 compute-0 nova_compute[192079]: 2025-10-02 12:42:45.049 2 DEBUG nova.network.neutron [req-88598e72-138d-4e42-9d98-8a5caa1f7ace req-cf5a9183-cca7-4350-8b5b-e1883b598959 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Refreshing network info cache for port 92e7c855-ede8-4edf-9cc1-bd4d683e87ca _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:42:45 compute-0 nova_compute[192079]: 2025-10-02 12:42:45.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:42:45 compute-0 nova_compute[192079]: 2025-10-02 12:42:45.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:42:45 compute-0 nova_compute[192079]: 2025-10-02 12:42:45.930 2 DEBUG nova.network.neutron [req-88598e72-138d-4e42-9d98-8a5caa1f7ace req-cf5a9183-cca7-4350-8b5b-e1883b598959 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Updated VIF entry in instance network info cache for port 92e7c855-ede8-4edf-9cc1-bd4d683e87ca. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:42:45 compute-0 nova_compute[192079]: 2025-10-02 12:42:45.931 2 DEBUG nova.network.neutron [req-88598e72-138d-4e42-9d98-8a5caa1f7ace req-cf5a9183-cca7-4350-8b5b-e1883b598959 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Updating instance_info_cache with network_info: [{"id": "92e7c855-ede8-4edf-9cc1-bd4d683e87ca", "address": "fa:16:3e:7e:0a:d3", "network": {"id": "308a9d2e-30f5-4a94-9bc3-d0e8463f1653", "bridge": "br-int", "label": "tempest-network-smoke--650526529", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.244", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92e7c855-ed", "ovs_interfaceid": "92e7c855-ede8-4edf-9cc1-bd4d683e87ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:42:45 compute-0 nova_compute[192079]: 2025-10-02 12:42:45.948 2 DEBUG oslo_concurrency.lockutils [req-88598e72-138d-4e42-9d98-8a5caa1f7ace req-cf5a9183-cca7-4350-8b5b-e1883b598959 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-31d1c03c-8272-4aa0-8a60-469bd8ca0853" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:42:46 compute-0 podman[251035]: 2025-10-02 12:42:46.155295459 +0000 UTC m=+0.061755650 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:42:46 compute-0 podman[251036]: 2025-10-02 12:42:46.177921508 +0000 UTC m=+0.074164900 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, config_id=iscsid, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0)
Oct 02 12:42:47 compute-0 nova_compute[192079]: 2025-10-02 12:42:47.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:42:47 compute-0 nova_compute[192079]: 2025-10-02 12:42:47.690 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:42:47 compute-0 nova_compute[192079]: 2025-10-02 12:42:47.690 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:42:47 compute-0 nova_compute[192079]: 2025-10-02 12:42:47.690 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:42:47 compute-0 nova_compute[192079]: 2025-10-02 12:42:47.690 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:42:47 compute-0 nova_compute[192079]: 2025-10-02 12:42:47.772 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/31d1c03c-8272-4aa0-8a60-469bd8ca0853/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:42:47 compute-0 nova_compute[192079]: 2025-10-02 12:42:47.830 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/31d1c03c-8272-4aa0-8a60-469bd8ca0853/disk --force-share --output=json" returned: 0 in 0.058s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:42:47 compute-0 nova_compute[192079]: 2025-10-02 12:42:47.832 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/31d1c03c-8272-4aa0-8a60-469bd8ca0853/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:42:47 compute-0 nova_compute[192079]: 2025-10-02 12:42:47.888 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/31d1c03c-8272-4aa0-8a60-469bd8ca0853/disk --force-share --output=json" returned: 0 in 0.057s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:42:48 compute-0 nova_compute[192079]: 2025-10-02 12:42:48.051 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:42:48 compute-0 nova_compute[192079]: 2025-10-02 12:42:48.053 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5593MB free_disk=73.27143478393555GB free_vcpus=7 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:42:48 compute-0 nova_compute[192079]: 2025-10-02 12:42:48.053 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:42:48 compute-0 nova_compute[192079]: 2025-10-02 12:42:48.053 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:42:48 compute-0 nova_compute[192079]: 2025-10-02 12:42:48.117 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance 31d1c03c-8272-4aa0-8a60-469bd8ca0853 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:42:48 compute-0 nova_compute[192079]: 2025-10-02 12:42:48.117 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 1 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:42:48 compute-0 nova_compute[192079]: 2025-10-02 12:42:48.118 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=640MB phys_disk=79GB used_disk=1GB total_vcpus=8 used_vcpus=1 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:42:48 compute-0 nova_compute[192079]: 2025-10-02 12:42:48.158 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:42:48 compute-0 nova_compute[192079]: 2025-10-02 12:42:48.174 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:42:48 compute-0 nova_compute[192079]: 2025-10-02 12:42:48.196 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:42:48 compute-0 nova_compute[192079]: 2025-10-02 12:42:48.196 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.143s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:42:48 compute-0 nova_compute[192079]: 2025-10-02 12:42:48.564 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:49 compute-0 nova_compute[192079]: 2025-10-02 12:42:49.198 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:42:49 compute-0 nova_compute[192079]: 2025-10-02 12:42:49.198 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:42:49 compute-0 ovn_controller[94336]: 2025-10-02T12:42:49Z|00690|binding|INFO|Releasing lport 921996ce-f2f8-4f13-9a74-ab75ec6374d6 from this chassis (sb_readonly=0)
Oct 02 12:42:49 compute-0 nova_compute[192079]: 2025-10-02 12:42:49.511 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:49 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:42:49.735 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '48'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:42:50 compute-0 nova_compute[192079]: 2025-10-02 12:42:50.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:42:52 compute-0 nova_compute[192079]: 2025-10-02 12:42:52.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:42:53 compute-0 ovn_controller[94336]: 2025-10-02T12:42:53Z|00081|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:7e:0a:d3 10.100.0.14
Oct 02 12:42:53 compute-0 ovn_controller[94336]: 2025-10-02T12:42:53Z|00082|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:7e:0a:d3 10.100.0.14
Oct 02 12:42:53 compute-0 nova_compute[192079]: 2025-10-02 12:42:53.566 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:53 compute-0 nova_compute[192079]: 2025-10-02 12:42:53.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:42:55 compute-0 podman[251101]: 2025-10-02 12:42:55.133560982 +0000 UTC m=+0.051130650 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, managed_by=edpm_ansible, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, config_id=ovn_metadata_agent)
Oct 02 12:42:55 compute-0 podman[251103]: 2025-10-02 12:42:55.137926811 +0000 UTC m=+0.049751972 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:42:55 compute-0 podman[251102]: 2025-10-02 12:42:55.17004317 +0000 UTC m=+0.084611316 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_controller, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, config_id=ovn_controller, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:42:55 compute-0 nova_compute[192079]: 2025-10-02 12:42:55.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:42:55 compute-0 nova_compute[192079]: 2025-10-02 12:42:55.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:42:55 compute-0 nova_compute[192079]: 2025-10-02 12:42:55.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:42:56 compute-0 nova_compute[192079]: 2025-10-02 12:42:56.375 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "refresh_cache-31d1c03c-8272-4aa0-8a60-469bd8ca0853" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:42:56 compute-0 nova_compute[192079]: 2025-10-02 12:42:56.376 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquired lock "refresh_cache-31d1c03c-8272-4aa0-8a60-469bd8ca0853" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:42:56 compute-0 nova_compute[192079]: 2025-10-02 12:42:56.377 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Forcefully refreshing network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2004
Oct 02 12:42:56 compute-0 nova_compute[192079]: 2025-10-02 12:42:56.377 2 DEBUG nova.objects.instance [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lazy-loading 'info_cache' on Instance uuid 31d1c03c-8272-4aa0-8a60-469bd8ca0853 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:42:58 compute-0 nova_compute[192079]: 2025-10-02 12:42:58.458 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Updating instance_info_cache with network_info: [{"id": "92e7c855-ede8-4edf-9cc1-bd4d683e87ca", "address": "fa:16:3e:7e:0a:d3", "network": {"id": "308a9d2e-30f5-4a94-9bc3-d0e8463f1653", "bridge": "br-int", "label": "tempest-network-smoke--650526529", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.244", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92e7c855-ed", "ovs_interfaceid": "92e7c855-ede8-4edf-9cc1-bd4d683e87ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:42:58 compute-0 nova_compute[192079]: 2025-10-02 12:42:58.479 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Releasing lock "refresh_cache-31d1c03c-8272-4aa0-8a60-469bd8ca0853" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:42:58 compute-0 nova_compute[192079]: 2025-10-02 12:42:58.480 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Updated the network info_cache for instance _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9929
Oct 02 12:42:58 compute-0 nova_compute[192079]: 2025-10-02 12:42:58.567 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:42:59 compute-0 nova_compute[192079]: 2025-10-02 12:42:59.687 2 INFO nova.compute.manager [None req-cd060a8c-a5ad-4f12-ba5a-262a6986fe54 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Get console output
Oct 02 12:42:59 compute-0 nova_compute[192079]: 2025-10-02 12:42:59.694 55 INFO nova.privsep.libvirt [-] Ignored error while reading from instance console pty: can't concat NoneType to bytes
Oct 02 12:43:00 compute-0 ovn_controller[94336]: 2025-10-02T12:43:00Z|00691|binding|INFO|Releasing lport 921996ce-f2f8-4f13-9a74-ab75ec6374d6 from this chassis (sb_readonly=0)
Oct 02 12:43:00 compute-0 nova_compute[192079]: 2025-10-02 12:43:00.276 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:00 compute-0 ovn_controller[94336]: 2025-10-02T12:43:00Z|00692|binding|INFO|Releasing lport 921996ce-f2f8-4f13-9a74-ab75ec6374d6 from this chassis (sb_readonly=0)
Oct 02 12:43:00 compute-0 nova_compute[192079]: 2025-10-02 12:43:00.328 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:01 compute-0 nova_compute[192079]: 2025-10-02 12:43:01.574 2 INFO nova.compute.manager [None req-121f4937-469c-4f41-a181-5130f7038a90 a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Get console output
Oct 02 12:43:01 compute-0 nova_compute[192079]: 2025-10-02 12:43:01.579 55 INFO nova.privsep.libvirt [-] Ignored error while reading from instance console pty: can't concat NoneType to bytes
Oct 02 12:43:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:02.249 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:43:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:02.250 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:43:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:02.250 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:43:02 compute-0 nova_compute[192079]: 2025-10-02 12:43:02.763 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:02 compute-0 NetworkManager[51160]: <info>  [1759408982.7639] manager: (patch-br-int-to-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/339)
Oct 02 12:43:02 compute-0 NetworkManager[51160]: <info>  [1759408982.7653] manager: (patch-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d-to-br-int): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/340)
Oct 02 12:43:02 compute-0 nova_compute[192079]: 2025-10-02 12:43:02.827 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:02 compute-0 ovn_controller[94336]: 2025-10-02T12:43:02Z|00693|binding|INFO|Releasing lport 921996ce-f2f8-4f13-9a74-ab75ec6374d6 from this chassis (sb_readonly=0)
Oct 02 12:43:02 compute-0 nova_compute[192079]: 2025-10-02 12:43:02.835 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:03 compute-0 nova_compute[192079]: 2025-10-02 12:43:03.064 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:03 compute-0 nova_compute[192079]: 2025-10-02 12:43:03.218 2 INFO nova.compute.manager [None req-00b07d00-f40a-41fb-be10-e224c00cbb8c a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Get console output
Oct 02 12:43:03 compute-0 nova_compute[192079]: 2025-10-02 12:43:03.221 55 INFO nova.privsep.libvirt [-] Ignored error while reading from instance console pty: can't concat NoneType to bytes
Oct 02 12:43:03 compute-0 nova_compute[192079]: 2025-10-02 12:43:03.569 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.033 2 DEBUG nova.compute.manager [req-88e4e03d-370b-48ab-9eb0-761ff6ec7a5b req-9a74845a-3d5b-4473-b8f9-6d094f9be8a5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Received event network-changed-92e7c855-ede8-4edf-9cc1-bd4d683e87ca external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.034 2 DEBUG nova.compute.manager [req-88e4e03d-370b-48ab-9eb0-761ff6ec7a5b req-9a74845a-3d5b-4473-b8f9-6d094f9be8a5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Refreshing instance network info cache due to event network-changed-92e7c855-ede8-4edf-9cc1-bd4d683e87ca. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.034 2 DEBUG oslo_concurrency.lockutils [req-88e4e03d-370b-48ab-9eb0-761ff6ec7a5b req-9a74845a-3d5b-4473-b8f9-6d094f9be8a5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-31d1c03c-8272-4aa0-8a60-469bd8ca0853" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.034 2 DEBUG oslo_concurrency.lockutils [req-88e4e03d-370b-48ab-9eb0-761ff6ec7a5b req-9a74845a-3d5b-4473-b8f9-6d094f9be8a5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-31d1c03c-8272-4aa0-8a60-469bd8ca0853" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.034 2 DEBUG nova.network.neutron [req-88e4e03d-370b-48ab-9eb0-761ff6ec7a5b req-9a74845a-3d5b-4473-b8f9-6d094f9be8a5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Refreshing network info cache for port 92e7c855-ede8-4edf-9cc1-bd4d683e87ca _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:43:04 compute-0 podman[251170]: 2025-10-02 12:43:04.144526258 +0000 UTC m=+0.061404160 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=edpm, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, tcib_managed=true)
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.314 2 DEBUG oslo_concurrency.lockutils [None req-2d2c06e7-787e-4616-b5c5-85edd164592a a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "31d1c03c-8272-4aa0-8a60-469bd8ca0853" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.314 2 DEBUG oslo_concurrency.lockutils [None req-2d2c06e7-787e-4616-b5c5-85edd164592a a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "31d1c03c-8272-4aa0-8a60-469bd8ca0853" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.315 2 DEBUG oslo_concurrency.lockutils [None req-2d2c06e7-787e-4616-b5c5-85edd164592a a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "31d1c03c-8272-4aa0-8a60-469bd8ca0853-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.315 2 DEBUG oslo_concurrency.lockutils [None req-2d2c06e7-787e-4616-b5c5-85edd164592a a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "31d1c03c-8272-4aa0-8a60-469bd8ca0853-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.315 2 DEBUG oslo_concurrency.lockutils [None req-2d2c06e7-787e-4616-b5c5-85edd164592a a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "31d1c03c-8272-4aa0-8a60-469bd8ca0853-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.357 2 INFO nova.compute.manager [None req-2d2c06e7-787e-4616-b5c5-85edd164592a a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Terminating instance
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.418 2 DEBUG nova.compute.manager [None req-2d2c06e7-787e-4616-b5c5-85edd164592a a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:43:04 compute-0 kernel: tap92e7c855-ed (unregistering): left promiscuous mode
Oct 02 12:43:04 compute-0 NetworkManager[51160]: <info>  [1759408984.4386] device (tap92e7c855-ed): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.446 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.449 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:04 compute-0 ovn_controller[94336]: 2025-10-02T12:43:04Z|00694|binding|INFO|Releasing lport 92e7c855-ede8-4edf-9cc1-bd4d683e87ca from this chassis (sb_readonly=0)
Oct 02 12:43:04 compute-0 ovn_controller[94336]: 2025-10-02T12:43:04Z|00695|binding|INFO|Setting lport 92e7c855-ede8-4edf-9cc1-bd4d683e87ca down in Southbound
Oct 02 12:43:04 compute-0 ovn_controller[94336]: 2025-10-02T12:43:04Z|00696|binding|INFO|Removing iface tap92e7c855-ed ovn-installed in OVS
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.462 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:04.463 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:7e:0a:d3 10.100.0.14'], port_security=['fa:16:3e:7e:0a:d3 10.100.0.14'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.14/28', 'neutron:device_id': '31d1c03c-8272-4aa0-8a60-469bd8ca0853', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-308a9d2e-30f5-4a94-9bc3-d0e8463f1653', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '6e2a4899168a47618e377cb3ac85ddd2', 'neutron:revision_number': '4', 'neutron:security_group_ids': '47be5342-2ddc-4caf-96cd-a5b8f37810bd', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=f2112ece-3902-43a8-9e6c-27d4f44d0f07, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=92e7c855-ede8-4edf-9cc1-bd4d683e87ca) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:43:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:04.464 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 92e7c855-ede8-4edf-9cc1-bd4d683e87ca in datapath 308a9d2e-30f5-4a94-9bc3-d0e8463f1653 unbound from our chassis
Oct 02 12:43:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:04.465 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 308a9d2e-30f5-4a94-9bc3-d0e8463f1653, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:43:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:04.466 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9ee7d8c4-8c84-480b-94f2-dbd9d1d60e04]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:04.467 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-308a9d2e-30f5-4a94-9bc3-d0e8463f1653 namespace which is not needed anymore
Oct 02 12:43:04 compute-0 systemd[1]: machine-qemu\x2d84\x2dinstance\x2d000000af.scope: Deactivated successfully.
Oct 02 12:43:04 compute-0 systemd[1]: machine-qemu\x2d84\x2dinstance\x2d000000af.scope: Consumed 13.356s CPU time.
Oct 02 12:43:04 compute-0 systemd-machined[152150]: Machine qemu-84-instance-000000af terminated.
Oct 02 12:43:04 compute-0 neutron-haproxy-ovnmeta-308a9d2e-30f5-4a94-9bc3-d0e8463f1653[251004]: [NOTICE]   (251023) : haproxy version is 2.8.14-c23fe91
Oct 02 12:43:04 compute-0 neutron-haproxy-ovnmeta-308a9d2e-30f5-4a94-9bc3-d0e8463f1653[251004]: [NOTICE]   (251023) : path to executable is /usr/sbin/haproxy
Oct 02 12:43:04 compute-0 neutron-haproxy-ovnmeta-308a9d2e-30f5-4a94-9bc3-d0e8463f1653[251004]: [WARNING]  (251023) : Exiting Master process...
Oct 02 12:43:04 compute-0 neutron-haproxy-ovnmeta-308a9d2e-30f5-4a94-9bc3-d0e8463f1653[251004]: [WARNING]  (251023) : Exiting Master process...
Oct 02 12:43:04 compute-0 neutron-haproxy-ovnmeta-308a9d2e-30f5-4a94-9bc3-d0e8463f1653[251004]: [ALERT]    (251023) : Current worker (251025) exited with code 143 (Terminated)
Oct 02 12:43:04 compute-0 neutron-haproxy-ovnmeta-308a9d2e-30f5-4a94-9bc3-d0e8463f1653[251004]: [WARNING]  (251023) : All workers exited. Exiting... (0)
Oct 02 12:43:04 compute-0 systemd[1]: libpod-5322e8270d1af74d671b854da4a1745ddbb2f04f9b536314f66672c488488b81.scope: Deactivated successfully.
Oct 02 12:43:04 compute-0 podman[251212]: 2025-10-02 12:43:04.609142895 +0000 UTC m=+0.060385522 container died 5322e8270d1af74d671b854da4a1745ddbb2f04f9b536314f66672c488488b81 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-308a9d2e-30f5-4a94-9bc3-d0e8463f1653, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001)
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.644 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:04 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-5322e8270d1af74d671b854da4a1745ddbb2f04f9b536314f66672c488488b81-userdata-shm.mount: Deactivated successfully.
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.652 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:04 compute-0 systemd[1]: var-lib-containers-storage-overlay-97e9024b2ddf20bf12e566a2dbde6a7b50cd9fdfabe5191568f5a0efa795ac81-merged.mount: Deactivated successfully.
Oct 02 12:43:04 compute-0 podman[251212]: 2025-10-02 12:43:04.683405486 +0000 UTC m=+0.134648113 container cleanup 5322e8270d1af74d671b854da4a1745ddbb2f04f9b536314f66672c488488b81 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-308a9d2e-30f5-4a94-9bc3-d0e8463f1653, org.label-schema.vendor=CentOS, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:43:04 compute-0 systemd[1]: libpod-conmon-5322e8270d1af74d671b854da4a1745ddbb2f04f9b536314f66672c488488b81.scope: Deactivated successfully.
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.696 2 INFO nova.virt.libvirt.driver [-] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Instance destroyed successfully.
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.697 2 DEBUG nova.objects.instance [None req-2d2c06e7-787e-4616-b5c5-85edd164592a a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lazy-loading 'resources' on Instance uuid 31d1c03c-8272-4aa0-8a60-469bd8ca0853 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.714 2 DEBUG nova.compute.manager [req-08cc2e13-2783-445c-acbc-04eaa6e97291 req-53507156-c0ca-400f-84d9-34d0323688c4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Received event network-vif-unplugged-92e7c855-ede8-4edf-9cc1-bd4d683e87ca external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.715 2 DEBUG oslo_concurrency.lockutils [req-08cc2e13-2783-445c-acbc-04eaa6e97291 req-53507156-c0ca-400f-84d9-34d0323688c4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "31d1c03c-8272-4aa0-8a60-469bd8ca0853-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.715 2 DEBUG oslo_concurrency.lockutils [req-08cc2e13-2783-445c-acbc-04eaa6e97291 req-53507156-c0ca-400f-84d9-34d0323688c4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "31d1c03c-8272-4aa0-8a60-469bd8ca0853-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.715 2 DEBUG oslo_concurrency.lockutils [req-08cc2e13-2783-445c-acbc-04eaa6e97291 req-53507156-c0ca-400f-84d9-34d0323688c4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "31d1c03c-8272-4aa0-8a60-469bd8ca0853-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.716 2 DEBUG nova.compute.manager [req-08cc2e13-2783-445c-acbc-04eaa6e97291 req-53507156-c0ca-400f-84d9-34d0323688c4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] No waiting events found dispatching network-vif-unplugged-92e7c855-ede8-4edf-9cc1-bd4d683e87ca pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.716 2 DEBUG nova.compute.manager [req-08cc2e13-2783-445c-acbc-04eaa6e97291 req-53507156-c0ca-400f-84d9-34d0323688c4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Received event network-vif-unplugged-92e7c855-ede8-4edf-9cc1-bd4d683e87ca for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.720 2 DEBUG nova.virt.libvirt.vif [None req-2d2c06e7-787e-4616-b5c5-85edd164592a a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:42:33Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestNetworkBasicOps-server-79511923',display_name='tempest-TestNetworkBasicOps-server-79511923',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testnetworkbasicops-server-79511923',id=175,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBGjFhtEvmK/YgxCkhguNflXQ4seJsulXHSpWXpH9lcREJOgYaYrJzPAfiUZjq8nFW6YdSh6VSco6K6tZV6JddWrYCJDJZlx/bYzvTXxdCXyphJZTmlouEQenKk9vkKqn5Q==',key_name='tempest-TestNetworkBasicOps-1093149800',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:42:40Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='6e2a4899168a47618e377cb3ac85ddd2',ramdisk_id='',reservation_id='r-6bnlzwip',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-TestNetworkBasicOps-1323893370',owner_user_name='tempest-TestNetworkBasicOps-1323893370-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:42:40Z,user_data=None,user_id='a1898fdf056c4a249c33590f26d4d845',uuid=31d1c03c-8272-4aa0-8a60-469bd8ca0853,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "92e7c855-ede8-4edf-9cc1-bd4d683e87ca", "address": "fa:16:3e:7e:0a:d3", "network": {"id": "308a9d2e-30f5-4a94-9bc3-d0e8463f1653", "bridge": "br-int", "label": "tempest-network-smoke--650526529", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.244", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92e7c855-ed", "ovs_interfaceid": "92e7c855-ede8-4edf-9cc1-bd4d683e87ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.721 2 DEBUG nova.network.os_vif_util [None req-2d2c06e7-787e-4616-b5c5-85edd164592a a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converting VIF {"id": "92e7c855-ede8-4edf-9cc1-bd4d683e87ca", "address": "fa:16:3e:7e:0a:d3", "network": {"id": "308a9d2e-30f5-4a94-9bc3-d0e8463f1653", "bridge": "br-int", "label": "tempest-network-smoke--650526529", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.244", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92e7c855-ed", "ovs_interfaceid": "92e7c855-ede8-4edf-9cc1-bd4d683e87ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.722 2 DEBUG nova.network.os_vif_util [None req-2d2c06e7-787e-4616-b5c5-85edd164592a a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:7e:0a:d3,bridge_name='br-int',has_traffic_filtering=True,id=92e7c855-ede8-4edf-9cc1-bd4d683e87ca,network=Network(308a9d2e-30f5-4a94-9bc3-d0e8463f1653),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap92e7c855-ed') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.722 2 DEBUG os_vif [None req-2d2c06e7-787e-4616-b5c5-85edd164592a a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:7e:0a:d3,bridge_name='br-int',has_traffic_filtering=True,id=92e7c855-ede8-4edf-9cc1-bd4d683e87ca,network=Network(308a9d2e-30f5-4a94-9bc3-d0e8463f1653),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap92e7c855-ed') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.724 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.724 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap92e7c855-ed, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.727 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.730 2 INFO os_vif [None req-2d2c06e7-787e-4616-b5c5-85edd164592a a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:7e:0a:d3,bridge_name='br-int',has_traffic_filtering=True,id=92e7c855-ede8-4edf-9cc1-bd4d683e87ca,network=Network(308a9d2e-30f5-4a94-9bc3-d0e8463f1653),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap92e7c855-ed')
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.730 2 INFO nova.virt.libvirt.driver [None req-2d2c06e7-787e-4616-b5c5-85edd164592a a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Deleting instance files /var/lib/nova/instances/31d1c03c-8272-4aa0-8a60-469bd8ca0853_del
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.731 2 INFO nova.virt.libvirt.driver [None req-2d2c06e7-787e-4616-b5c5-85edd164592a a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Deletion of /var/lib/nova/instances/31d1c03c-8272-4aa0-8a60-469bd8ca0853_del complete
Oct 02 12:43:04 compute-0 podman[251258]: 2025-10-02 12:43:04.791308107 +0000 UTC m=+0.083898626 container remove 5322e8270d1af74d671b854da4a1745ddbb2f04f9b536314f66672c488488b81 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-308a9d2e-30f5-4a94-9bc3-d0e8463f1653, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2)
Oct 02 12:43:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:04.797 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[cc20e688-bf19-4510-a34e-5a5d924c1020]: (4, ('Thu Oct  2 12:43:04 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-308a9d2e-30f5-4a94-9bc3-d0e8463f1653 (5322e8270d1af74d671b854da4a1745ddbb2f04f9b536314f66672c488488b81)\n5322e8270d1af74d671b854da4a1745ddbb2f04f9b536314f66672c488488b81\nThu Oct  2 12:43:04 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-308a9d2e-30f5-4a94-9bc3-d0e8463f1653 (5322e8270d1af74d671b854da4a1745ddbb2f04f9b536314f66672c488488b81)\n5322e8270d1af74d671b854da4a1745ddbb2f04f9b536314f66672c488488b81\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:04.798 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ffe76517-ddcb-4c1c-a836-8b3d18c40ae0]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:04.799 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap308a9d2e-30, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.800 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:04 compute-0 kernel: tap308a9d2e-30: left promiscuous mode
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.812 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:04.815 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5b2dbf64-0982-41b5-bb17-3b77b496e1a9]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:04.853 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[828bb85c-2aad-44f7-9aeb-ee04c12d6efe]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:04.856 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[dcf381cf-cd3e-4a55-97fa-fd8ee2225792]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:04.877 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6c71cdce-9b3d-4fd7-80ee-54eab58a3df4]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 695756, 'reachable_time': 17420, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 251271, 'error': None, 'target': 'ovnmeta-308a9d2e-30f5-4a94-9bc3-d0e8463f1653', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:04 compute-0 systemd[1]: run-netns-ovnmeta\x2d308a9d2e\x2d30f5\x2d4a94\x2d9bc3\x2dd0e8463f1653.mount: Deactivated successfully.
Oct 02 12:43:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:04.881 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-308a9d2e-30f5-4a94-9bc3-d0e8463f1653 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:43:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:04.881 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[1a5fd1f7-ab03-4ca4-9a8d-8dbe19b1cc15]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.891 2 INFO nova.compute.manager [None req-2d2c06e7-787e-4616-b5c5-85edd164592a a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Took 0.47 seconds to destroy the instance on the hypervisor.
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.892 2 DEBUG oslo.service.loopingcall [None req-2d2c06e7-787e-4616-b5c5-85edd164592a a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.892 2 DEBUG nova.compute.manager [-] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:43:04 compute-0 nova_compute[192079]: 2025-10-02 12:43:04.892 2 DEBUG nova.network.neutron [-] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:43:05 compute-0 nova_compute[192079]: 2025-10-02 12:43:05.417 2 DEBUG nova.network.neutron [req-88e4e03d-370b-48ab-9eb0-761ff6ec7a5b req-9a74845a-3d5b-4473-b8f9-6d094f9be8a5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Updated VIF entry in instance network info cache for port 92e7c855-ede8-4edf-9cc1-bd4d683e87ca. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:43:05 compute-0 nova_compute[192079]: 2025-10-02 12:43:05.418 2 DEBUG nova.network.neutron [req-88e4e03d-370b-48ab-9eb0-761ff6ec7a5b req-9a74845a-3d5b-4473-b8f9-6d094f9be8a5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Updating instance_info_cache with network_info: [{"id": "92e7c855-ede8-4edf-9cc1-bd4d683e87ca", "address": "fa:16:3e:7e:0a:d3", "network": {"id": "308a9d2e-30f5-4a94-9bc3-d0e8463f1653", "bridge": "br-int", "label": "tempest-network-smoke--650526529", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6e2a4899168a47618e377cb3ac85ddd2", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap92e7c855-ed", "ovs_interfaceid": "92e7c855-ede8-4edf-9cc1-bd4d683e87ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:43:05 compute-0 nova_compute[192079]: 2025-10-02 12:43:05.475 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:43:05 compute-0 nova_compute[192079]: 2025-10-02 12:43:05.578 2 DEBUG oslo_concurrency.lockutils [req-88e4e03d-370b-48ab-9eb0-761ff6ec7a5b req-9a74845a-3d5b-4473-b8f9-6d094f9be8a5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-31d1c03c-8272-4aa0-8a60-469bd8ca0853" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:43:05 compute-0 nova_compute[192079]: 2025-10-02 12:43:05.828 2 DEBUG nova.network.neutron [-] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:43:05 compute-0 nova_compute[192079]: 2025-10-02 12:43:05.858 2 INFO nova.compute.manager [-] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Took 0.97 seconds to deallocate network for instance.
Oct 02 12:43:06 compute-0 nova_compute[192079]: 2025-10-02 12:43:06.014 2 DEBUG oslo_concurrency.lockutils [None req-2d2c06e7-787e-4616-b5c5-85edd164592a a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:43:06 compute-0 nova_compute[192079]: 2025-10-02 12:43:06.014 2 DEBUG oslo_concurrency.lockutils [None req-2d2c06e7-787e-4616-b5c5-85edd164592a a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:43:06 compute-0 nova_compute[192079]: 2025-10-02 12:43:06.077 2 DEBUG nova.compute.provider_tree [None req-2d2c06e7-787e-4616-b5c5-85edd164592a a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:43:06 compute-0 nova_compute[192079]: 2025-10-02 12:43:06.104 2 DEBUG nova.scheduler.client.report [None req-2d2c06e7-787e-4616-b5c5-85edd164592a a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:43:06 compute-0 nova_compute[192079]: 2025-10-02 12:43:06.137 2 DEBUG nova.compute.manager [req-d2fbcfb3-4b20-4b61-9cb7-dbd1f335c093 req-f70658df-f95b-46e9-a932-c8470d4c7441 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Received event network-vif-deleted-92e7c855-ede8-4edf-9cc1-bd4d683e87ca external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:43:06 compute-0 nova_compute[192079]: 2025-10-02 12:43:06.139 2 DEBUG oslo_concurrency.lockutils [None req-2d2c06e7-787e-4616-b5c5-85edd164592a a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.125s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:43:06 compute-0 nova_compute[192079]: 2025-10-02 12:43:06.200 2 INFO nova.scheduler.client.report [None req-2d2c06e7-787e-4616-b5c5-85edd164592a a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Deleted allocations for instance 31d1c03c-8272-4aa0-8a60-469bd8ca0853
Oct 02 12:43:06 compute-0 nova_compute[192079]: 2025-10-02 12:43:06.277 2 DEBUG oslo_concurrency.lockutils [None req-2d2c06e7-787e-4616-b5c5-85edd164592a a1898fdf056c4a249c33590f26d4d845 6e2a4899168a47618e377cb3ac85ddd2 - - default default] Lock "31d1c03c-8272-4aa0-8a60-469bd8ca0853" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.962s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:43:06 compute-0 nova_compute[192079]: 2025-10-02 12:43:06.816 2 DEBUG nova.compute.manager [req-be38bd95-caba-43e9-b09a-625b15c94cb7 req-9186a0b0-71a3-459c-a7ab-a89e9fd52192 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Received event network-vif-plugged-92e7c855-ede8-4edf-9cc1-bd4d683e87ca external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:43:06 compute-0 nova_compute[192079]: 2025-10-02 12:43:06.816 2 DEBUG oslo_concurrency.lockutils [req-be38bd95-caba-43e9-b09a-625b15c94cb7 req-9186a0b0-71a3-459c-a7ab-a89e9fd52192 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "31d1c03c-8272-4aa0-8a60-469bd8ca0853-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:43:06 compute-0 nova_compute[192079]: 2025-10-02 12:43:06.816 2 DEBUG oslo_concurrency.lockutils [req-be38bd95-caba-43e9-b09a-625b15c94cb7 req-9186a0b0-71a3-459c-a7ab-a89e9fd52192 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "31d1c03c-8272-4aa0-8a60-469bd8ca0853-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:43:06 compute-0 nova_compute[192079]: 2025-10-02 12:43:06.817 2 DEBUG oslo_concurrency.lockutils [req-be38bd95-caba-43e9-b09a-625b15c94cb7 req-9186a0b0-71a3-459c-a7ab-a89e9fd52192 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "31d1c03c-8272-4aa0-8a60-469bd8ca0853-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:43:06 compute-0 nova_compute[192079]: 2025-10-02 12:43:06.817 2 DEBUG nova.compute.manager [req-be38bd95-caba-43e9-b09a-625b15c94cb7 req-9186a0b0-71a3-459c-a7ab-a89e9fd52192 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] No waiting events found dispatching network-vif-plugged-92e7c855-ede8-4edf-9cc1-bd4d683e87ca pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:43:06 compute-0 nova_compute[192079]: 2025-10-02 12:43:06.817 2 WARNING nova.compute.manager [req-be38bd95-caba-43e9-b09a-625b15c94cb7 req-9186a0b0-71a3-459c-a7ab-a89e9fd52192 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Received unexpected event network-vif-plugged-92e7c855-ede8-4edf-9cc1-bd4d683e87ca for instance with vm_state deleted and task_state None.
Oct 02 12:43:07 compute-0 nova_compute[192079]: 2025-10-02 12:43:07.908 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:08 compute-0 nova_compute[192079]: 2025-10-02 12:43:08.572 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:09 compute-0 nova_compute[192079]: 2025-10-02 12:43:09.727 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:10 compute-0 nova_compute[192079]: 2025-10-02 12:43:10.755 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:10 compute-0 nova_compute[192079]: 2025-10-02 12:43:10.902 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:11 compute-0 podman[251275]: 2025-10-02 12:43:11.157789622 +0000 UTC m=+0.066820299 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, config_id=multipathd, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, container_name=multipathd, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:43:11 compute-0 podman[251274]: 2025-10-02 12:43:11.19209989 +0000 UTC m=+0.093804257 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Red Hat, Inc., url=https://catalog.redhat.com/en/search?searchType=containers, build-date=2025-08-20T13:12:41, com.redhat.component=ubi9-minimal-container, config_id=edpm, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.tags=minimal rhel9, version=9.6, name=ubi9-minimal, io.openshift.expose-services=, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., architecture=x86_64, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, vcs-type=git, container_name=openstack_network_exporter, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, vendor=Red Hat, Inc., description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., managed_by=edpm_ansible, release=1755695350, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, distribution-scope=public, io.buildah.version=1.33.7)
Oct 02 12:43:13 compute-0 nova_compute[192079]: 2025-10-02 12:43:13.573 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:14 compute-0 nova_compute[192079]: 2025-10-02 12:43:14.773 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:43:17.113 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:43:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:43:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:43:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:43:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:43:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:43:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:43:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:43:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:43:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:43:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:43:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:43:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:43:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:43:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:43:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:43:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:43:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:43:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:43:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:43:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:43:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:43:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster cpu, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:43:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:43:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:43:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:43:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.capacity, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:43:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:43:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:43:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:43:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster memory.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:43:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:43:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:43:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:43:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:43:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:43:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.iops, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:43:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:43:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:43:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:43:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:43:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:43:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:43:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:43:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:43:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:43:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:43:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:43:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.allocation, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:43:17 compute-0 podman[251314]: 2025-10-02 12:43:17.136382418 +0000 UTC m=+0.049489285 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:43:17 compute-0 podman[251315]: 2025-10-02 12:43:17.16573337 +0000 UTC m=+0.068847133 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=iscsid, managed_by=edpm_ansible, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true)
Oct 02 12:43:18 compute-0 nova_compute[192079]: 2025-10-02 12:43:18.225 2 DEBUG oslo_concurrency.lockutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Acquiring lock "05cbee12-b07a-4b63-9f29-17f035a58f9b" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:43:18 compute-0 nova_compute[192079]: 2025-10-02 12:43:18.226 2 DEBUG oslo_concurrency.lockutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Lock "05cbee12-b07a-4b63-9f29-17f035a58f9b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:43:18 compute-0 nova_compute[192079]: 2025-10-02 12:43:18.241 2 DEBUG nova.compute.manager [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:43:18 compute-0 nova_compute[192079]: 2025-10-02 12:43:18.339 2 DEBUG oslo_concurrency.lockutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:43:18 compute-0 nova_compute[192079]: 2025-10-02 12:43:18.340 2 DEBUG oslo_concurrency.lockutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:43:18 compute-0 nova_compute[192079]: 2025-10-02 12:43:18.346 2 DEBUG nova.virt.hardware [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:43:18 compute-0 nova_compute[192079]: 2025-10-02 12:43:18.346 2 INFO nova.compute.claims [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:43:18 compute-0 nova_compute[192079]: 2025-10-02 12:43:18.473 2 DEBUG nova.compute.provider_tree [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:43:18 compute-0 nova_compute[192079]: 2025-10-02 12:43:18.500 2 DEBUG nova.scheduler.client.report [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:43:18 compute-0 nova_compute[192079]: 2025-10-02 12:43:18.527 2 DEBUG oslo_concurrency.lockutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.187s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:43:18 compute-0 nova_compute[192079]: 2025-10-02 12:43:18.527 2 DEBUG nova.compute.manager [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:43:18 compute-0 nova_compute[192079]: 2025-10-02 12:43:18.574 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:18 compute-0 nova_compute[192079]: 2025-10-02 12:43:18.588 2 DEBUG nova.compute.manager [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:43:18 compute-0 nova_compute[192079]: 2025-10-02 12:43:18.588 2 DEBUG nova.network.neutron [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:43:18 compute-0 nova_compute[192079]: 2025-10-02 12:43:18.613 2 INFO nova.virt.libvirt.driver [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:43:18 compute-0 nova_compute[192079]: 2025-10-02 12:43:18.643 2 DEBUG nova.compute.manager [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:43:18 compute-0 nova_compute[192079]: 2025-10-02 12:43:18.781 2 DEBUG nova.compute.manager [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:43:18 compute-0 nova_compute[192079]: 2025-10-02 12:43:18.784 2 DEBUG nova.virt.libvirt.driver [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:43:18 compute-0 nova_compute[192079]: 2025-10-02 12:43:18.785 2 INFO nova.virt.libvirt.driver [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Creating image(s)
Oct 02 12:43:18 compute-0 nova_compute[192079]: 2025-10-02 12:43:18.786 2 DEBUG oslo_concurrency.lockutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Acquiring lock "/var/lib/nova/instances/05cbee12-b07a-4b63-9f29-17f035a58f9b/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:43:18 compute-0 nova_compute[192079]: 2025-10-02 12:43:18.786 2 DEBUG oslo_concurrency.lockutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Lock "/var/lib/nova/instances/05cbee12-b07a-4b63-9f29-17f035a58f9b/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:43:18 compute-0 nova_compute[192079]: 2025-10-02 12:43:18.787 2 DEBUG oslo_concurrency.lockutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Lock "/var/lib/nova/instances/05cbee12-b07a-4b63-9f29-17f035a58f9b/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:43:18 compute-0 nova_compute[192079]: 2025-10-02 12:43:18.816 2 DEBUG oslo_concurrency.processutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:43:18 compute-0 nova_compute[192079]: 2025-10-02 12:43:18.890 2 DEBUG oslo_concurrency.processutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.074s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:43:18 compute-0 nova_compute[192079]: 2025-10-02 12:43:18.891 2 DEBUG oslo_concurrency.lockutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:43:18 compute-0 nova_compute[192079]: 2025-10-02 12:43:18.892 2 DEBUG oslo_concurrency.lockutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:43:18 compute-0 nova_compute[192079]: 2025-10-02 12:43:18.907 2 DEBUG oslo_concurrency.processutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:43:18 compute-0 nova_compute[192079]: 2025-10-02 12:43:18.970 2 DEBUG oslo_concurrency.processutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.063s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:43:18 compute-0 nova_compute[192079]: 2025-10-02 12:43:18.972 2 DEBUG oslo_concurrency.processutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/05cbee12-b07a-4b63-9f29-17f035a58f9b/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:43:19 compute-0 nova_compute[192079]: 2025-10-02 12:43:19.138 2 DEBUG oslo_concurrency.processutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/05cbee12-b07a-4b63-9f29-17f035a58f9b/disk 1073741824" returned: 0 in 0.166s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:43:19 compute-0 nova_compute[192079]: 2025-10-02 12:43:19.139 2 DEBUG oslo_concurrency.lockutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.247s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:43:19 compute-0 nova_compute[192079]: 2025-10-02 12:43:19.139 2 DEBUG oslo_concurrency.processutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:43:19 compute-0 nova_compute[192079]: 2025-10-02 12:43:19.193 2 DEBUG oslo_concurrency.processutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.054s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:43:19 compute-0 nova_compute[192079]: 2025-10-02 12:43:19.195 2 DEBUG nova.virt.disk.api [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Checking if we can resize image /var/lib/nova/instances/05cbee12-b07a-4b63-9f29-17f035a58f9b/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:43:19 compute-0 nova_compute[192079]: 2025-10-02 12:43:19.195 2 DEBUG oslo_concurrency.processutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/05cbee12-b07a-4b63-9f29-17f035a58f9b/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:43:19 compute-0 nova_compute[192079]: 2025-10-02 12:43:19.287 2 DEBUG oslo_concurrency.processutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/05cbee12-b07a-4b63-9f29-17f035a58f9b/disk --force-share --output=json" returned: 0 in 0.091s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:43:19 compute-0 nova_compute[192079]: 2025-10-02 12:43:19.288 2 DEBUG nova.virt.disk.api [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Cannot resize image /var/lib/nova/instances/05cbee12-b07a-4b63-9f29-17f035a58f9b/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:43:19 compute-0 nova_compute[192079]: 2025-10-02 12:43:19.288 2 DEBUG nova.objects.instance [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Lazy-loading 'migration_context' on Instance uuid 05cbee12-b07a-4b63-9f29-17f035a58f9b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:43:19 compute-0 nova_compute[192079]: 2025-10-02 12:43:19.316 2 DEBUG nova.virt.libvirt.driver [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:43:19 compute-0 nova_compute[192079]: 2025-10-02 12:43:19.316 2 DEBUG nova.virt.libvirt.driver [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Ensure instance console log exists: /var/lib/nova/instances/05cbee12-b07a-4b63-9f29-17f035a58f9b/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:43:19 compute-0 nova_compute[192079]: 2025-10-02 12:43:19.317 2 DEBUG oslo_concurrency.lockutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:43:19 compute-0 nova_compute[192079]: 2025-10-02 12:43:19.317 2 DEBUG oslo_concurrency.lockutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:43:19 compute-0 nova_compute[192079]: 2025-10-02 12:43:19.318 2 DEBUG oslo_concurrency.lockutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:43:19 compute-0 nova_compute[192079]: 2025-10-02 12:43:19.694 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759408984.6932614, 31d1c03c-8272-4aa0-8a60-469bd8ca0853 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:43:19 compute-0 nova_compute[192079]: 2025-10-02 12:43:19.695 2 INFO nova.compute.manager [-] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] VM Stopped (Lifecycle Event)
Oct 02 12:43:19 compute-0 nova_compute[192079]: 2025-10-02 12:43:19.776 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:19 compute-0 nova_compute[192079]: 2025-10-02 12:43:19.799 2 DEBUG nova.compute.manager [None req-15333118-ccfe-410b-b3d7-e84ec1e23593 - - - - - -] [instance: 31d1c03c-8272-4aa0-8a60-469bd8ca0853] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:43:19 compute-0 nova_compute[192079]: 2025-10-02 12:43:19.909 2 DEBUG nova.network.neutron [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Successfully created port: 6b9bd275-f90c-46d0-938f-7a949ade1669 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:43:21 compute-0 nova_compute[192079]: 2025-10-02 12:43:21.506 2 DEBUG nova.network.neutron [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Successfully updated port: 6b9bd275-f90c-46d0-938f-7a949ade1669 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:43:21 compute-0 nova_compute[192079]: 2025-10-02 12:43:21.525 2 DEBUG oslo_concurrency.lockutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Acquiring lock "refresh_cache-05cbee12-b07a-4b63-9f29-17f035a58f9b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:43:21 compute-0 nova_compute[192079]: 2025-10-02 12:43:21.525 2 DEBUG oslo_concurrency.lockutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Acquired lock "refresh_cache-05cbee12-b07a-4b63-9f29-17f035a58f9b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:43:21 compute-0 nova_compute[192079]: 2025-10-02 12:43:21.526 2 DEBUG nova.network.neutron [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:43:21 compute-0 nova_compute[192079]: 2025-10-02 12:43:21.633 2 DEBUG nova.compute.manager [req-71295ebc-6ed3-4384-bd4a-db9834ecb4ac req-c37e069e-8ab2-4102-94f2-5e6c7df1e1ed 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Received event network-changed-6b9bd275-f90c-46d0-938f-7a949ade1669 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:43:21 compute-0 nova_compute[192079]: 2025-10-02 12:43:21.634 2 DEBUG nova.compute.manager [req-71295ebc-6ed3-4384-bd4a-db9834ecb4ac req-c37e069e-8ab2-4102-94f2-5e6c7df1e1ed 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Refreshing instance network info cache due to event network-changed-6b9bd275-f90c-46d0-938f-7a949ade1669. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:43:21 compute-0 nova_compute[192079]: 2025-10-02 12:43:21.634 2 DEBUG oslo_concurrency.lockutils [req-71295ebc-6ed3-4384-bd4a-db9834ecb4ac req-c37e069e-8ab2-4102-94f2-5e6c7df1e1ed 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-05cbee12-b07a-4b63-9f29-17f035a58f9b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:43:21 compute-0 nova_compute[192079]: 2025-10-02 12:43:21.757 2 DEBUG nova.network.neutron [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.793 2 DEBUG nova.network.neutron [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Updating instance_info_cache with network_info: [{"id": "6b9bd275-f90c-46d0-938f-7a949ade1669", "address": "fa:16:3e:f5:c5:b5", "network": {"id": "89c6a9c2-23c1-4b8b-81b9-3050a42a016f", "bridge": "br-int", "label": "tempest-TestServerMultinode-1758818255-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "4a5d17af56da453cb0073e5e2be72803", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap6b9bd275-f9", "ovs_interfaceid": "6b9bd275-f90c-46d0-938f-7a949ade1669", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.822 2 DEBUG oslo_concurrency.lockutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Releasing lock "refresh_cache-05cbee12-b07a-4b63-9f29-17f035a58f9b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.823 2 DEBUG nova.compute.manager [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Instance network_info: |[{"id": "6b9bd275-f90c-46d0-938f-7a949ade1669", "address": "fa:16:3e:f5:c5:b5", "network": {"id": "89c6a9c2-23c1-4b8b-81b9-3050a42a016f", "bridge": "br-int", "label": "tempest-TestServerMultinode-1758818255-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "4a5d17af56da453cb0073e5e2be72803", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap6b9bd275-f9", "ovs_interfaceid": "6b9bd275-f90c-46d0-938f-7a949ade1669", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.823 2 DEBUG oslo_concurrency.lockutils [req-71295ebc-6ed3-4384-bd4a-db9834ecb4ac req-c37e069e-8ab2-4102-94f2-5e6c7df1e1ed 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-05cbee12-b07a-4b63-9f29-17f035a58f9b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.823 2 DEBUG nova.network.neutron [req-71295ebc-6ed3-4384-bd4a-db9834ecb4ac req-c37e069e-8ab2-4102-94f2-5e6c7df1e1ed 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Refreshing network info cache for port 6b9bd275-f90c-46d0-938f-7a949ade1669 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.826 2 DEBUG nova.virt.libvirt.driver [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Start _get_guest_xml network_info=[{"id": "6b9bd275-f90c-46d0-938f-7a949ade1669", "address": "fa:16:3e:f5:c5:b5", "network": {"id": "89c6a9c2-23c1-4b8b-81b9-3050a42a016f", "bridge": "br-int", "label": "tempest-TestServerMultinode-1758818255-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "4a5d17af56da453cb0073e5e2be72803", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap6b9bd275-f9", "ovs_interfaceid": "6b9bd275-f90c-46d0-938f-7a949ade1669", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.830 2 WARNING nova.virt.libvirt.driver [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.834 2 DEBUG nova.virt.libvirt.host [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.835 2 DEBUG nova.virt.libvirt.host [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.838 2 DEBUG nova.virt.libvirt.host [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.838 2 DEBUG nova.virt.libvirt.host [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.839 2 DEBUG nova.virt.libvirt.driver [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.840 2 DEBUG nova.virt.hardware [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.840 2 DEBUG nova.virt.hardware [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.840 2 DEBUG nova.virt.hardware [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.840 2 DEBUG nova.virt.hardware [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.840 2 DEBUG nova.virt.hardware [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.841 2 DEBUG nova.virt.hardware [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.841 2 DEBUG nova.virt.hardware [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.841 2 DEBUG nova.virt.hardware [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.841 2 DEBUG nova.virt.hardware [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.842 2 DEBUG nova.virt.hardware [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.842 2 DEBUG nova.virt.hardware [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.845 2 DEBUG nova.virt.libvirt.vif [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:43:17Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestServerMultinode-server-164108363',display_name='tempest-TestServerMultinode-server-164108363',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testservermultinode-server-164108363',id=177,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='0acd1c52a26d4654b24111e5ad4814f2',ramdisk_id='',reservation_id='r-5h040yme',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='admin,reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestServerMultinode-1539275040',owner_user_name='tempest-TestServerMultinode-1539275040-project-admin'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:43:18Z,user_data=None,user_id='7ed2a973cfed4867a095aecf0c6453fb',uuid=05cbee12-b07a-4b63-9f29-17f035a58f9b,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "6b9bd275-f90c-46d0-938f-7a949ade1669", "address": "fa:16:3e:f5:c5:b5", "network": {"id": "89c6a9c2-23c1-4b8b-81b9-3050a42a016f", "bridge": "br-int", "label": "tempest-TestServerMultinode-1758818255-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "4a5d17af56da453cb0073e5e2be72803", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap6b9bd275-f9", "ovs_interfaceid": "6b9bd275-f90c-46d0-938f-7a949ade1669", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.845 2 DEBUG nova.network.os_vif_util [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Converting VIF {"id": "6b9bd275-f90c-46d0-938f-7a949ade1669", "address": "fa:16:3e:f5:c5:b5", "network": {"id": "89c6a9c2-23c1-4b8b-81b9-3050a42a016f", "bridge": "br-int", "label": "tempest-TestServerMultinode-1758818255-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "4a5d17af56da453cb0073e5e2be72803", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap6b9bd275-f9", "ovs_interfaceid": "6b9bd275-f90c-46d0-938f-7a949ade1669", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.845 2 DEBUG nova.network.os_vif_util [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:f5:c5:b5,bridge_name='br-int',has_traffic_filtering=True,id=6b9bd275-f90c-46d0-938f-7a949ade1669,network=Network(89c6a9c2-23c1-4b8b-81b9-3050a42a016f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap6b9bd275-f9') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.846 2 DEBUG nova.objects.instance [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Lazy-loading 'pci_devices' on Instance uuid 05cbee12-b07a-4b63-9f29-17f035a58f9b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.873 2 DEBUG nova.virt.libvirt.driver [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:43:22 compute-0 nova_compute[192079]:   <uuid>05cbee12-b07a-4b63-9f29-17f035a58f9b</uuid>
Oct 02 12:43:22 compute-0 nova_compute[192079]:   <name>instance-000000b1</name>
Oct 02 12:43:22 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:43:22 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:43:22 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:43:22 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:       <nova:name>tempest-TestServerMultinode-server-164108363</nova:name>
Oct 02 12:43:22 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:43:22</nova:creationTime>
Oct 02 12:43:22 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:43:22 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:43:22 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:43:22 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:43:22 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:43:22 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:43:22 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:43:22 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:43:22 compute-0 nova_compute[192079]:         <nova:user uuid="7ed2a973cfed4867a095aecf0c6453fb">tempest-TestServerMultinode-1539275040-project-admin</nova:user>
Oct 02 12:43:22 compute-0 nova_compute[192079]:         <nova:project uuid="0acd1c52a26d4654b24111e5ad4814f2">tempest-TestServerMultinode-1539275040</nova:project>
Oct 02 12:43:22 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:43:22 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:43:22 compute-0 nova_compute[192079]:         <nova:port uuid="6b9bd275-f90c-46d0-938f-7a949ade1669">
Oct 02 12:43:22 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.13" ipVersion="4"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:43:22 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:43:22 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:43:22 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <system>
Oct 02 12:43:22 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:43:22 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:43:22 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:43:22 compute-0 nova_compute[192079]:       <entry name="serial">05cbee12-b07a-4b63-9f29-17f035a58f9b</entry>
Oct 02 12:43:22 compute-0 nova_compute[192079]:       <entry name="uuid">05cbee12-b07a-4b63-9f29-17f035a58f9b</entry>
Oct 02 12:43:22 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     </system>
Oct 02 12:43:22 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:43:22 compute-0 nova_compute[192079]:   <os>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:   </os>
Oct 02 12:43:22 compute-0 nova_compute[192079]:   <features>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:   </features>
Oct 02 12:43:22 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:43:22 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:43:22 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:43:22 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/05cbee12-b07a-4b63-9f29-17f035a58f9b/disk"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:43:22 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/05cbee12-b07a-4b63-9f29-17f035a58f9b/disk.config"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:43:22 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:f5:c5:b5"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:       <target dev="tap6b9bd275-f9"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:43:22 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/05cbee12-b07a-4b63-9f29-17f035a58f9b/console.log" append="off"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <video>
Oct 02 12:43:22 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     </video>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:43:22 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:43:22 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:43:22 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:43:22 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:43:22 compute-0 nova_compute[192079]: </domain>
Oct 02 12:43:22 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.874 2 DEBUG nova.compute.manager [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Preparing to wait for external event network-vif-plugged-6b9bd275-f90c-46d0-938f-7a949ade1669 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.875 2 DEBUG oslo_concurrency.lockutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Acquiring lock "05cbee12-b07a-4b63-9f29-17f035a58f9b-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.875 2 DEBUG oslo_concurrency.lockutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Lock "05cbee12-b07a-4b63-9f29-17f035a58f9b-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.876 2 DEBUG oslo_concurrency.lockutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Lock "05cbee12-b07a-4b63-9f29-17f035a58f9b-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.877 2 DEBUG nova.virt.libvirt.vif [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:43:17Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-TestServerMultinode-server-164108363',display_name='tempest-TestServerMultinode-server-164108363',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testservermultinode-server-164108363',id=177,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='0acd1c52a26d4654b24111e5ad4814f2',ramdisk_id='',reservation_id='r-5h040yme',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='admin,reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestServerMultinode-1539275040',owner_user_name='tempest-TestServerMultinode-1539275040-project-admin'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:43:18Z,user_data=None,user_id='7ed2a973cfed4867a095aecf0c6453fb',uuid=05cbee12-b07a-4b63-9f29-17f035a58f9b,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "6b9bd275-f90c-46d0-938f-7a949ade1669", "address": "fa:16:3e:f5:c5:b5", "network": {"id": "89c6a9c2-23c1-4b8b-81b9-3050a42a016f", "bridge": "br-int", "label": "tempest-TestServerMultinode-1758818255-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "4a5d17af56da453cb0073e5e2be72803", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap6b9bd275-f9", "ovs_interfaceid": "6b9bd275-f90c-46d0-938f-7a949ade1669", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.878 2 DEBUG nova.network.os_vif_util [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Converting VIF {"id": "6b9bd275-f90c-46d0-938f-7a949ade1669", "address": "fa:16:3e:f5:c5:b5", "network": {"id": "89c6a9c2-23c1-4b8b-81b9-3050a42a016f", "bridge": "br-int", "label": "tempest-TestServerMultinode-1758818255-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "4a5d17af56da453cb0073e5e2be72803", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap6b9bd275-f9", "ovs_interfaceid": "6b9bd275-f90c-46d0-938f-7a949ade1669", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.879 2 DEBUG nova.network.os_vif_util [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:f5:c5:b5,bridge_name='br-int',has_traffic_filtering=True,id=6b9bd275-f90c-46d0-938f-7a949ade1669,network=Network(89c6a9c2-23c1-4b8b-81b9-3050a42a016f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap6b9bd275-f9') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.879 2 DEBUG os_vif [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:f5:c5:b5,bridge_name='br-int',has_traffic_filtering=True,id=6b9bd275-f90c-46d0-938f-7a949ade1669,network=Network(89c6a9c2-23c1-4b8b-81b9-3050a42a016f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap6b9bd275-f9') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.880 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.881 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.882 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.885 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.885 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap6b9bd275-f9, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.886 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap6b9bd275-f9, col_values=(('external_ids', {'iface-id': '6b9bd275-f90c-46d0-938f-7a949ade1669', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:f5:c5:b5', 'vm-uuid': '05cbee12-b07a-4b63-9f29-17f035a58f9b'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:43:22 compute-0 NetworkManager[51160]: <info>  [1759409002.9242] manager: (tap6b9bd275-f9): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/341)
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.924 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.927 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.931 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:22 compute-0 nova_compute[192079]: 2025-10-02 12:43:22.932 2 INFO os_vif [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:f5:c5:b5,bridge_name='br-int',has_traffic_filtering=True,id=6b9bd275-f90c-46d0-938f-7a949ade1669,network=Network(89c6a9c2-23c1-4b8b-81b9-3050a42a016f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap6b9bd275-f9')
Oct 02 12:43:23 compute-0 nova_compute[192079]: 2025-10-02 12:43:23.056 2 DEBUG nova.virt.libvirt.driver [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:43:23 compute-0 nova_compute[192079]: 2025-10-02 12:43:23.056 2 DEBUG nova.virt.libvirt.driver [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:43:23 compute-0 nova_compute[192079]: 2025-10-02 12:43:23.057 2 DEBUG nova.virt.libvirt.driver [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] No VIF found with MAC fa:16:3e:f5:c5:b5, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:43:23 compute-0 nova_compute[192079]: 2025-10-02 12:43:23.058 2 INFO nova.virt.libvirt.driver [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Using config drive
Oct 02 12:43:23 compute-0 nova_compute[192079]: 2025-10-02 12:43:23.491 2 INFO nova.virt.libvirt.driver [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Creating config drive at /var/lib/nova/instances/05cbee12-b07a-4b63-9f29-17f035a58f9b/disk.config
Oct 02 12:43:23 compute-0 nova_compute[192079]: 2025-10-02 12:43:23.496 2 DEBUG oslo_concurrency.processutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/05cbee12-b07a-4b63-9f29-17f035a58f9b/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp0fvsfsfo execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:43:23 compute-0 nova_compute[192079]: 2025-10-02 12:43:23.576 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:23 compute-0 nova_compute[192079]: 2025-10-02 12:43:23.620 2 DEBUG oslo_concurrency.processutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/05cbee12-b07a-4b63-9f29-17f035a58f9b/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmp0fvsfsfo" returned: 0 in 0.124s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:43:23 compute-0 kernel: tap6b9bd275-f9: entered promiscuous mode
Oct 02 12:43:23 compute-0 NetworkManager[51160]: <info>  [1759409003.6860] manager: (tap6b9bd275-f9): new Tun device (/org/freedesktop/NetworkManager/Devices/342)
Oct 02 12:43:23 compute-0 ovn_controller[94336]: 2025-10-02T12:43:23Z|00697|binding|INFO|Claiming lport 6b9bd275-f90c-46d0-938f-7a949ade1669 for this chassis.
Oct 02 12:43:23 compute-0 ovn_controller[94336]: 2025-10-02T12:43:23Z|00698|binding|INFO|6b9bd275-f90c-46d0-938f-7a949ade1669: Claiming fa:16:3e:f5:c5:b5 10.100.0.13
Oct 02 12:43:23 compute-0 nova_compute[192079]: 2025-10-02 12:43:23.688 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:23 compute-0 nova_compute[192079]: 2025-10-02 12:43:23.691 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:23.707 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:f5:c5:b5 10.100.0.13'], port_security=['fa:16:3e:f5:c5:b5 10.100.0.13'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.13/28', 'neutron:device_id': '05cbee12-b07a-4b63-9f29-17f035a58f9b', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-89c6a9c2-23c1-4b8b-81b9-3050a42a016f', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '0acd1c52a26d4654b24111e5ad4814f2', 'neutron:revision_number': '2', 'neutron:security_group_ids': '5f9d53e4-02f4-4598-9a8f-67bc82369860', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=7c1b0270-8f0a-4540-b305-4a4654e80399, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=6b9bd275-f90c-46d0-938f-7a949ade1669) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:43:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:23.708 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 6b9bd275-f90c-46d0-938f-7a949ade1669 in datapath 89c6a9c2-23c1-4b8b-81b9-3050a42a016f bound to our chassis
Oct 02 12:43:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:23.709 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 89c6a9c2-23c1-4b8b-81b9-3050a42a016f
Oct 02 12:43:23 compute-0 systemd-udevd[251395]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:43:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:23.719 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1ccc7140-7b79-4429-b09e-c6a667709ec9]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:23.720 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap89c6a9c2-21 in ovnmeta-89c6a9c2-23c1-4b8b-81b9-3050a42a016f namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:43:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:23.723 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap89c6a9c2-20 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:43:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:23.724 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f1ac1836-bee7-45ee-8239-ce9698f1096f]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:23.725 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[643ac167-18aa-49f8-946e-1e979113ab31]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:23 compute-0 NetworkManager[51160]: <info>  [1759409003.7318] device (tap6b9bd275-f9): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:43:23 compute-0 NetworkManager[51160]: <info>  [1759409003.7329] device (tap6b9bd275-f9): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:43:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:23.737 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[7e8bfbca-c709-4b26-80af-d38a6ec4bffd]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:23 compute-0 systemd-machined[152150]: New machine qemu-85-instance-000000b1.
Oct 02 12:43:23 compute-0 nova_compute[192079]: 2025-10-02 12:43:23.750 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:23 compute-0 systemd[1]: Started Virtual Machine qemu-85-instance-000000b1.
Oct 02 12:43:23 compute-0 ovn_controller[94336]: 2025-10-02T12:43:23Z|00699|binding|INFO|Setting lport 6b9bd275-f90c-46d0-938f-7a949ade1669 ovn-installed in OVS
Oct 02 12:43:23 compute-0 ovn_controller[94336]: 2025-10-02T12:43:23Z|00700|binding|INFO|Setting lport 6b9bd275-f90c-46d0-938f-7a949ade1669 up in Southbound
Oct 02 12:43:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:23.757 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[adb4d338-6652-4faf-bda2-10ceb7e3094f]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:23 compute-0 nova_compute[192079]: 2025-10-02 12:43:23.757 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:23.791 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[3e367085-a1b4-4b2f-be86-22fab576182a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:23.795 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[948f1d3b-c038-4feb-bfea-9cb5fd5f0faf]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:23 compute-0 NetworkManager[51160]: <info>  [1759409003.7977] manager: (tap89c6a9c2-20): new Veth device (/org/freedesktop/NetworkManager/Devices/343)
Oct 02 12:43:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:23.835 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[c11a15be-4435-48e0-a935-cc0fc2a2cbb2]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:23.838 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[b8c9ed48-179d-4acd-aaac-f6585e41d103]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:23 compute-0 NetworkManager[51160]: <info>  [1759409003.8614] device (tap89c6a9c2-20): carrier: link connected
Oct 02 12:43:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:23.866 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[9279be41-cbc3-47de-8af4-6cb0b8d74b67]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:23.881 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[72217329-8c11-457d-a377-d6c531af910e]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap89c6a9c2-21'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:d4:39:14'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 218], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 700149, 'reachable_time': 42281, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 251429, 'error': None, 'target': 'ovnmeta-89c6a9c2-23c1-4b8b-81b9-3050a42a016f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:23.900 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3f4ad6ce-6cd8-41ca-8e93-e46002a9528a]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fed4:3914'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 700148, 'tstamp': 700148}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 251430, 'error': None, 'target': 'ovnmeta-89c6a9c2-23c1-4b8b-81b9-3050a42a016f', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:23.919 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2513caa9-e3dc-4611-b94e-96dddb946fc8]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap89c6a9c2-21'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:d4:39:14'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 2, 'tx_packets': 1, 'rx_bytes': 196, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 2, 'tx_packets': 1, 'rx_bytes': 196, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 218], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 700149, 'reachable_time': 42281, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 2, 'inoctets': 168, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 2, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 168, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 2, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 251431, 'error': None, 'target': 'ovnmeta-89c6a9c2-23c1-4b8b-81b9-3050a42a016f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:23.949 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b6833ca2-613a-4242-842f-92c78ebbf05f]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:24.007 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f6596d0d-d973-489b-9fd7-bb12c26e4f36]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:24.008 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap89c6a9c2-20, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:24.009 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:24.009 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap89c6a9c2-20, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:43:24 compute-0 nova_compute[192079]: 2025-10-02 12:43:24.088 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:24 compute-0 NetworkManager[51160]: <info>  [1759409004.0891] manager: (tap89c6a9c2-20): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/344)
Oct 02 12:43:24 compute-0 kernel: tap89c6a9c2-20: entered promiscuous mode
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:24.091 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap89c6a9c2-20, col_values=(('external_ids', {'iface-id': 'f668a745-fb31-4662-9099-e8e7982b3bbb'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:43:24 compute-0 nova_compute[192079]: 2025-10-02 12:43:24.093 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:24 compute-0 ovn_controller[94336]: 2025-10-02T12:43:24Z|00701|binding|INFO|Releasing lport f668a745-fb31-4662-9099-e8e7982b3bbb from this chassis (sb_readonly=0)
Oct 02 12:43:24 compute-0 nova_compute[192079]: 2025-10-02 12:43:24.107 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:24 compute-0 nova_compute[192079]: 2025-10-02 12:43:24.108 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:24.109 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/89c6a9c2-23c1-4b8b-81b9-3050a42a016f.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/89c6a9c2-23c1-4b8b-81b9-3050a42a016f.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:24.110 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e406aa22-600e-4a8a-87fd-d98e630eea04]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:24.111 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-89c6a9c2-23c1-4b8b-81b9-3050a42a016f
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/89c6a9c2-23c1-4b8b-81b9-3050a42a016f.pid.haproxy
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 89c6a9c2-23c1-4b8b-81b9-3050a42a016f
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:43:24 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:24.113 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-89c6a9c2-23c1-4b8b-81b9-3050a42a016f', 'env', 'PROCESS_TAG=haproxy-89c6a9c2-23c1-4b8b-81b9-3050a42a016f', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/89c6a9c2-23c1-4b8b-81b9-3050a42a016f.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:43:24 compute-0 podman[251470]: 2025-10-02 12:43:24.462852724 +0000 UTC m=+0.021886359 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:43:24 compute-0 nova_compute[192079]: 2025-10-02 12:43:24.600 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759409004.599435, 05cbee12-b07a-4b63-9f29-17f035a58f9b => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:43:24 compute-0 nova_compute[192079]: 2025-10-02 12:43:24.600 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] VM Started (Lifecycle Event)
Oct 02 12:43:24 compute-0 nova_compute[192079]: 2025-10-02 12:43:24.627 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:43:24 compute-0 nova_compute[192079]: 2025-10-02 12:43:24.631 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759409004.5996346, 05cbee12-b07a-4b63-9f29-17f035a58f9b => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:43:24 compute-0 nova_compute[192079]: 2025-10-02 12:43:24.631 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] VM Paused (Lifecycle Event)
Oct 02 12:43:24 compute-0 nova_compute[192079]: 2025-10-02 12:43:24.650 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:43:24 compute-0 nova_compute[192079]: 2025-10-02 12:43:24.653 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:43:24 compute-0 nova_compute[192079]: 2025-10-02 12:43:24.680 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:43:24 compute-0 podman[251470]: 2025-10-02 12:43:24.923202054 +0000 UTC m=+0.482235699 container create 61e01a9764ac67eb5eb65d1c4e36d71627d9f8f95e1cc01db5889f4d01ec0043 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-89c6a9c2-23c1-4b8b-81b9-3050a42a016f, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:43:25 compute-0 systemd[1]: Started libpod-conmon-61e01a9764ac67eb5eb65d1c4e36d71627d9f8f95e1cc01db5889f4d01ec0043.scope.
Oct 02 12:43:25 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:43:25 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/bd21919468ff3871eec2b9826a903ac084c3adf3646b859e414f4e5618586998/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:43:25 compute-0 podman[251470]: 2025-10-02 12:43:25.228931796 +0000 UTC m=+0.787965431 container init 61e01a9764ac67eb5eb65d1c4e36d71627d9f8f95e1cc01db5889f4d01ec0043 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-89c6a9c2-23c1-4b8b-81b9-3050a42a016f, org.label-schema.schema-version=1.0, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:43:25 compute-0 podman[251470]: 2025-10-02 12:43:25.239542115 +0000 UTC m=+0.798575750 container start 61e01a9764ac67eb5eb65d1c4e36d71627d9f8f95e1cc01db5889f4d01ec0043 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-89c6a9c2-23c1-4b8b-81b9-3050a42a016f, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.vendor=CentOS)
Oct 02 12:43:25 compute-0 neutron-haproxy-ovnmeta-89c6a9c2-23c1-4b8b-81b9-3050a42a016f[251486]: [NOTICE]   (251490) : New worker (251492) forked
Oct 02 12:43:25 compute-0 neutron-haproxy-ovnmeta-89c6a9c2-23c1-4b8b-81b9-3050a42a016f[251486]: [NOTICE]   (251490) : Loading success.
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.494 2 DEBUG nova.network.neutron [req-71295ebc-6ed3-4384-bd4a-db9834ecb4ac req-c37e069e-8ab2-4102-94f2-5e6c7df1e1ed 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Updated VIF entry in instance network info cache for port 6b9bd275-f90c-46d0-938f-7a949ade1669. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.495 2 DEBUG nova.network.neutron [req-71295ebc-6ed3-4384-bd4a-db9834ecb4ac req-c37e069e-8ab2-4102-94f2-5e6c7df1e1ed 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Updating instance_info_cache with network_info: [{"id": "6b9bd275-f90c-46d0-938f-7a949ade1669", "address": "fa:16:3e:f5:c5:b5", "network": {"id": "89c6a9c2-23c1-4b8b-81b9-3050a42a016f", "bridge": "br-int", "label": "tempest-TestServerMultinode-1758818255-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "4a5d17af56da453cb0073e5e2be72803", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap6b9bd275-f9", "ovs_interfaceid": "6b9bd275-f90c-46d0-938f-7a949ade1669", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.512 2 DEBUG oslo_concurrency.lockutils [req-71295ebc-6ed3-4384-bd4a-db9834ecb4ac req-c37e069e-8ab2-4102-94f2-5e6c7df1e1ed 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-05cbee12-b07a-4b63-9f29-17f035a58f9b" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.801 2 DEBUG nova.compute.manager [req-b2ee8171-fd01-4aa8-b030-edcf4acd287e req-341cba1c-ddfc-4b2f-8398-7e667c65021a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Received event network-vif-plugged-6b9bd275-f90c-46d0-938f-7a949ade1669 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.801 2 DEBUG oslo_concurrency.lockutils [req-b2ee8171-fd01-4aa8-b030-edcf4acd287e req-341cba1c-ddfc-4b2f-8398-7e667c65021a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "05cbee12-b07a-4b63-9f29-17f035a58f9b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.801 2 DEBUG oslo_concurrency.lockutils [req-b2ee8171-fd01-4aa8-b030-edcf4acd287e req-341cba1c-ddfc-4b2f-8398-7e667c65021a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "05cbee12-b07a-4b63-9f29-17f035a58f9b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.802 2 DEBUG oslo_concurrency.lockutils [req-b2ee8171-fd01-4aa8-b030-edcf4acd287e req-341cba1c-ddfc-4b2f-8398-7e667c65021a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "05cbee12-b07a-4b63-9f29-17f035a58f9b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.802 2 DEBUG nova.compute.manager [req-b2ee8171-fd01-4aa8-b030-edcf4acd287e req-341cba1c-ddfc-4b2f-8398-7e667c65021a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Processing event network-vif-plugged-6b9bd275-f90c-46d0-938f-7a949ade1669 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.802 2 DEBUG nova.compute.manager [req-b2ee8171-fd01-4aa8-b030-edcf4acd287e req-341cba1c-ddfc-4b2f-8398-7e667c65021a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Received event network-vif-plugged-6b9bd275-f90c-46d0-938f-7a949ade1669 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.802 2 DEBUG oslo_concurrency.lockutils [req-b2ee8171-fd01-4aa8-b030-edcf4acd287e req-341cba1c-ddfc-4b2f-8398-7e667c65021a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "05cbee12-b07a-4b63-9f29-17f035a58f9b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.803 2 DEBUG oslo_concurrency.lockutils [req-b2ee8171-fd01-4aa8-b030-edcf4acd287e req-341cba1c-ddfc-4b2f-8398-7e667c65021a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "05cbee12-b07a-4b63-9f29-17f035a58f9b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.803 2 DEBUG oslo_concurrency.lockutils [req-b2ee8171-fd01-4aa8-b030-edcf4acd287e req-341cba1c-ddfc-4b2f-8398-7e667c65021a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "05cbee12-b07a-4b63-9f29-17f035a58f9b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.803 2 DEBUG nova.compute.manager [req-b2ee8171-fd01-4aa8-b030-edcf4acd287e req-341cba1c-ddfc-4b2f-8398-7e667c65021a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] No waiting events found dispatching network-vif-plugged-6b9bd275-f90c-46d0-938f-7a949ade1669 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.803 2 WARNING nova.compute.manager [req-b2ee8171-fd01-4aa8-b030-edcf4acd287e req-341cba1c-ddfc-4b2f-8398-7e667c65021a 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Received unexpected event network-vif-plugged-6b9bd275-f90c-46d0-938f-7a949ade1669 for instance with vm_state building and task_state spawning.
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.804 2 DEBUG nova.compute.manager [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Instance event wait completed in 1 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.807 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759409005.8076572, 05cbee12-b07a-4b63-9f29-17f035a58f9b => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.808 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] VM Resumed (Lifecycle Event)
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.809 2 DEBUG nova.virt.libvirt.driver [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.812 2 INFO nova.virt.libvirt.driver [-] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Instance spawned successfully.
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.813 2 DEBUG nova.virt.libvirt.driver [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.830 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.836 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.841 2 DEBUG nova.virt.libvirt.driver [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.842 2 DEBUG nova.virt.libvirt.driver [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.842 2 DEBUG nova.virt.libvirt.driver [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.843 2 DEBUG nova.virt.libvirt.driver [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.843 2 DEBUG nova.virt.libvirt.driver [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.843 2 DEBUG nova.virt.libvirt.driver [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.877 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.920 2 INFO nova.compute.manager [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Took 7.14 seconds to spawn the instance on the hypervisor.
Oct 02 12:43:25 compute-0 nova_compute[192079]: 2025-10-02 12:43:25.921 2 DEBUG nova.compute.manager [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:43:26 compute-0 nova_compute[192079]: 2025-10-02 12:43:26.026 2 INFO nova.compute.manager [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Took 7.74 seconds to build instance.
Oct 02 12:43:26 compute-0 nova_compute[192079]: 2025-10-02 12:43:26.042 2 DEBUG oslo_concurrency.lockutils [None req-e1ee1fcb-fd53-4be4-aa34-f7f55972f283 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Lock "05cbee12-b07a-4b63-9f29-17f035a58f9b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 7.816s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:43:26 compute-0 podman[251501]: 2025-10-02 12:43:26.154033676 +0000 UTC m=+0.058647835 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, org.label-schema.schema-version=1.0)
Oct 02 12:43:26 compute-0 podman[251503]: 2025-10-02 12:43:26.161052818 +0000 UTC m=+0.056741533 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']})
Oct 02 12:43:26 compute-0 podman[251502]: 2025-10-02 12:43:26.243508773 +0000 UTC m=+0.139790974 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, config_id=ovn_controller, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, container_name=ovn_controller, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:43:27 compute-0 nova_compute[192079]: 2025-10-02 12:43:27.925 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:28 compute-0 nova_compute[192079]: 2025-10-02 12:43:28.577 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:31.866 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=49, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=48) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:43:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:31.867 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 0 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:43:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:31.868 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '49'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:43:31 compute-0 nova_compute[192079]: 2025-10-02 12:43:31.868 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:32 compute-0 nova_compute[192079]: 2025-10-02 12:43:32.928 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:33 compute-0 nova_compute[192079]: 2025-10-02 12:43:33.580 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:34 compute-0 nova_compute[192079]: 2025-10-02 12:43:34.717 2 DEBUG oslo_concurrency.lockutils [None req-902046ec-e0cf-4865-ab18-09b232eb4224 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Acquiring lock "05cbee12-b07a-4b63-9f29-17f035a58f9b" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:43:34 compute-0 nova_compute[192079]: 2025-10-02 12:43:34.718 2 DEBUG oslo_concurrency.lockutils [None req-902046ec-e0cf-4865-ab18-09b232eb4224 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Lock "05cbee12-b07a-4b63-9f29-17f035a58f9b" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:43:34 compute-0 nova_compute[192079]: 2025-10-02 12:43:34.719 2 DEBUG oslo_concurrency.lockutils [None req-902046ec-e0cf-4865-ab18-09b232eb4224 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Acquiring lock "05cbee12-b07a-4b63-9f29-17f035a58f9b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:43:34 compute-0 nova_compute[192079]: 2025-10-02 12:43:34.719 2 DEBUG oslo_concurrency.lockutils [None req-902046ec-e0cf-4865-ab18-09b232eb4224 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Lock "05cbee12-b07a-4b63-9f29-17f035a58f9b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:43:34 compute-0 nova_compute[192079]: 2025-10-02 12:43:34.720 2 DEBUG oslo_concurrency.lockutils [None req-902046ec-e0cf-4865-ab18-09b232eb4224 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Lock "05cbee12-b07a-4b63-9f29-17f035a58f9b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:43:34 compute-0 nova_compute[192079]: 2025-10-02 12:43:34.733 2 INFO nova.compute.manager [None req-902046ec-e0cf-4865-ab18-09b232eb4224 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Terminating instance
Oct 02 12:43:34 compute-0 nova_compute[192079]: 2025-10-02 12:43:34.745 2 DEBUG nova.compute.manager [None req-902046ec-e0cf-4865-ab18-09b232eb4224 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:43:34 compute-0 kernel: tap6b9bd275-f9 (unregistering): left promiscuous mode
Oct 02 12:43:34 compute-0 NetworkManager[51160]: <info>  [1759409014.7776] device (tap6b9bd275-f9): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:43:34 compute-0 ovn_controller[94336]: 2025-10-02T12:43:34Z|00702|binding|INFO|Releasing lport 6b9bd275-f90c-46d0-938f-7a949ade1669 from this chassis (sb_readonly=0)
Oct 02 12:43:34 compute-0 ovn_controller[94336]: 2025-10-02T12:43:34Z|00703|binding|INFO|Setting lport 6b9bd275-f90c-46d0-938f-7a949ade1669 down in Southbound
Oct 02 12:43:34 compute-0 nova_compute[192079]: 2025-10-02 12:43:34.801 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:34 compute-0 ovn_controller[94336]: 2025-10-02T12:43:34Z|00704|binding|INFO|Removing iface tap6b9bd275-f9 ovn-installed in OVS
Oct 02 12:43:34 compute-0 nova_compute[192079]: 2025-10-02 12:43:34.805 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:34.832 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:f5:c5:b5 10.100.0.13'], port_security=['fa:16:3e:f5:c5:b5 10.100.0.13'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.13/28', 'neutron:device_id': '05cbee12-b07a-4b63-9f29-17f035a58f9b', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-89c6a9c2-23c1-4b8b-81b9-3050a42a016f', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '0acd1c52a26d4654b24111e5ad4814f2', 'neutron:revision_number': '4', 'neutron:security_group_ids': '5f9d53e4-02f4-4598-9a8f-67bc82369860', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=7c1b0270-8f0a-4540-b305-4a4654e80399, chassis=[], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=6b9bd275-f90c-46d0-938f-7a949ade1669) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:43:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:34.833 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 6b9bd275-f90c-46d0-938f-7a949ade1669 in datapath 89c6a9c2-23c1-4b8b-81b9-3050a42a016f unbound from our chassis
Oct 02 12:43:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:34.834 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 89c6a9c2-23c1-4b8b-81b9-3050a42a016f, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:43:34 compute-0 systemd[1]: machine-qemu\x2d85\x2dinstance\x2d000000b1.scope: Deactivated successfully.
Oct 02 12:43:34 compute-0 systemd[1]: machine-qemu\x2d85\x2dinstance\x2d000000b1.scope: Consumed 9.780s CPU time.
Oct 02 12:43:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:34.837 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b940ba46-93fd-4863-98b8-b661260a0a67]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:34 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:34.837 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-89c6a9c2-23c1-4b8b-81b9-3050a42a016f namespace which is not needed anymore
Oct 02 12:43:34 compute-0 nova_compute[192079]: 2025-10-02 12:43:34.845 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:34 compute-0 systemd-machined[152150]: Machine qemu-85-instance-000000b1 terminated.
Oct 02 12:43:34 compute-0 podman[251571]: 2025-10-02 12:43:34.878899458 +0000 UTC m=+0.073281285 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, config_id=edpm, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:43:34 compute-0 nova_compute[192079]: 2025-10-02 12:43:34.970 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:34 compute-0 nova_compute[192079]: 2025-10-02 12:43:34.976 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:35 compute-0 nova_compute[192079]: 2025-10-02 12:43:35.008 2 INFO nova.virt.libvirt.driver [-] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Instance destroyed successfully.
Oct 02 12:43:35 compute-0 nova_compute[192079]: 2025-10-02 12:43:35.008 2 DEBUG nova.objects.instance [None req-902046ec-e0cf-4865-ab18-09b232eb4224 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Lazy-loading 'resources' on Instance uuid 05cbee12-b07a-4b63-9f29-17f035a58f9b obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:43:35 compute-0 nova_compute[192079]: 2025-10-02 12:43:35.022 2 DEBUG nova.virt.libvirt.vif [None req-902046ec-e0cf-4865-ab18-09b232eb4224 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:43:17Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-TestServerMultinode-server-164108363',display_name='tempest-TestServerMultinode-server-164108363',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-testservermultinode-server-164108363',id=177,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data=None,key_name=None,keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:43:25Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='0acd1c52a26d4654b24111e5ad4814f2',ramdisk_id='',reservation_id='r-5h040yme',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='admin,reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-TestServerMultinode-1539275040',owner_user_name='tempest-TestServerMultinode-1539275040-project-admin'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:43:25Z,user_data=None,user_id='7ed2a973cfed4867a095aecf0c6453fb',uuid=05cbee12-b07a-4b63-9f29-17f035a58f9b,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "6b9bd275-f90c-46d0-938f-7a949ade1669", "address": "fa:16:3e:f5:c5:b5", "network": {"id": "89c6a9c2-23c1-4b8b-81b9-3050a42a016f", "bridge": "br-int", "label": "tempest-TestServerMultinode-1758818255-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "4a5d17af56da453cb0073e5e2be72803", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap6b9bd275-f9", "ovs_interfaceid": "6b9bd275-f90c-46d0-938f-7a949ade1669", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:43:35 compute-0 nova_compute[192079]: 2025-10-02 12:43:35.023 2 DEBUG nova.network.os_vif_util [None req-902046ec-e0cf-4865-ab18-09b232eb4224 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Converting VIF {"id": "6b9bd275-f90c-46d0-938f-7a949ade1669", "address": "fa:16:3e:f5:c5:b5", "network": {"id": "89c6a9c2-23c1-4b8b-81b9-3050a42a016f", "bridge": "br-int", "label": "tempest-TestServerMultinode-1758818255-network", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "4a5d17af56da453cb0073e5e2be72803", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap6b9bd275-f9", "ovs_interfaceid": "6b9bd275-f90c-46d0-938f-7a949ade1669", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:43:35 compute-0 nova_compute[192079]: 2025-10-02 12:43:35.023 2 DEBUG nova.network.os_vif_util [None req-902046ec-e0cf-4865-ab18-09b232eb4224 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:f5:c5:b5,bridge_name='br-int',has_traffic_filtering=True,id=6b9bd275-f90c-46d0-938f-7a949ade1669,network=Network(89c6a9c2-23c1-4b8b-81b9-3050a42a016f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap6b9bd275-f9') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:43:35 compute-0 nova_compute[192079]: 2025-10-02 12:43:35.023 2 DEBUG os_vif [None req-902046ec-e0cf-4865-ab18-09b232eb4224 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Unplugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:f5:c5:b5,bridge_name='br-int',has_traffic_filtering=True,id=6b9bd275-f90c-46d0-938f-7a949ade1669,network=Network(89c6a9c2-23c1-4b8b-81b9-3050a42a016f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap6b9bd275-f9') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:43:35 compute-0 nova_compute[192079]: 2025-10-02 12:43:35.025 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:35 compute-0 nova_compute[192079]: 2025-10-02 12:43:35.025 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap6b9bd275-f9, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:43:35 compute-0 nova_compute[192079]: 2025-10-02 12:43:35.026 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:35 compute-0 nova_compute[192079]: 2025-10-02 12:43:35.028 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:43:35 compute-0 nova_compute[192079]: 2025-10-02 12:43:35.030 2 INFO os_vif [None req-902046ec-e0cf-4865-ab18-09b232eb4224 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Successfully unplugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:f5:c5:b5,bridge_name='br-int',has_traffic_filtering=True,id=6b9bd275-f90c-46d0-938f-7a949ade1669,network=Network(89c6a9c2-23c1-4b8b-81b9-3050a42a016f),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap6b9bd275-f9')
Oct 02 12:43:35 compute-0 nova_compute[192079]: 2025-10-02 12:43:35.031 2 INFO nova.virt.libvirt.driver [None req-902046ec-e0cf-4865-ab18-09b232eb4224 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Deleting instance files /var/lib/nova/instances/05cbee12-b07a-4b63-9f29-17f035a58f9b_del
Oct 02 12:43:35 compute-0 nova_compute[192079]: 2025-10-02 12:43:35.032 2 INFO nova.virt.libvirt.driver [None req-902046ec-e0cf-4865-ab18-09b232eb4224 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Deletion of /var/lib/nova/instances/05cbee12-b07a-4b63-9f29-17f035a58f9b_del complete
Oct 02 12:43:35 compute-0 nova_compute[192079]: 2025-10-02 12:43:35.047 2 DEBUG nova.compute.manager [req-1e8e5277-7f47-4e76-95c6-c2d329c0b90d req-7c587e64-8109-4049-8e45-f41f2b6ca8b6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Received event network-vif-unplugged-6b9bd275-f90c-46d0-938f-7a949ade1669 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:43:35 compute-0 nova_compute[192079]: 2025-10-02 12:43:35.048 2 DEBUG oslo_concurrency.lockutils [req-1e8e5277-7f47-4e76-95c6-c2d329c0b90d req-7c587e64-8109-4049-8e45-f41f2b6ca8b6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "05cbee12-b07a-4b63-9f29-17f035a58f9b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:43:35 compute-0 nova_compute[192079]: 2025-10-02 12:43:35.048 2 DEBUG oslo_concurrency.lockutils [req-1e8e5277-7f47-4e76-95c6-c2d329c0b90d req-7c587e64-8109-4049-8e45-f41f2b6ca8b6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "05cbee12-b07a-4b63-9f29-17f035a58f9b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:43:35 compute-0 nova_compute[192079]: 2025-10-02 12:43:35.048 2 DEBUG oslo_concurrency.lockutils [req-1e8e5277-7f47-4e76-95c6-c2d329c0b90d req-7c587e64-8109-4049-8e45-f41f2b6ca8b6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "05cbee12-b07a-4b63-9f29-17f035a58f9b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:43:35 compute-0 nova_compute[192079]: 2025-10-02 12:43:35.048 2 DEBUG nova.compute.manager [req-1e8e5277-7f47-4e76-95c6-c2d329c0b90d req-7c587e64-8109-4049-8e45-f41f2b6ca8b6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] No waiting events found dispatching network-vif-unplugged-6b9bd275-f90c-46d0-938f-7a949ade1669 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:43:35 compute-0 nova_compute[192079]: 2025-10-02 12:43:35.048 2 DEBUG nova.compute.manager [req-1e8e5277-7f47-4e76-95c6-c2d329c0b90d req-7c587e64-8109-4049-8e45-f41f2b6ca8b6 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Received event network-vif-unplugged-6b9bd275-f90c-46d0-938f-7a949ade1669 for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:43:35 compute-0 nova_compute[192079]: 2025-10-02 12:43:35.113 2 INFO nova.compute.manager [None req-902046ec-e0cf-4865-ab18-09b232eb4224 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Took 0.37 seconds to destroy the instance on the hypervisor.
Oct 02 12:43:35 compute-0 nova_compute[192079]: 2025-10-02 12:43:35.114 2 DEBUG oslo.service.loopingcall [None req-902046ec-e0cf-4865-ab18-09b232eb4224 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:43:35 compute-0 nova_compute[192079]: 2025-10-02 12:43:35.114 2 DEBUG nova.compute.manager [-] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:43:35 compute-0 nova_compute[192079]: 2025-10-02 12:43:35.114 2 DEBUG nova.network.neutron [-] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:43:35 compute-0 neutron-haproxy-ovnmeta-89c6a9c2-23c1-4b8b-81b9-3050a42a016f[251486]: [NOTICE]   (251490) : haproxy version is 2.8.14-c23fe91
Oct 02 12:43:35 compute-0 neutron-haproxy-ovnmeta-89c6a9c2-23c1-4b8b-81b9-3050a42a016f[251486]: [NOTICE]   (251490) : path to executable is /usr/sbin/haproxy
Oct 02 12:43:35 compute-0 neutron-haproxy-ovnmeta-89c6a9c2-23c1-4b8b-81b9-3050a42a016f[251486]: [WARNING]  (251490) : Exiting Master process...
Oct 02 12:43:35 compute-0 neutron-haproxy-ovnmeta-89c6a9c2-23c1-4b8b-81b9-3050a42a016f[251486]: [WARNING]  (251490) : Exiting Master process...
Oct 02 12:43:35 compute-0 neutron-haproxy-ovnmeta-89c6a9c2-23c1-4b8b-81b9-3050a42a016f[251486]: [ALERT]    (251490) : Current worker (251492) exited with code 143 (Terminated)
Oct 02 12:43:35 compute-0 neutron-haproxy-ovnmeta-89c6a9c2-23c1-4b8b-81b9-3050a42a016f[251486]: [WARNING]  (251490) : All workers exited. Exiting... (0)
Oct 02 12:43:35 compute-0 systemd[1]: libpod-61e01a9764ac67eb5eb65d1c4e36d71627d9f8f95e1cc01db5889f4d01ec0043.scope: Deactivated successfully.
Oct 02 12:43:35 compute-0 podman[251611]: 2025-10-02 12:43:35.367036058 +0000 UTC m=+0.440361984 container died 61e01a9764ac67eb5eb65d1c4e36d71627d9f8f95e1cc01db5889f4d01ec0043 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-89c6a9c2-23c1-4b8b-81b9-3050a42a016f, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS)
Oct 02 12:43:35 compute-0 systemd[1]: var-lib-containers-storage-overlay-bd21919468ff3871eec2b9826a903ac084c3adf3646b859e414f4e5618586998-merged.mount: Deactivated successfully.
Oct 02 12:43:35 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-61e01a9764ac67eb5eb65d1c4e36d71627d9f8f95e1cc01db5889f4d01ec0043-userdata-shm.mount: Deactivated successfully.
Oct 02 12:43:36 compute-0 podman[251611]: 2025-10-02 12:43:36.159883622 +0000 UTC m=+1.233209598 container cleanup 61e01a9764ac67eb5eb65d1c4e36d71627d9f8f95e1cc01db5889f4d01ec0043 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-89c6a9c2-23c1-4b8b-81b9-3050a42a016f, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001)
Oct 02 12:43:36 compute-0 systemd[1]: libpod-conmon-61e01a9764ac67eb5eb65d1c4e36d71627d9f8f95e1cc01db5889f4d01ec0043.scope: Deactivated successfully.
Oct 02 12:43:36 compute-0 podman[251661]: 2025-10-02 12:43:36.339163895 +0000 UTC m=+0.135121197 container remove 61e01a9764ac67eb5eb65d1c4e36d71627d9f8f95e1cc01db5889f4d01ec0043 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-89c6a9c2-23c1-4b8b-81b9-3050a42a016f, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001)
Oct 02 12:43:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:36.348 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[766530ef-8727-4182-93fb-1963dee80a36]: (4, ('Thu Oct  2 12:43:34 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-89c6a9c2-23c1-4b8b-81b9-3050a42a016f (61e01a9764ac67eb5eb65d1c4e36d71627d9f8f95e1cc01db5889f4d01ec0043)\n61e01a9764ac67eb5eb65d1c4e36d71627d9f8f95e1cc01db5889f4d01ec0043\nThu Oct  2 12:43:36 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-89c6a9c2-23c1-4b8b-81b9-3050a42a016f (61e01a9764ac67eb5eb65d1c4e36d71627d9f8f95e1cc01db5889f4d01ec0043)\n61e01a9764ac67eb5eb65d1c4e36d71627d9f8f95e1cc01db5889f4d01ec0043\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:36.351 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f995b198-c930-4afc-b852-e05513e13bbe]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:36.352 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap89c6a9c2-20, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:43:36 compute-0 nova_compute[192079]: 2025-10-02 12:43:36.355 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:36 compute-0 kernel: tap89c6a9c2-20: left promiscuous mode
Oct 02 12:43:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:36.361 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[75bc5a11-eff3-490c-af83-ca452349bec3]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:36 compute-0 nova_compute[192079]: 2025-10-02 12:43:36.371 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:36.397 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e3cffee3-250f-4889-b85a-4a5db19aefe6]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:36.398 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2ffb8306-a2dd-46d0-a97c-d570f98d1743]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:36.415 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[948920e4-0a6d-4de3-b1b7-96c50fe4aa25]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 700141, 'reachable_time': 24267, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 251677, 'error': None, 'target': 'ovnmeta-89c6a9c2-23c1-4b8b-81b9-3050a42a016f', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:36 compute-0 systemd[1]: run-netns-ovnmeta\x2d89c6a9c2\x2d23c1\x2d4b8b\x2d81b9\x2d3050a42a016f.mount: Deactivated successfully.
Oct 02 12:43:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:36.418 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-89c6a9c2-23c1-4b8b-81b9-3050a42a016f deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:43:36 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:43:36.418 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[c97542ba-6550-47ef-b2c6-aa978d79ae4d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:43:36 compute-0 nova_compute[192079]: 2025-10-02 12:43:36.820 2 DEBUG nova.network.neutron [-] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:43:36 compute-0 nova_compute[192079]: 2025-10-02 12:43:36.852 2 INFO nova.compute.manager [-] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Took 1.74 seconds to deallocate network for instance.
Oct 02 12:43:36 compute-0 nova_compute[192079]: 2025-10-02 12:43:36.916 2 DEBUG oslo_concurrency.lockutils [None req-902046ec-e0cf-4865-ab18-09b232eb4224 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:43:36 compute-0 nova_compute[192079]: 2025-10-02 12:43:36.916 2 DEBUG oslo_concurrency.lockutils [None req-902046ec-e0cf-4865-ab18-09b232eb4224 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:43:36 compute-0 nova_compute[192079]: 2025-10-02 12:43:36.962 2 DEBUG nova.compute.provider_tree [None req-902046ec-e0cf-4865-ab18-09b232eb4224 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:43:36 compute-0 nova_compute[192079]: 2025-10-02 12:43:36.976 2 DEBUG nova.scheduler.client.report [None req-902046ec-e0cf-4865-ab18-09b232eb4224 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:43:36 compute-0 nova_compute[192079]: 2025-10-02 12:43:36.996 2 DEBUG oslo_concurrency.lockutils [None req-902046ec-e0cf-4865-ab18-09b232eb4224 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.080s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:43:37 compute-0 nova_compute[192079]: 2025-10-02 12:43:37.021 2 INFO nova.scheduler.client.report [None req-902046ec-e0cf-4865-ab18-09b232eb4224 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Deleted allocations for instance 05cbee12-b07a-4b63-9f29-17f035a58f9b
Oct 02 12:43:37 compute-0 nova_compute[192079]: 2025-10-02 12:43:37.093 2 DEBUG oslo_concurrency.lockutils [None req-902046ec-e0cf-4865-ab18-09b232eb4224 7ed2a973cfed4867a095aecf0c6453fb 0acd1c52a26d4654b24111e5ad4814f2 - - default default] Lock "05cbee12-b07a-4b63-9f29-17f035a58f9b" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 2.374s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:43:37 compute-0 nova_compute[192079]: 2025-10-02 12:43:37.409 2 DEBUG nova.compute.manager [req-b0c58342-e625-4267-85f7-04bd373f4746 req-4644062e-2003-43f9-bcf1-57b9f93721f8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Received event network-vif-plugged-6b9bd275-f90c-46d0-938f-7a949ade1669 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:43:37 compute-0 nova_compute[192079]: 2025-10-02 12:43:37.410 2 DEBUG oslo_concurrency.lockutils [req-b0c58342-e625-4267-85f7-04bd373f4746 req-4644062e-2003-43f9-bcf1-57b9f93721f8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "05cbee12-b07a-4b63-9f29-17f035a58f9b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:43:37 compute-0 nova_compute[192079]: 2025-10-02 12:43:37.410 2 DEBUG oslo_concurrency.lockutils [req-b0c58342-e625-4267-85f7-04bd373f4746 req-4644062e-2003-43f9-bcf1-57b9f93721f8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "05cbee12-b07a-4b63-9f29-17f035a58f9b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:43:37 compute-0 nova_compute[192079]: 2025-10-02 12:43:37.411 2 DEBUG oslo_concurrency.lockutils [req-b0c58342-e625-4267-85f7-04bd373f4746 req-4644062e-2003-43f9-bcf1-57b9f93721f8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "05cbee12-b07a-4b63-9f29-17f035a58f9b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:43:37 compute-0 nova_compute[192079]: 2025-10-02 12:43:37.411 2 DEBUG nova.compute.manager [req-b0c58342-e625-4267-85f7-04bd373f4746 req-4644062e-2003-43f9-bcf1-57b9f93721f8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] No waiting events found dispatching network-vif-plugged-6b9bd275-f90c-46d0-938f-7a949ade1669 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:43:37 compute-0 nova_compute[192079]: 2025-10-02 12:43:37.412 2 WARNING nova.compute.manager [req-b0c58342-e625-4267-85f7-04bd373f4746 req-4644062e-2003-43f9-bcf1-57b9f93721f8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Received unexpected event network-vif-plugged-6b9bd275-f90c-46d0-938f-7a949ade1669 for instance with vm_state deleted and task_state None.
Oct 02 12:43:37 compute-0 nova_compute[192079]: 2025-10-02 12:43:37.412 2 DEBUG nova.compute.manager [req-b0c58342-e625-4267-85f7-04bd373f4746 req-4644062e-2003-43f9-bcf1-57b9f93721f8 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Received event network-vif-deleted-6b9bd275-f90c-46d0-938f-7a949ade1669 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:43:38 compute-0 nova_compute[192079]: 2025-10-02 12:43:38.580 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:40 compute-0 nova_compute[192079]: 2025-10-02 12:43:40.027 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:41 compute-0 nova_compute[192079]: 2025-10-02 12:43:41.781 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:43:42 compute-0 podman[251679]: 2025-10-02 12:43:42.165064495 +0000 UTC m=+0.068607677 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, config_id=multipathd, org.label-schema.license=GPLv2, tcib_managed=true, container_name=multipathd, org.label-schema.vendor=CentOS)
Oct 02 12:43:42 compute-0 podman[251678]: 2025-10-02 12:43:42.165874548 +0000 UTC m=+0.072689419 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.33.7, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., com.redhat.component=ubi9-minimal-container, maintainer=Red Hat, Inc., release=1755695350, vendor=Red Hat, Inc., architecture=x86_64, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, io.openshift.expose-services=, build-date=2025-08-20T13:12:41, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, managed_by=edpm_ansible, vcs-type=git, io.openshift.tags=minimal rhel9, version=9.6, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, config_id=edpm, distribution-scope=public, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., name=ubi9-minimal, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., container_name=openstack_network_exporter, url=https://catalog.redhat.com/en/search?searchType=containers)
Oct 02 12:43:43 compute-0 nova_compute[192079]: 2025-10-02 12:43:43.355 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:43 compute-0 nova_compute[192079]: 2025-10-02 12:43:43.582 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:45 compute-0 nova_compute[192079]: 2025-10-02 12:43:45.030 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:46 compute-0 nova_compute[192079]: 2025-10-02 12:43:46.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:43:47 compute-0 nova_compute[192079]: 2025-10-02 12:43:47.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:43:48 compute-0 podman[251721]: 2025-10-02 12:43:48.146941661 +0000 UTC m=+0.058507951 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_id=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, org.label-schema.build-date=20251001, container_name=iscsid, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, io.buildah.version=1.41.3)
Oct 02 12:43:48 compute-0 podman[251720]: 2025-10-02 12:43:48.147227659 +0000 UTC m=+0.060984189 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']})
Oct 02 12:43:48 compute-0 nova_compute[192079]: 2025-10-02 12:43:48.584 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:49 compute-0 nova_compute[192079]: 2025-10-02 12:43:49.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:43:49 compute-0 nova_compute[192079]: 2025-10-02 12:43:49.694 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:43:49 compute-0 nova_compute[192079]: 2025-10-02 12:43:49.695 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:43:49 compute-0 nova_compute[192079]: 2025-10-02 12:43:49.695 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:43:49 compute-0 nova_compute[192079]: 2025-10-02 12:43:49.695 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:43:49 compute-0 nova_compute[192079]: 2025-10-02 12:43:49.852 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:43:49 compute-0 nova_compute[192079]: 2025-10-02 12:43:49.853 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5715MB free_disk=73.27226638793945GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:43:49 compute-0 nova_compute[192079]: 2025-10-02 12:43:49.853 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:43:49 compute-0 nova_compute[192079]: 2025-10-02 12:43:49.853 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:43:49 compute-0 nova_compute[192079]: 2025-10-02 12:43:49.923 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:43:49 compute-0 nova_compute[192079]: 2025-10-02 12:43:49.924 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:43:49 compute-0 nova_compute[192079]: 2025-10-02 12:43:49.949 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:43:49 compute-0 nova_compute[192079]: 2025-10-02 12:43:49.984 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:43:50 compute-0 nova_compute[192079]: 2025-10-02 12:43:50.006 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759409015.005547, 05cbee12-b07a-4b63-9f29-17f035a58f9b => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:43:50 compute-0 nova_compute[192079]: 2025-10-02 12:43:50.007 2 INFO nova.compute.manager [-] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] VM Stopped (Lifecycle Event)
Oct 02 12:43:50 compute-0 nova_compute[192079]: 2025-10-02 12:43:50.013 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:43:50 compute-0 nova_compute[192079]: 2025-10-02 12:43:50.013 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.160s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:43:50 compute-0 nova_compute[192079]: 2025-10-02 12:43:50.031 2 DEBUG nova.compute.manager [None req-b8271994-8dab-403b-8d53-74526ee03b07 - - - - - -] [instance: 05cbee12-b07a-4b63-9f29-17f035a58f9b] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:43:50 compute-0 nova_compute[192079]: 2025-10-02 12:43:50.033 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:51 compute-0 nova_compute[192079]: 2025-10-02 12:43:51.014 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:43:51 compute-0 nova_compute[192079]: 2025-10-02 12:43:51.014 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:43:51 compute-0 nova_compute[192079]: 2025-10-02 12:43:51.014 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:43:53 compute-0 nova_compute[192079]: 2025-10-02 12:43:53.586 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:53 compute-0 nova_compute[192079]: 2025-10-02 12:43:53.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:43:54 compute-0 nova_compute[192079]: 2025-10-02 12:43:54.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:43:55 compute-0 nova_compute[192079]: 2025-10-02 12:43:55.036 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:43:57 compute-0 podman[251764]: 2025-10-02 12:43:57.155195221 +0000 UTC m=+0.072756400 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.schema-version=1.0)
Oct 02 12:43:57 compute-0 podman[251766]: 2025-10-02 12:43:57.172083973 +0000 UTC m=+0.068048352 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']})
Oct 02 12:43:57 compute-0 podman[251765]: 2025-10-02 12:43:57.203224985 +0000 UTC m=+0.106938056 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, config_id=ovn_controller, container_name=ovn_controller, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, managed_by=edpm_ansible, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']})
Oct 02 12:43:57 compute-0 nova_compute[192079]: 2025-10-02 12:43:57.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:43:57 compute-0 nova_compute[192079]: 2025-10-02 12:43:57.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:43:57 compute-0 nova_compute[192079]: 2025-10-02 12:43:57.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:43:57 compute-0 nova_compute[192079]: 2025-10-02 12:43:57.681 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:43:58 compute-0 nova_compute[192079]: 2025-10-02 12:43:58.588 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:44:00 compute-0 nova_compute[192079]: 2025-10-02 12:44:00.038 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:44:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:44:02.250 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:44:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:44:02.251 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:44:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:44:02.251 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:44:03 compute-0 nova_compute[192079]: 2025-10-02 12:44:03.590 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:44:05 compute-0 nova_compute[192079]: 2025-10-02 12:44:05.040 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:44:05 compute-0 podman[251829]: 2025-10-02 12:44:05.162468129 +0000 UTC m=+0.073918113 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=edpm, io.buildah.version=1.41.3, managed_by=edpm_ansible, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_managed=true, container_name=ceilometer_agent_compute)
Oct 02 12:44:08 compute-0 nova_compute[192079]: 2025-10-02 12:44:08.592 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:44:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:44:10.027 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=50, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=49) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:44:10 compute-0 nova_compute[192079]: 2025-10-02 12:44:10.028 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:44:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:44:10.029 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 0 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:44:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:44:10.029 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '50'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:44:10 compute-0 nova_compute[192079]: 2025-10-02 12:44:10.041 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:44:13 compute-0 podman[251850]: 2025-10-02 12:44:13.148838854 +0000 UTC m=+0.055952692 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, config_id=multipathd, io.buildah.version=1.41.3, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, managed_by=edpm_ansible)
Oct 02 12:44:13 compute-0 podman[251849]: 2025-10-02 12:44:13.161957683 +0000 UTC m=+0.068382031 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.33.7, managed_by=edpm_ansible, vcs-type=git, com.redhat.component=ubi9-minimal-container, config_id=edpm, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, url=https://catalog.redhat.com/en/search?searchType=containers, architecture=x86_64, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.openshift.tags=minimal rhel9, maintainer=Red Hat, Inc., container_name=openstack_network_exporter, io.openshift.expose-services=, vendor=Red Hat, Inc., release=1755695350, version=9.6, build-date=2025-08-20T13:12:41, distribution-scope=public, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, name=ubi9-minimal, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly.)
Oct 02 12:44:13 compute-0 nova_compute[192079]: 2025-10-02 12:44:13.594 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:44:15 compute-0 nova_compute[192079]: 2025-10-02 12:44:15.044 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:44:17 compute-0 ovn_controller[94336]: 2025-10-02T12:44:17Z|00705|memory_trim|INFO|Detected inactivity (last active 30005 ms ago): trimming memory
Oct 02 12:44:18 compute-0 nova_compute[192079]: 2025-10-02 12:44:18.596 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:44:19 compute-0 podman[251889]: 2025-10-02 12:44:19.133881176 +0000 UTC m=+0.050181283 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:44:19 compute-0 podman[251890]: 2025-10-02 12:44:19.168809741 +0000 UTC m=+0.081274314 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=iscsid, container_name=iscsid, managed_by=edpm_ansible, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2)
Oct 02 12:44:20 compute-0 nova_compute[192079]: 2025-10-02 12:44:20.048 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:44:23 compute-0 nova_compute[192079]: 2025-10-02 12:44:23.599 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:44:25 compute-0 nova_compute[192079]: 2025-10-02 12:44:25.050 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:44:28 compute-0 podman[251933]: 2025-10-02 12:44:28.138226701 +0000 UTC m=+0.055584282 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, container_name=ovn_metadata_agent, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_managed=true, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2)
Oct 02 12:44:28 compute-0 podman[251934]: 2025-10-02 12:44:28.162753422 +0000 UTC m=+0.078157399 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller, container_name=ovn_controller, io.buildah.version=1.41.3, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:44:28 compute-0 podman[251935]: 2025-10-02 12:44:28.190665335 +0000 UTC m=+0.092969233 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:44:28 compute-0 nova_compute[192079]: 2025-10-02 12:44:28.600 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:44:30 compute-0 nova_compute[192079]: 2025-10-02 12:44:30.054 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:44:33 compute-0 nova_compute[192079]: 2025-10-02 12:44:33.602 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:44:33 compute-0 nova_compute[192079]: 2025-10-02 12:44:33.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_incomplete_migrations run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:44:33 compute-0 nova_compute[192079]: 2025-10-02 12:44:33.664 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Cleaning up deleted instances with incomplete migration  _cleanup_incomplete_migrations /usr/lib/python3.9/site-packages/nova/compute/manager.py:11183
Oct 02 12:44:35 compute-0 nova_compute[192079]: 2025-10-02 12:44:35.057 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:44:36 compute-0 podman[252003]: 2025-10-02 12:44:36.171884308 +0000 UTC m=+0.087619997 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, tcib_managed=true, config_id=edpm)
Oct 02 12:44:38 compute-0 nova_compute[192079]: 2025-10-02 12:44:38.662 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:44:39 compute-0 sshd-session[252024]: Invalid user solana from 45.148.10.240 port 38658
Oct 02 12:44:39 compute-0 sshd-session[252024]: pam_unix(sshd:auth): check pass; user unknown
Oct 02 12:44:39 compute-0 sshd-session[252024]: pam_unix(sshd:auth): authentication failure; logname= uid=0 euid=0 tty=ssh ruser= rhost=45.148.10.240
Oct 02 12:44:40 compute-0 nova_compute[192079]: 2025-10-02 12:44:40.060 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:44:41 compute-0 sshd-session[252024]: Failed password for invalid user solana from 45.148.10.240 port 38658 ssh2
Oct 02 12:44:42 compute-0 sshd-session[252024]: Connection closed by invalid user solana 45.148.10.240 port 38658 [preauth]
Oct 02 12:44:43 compute-0 nova_compute[192079]: 2025-10-02 12:44:43.665 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:44:43 compute-0 nova_compute[192079]: 2025-10-02 12:44:43.676 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:44:44 compute-0 podman[252027]: 2025-10-02 12:44:44.1556115 +0000 UTC m=+0.066229173 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, container_name=multipathd, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:44:44 compute-0 podman[252026]: 2025-10-02 12:44:44.163746212 +0000 UTC m=+0.075192497 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.buildah.version=1.33.7, vendor=Red Hat, Inc., com.redhat.component=ubi9-minimal-container, io.openshift.expose-services=, release=1755695350, version=9.6, architecture=x86_64, container_name=openstack_network_exporter, build-date=2025-08-20T13:12:41, maintainer=Red Hat, Inc., config_id=edpm, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, distribution-scope=public, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, io.openshift.tags=minimal rhel9, name=ubi9-minimal, managed_by=edpm_ansible, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vcs-type=git, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., url=https://catalog.redhat.com/en/search?searchType=containers, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI)
Oct 02 12:44:45 compute-0 nova_compute[192079]: 2025-10-02 12:44:45.063 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:44:46 compute-0 nova_compute[192079]: 2025-10-02 12:44:46.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:44:48 compute-0 nova_compute[192079]: 2025-10-02 12:44:48.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:44:48 compute-0 nova_compute[192079]: 2025-10-02 12:44:48.666 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:44:50 compute-0 nova_compute[192079]: 2025-10-02 12:44:50.065 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:44:50 compute-0 podman[252067]: 2025-10-02 12:44:50.148783893 +0000 UTC m=+0.054905572 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, container_name=iscsid, managed_by=edpm_ansible, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2)
Oct 02 12:44:50 compute-0 podman[252066]: 2025-10-02 12:44:50.1658552 +0000 UTC m=+0.075999759 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']})
Oct 02 12:44:51 compute-0 ovn_controller[94336]: 2025-10-02T12:44:51Z|00706|memory_trim|INFO|Detected inactivity (last active 30009 ms ago): trimming memory
Oct 02 12:44:51 compute-0 nova_compute[192079]: 2025-10-02 12:44:51.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:44:51 compute-0 nova_compute[192079]: 2025-10-02 12:44:51.730 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:44:51 compute-0 nova_compute[192079]: 2025-10-02 12:44:51.730 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:44:51 compute-0 nova_compute[192079]: 2025-10-02 12:44:51.731 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:44:51 compute-0 nova_compute[192079]: 2025-10-02 12:44:51.731 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:44:51 compute-0 nova_compute[192079]: 2025-10-02 12:44:51.877 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:44:51 compute-0 nova_compute[192079]: 2025-10-02 12:44:51.878 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5737MB free_disk=73.27226638793945GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:44:51 compute-0 nova_compute[192079]: 2025-10-02 12:44:51.878 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:44:51 compute-0 nova_compute[192079]: 2025-10-02 12:44:51.878 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:44:51 compute-0 nova_compute[192079]: 2025-10-02 12:44:51.960 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:44:51 compute-0 nova_compute[192079]: 2025-10-02 12:44:51.961 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:44:51 compute-0 nova_compute[192079]: 2025-10-02 12:44:51.981 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:44:52 compute-0 nova_compute[192079]: 2025-10-02 12:44:52.022 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:44:52 compute-0 nova_compute[192079]: 2025-10-02 12:44:52.024 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:44:52 compute-0 nova_compute[192079]: 2025-10-02 12:44:52.024 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.146s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:44:53 compute-0 nova_compute[192079]: 2025-10-02 12:44:53.024 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:44:53 compute-0 nova_compute[192079]: 2025-10-02 12:44:53.024 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:44:53 compute-0 nova_compute[192079]: 2025-10-02 12:44:53.025 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:44:53 compute-0 nova_compute[192079]: 2025-10-02 12:44:53.667 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:44:54 compute-0 nova_compute[192079]: 2025-10-02 12:44:54.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:44:55 compute-0 nova_compute[192079]: 2025-10-02 12:44:55.113 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:44:56 compute-0 nova_compute[192079]: 2025-10-02 12:44:56.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:44:57 compute-0 nova_compute[192079]: 2025-10-02 12:44:57.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:44:57 compute-0 nova_compute[192079]: 2025-10-02 12:44:57.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:44:57 compute-0 nova_compute[192079]: 2025-10-02 12:44:57.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:44:57 compute-0 nova_compute[192079]: 2025-10-02 12:44:57.689 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:44:58 compute-0 nova_compute[192079]: 2025-10-02 12:44:58.668 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:44:59 compute-0 podman[252110]: 2025-10-02 12:44:59.140807761 +0000 UTC m=+0.052431855 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, container_name=ovn_metadata_agent, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible)
Oct 02 12:44:59 compute-0 podman[252111]: 2025-10-02 12:44:59.183794227 +0000 UTC m=+0.090484486 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_controller, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, managed_by=edpm_ansible, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, config_id=ovn_controller, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_managed=true)
Oct 02 12:44:59 compute-0 podman[252117]: 2025-10-02 12:44:59.186570773 +0000 UTC m=+0.077220083 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>)
Oct 02 12:45:00 compute-0 nova_compute[192079]: 2025-10-02 12:45:00.115 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:02.251 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:45:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:02.251 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:45:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:02.251 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:45:03 compute-0 nova_compute[192079]: 2025-10-02 12:45:03.670 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:04 compute-0 nova_compute[192079]: 2025-10-02 12:45:04.684 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:45:05 compute-0 nova_compute[192079]: 2025-10-02 12:45:05.117 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:07 compute-0 podman[252175]: 2025-10-02 12:45:07.199935277 +0000 UTC m=+0.095705638 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, container_name=ceilometer_agent_compute, managed_by=edpm_ansible, org.label-schema.build-date=20251001, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=edpm, org.label-schema.vendor=CentOS, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']})
Oct 02 12:45:08 compute-0 nova_compute[192079]: 2025-10-02 12:45:08.672 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:08 compute-0 nova_compute[192079]: 2025-10-02 12:45:08.786 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:08.786 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=51, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=50) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:45:08 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:08.788 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 9 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:45:10 compute-0 nova_compute[192079]: 2025-10-02 12:45:10.120 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:13 compute-0 nova_compute[192079]: 2025-10-02 12:45:13.675 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:15 compute-0 nova_compute[192079]: 2025-10-02 12:45:15.122 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:15 compute-0 podman[252195]: 2025-10-02 12:45:15.146249155 +0000 UTC m=+0.055281502 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, architecture=x86_64, url=https://catalog.redhat.com/en/search?searchType=containers, build-date=2025-08-20T13:12:41, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.openshift.expose-services=, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, com.redhat.component=ubi9-minimal-container, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., name=ubi9-minimal, vendor=Red Hat, Inc., io.openshift.tags=minimal rhel9, vcs-type=git, maintainer=Red Hat, Inc., summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., release=1755695350, container_name=openstack_network_exporter, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., version=9.6, managed_by=edpm_ansible, distribution-scope=public, config_id=edpm, io.buildah.version=1.33.7)
Oct 02 12:45:15 compute-0 podman[252196]: 2025-10-02 12:45:15.150705978 +0000 UTC m=+0.053286399 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=multipathd, container_name=multipathd, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']})
Oct 02 12:45:15 compute-0 nova_compute[192079]: 2025-10-02 12:45:15.474 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_power_states run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:45:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:45:17.113 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.capacity, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:45:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:45:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:45:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:45:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:45:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:45:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.allocation, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:45:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:45:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:45:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:45:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:45:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:45:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:45:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:45:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:45:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:45:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster memory.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:45:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:45:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:45:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:45:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:45:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:45:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:45:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:45:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:45:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:45:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:45:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:45:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:45:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:45:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster cpu, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:45:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:45:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:45:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:45:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:45:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:45:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:45:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:45:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:45:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:45:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:45:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:45:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:45:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:45:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:45:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:45:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.iops, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:45:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:45:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:45:17 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:17.792 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '51'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:45:18 compute-0 nova_compute[192079]: 2025-10-02 12:45:18.679 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:20 compute-0 nova_compute[192079]: 2025-10-02 12:45:20.128 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:20 compute-0 nova_compute[192079]: 2025-10-02 12:45:20.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:45:21 compute-0 podman[252232]: 2025-10-02 12:45:21.132546842 +0000 UTC m=+0.048936429 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']})
Oct 02 12:45:21 compute-0 podman[252233]: 2025-10-02 12:45:21.144626193 +0000 UTC m=+0.053087663 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, config_id=iscsid, container_name=iscsid)
Oct 02 12:45:23 compute-0 nova_compute[192079]: 2025-10-02 12:45:23.678 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._run_pending_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:45:23 compute-0 nova_compute[192079]: 2025-10-02 12:45:23.678 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Cleaning up deleted instances _run_pending_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:11145
Oct 02 12:45:23 compute-0 nova_compute[192079]: 2025-10-02 12:45:23.681 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:23 compute-0 nova_compute[192079]: 2025-10-02 12:45:23.704 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] There are 0 instances to clean _run_pending_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:11154
Oct 02 12:45:25 compute-0 nova_compute[192079]: 2025-10-02 12:45:25.132 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:28 compute-0 nova_compute[192079]: 2025-10-02 12:45:28.683 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:30 compute-0 nova_compute[192079]: 2025-10-02 12:45:30.134 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:30 compute-0 podman[252271]: 2025-10-02 12:45:30.138765789 +0000 UTC m=+0.053952806 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2, tcib_managed=true, config_id=ovn_metadata_agent, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0)
Oct 02 12:45:30 compute-0 podman[252273]: 2025-10-02 12:45:30.143754786 +0000 UTC m=+0.051797637 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 12:45:30 compute-0 podman[252272]: 2025-10-02 12:45:30.186811913 +0000 UTC m=+0.090252709 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, container_name=ovn_controller, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, config_id=ovn_controller, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:45:33 compute-0 nova_compute[192079]: 2025-10-02 12:45:33.686 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:35 compute-0 nova_compute[192079]: 2025-10-02 12:45:35.137 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:38 compute-0 podman[252341]: 2025-10-02 12:45:38.16186194 +0000 UTC m=+0.072707910 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, org.label-schema.name=CentOS Stream 9 Base Image, config_id=edpm, org.label-schema.license=GPLv2)
Oct 02 12:45:38 compute-0 nova_compute[192079]: 2025-10-02 12:45:38.686 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:40 compute-0 nova_compute[192079]: 2025-10-02 12:45:40.140 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:43 compute-0 nova_compute[192079]: 2025-10-02 12:45:43.688 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:44 compute-0 nova_compute[192079]: 2025-10-02 12:45:44.687 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:45:45 compute-0 nova_compute[192079]: 2025-10-02 12:45:45.142 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:45 compute-0 nova_compute[192079]: 2025-10-02 12:45:45.974 2 DEBUG oslo_concurrency.lockutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "475567cc-a1f1-46b1-ae67-fb3b0ef2e230" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:45:45 compute-0 nova_compute[192079]: 2025-10-02 12:45:45.974 2 DEBUG oslo_concurrency.lockutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "475567cc-a1f1-46b1-ae67-fb3b0ef2e230" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:45:45 compute-0 nova_compute[192079]: 2025-10-02 12:45:45.991 2 DEBUG nova.compute.manager [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:45:46 compute-0 podman[252362]: 2025-10-02 12:45:46.148380899 +0000 UTC m=+0.055302244 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., config_id=edpm, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., version=9.6, release=1755695350, architecture=x86_64, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, build-date=2025-08-20T13:12:41, url=https://catalog.redhat.com/en/search?searchType=containers, io.openshift.tags=minimal rhel9, vcs-type=git, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.buildah.version=1.33.7, vendor=Red Hat, Inc., name=ubi9-minimal, com.redhat.component=ubi9-minimal-container, container_name=openstack_network_exporter, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., distribution-scope=public, io.openshift.expose-services=, maintainer=Red Hat, Inc., managed_by=edpm_ansible)
Oct 02 12:45:46 compute-0 podman[252363]: 2025-10-02 12:45:46.176768675 +0000 UTC m=+0.077909422 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, config_id=multipathd, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=multipathd, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.build-date=20251001)
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.186 2 DEBUG oslo_concurrency.lockutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.186 2 DEBUG oslo_concurrency.lockutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.197 2 DEBUG nova.virt.hardware [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.198 2 INFO nova.compute.claims [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.342 2 DEBUG nova.compute.provider_tree [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.355 2 DEBUG nova.scheduler.client.report [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.374 2 DEBUG oslo_concurrency.lockutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.188s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.375 2 DEBUG nova.compute.manager [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.423 2 DEBUG nova.compute.manager [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.424 2 DEBUG nova.network.neutron [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.444 2 INFO nova.virt.libvirt.driver [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.461 2 DEBUG nova.compute.manager [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.620 2 DEBUG nova.compute.manager [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.622 2 DEBUG nova.virt.libvirt.driver [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.623 2 INFO nova.virt.libvirt.driver [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Creating image(s)
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.624 2 DEBUG oslo_concurrency.lockutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "/var/lib/nova/instances/475567cc-a1f1-46b1-ae67-fb3b0ef2e230/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.624 2 DEBUG oslo_concurrency.lockutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "/var/lib/nova/instances/475567cc-a1f1-46b1-ae67-fb3b0ef2e230/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.624 2 DEBUG oslo_concurrency.lockutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "/var/lib/nova/instances/475567cc-a1f1-46b1-ae67-fb3b0ef2e230/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.636 2 DEBUG oslo_concurrency.processutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.696 2 DEBUG oslo_concurrency.processutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.060s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.697 2 DEBUG oslo_concurrency.lockutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.698 2 DEBUG oslo_concurrency.lockutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.708 2 DEBUG oslo_concurrency.processutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.771 2 DEBUG oslo_concurrency.processutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.063s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.772 2 DEBUG oslo_concurrency.processutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/475567cc-a1f1-46b1-ae67-fb3b0ef2e230/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.816 2 DEBUG oslo_concurrency.processutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/475567cc-a1f1-46b1-ae67-fb3b0ef2e230/disk 1073741824" returned: 0 in 0.044s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.818 2 DEBUG oslo_concurrency.lockutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.120s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.819 2 DEBUG oslo_concurrency.processutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.902 2 DEBUG oslo_concurrency.processutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.083s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.904 2 DEBUG nova.virt.disk.api [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Checking if we can resize image /var/lib/nova/instances/475567cc-a1f1-46b1-ae67-fb3b0ef2e230/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.904 2 DEBUG oslo_concurrency.processutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/475567cc-a1f1-46b1-ae67-fb3b0ef2e230/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.983 2 DEBUG oslo_concurrency.processutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/475567cc-a1f1-46b1-ae67-fb3b0ef2e230/disk --force-share --output=json" returned: 0 in 0.078s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.984 2 DEBUG nova.virt.disk.api [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Cannot resize image /var/lib/nova/instances/475567cc-a1f1-46b1-ae67-fb3b0ef2e230/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:45:46 compute-0 nova_compute[192079]: 2025-10-02 12:45:46.985 2 DEBUG nova.objects.instance [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lazy-loading 'migration_context' on Instance uuid 475567cc-a1f1-46b1-ae67-fb3b0ef2e230 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:45:47 compute-0 nova_compute[192079]: 2025-10-02 12:45:47.006 2 DEBUG nova.virt.libvirt.driver [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:45:47 compute-0 nova_compute[192079]: 2025-10-02 12:45:47.006 2 DEBUG nova.virt.libvirt.driver [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Ensure instance console log exists: /var/lib/nova/instances/475567cc-a1f1-46b1-ae67-fb3b0ef2e230/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:45:47 compute-0 nova_compute[192079]: 2025-10-02 12:45:47.007 2 DEBUG oslo_concurrency.lockutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:45:47 compute-0 nova_compute[192079]: 2025-10-02 12:45:47.007 2 DEBUG oslo_concurrency.lockutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:45:47 compute-0 nova_compute[192079]: 2025-10-02 12:45:47.007 2 DEBUG oslo_concurrency.lockutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:45:47 compute-0 nova_compute[192079]: 2025-10-02 12:45:47.558 2 DEBUG nova.policy [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:45:48 compute-0 nova_compute[192079]: 2025-10-02 12:45:48.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:45:48 compute-0 nova_compute[192079]: 2025-10-02 12:45:48.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:45:48 compute-0 nova_compute[192079]: 2025-10-02 12:45:48.690 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:50 compute-0 nova_compute[192079]: 2025-10-02 12:45:50.144 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:50 compute-0 nova_compute[192079]: 2025-10-02 12:45:50.649 2 DEBUG nova.network.neutron [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Successfully created port: 483b5333-614f-4867-bf07-c9c1e37d1ce4 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:45:51 compute-0 nova_compute[192079]: 2025-10-02 12:45:51.858 2 DEBUG nova.network.neutron [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Successfully updated port: 483b5333-614f-4867-bf07-c9c1e37d1ce4 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:45:51 compute-0 nova_compute[192079]: 2025-10-02 12:45:51.874 2 DEBUG oslo_concurrency.lockutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "refresh_cache-475567cc-a1f1-46b1-ae67-fb3b0ef2e230" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:45:51 compute-0 nova_compute[192079]: 2025-10-02 12:45:51.874 2 DEBUG oslo_concurrency.lockutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquired lock "refresh_cache-475567cc-a1f1-46b1-ae67-fb3b0ef2e230" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:45:51 compute-0 nova_compute[192079]: 2025-10-02 12:45:51.874 2 DEBUG nova.network.neutron [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:45:51 compute-0 nova_compute[192079]: 2025-10-02 12:45:51.974 2 DEBUG nova.compute.manager [req-903c7f89-af4a-4408-9a23-7ca596cecf60 req-47697bab-2a28-4bb7-9641-7fd94fabab75 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Received event network-changed-483b5333-614f-4867-bf07-c9c1e37d1ce4 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:45:51 compute-0 nova_compute[192079]: 2025-10-02 12:45:51.975 2 DEBUG nova.compute.manager [req-903c7f89-af4a-4408-9a23-7ca596cecf60 req-47697bab-2a28-4bb7-9641-7fd94fabab75 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Refreshing instance network info cache due to event network-changed-483b5333-614f-4867-bf07-c9c1e37d1ce4. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:45:51 compute-0 nova_compute[192079]: 2025-10-02 12:45:51.975 2 DEBUG oslo_concurrency.lockutils [req-903c7f89-af4a-4408-9a23-7ca596cecf60 req-47697bab-2a28-4bb7-9641-7fd94fabab75 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-475567cc-a1f1-46b1-ae67-fb3b0ef2e230" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:45:52 compute-0 ovn_controller[94336]: 2025-10-02T12:45:52Z|00707|memory_trim|INFO|Detected inactivity (last active 30001 ms ago): trimming memory
Oct 02 12:45:52 compute-0 nova_compute[192079]: 2025-10-02 12:45:52.061 2 DEBUG nova.network.neutron [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:45:52 compute-0 podman[252417]: 2025-10-02 12:45:52.14582507 +0000 UTC m=+0.057748460 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, config_id=iscsid, container_name=iscsid, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001)
Oct 02 12:45:52 compute-0 podman[252416]: 2025-10-02 12:45:52.154828976 +0000 UTC m=+0.063242840 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:45:52 compute-0 nova_compute[192079]: 2025-10-02 12:45:52.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:45:52 compute-0 nova_compute[192079]: 2025-10-02 12:45:52.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:45:52 compute-0 nova_compute[192079]: 2025-10-02 12:45:52.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:45:52 compute-0 nova_compute[192079]: 2025-10-02 12:45:52.685 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:45:52 compute-0 nova_compute[192079]: 2025-10-02 12:45:52.686 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:45:52 compute-0 nova_compute[192079]: 2025-10-02 12:45:52.686 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:45:52 compute-0 nova_compute[192079]: 2025-10-02 12:45:52.686 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:45:52 compute-0 nova_compute[192079]: 2025-10-02 12:45:52.842 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:45:52 compute-0 nova_compute[192079]: 2025-10-02 12:45:52.843 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5735MB free_disk=73.27205657958984GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:45:52 compute-0 nova_compute[192079]: 2025-10-02 12:45:52.843 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:45:52 compute-0 nova_compute[192079]: 2025-10-02 12:45:52.844 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:45:52 compute-0 nova_compute[192079]: 2025-10-02 12:45:52.933 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance 475567cc-a1f1-46b1-ae67-fb3b0ef2e230 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:45:52 compute-0 nova_compute[192079]: 2025-10-02 12:45:52.933 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 1 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:45:52 compute-0 nova_compute[192079]: 2025-10-02 12:45:52.933 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=640MB phys_disk=79GB used_disk=1GB total_vcpus=8 used_vcpus=1 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:45:52 compute-0 nova_compute[192079]: 2025-10-02 12:45:52.980 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.001 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.016 2 DEBUG nova.network.neutron [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Updating instance_info_cache with network_info: [{"id": "483b5333-614f-4867-bf07-c9c1e37d1ce4", "address": "fa:16:3e:70:a7:5e", "network": {"id": "85e4aed1-4716-45af-bcd8-38b9aeff1c42", "bridge": "br-int", "label": "tempest-network-smoke--13042790", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap483b5333-61", "ovs_interfaceid": "483b5333-614f-4867-bf07-c9c1e37d1ce4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.023 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.024 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.180s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.034 2 DEBUG oslo_concurrency.lockutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Releasing lock "refresh_cache-475567cc-a1f1-46b1-ae67-fb3b0ef2e230" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.034 2 DEBUG nova.compute.manager [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Instance network_info: |[{"id": "483b5333-614f-4867-bf07-c9c1e37d1ce4", "address": "fa:16:3e:70:a7:5e", "network": {"id": "85e4aed1-4716-45af-bcd8-38b9aeff1c42", "bridge": "br-int", "label": "tempest-network-smoke--13042790", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap483b5333-61", "ovs_interfaceid": "483b5333-614f-4867-bf07-c9c1e37d1ce4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.034 2 DEBUG oslo_concurrency.lockutils [req-903c7f89-af4a-4408-9a23-7ca596cecf60 req-47697bab-2a28-4bb7-9641-7fd94fabab75 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-475567cc-a1f1-46b1-ae67-fb3b0ef2e230" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.035 2 DEBUG nova.network.neutron [req-903c7f89-af4a-4408-9a23-7ca596cecf60 req-47697bab-2a28-4bb7-9641-7fd94fabab75 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Refreshing network info cache for port 483b5333-614f-4867-bf07-c9c1e37d1ce4 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.037 2 DEBUG nova.virt.libvirt.driver [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Start _get_guest_xml network_info=[{"id": "483b5333-614f-4867-bf07-c9c1e37d1ce4", "address": "fa:16:3e:70:a7:5e", "network": {"id": "85e4aed1-4716-45af-bcd8-38b9aeff1c42", "bridge": "br-int", "label": "tempest-network-smoke--13042790", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap483b5333-61", "ovs_interfaceid": "483b5333-614f-4867-bf07-c9c1e37d1ce4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.041 2 WARNING nova.virt.libvirt.driver [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.048 2 DEBUG nova.virt.libvirt.host [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.049 2 DEBUG nova.virt.libvirt.host [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.054 2 DEBUG nova.virt.libvirt.host [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.055 2 DEBUG nova.virt.libvirt.host [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.056 2 DEBUG nova.virt.libvirt.driver [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.056 2 DEBUG nova.virt.hardware [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.056 2 DEBUG nova.virt.hardware [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.057 2 DEBUG nova.virt.hardware [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.057 2 DEBUG nova.virt.hardware [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.057 2 DEBUG nova.virt.hardware [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.057 2 DEBUG nova.virt.hardware [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.058 2 DEBUG nova.virt.hardware [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.058 2 DEBUG nova.virt.hardware [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.058 2 DEBUG nova.virt.hardware [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.058 2 DEBUG nova.virt.hardware [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.058 2 DEBUG nova.virt.hardware [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.061 2 DEBUG nova.virt.libvirt.vif [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:45:45Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-474871786',display_name='tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-474871786',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-server-tempest-testsecuritygroupsbasicops-1020134341-ac',id=181,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBGsUKfvQsRFH/GldSVzED6JnM2R8DeZMSLqFM+7ZoEbCSUSgEpS2XwQTay0eRWx3t/E5S4rEWdCjCoc+0nrAH+n3s9z8s5WA+sL/sdupqrDO9IWm9qn8ROfjJ4EtbzYHtg==',key_name='tempest-TestSecurityGroupsBasicOps-880121214',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='575f3d227ab24f2daa62e65e14a4cd9c',ramdisk_id='',reservation_id='r-evajko44',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestSecurityGroupsBasicOps-1020134341',owner_user_name='tempest-TestSecurityGroupsBasicOps-1020134341-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:45:46Z,user_data=None,user_id='2d2b4a2da57543ef88e44ae28ad61647',uuid=475567cc-a1f1-46b1-ae67-fb3b0ef2e230,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "483b5333-614f-4867-bf07-c9c1e37d1ce4", "address": "fa:16:3e:70:a7:5e", "network": {"id": "85e4aed1-4716-45af-bcd8-38b9aeff1c42", "bridge": "br-int", "label": "tempest-network-smoke--13042790", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap483b5333-61", "ovs_interfaceid": "483b5333-614f-4867-bf07-c9c1e37d1ce4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.062 2 DEBUG nova.network.os_vif_util [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Converting VIF {"id": "483b5333-614f-4867-bf07-c9c1e37d1ce4", "address": "fa:16:3e:70:a7:5e", "network": {"id": "85e4aed1-4716-45af-bcd8-38b9aeff1c42", "bridge": "br-int", "label": "tempest-network-smoke--13042790", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap483b5333-61", "ovs_interfaceid": "483b5333-614f-4867-bf07-c9c1e37d1ce4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.062 2 DEBUG nova.network.os_vif_util [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:70:a7:5e,bridge_name='br-int',has_traffic_filtering=True,id=483b5333-614f-4867-bf07-c9c1e37d1ce4,network=Network(85e4aed1-4716-45af-bcd8-38b9aeff1c42),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap483b5333-61') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.063 2 DEBUG nova.objects.instance [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lazy-loading 'pci_devices' on Instance uuid 475567cc-a1f1-46b1-ae67-fb3b0ef2e230 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.076 2 DEBUG nova.virt.libvirt.driver [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:45:53 compute-0 nova_compute[192079]:   <uuid>475567cc-a1f1-46b1-ae67-fb3b0ef2e230</uuid>
Oct 02 12:45:53 compute-0 nova_compute[192079]:   <name>instance-000000b5</name>
Oct 02 12:45:53 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:45:53 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:45:53 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:45:53 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:       <nova:name>tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-474871786</nova:name>
Oct 02 12:45:53 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:45:53</nova:creationTime>
Oct 02 12:45:53 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:45:53 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:45:53 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:45:53 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:45:53 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:45:53 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:45:53 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:45:53 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:45:53 compute-0 nova_compute[192079]:         <nova:user uuid="2d2b4a2da57543ef88e44ae28ad61647">tempest-TestSecurityGroupsBasicOps-1020134341-project-member</nova:user>
Oct 02 12:45:53 compute-0 nova_compute[192079]:         <nova:project uuid="575f3d227ab24f2daa62e65e14a4cd9c">tempest-TestSecurityGroupsBasicOps-1020134341</nova:project>
Oct 02 12:45:53 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:45:53 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:45:53 compute-0 nova_compute[192079]:         <nova:port uuid="483b5333-614f-4867-bf07-c9c1e37d1ce4">
Oct 02 12:45:53 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.6" ipVersion="4"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:45:53 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:45:53 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:45:53 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <system>
Oct 02 12:45:53 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:45:53 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:45:53 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:45:53 compute-0 nova_compute[192079]:       <entry name="serial">475567cc-a1f1-46b1-ae67-fb3b0ef2e230</entry>
Oct 02 12:45:53 compute-0 nova_compute[192079]:       <entry name="uuid">475567cc-a1f1-46b1-ae67-fb3b0ef2e230</entry>
Oct 02 12:45:53 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     </system>
Oct 02 12:45:53 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:45:53 compute-0 nova_compute[192079]:   <os>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:   </os>
Oct 02 12:45:53 compute-0 nova_compute[192079]:   <features>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:   </features>
Oct 02 12:45:53 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:45:53 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:45:53 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:45:53 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/475567cc-a1f1-46b1-ae67-fb3b0ef2e230/disk"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:45:53 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/475567cc-a1f1-46b1-ae67-fb3b0ef2e230/disk.config"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:45:53 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:70:a7:5e"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:       <target dev="tap483b5333-61"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:45:53 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/475567cc-a1f1-46b1-ae67-fb3b0ef2e230/console.log" append="off"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <video>
Oct 02 12:45:53 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     </video>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:45:53 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:45:53 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:45:53 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:45:53 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:45:53 compute-0 nova_compute[192079]: </domain>
Oct 02 12:45:53 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.077 2 DEBUG nova.compute.manager [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Preparing to wait for external event network-vif-plugged-483b5333-614f-4867-bf07-c9c1e37d1ce4 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.077 2 DEBUG oslo_concurrency.lockutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "475567cc-a1f1-46b1-ae67-fb3b0ef2e230-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.078 2 DEBUG oslo_concurrency.lockutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "475567cc-a1f1-46b1-ae67-fb3b0ef2e230-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.078 2 DEBUG oslo_concurrency.lockutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "475567cc-a1f1-46b1-ae67-fb3b0ef2e230-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.078 2 DEBUG nova.virt.libvirt.vif [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:45:45Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-474871786',display_name='tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-474871786',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-server-tempest-testsecuritygroupsbasicops-1020134341-ac',id=181,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBGsUKfvQsRFH/GldSVzED6JnM2R8DeZMSLqFM+7ZoEbCSUSgEpS2XwQTay0eRWx3t/E5S4rEWdCjCoc+0nrAH+n3s9z8s5WA+sL/sdupqrDO9IWm9qn8ROfjJ4EtbzYHtg==',key_name='tempest-TestSecurityGroupsBasicOps-880121214',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='575f3d227ab24f2daa62e65e14a4cd9c',ramdisk_id='',reservation_id='r-evajko44',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestSecurityGroupsBasicOps-1020134341',owner_user_name='tempest-TestSecurityGroupsBasicOps-1020134341-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:45:46Z,user_data=None,user_id='2d2b4a2da57543ef88e44ae28ad61647',uuid=475567cc-a1f1-46b1-ae67-fb3b0ef2e230,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "483b5333-614f-4867-bf07-c9c1e37d1ce4", "address": "fa:16:3e:70:a7:5e", "network": {"id": "85e4aed1-4716-45af-bcd8-38b9aeff1c42", "bridge": "br-int", "label": "tempest-network-smoke--13042790", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap483b5333-61", "ovs_interfaceid": "483b5333-614f-4867-bf07-c9c1e37d1ce4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.079 2 DEBUG nova.network.os_vif_util [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Converting VIF {"id": "483b5333-614f-4867-bf07-c9c1e37d1ce4", "address": "fa:16:3e:70:a7:5e", "network": {"id": "85e4aed1-4716-45af-bcd8-38b9aeff1c42", "bridge": "br-int", "label": "tempest-network-smoke--13042790", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap483b5333-61", "ovs_interfaceid": "483b5333-614f-4867-bf07-c9c1e37d1ce4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.079 2 DEBUG nova.network.os_vif_util [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:70:a7:5e,bridge_name='br-int',has_traffic_filtering=True,id=483b5333-614f-4867-bf07-c9c1e37d1ce4,network=Network(85e4aed1-4716-45af-bcd8-38b9aeff1c42),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap483b5333-61') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.079 2 DEBUG os_vif [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:70:a7:5e,bridge_name='br-int',has_traffic_filtering=True,id=483b5333-614f-4867-bf07-c9c1e37d1ce4,network=Network(85e4aed1-4716-45af-bcd8-38b9aeff1c42),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap483b5333-61') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.080 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.080 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.080 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.083 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.083 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap483b5333-61, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.083 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tap483b5333-61, col_values=(('external_ids', {'iface-id': '483b5333-614f-4867-bf07-c9c1e37d1ce4', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:70:a7:5e', 'vm-uuid': '475567cc-a1f1-46b1-ae67-fb3b0ef2e230'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.084 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:53 compute-0 NetworkManager[51160]: <info>  [1759409153.0855] manager: (tap483b5333-61): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/345)
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.086 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.090 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.091 2 INFO os_vif [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:70:a7:5e,bridge_name='br-int',has_traffic_filtering=True,id=483b5333-614f-4867-bf07-c9c1e37d1ce4,network=Network(85e4aed1-4716-45af-bcd8-38b9aeff1c42),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap483b5333-61')
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.134 2 DEBUG nova.virt.libvirt.driver [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.134 2 DEBUG nova.virt.libvirt.driver [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.135 2 DEBUG nova.virt.libvirt.driver [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] No VIF found with MAC fa:16:3e:70:a7:5e, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.135 2 INFO nova.virt.libvirt.driver [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Using config drive
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.692 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.735 2 INFO nova.virt.libvirt.driver [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Creating config drive at /var/lib/nova/instances/475567cc-a1f1-46b1-ae67-fb3b0ef2e230/disk.config
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.740 2 DEBUG oslo_concurrency.processutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/475567cc-a1f1-46b1-ae67-fb3b0ef2e230/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmplpkijd9m execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.864 2 DEBUG oslo_concurrency.processutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/475567cc-a1f1-46b1-ae67-fb3b0ef2e230/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmplpkijd9m" returned: 0 in 0.124s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:45:53 compute-0 kernel: tap483b5333-61: entered promiscuous mode
Oct 02 12:45:53 compute-0 ovn_controller[94336]: 2025-10-02T12:45:53Z|00708|binding|INFO|Claiming lport 483b5333-614f-4867-bf07-c9c1e37d1ce4 for this chassis.
Oct 02 12:45:53 compute-0 ovn_controller[94336]: 2025-10-02T12:45:53Z|00709|binding|INFO|483b5333-614f-4867-bf07-c9c1e37d1ce4: Claiming fa:16:3e:70:a7:5e 10.100.0.6
Oct 02 12:45:53 compute-0 NetworkManager[51160]: <info>  [1759409153.9304] manager: (tap483b5333-61): new Tun device (/org/freedesktop/NetworkManager/Devices/346)
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.929 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.932 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:53.943 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:70:a7:5e 10.100.0.6'], port_security=['fa:16:3e:70:a7:5e 10.100.0.6'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.6/28', 'neutron:device_id': '475567cc-a1f1-46b1-ae67-fb3b0ef2e230', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-85e4aed1-4716-45af-bcd8-38b9aeff1c42', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'neutron:revision_number': '2', 'neutron:security_group_ids': '50db741e-f4cd-4cfd-91f3-cd58fd84561b e1be73b2-0596-4634-bbd8-c2bd6eae245c', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=170a325d-4ee6-4f9e-99b6-aa2be81235b5, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=483b5333-614f-4867-bf07-c9c1e37d1ce4) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:45:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:53.945 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 483b5333-614f-4867-bf07-c9c1e37d1ce4 in datapath 85e4aed1-4716-45af-bcd8-38b9aeff1c42 bound to our chassis
Oct 02 12:45:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:53.945 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 85e4aed1-4716-45af-bcd8-38b9aeff1c42
Oct 02 12:45:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:53.958 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[8b2b99e6-eb01-4e79-ab74-7f6c214ff9a7]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:45:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:53.959 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap85e4aed1-41 in ovnmeta-85e4aed1-4716-45af-bcd8-38b9aeff1c42 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:45:53 compute-0 systemd-udevd[252478]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:45:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:53.961 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap85e4aed1-40 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:45:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:53.961 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7fcabbf7-a54d-4f72-be6b-90b4d6d7d069]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:45:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:53.962 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a0eb8751-b6e1-47f5-ab2e-1aca4e2bfbff]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:45:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:53.974 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[af7ce253-61ab-441a-b58d-c68a1a13ccb3]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:45:53 compute-0 NetworkManager[51160]: <info>  [1759409153.9775] device (tap483b5333-61): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:45:53 compute-0 NetworkManager[51160]: <info>  [1759409153.9787] device (tap483b5333-61): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:45:53 compute-0 systemd-machined[152150]: New machine qemu-86-instance-000000b5.
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.987 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:53 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:53.990 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[52f0ea2e-d7f8-4918-921a-5b7096ff141c]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:45:53 compute-0 ovn_controller[94336]: 2025-10-02T12:45:53Z|00710|binding|INFO|Setting lport 483b5333-614f-4867-bf07-c9c1e37d1ce4 ovn-installed in OVS
Oct 02 12:45:53 compute-0 ovn_controller[94336]: 2025-10-02T12:45:53Z|00711|binding|INFO|Setting lport 483b5333-614f-4867-bf07-c9c1e37d1ce4 up in Southbound
Oct 02 12:45:53 compute-0 nova_compute[192079]: 2025-10-02 12:45:53.995 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:54 compute-0 systemd[1]: Started Virtual Machine qemu-86-instance-000000b5.
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:54.016 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[a3088a7b-fde2-409d-b714-606a5b8d5965]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:45:54 compute-0 systemd-udevd[252483]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:45:54 compute-0 NetworkManager[51160]: <info>  [1759409154.0219] manager: (tap85e4aed1-40): new Veth device (/org/freedesktop/NetworkManager/Devices/347)
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:54.023 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e8bb83ed-d4ba-4015-b068-8de35e9a4e9f]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.024 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:54.056 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[5b2e89b5-89a0-40cf-9b7d-494a27ce26d5]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:54.059 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[cbf9bf7b-6c4e-41e1-be5f-8d822a5808ad]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:45:54 compute-0 NetworkManager[51160]: <info>  [1759409154.0769] device (tap85e4aed1-40): carrier: link connected
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:54.081 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[b6004d69-dfcb-474d-9652-ca54431629f5]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:54.095 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[61ae6962-7b7a-469b-8a8e-f6de3345f17e]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap85e4aed1-41'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:c6:bd:06'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 221], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 715170, 'reachable_time': 32394, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 252512, 'error': None, 'target': 'ovnmeta-85e4aed1-4716-45af-bcd8-38b9aeff1c42', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:54.106 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[36e41f71-ddef-4885-9bbd-e8b823f497a0]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fec6:bd06'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 715170, 'tstamp': 715170}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 252513, 'error': None, 'target': 'ovnmeta-85e4aed1-4716-45af-bcd8-38b9aeff1c42', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:54.119 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[09e11d45-4af2-4ad4-b05c-7ce71247f565]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap85e4aed1-41'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:c6:bd:06'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 221], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 715170, 'reachable_time': 32394, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 252514, 'error': None, 'target': 'ovnmeta-85e4aed1-4716-45af-bcd8-38b9aeff1c42', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:54.148 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b81eab99-e90e-4efa-a5a4-56dc2e0cce58]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:54.203 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[296c1ce9-8b3a-44b1-be6f-587287ecc40d]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:54.205 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap85e4aed1-40, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:54.205 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:54.206 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap85e4aed1-40, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:45:54 compute-0 kernel: tap85e4aed1-40: entered promiscuous mode
Oct 02 12:45:54 compute-0 NetworkManager[51160]: <info>  [1759409154.2083] manager: (tap85e4aed1-40): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/348)
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.210 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:54.216 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap85e4aed1-40, col_values=(('external_ids', {'iface-id': 'cc53a2f5-12aa-4c54-8e29-196f3f838f7d'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.217 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:54 compute-0 ovn_controller[94336]: 2025-10-02T12:45:54Z|00712|binding|INFO|Releasing lport cc53a2f5-12aa-4c54-8e29-196f3f838f7d from this chassis (sb_readonly=0)
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.218 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:54.224 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/85e4aed1-4716-45af-bcd8-38b9aeff1c42.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/85e4aed1-4716-45af-bcd8-38b9aeff1c42.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:54.225 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[20314316-35ce-4e42-a2d1-8b9ea94091da]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:54.226 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-85e4aed1-4716-45af-bcd8-38b9aeff1c42
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/85e4aed1-4716-45af-bcd8-38b9aeff1c42.pid.haproxy
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 85e4aed1-4716-45af-bcd8-38b9aeff1c42
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:45:54 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:45:54.227 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-85e4aed1-4716-45af-bcd8-38b9aeff1c42', 'env', 'PROCESS_TAG=haproxy-85e4aed1-4716-45af-bcd8-38b9aeff1c42', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/85e4aed1-4716-45af-bcd8-38b9aeff1c42.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.231 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.447 2 DEBUG nova.compute.manager [req-0c3ccf69-3791-4f89-bd40-5e4eaa281255 req-b4e1d4e4-27e6-415a-99e7-b19cd239a4c0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Received event network-vif-plugged-483b5333-614f-4867-bf07-c9c1e37d1ce4 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.447 2 DEBUG oslo_concurrency.lockutils [req-0c3ccf69-3791-4f89-bd40-5e4eaa281255 req-b4e1d4e4-27e6-415a-99e7-b19cd239a4c0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "475567cc-a1f1-46b1-ae67-fb3b0ef2e230-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.448 2 DEBUG oslo_concurrency.lockutils [req-0c3ccf69-3791-4f89-bd40-5e4eaa281255 req-b4e1d4e4-27e6-415a-99e7-b19cd239a4c0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "475567cc-a1f1-46b1-ae67-fb3b0ef2e230-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.448 2 DEBUG oslo_concurrency.lockutils [req-0c3ccf69-3791-4f89-bd40-5e4eaa281255 req-b4e1d4e4-27e6-415a-99e7-b19cd239a4c0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "475567cc-a1f1-46b1-ae67-fb3b0ef2e230-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.448 2 DEBUG nova.compute.manager [req-0c3ccf69-3791-4f89-bd40-5e4eaa281255 req-b4e1d4e4-27e6-415a-99e7-b19cd239a4c0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Processing event network-vif-plugged-483b5333-614f-4867-bf07-c9c1e37d1ce4 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:45:54 compute-0 podman[252551]: 2025-10-02 12:45:54.555775199 +0000 UTC m=+0.046358588 container create 2e436edcfc84cd24e43a178e20d20d418f8a9a34ca9651b38379c5981f6f8406 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-85e4aed1-4716-45af-bcd8-38b9aeff1c42, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:45:54 compute-0 systemd[1]: Started libpod-conmon-2e436edcfc84cd24e43a178e20d20d418f8a9a34ca9651b38379c5981f6f8406.scope.
Oct 02 12:45:54 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:45:54 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/07de8edb4237dc9abf2f0425d078fde42f5fe166139ef45d80886e09dc6e92d7/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:45:54 compute-0 podman[252551]: 2025-10-02 12:45:54.52984861 +0000 UTC m=+0.020432019 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:45:54 compute-0 podman[252551]: 2025-10-02 12:45:54.640534798 +0000 UTC m=+0.131118207 container init 2e436edcfc84cd24e43a178e20d20d418f8a9a34ca9651b38379c5981f6f8406 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-85e4aed1-4716-45af-bcd8-38b9aeff1c42, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:45:54 compute-0 podman[252551]: 2025-10-02 12:45:54.645693758 +0000 UTC m=+0.136277137 container start 2e436edcfc84cd24e43a178e20d20d418f8a9a34ca9651b38379c5981f6f8406 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-85e4aed1-4716-45af-bcd8-38b9aeff1c42, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:45:54 compute-0 neutron-haproxy-ovnmeta-85e4aed1-4716-45af-bcd8-38b9aeff1c42[252566]: [NOTICE]   (252570) : New worker (252572) forked
Oct 02 12:45:54 compute-0 neutron-haproxy-ovnmeta-85e4aed1-4716-45af-bcd8-38b9aeff1c42[252566]: [NOTICE]   (252570) : Loading success.
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.693 2 DEBUG nova.network.neutron [req-903c7f89-af4a-4408-9a23-7ca596cecf60 req-47697bab-2a28-4bb7-9641-7fd94fabab75 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Updated VIF entry in instance network info cache for port 483b5333-614f-4867-bf07-c9c1e37d1ce4. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.694 2 DEBUG nova.network.neutron [req-903c7f89-af4a-4408-9a23-7ca596cecf60 req-47697bab-2a28-4bb7-9641-7fd94fabab75 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Updating instance_info_cache with network_info: [{"id": "483b5333-614f-4867-bf07-c9c1e37d1ce4", "address": "fa:16:3e:70:a7:5e", "network": {"id": "85e4aed1-4716-45af-bcd8-38b9aeff1c42", "bridge": "br-int", "label": "tempest-network-smoke--13042790", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap483b5333-61", "ovs_interfaceid": "483b5333-614f-4867-bf07-c9c1e37d1ce4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.696 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759409154.6963658, 475567cc-a1f1-46b1-ae67-fb3b0ef2e230 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.697 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] VM Started (Lifecycle Event)
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.698 2 DEBUG nova.compute.manager [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.701 2 DEBUG nova.virt.libvirt.driver [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.705 2 INFO nova.virt.libvirt.driver [-] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Instance spawned successfully.
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.705 2 DEBUG nova.virt.libvirt.driver [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.713 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.715 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.717 2 DEBUG oslo_concurrency.lockutils [req-903c7f89-af4a-4408-9a23-7ca596cecf60 req-47697bab-2a28-4bb7-9641-7fd94fabab75 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-475567cc-a1f1-46b1-ae67-fb3b0ef2e230" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.724 2 DEBUG nova.virt.libvirt.driver [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.724 2 DEBUG nova.virt.libvirt.driver [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.724 2 DEBUG nova.virt.libvirt.driver [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.724 2 DEBUG nova.virt.libvirt.driver [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.725 2 DEBUG nova.virt.libvirt.driver [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.725 2 DEBUG nova.virt.libvirt.driver [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.733 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.733 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759409154.6988041, 475567cc-a1f1-46b1-ae67-fb3b0ef2e230 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.733 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] VM Paused (Lifecycle Event)
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.755 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.758 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759409154.7013206, 475567cc-a1f1-46b1-ae67-fb3b0ef2e230 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.758 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] VM Resumed (Lifecycle Event)
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.787 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.789 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.813 2 INFO nova.compute.manager [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Took 8.19 seconds to spawn the instance on the hypervisor.
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.813 2 DEBUG nova.compute.manager [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.815 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.907 2 INFO nova.compute.manager [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Took 8.84 seconds to build instance.
Oct 02 12:45:54 compute-0 nova_compute[192079]: 2025-10-02 12:45:54.925 2 DEBUG oslo_concurrency.lockutils [None req-b70c2de5-f592-48fa-b04c-d7d2c636b878 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "475567cc-a1f1-46b1-ae67-fb3b0ef2e230" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 8.951s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:45:56 compute-0 nova_compute[192079]: 2025-10-02 12:45:56.596 2 DEBUG nova.compute.manager [req-9b97d601-7baa-421a-b300-df29a549d690 req-0390bbe5-9a03-4e81-b2c7-df0f72ad3691 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Received event network-vif-plugged-483b5333-614f-4867-bf07-c9c1e37d1ce4 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:45:56 compute-0 nova_compute[192079]: 2025-10-02 12:45:56.596 2 DEBUG oslo_concurrency.lockutils [req-9b97d601-7baa-421a-b300-df29a549d690 req-0390bbe5-9a03-4e81-b2c7-df0f72ad3691 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "475567cc-a1f1-46b1-ae67-fb3b0ef2e230-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:45:56 compute-0 nova_compute[192079]: 2025-10-02 12:45:56.597 2 DEBUG oslo_concurrency.lockutils [req-9b97d601-7baa-421a-b300-df29a549d690 req-0390bbe5-9a03-4e81-b2c7-df0f72ad3691 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "475567cc-a1f1-46b1-ae67-fb3b0ef2e230-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:45:56 compute-0 nova_compute[192079]: 2025-10-02 12:45:56.597 2 DEBUG oslo_concurrency.lockutils [req-9b97d601-7baa-421a-b300-df29a549d690 req-0390bbe5-9a03-4e81-b2c7-df0f72ad3691 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "475567cc-a1f1-46b1-ae67-fb3b0ef2e230-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:45:56 compute-0 nova_compute[192079]: 2025-10-02 12:45:56.597 2 DEBUG nova.compute.manager [req-9b97d601-7baa-421a-b300-df29a549d690 req-0390bbe5-9a03-4e81-b2c7-df0f72ad3691 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] No waiting events found dispatching network-vif-plugged-483b5333-614f-4867-bf07-c9c1e37d1ce4 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:45:56 compute-0 nova_compute[192079]: 2025-10-02 12:45:56.597 2 WARNING nova.compute.manager [req-9b97d601-7baa-421a-b300-df29a549d690 req-0390bbe5-9a03-4e81-b2c7-df0f72ad3691 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Received unexpected event network-vif-plugged-483b5333-614f-4867-bf07-c9c1e37d1ce4 for instance with vm_state active and task_state None.
Oct 02 12:45:58 compute-0 nova_compute[192079]: 2025-10-02 12:45:58.086 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:58 compute-0 nova_compute[192079]: 2025-10-02 12:45:58.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:45:58 compute-0 nova_compute[192079]: 2025-10-02 12:45:58.664 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:45:58 compute-0 nova_compute[192079]: 2025-10-02 12:45:58.664 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:45:58 compute-0 nova_compute[192079]: 2025-10-02 12:45:58.694 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:58 compute-0 nova_compute[192079]: 2025-10-02 12:45:58.851 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "refresh_cache-475567cc-a1f1-46b1-ae67-fb3b0ef2e230" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:45:58 compute-0 nova_compute[192079]: 2025-10-02 12:45:58.852 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquired lock "refresh_cache-475567cc-a1f1-46b1-ae67-fb3b0ef2e230" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:45:58 compute-0 nova_compute[192079]: 2025-10-02 12:45:58.852 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Forcefully refreshing network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2004
Oct 02 12:45:58 compute-0 nova_compute[192079]: 2025-10-02 12:45:58.853 2 DEBUG nova.objects.instance [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lazy-loading 'info_cache' on Instance uuid 475567cc-a1f1-46b1-ae67-fb3b0ef2e230 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:45:59 compute-0 NetworkManager[51160]: <info>  [1759409159.2827] manager: (patch-br-int-to-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/349)
Oct 02 12:45:59 compute-0 NetworkManager[51160]: <info>  [1759409159.2836] manager: (patch-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d-to-br-int): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/350)
Oct 02 12:45:59 compute-0 nova_compute[192079]: 2025-10-02 12:45:59.282 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:59 compute-0 ovn_controller[94336]: 2025-10-02T12:45:59Z|00713|binding|INFO|Releasing lport cc53a2f5-12aa-4c54-8e29-196f3f838f7d from this chassis (sb_readonly=0)
Oct 02 12:45:59 compute-0 ovn_controller[94336]: 2025-10-02T12:45:59Z|00714|binding|INFO|Releasing lport cc53a2f5-12aa-4c54-8e29-196f3f838f7d from this chassis (sb_readonly=0)
Oct 02 12:45:59 compute-0 nova_compute[192079]: 2025-10-02 12:45:59.310 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:59 compute-0 nova_compute[192079]: 2025-10-02 12:45:59.314 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:45:59 compute-0 nova_compute[192079]: 2025-10-02 12:45:59.718 2 DEBUG nova.compute.manager [req-bbdae5f6-9d91-412a-a49a-2a27e48b0e68 req-0a3005dc-d671-4ab1-9376-b8d17d24d207 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Received event network-changed-483b5333-614f-4867-bf07-c9c1e37d1ce4 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:45:59 compute-0 nova_compute[192079]: 2025-10-02 12:45:59.719 2 DEBUG nova.compute.manager [req-bbdae5f6-9d91-412a-a49a-2a27e48b0e68 req-0a3005dc-d671-4ab1-9376-b8d17d24d207 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Refreshing instance network info cache due to event network-changed-483b5333-614f-4867-bf07-c9c1e37d1ce4. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:45:59 compute-0 nova_compute[192079]: 2025-10-02 12:45:59.719 2 DEBUG oslo_concurrency.lockutils [req-bbdae5f6-9d91-412a-a49a-2a27e48b0e68 req-0a3005dc-d671-4ab1-9376-b8d17d24d207 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-475567cc-a1f1-46b1-ae67-fb3b0ef2e230" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:46:00 compute-0 nova_compute[192079]: 2025-10-02 12:46:00.820 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Updating instance_info_cache with network_info: [{"id": "483b5333-614f-4867-bf07-c9c1e37d1ce4", "address": "fa:16:3e:70:a7:5e", "network": {"id": "85e4aed1-4716-45af-bcd8-38b9aeff1c42", "bridge": "br-int", "label": "tempest-network-smoke--13042790", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap483b5333-61", "ovs_interfaceid": "483b5333-614f-4867-bf07-c9c1e37d1ce4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:46:00 compute-0 nova_compute[192079]: 2025-10-02 12:46:00.844 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Releasing lock "refresh_cache-475567cc-a1f1-46b1-ae67-fb3b0ef2e230" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:46:00 compute-0 nova_compute[192079]: 2025-10-02 12:46:00.845 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Updated the network info_cache for instance _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9929
Oct 02 12:46:00 compute-0 nova_compute[192079]: 2025-10-02 12:46:00.846 2 DEBUG oslo_concurrency.lockutils [req-bbdae5f6-9d91-412a-a49a-2a27e48b0e68 req-0a3005dc-d671-4ab1-9376-b8d17d24d207 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-475567cc-a1f1-46b1-ae67-fb3b0ef2e230" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:46:00 compute-0 nova_compute[192079]: 2025-10-02 12:46:00.847 2 DEBUG nova.network.neutron [req-bbdae5f6-9d91-412a-a49a-2a27e48b0e68 req-0a3005dc-d671-4ab1-9376-b8d17d24d207 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Refreshing network info cache for port 483b5333-614f-4867-bf07-c9c1e37d1ce4 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:46:00 compute-0 nova_compute[192079]: 2025-10-02 12:46:00.848 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:46:01 compute-0 podman[252584]: 2025-10-02 12:46:01.16016117 +0000 UTC m=+0.056001783 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:46:01 compute-0 podman[252582]: 2025-10-02 12:46:01.182963603 +0000 UTC m=+0.084817470 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, container_name=ovn_metadata_agent, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:46:01 compute-0 podman[252583]: 2025-10-02 12:46:01.185328688 +0000 UTC m=+0.084896264 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, config_id=ovn_controller, container_name=ovn_controller, org.label-schema.schema-version=1.0)
Oct 02 12:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:46:02.252 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:46:02.253 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:46:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:46:02.254 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:46:02 compute-0 nova_compute[192079]: 2025-10-02 12:46:02.537 2 DEBUG nova.network.neutron [req-bbdae5f6-9d91-412a-a49a-2a27e48b0e68 req-0a3005dc-d671-4ab1-9376-b8d17d24d207 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Updated VIF entry in instance network info cache for port 483b5333-614f-4867-bf07-c9c1e37d1ce4. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:46:02 compute-0 nova_compute[192079]: 2025-10-02 12:46:02.539 2 DEBUG nova.network.neutron [req-bbdae5f6-9d91-412a-a49a-2a27e48b0e68 req-0a3005dc-d671-4ab1-9376-b8d17d24d207 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Updating instance_info_cache with network_info: [{"id": "483b5333-614f-4867-bf07-c9c1e37d1ce4", "address": "fa:16:3e:70:a7:5e", "network": {"id": "85e4aed1-4716-45af-bcd8-38b9aeff1c42", "bridge": "br-int", "label": "tempest-network-smoke--13042790", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap483b5333-61", "ovs_interfaceid": "483b5333-614f-4867-bf07-c9c1e37d1ce4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:46:02 compute-0 nova_compute[192079]: 2025-10-02 12:46:02.555 2 DEBUG oslo_concurrency.lockutils [req-bbdae5f6-9d91-412a-a49a-2a27e48b0e68 req-0a3005dc-d671-4ab1-9376-b8d17d24d207 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-475567cc-a1f1-46b1-ae67-fb3b0ef2e230" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:46:03 compute-0 nova_compute[192079]: 2025-10-02 12:46:03.122 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:03 compute-0 nova_compute[192079]: 2025-10-02 12:46:03.697 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:06 compute-0 ovn_controller[94336]: 2025-10-02T12:46:06Z|00083|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:70:a7:5e 10.100.0.6
Oct 02 12:46:06 compute-0 ovn_controller[94336]: 2025-10-02T12:46:06Z|00084|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:70:a7:5e 10.100.0.6
Oct 02 12:46:08 compute-0 nova_compute[192079]: 2025-10-02 12:46:08.126 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:08 compute-0 nova_compute[192079]: 2025-10-02 12:46:08.699 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:09 compute-0 podman[252660]: 2025-10-02 12:46:09.132584064 +0000 UTC m=+0.051204542 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, container_name=ceilometer_agent_compute, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=edpm, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, managed_by=edpm_ansible)
Oct 02 12:46:13 compute-0 nova_compute[192079]: 2025-10-02 12:46:13.129 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:13 compute-0 nova_compute[192079]: 2025-10-02 12:46:13.709 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:17 compute-0 podman[252681]: 2025-10-02 12:46:17.144352524 +0000 UTC m=+0.056059014 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, container_name=multipathd, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, io.buildah.version=1.41.3)
Oct 02 12:46:17 compute-0 podman[252680]: 2025-10-02 12:46:17.152510637 +0000 UTC m=+0.063614121 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, io.openshift.expose-services=, container_name=openstack_network_exporter, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, url=https://catalog.redhat.com/en/search?searchType=containers, vcs-type=git, vendor=Red Hat, Inc., architecture=x86_64, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, maintainer=Red Hat, Inc., com.redhat.component=ubi9-minimal-container, release=1755695350, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.buildah.version=1.33.7, managed_by=edpm_ansible, version=9.6, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., distribution-scope=public, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.tags=minimal rhel9, build-date=2025-08-20T13:12:41, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., name=ubi9-minimal)
Oct 02 12:46:18 compute-0 nova_compute[192079]: 2025-10-02 12:46:18.131 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:18 compute-0 nova_compute[192079]: 2025-10-02 12:46:18.755 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:46:20.450 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=52, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=51) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:46:20 compute-0 nova_compute[192079]: 2025-10-02 12:46:20.451 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:46:20.451 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 1 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:46:21 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:46:21.453 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '52'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:46:23 compute-0 nova_compute[192079]: 2025-10-02 12:46:23.134 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:23 compute-0 podman[252724]: 2025-10-02 12:46:23.140728414 +0000 UTC m=+0.050092491 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, config_id=iscsid, container_name=iscsid, io.buildah.version=1.41.3, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0)
Oct 02 12:46:23 compute-0 podman[252723]: 2025-10-02 12:46:23.148681202 +0000 UTC m=+0.057700750 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:46:23 compute-0 nova_compute[192079]: 2025-10-02 12:46:23.756 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:28 compute-0 nova_compute[192079]: 2025-10-02 12:46:28.137 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:28 compute-0 nova_compute[192079]: 2025-10-02 12:46:28.813 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:32 compute-0 podman[252767]: 2025-10-02 12:46:32.143780643 +0000 UTC m=+0.053310659 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, container_name=ovn_metadata_agent, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_managed=true, config_id=ovn_metadata_agent, io.buildah.version=1.41.3)
Oct 02 12:46:32 compute-0 podman[252769]: 2025-10-02 12:46:32.158769083 +0000 UTC m=+0.060972199 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:46:32 compute-0 podman[252768]: 2025-10-02 12:46:32.180920569 +0000 UTC m=+0.086959830 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, config_id=ovn_controller, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:46:33 compute-0 nova_compute[192079]: 2025-10-02 12:46:33.140 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:33 compute-0 nova_compute[192079]: 2025-10-02 12:46:33.841 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:38 compute-0 nova_compute[192079]: 2025-10-02 12:46:38.143 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:38 compute-0 nova_compute[192079]: 2025-10-02 12:46:38.843 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:40 compute-0 podman[252837]: 2025-10-02 12:46:40.129644396 +0000 UTC m=+0.049518936 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, config_id=edpm, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ceilometer_agent_compute, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2)
Oct 02 12:46:43 compute-0 nova_compute[192079]: 2025-10-02 12:46:43.146 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:43 compute-0 nova_compute[192079]: 2025-10-02 12:46:43.845 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:47 compute-0 nova_compute[192079]: 2025-10-02 12:46:47.844 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:46:48 compute-0 podman[252857]: 2025-10-02 12:46:48.184089211 +0000 UTC m=+0.087443412 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.openshift.tags=minimal rhel9, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, build-date=2025-08-20T13:12:41, name=ubi9-minimal, vcs-type=git, architecture=x86_64, release=1755695350, maintainer=Red Hat, Inc., managed_by=edpm_ansible, container_name=openstack_network_exporter, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, config_id=edpm, vendor=Red Hat, Inc., io.openshift.expose-services=, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, version=9.6, url=https://catalog.redhat.com/en/search?searchType=containers, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., com.redhat.component=ubi9-minimal-container, distribution-scope=public, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.buildah.version=1.33.7)
Oct 02 12:46:48 compute-0 nova_compute[192079]: 2025-10-02 12:46:48.195 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:48 compute-0 podman[252858]: 2025-10-02 12:46:48.221014291 +0000 UTC m=+0.119128310 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, container_name=multipathd, org.label-schema.build-date=20251001, tcib_managed=true, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=multipathd, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3)
Oct 02 12:46:48 compute-0 nova_compute[192079]: 2025-10-02 12:46:48.846 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:49 compute-0 nova_compute[192079]: 2025-10-02 12:46:49.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:46:49 compute-0 nova_compute[192079]: 2025-10-02 12:46:49.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.417 2 DEBUG oslo_concurrency.lockutils [None req-fac2566c-31cb-4b2a-878c-cd1e31868f00 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "475567cc-a1f1-46b1-ae67-fb3b0ef2e230" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.417 2 DEBUG oslo_concurrency.lockutils [None req-fac2566c-31cb-4b2a-878c-cd1e31868f00 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "475567cc-a1f1-46b1-ae67-fb3b0ef2e230" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.418 2 DEBUG oslo_concurrency.lockutils [None req-fac2566c-31cb-4b2a-878c-cd1e31868f00 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "475567cc-a1f1-46b1-ae67-fb3b0ef2e230-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.418 2 DEBUG oslo_concurrency.lockutils [None req-fac2566c-31cb-4b2a-878c-cd1e31868f00 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "475567cc-a1f1-46b1-ae67-fb3b0ef2e230-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.419 2 DEBUG oslo_concurrency.lockutils [None req-fac2566c-31cb-4b2a-878c-cd1e31868f00 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "475567cc-a1f1-46b1-ae67-fb3b0ef2e230-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.432 2 INFO nova.compute.manager [None req-fac2566c-31cb-4b2a-878c-cd1e31868f00 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Terminating instance
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.443 2 DEBUG nova.compute.manager [None req-fac2566c-31cb-4b2a-878c-cd1e31868f00 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:46:52 compute-0 kernel: tap483b5333-61 (unregistering): left promiscuous mode
Oct 02 12:46:52 compute-0 NetworkManager[51160]: <info>  [1759409212.4706] device (tap483b5333-61): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:46:52 compute-0 ovn_controller[94336]: 2025-10-02T12:46:52Z|00715|binding|INFO|Releasing lport 483b5333-614f-4867-bf07-c9c1e37d1ce4 from this chassis (sb_readonly=0)
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.479 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:52 compute-0 ovn_controller[94336]: 2025-10-02T12:46:52Z|00716|binding|INFO|Setting lport 483b5333-614f-4867-bf07-c9c1e37d1ce4 down in Southbound
Oct 02 12:46:52 compute-0 ovn_controller[94336]: 2025-10-02T12:46:52Z|00717|binding|INFO|Removing iface tap483b5333-61 ovn-installed in OVS
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.480 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:46:52.487 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:70:a7:5e 10.100.0.6'], port_security=['fa:16:3e:70:a7:5e 10.100.0.6'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.6/28', 'neutron:device_id': '475567cc-a1f1-46b1-ae67-fb3b0ef2e230', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-85e4aed1-4716-45af-bcd8-38b9aeff1c42', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'neutron:revision_number': '4', 'neutron:security_group_ids': '50db741e-f4cd-4cfd-91f3-cd58fd84561b e1be73b2-0596-4634-bbd8-c2bd6eae245c', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=170a325d-4ee6-4f9e-99b6-aa2be81235b5, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=483b5333-614f-4867-bf07-c9c1e37d1ce4) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:46:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:46:52.488 103294 INFO neutron.agent.ovn.metadata.agent [-] Port 483b5333-614f-4867-bf07-c9c1e37d1ce4 in datapath 85e4aed1-4716-45af-bcd8-38b9aeff1c42 unbound from our chassis
Oct 02 12:46:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:46:52.489 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 85e4aed1-4716-45af-bcd8-38b9aeff1c42, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:46:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:46:52.490 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[a7a2f123-faa6-400d-b498-e4ca7516f44b]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:46:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:46:52.490 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-85e4aed1-4716-45af-bcd8-38b9aeff1c42 namespace which is not needed anymore
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.497 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:52 compute-0 systemd[1]: machine-qemu\x2d86\x2dinstance\x2d000000b5.scope: Deactivated successfully.
Oct 02 12:46:52 compute-0 systemd[1]: machine-qemu\x2d86\x2dinstance\x2d000000b5.scope: Consumed 14.882s CPU time.
Oct 02 12:46:52 compute-0 systemd-machined[152150]: Machine qemu-86-instance-000000b5 terminated.
Oct 02 12:46:52 compute-0 neutron-haproxy-ovnmeta-85e4aed1-4716-45af-bcd8-38b9aeff1c42[252566]: [NOTICE]   (252570) : haproxy version is 2.8.14-c23fe91
Oct 02 12:46:52 compute-0 neutron-haproxy-ovnmeta-85e4aed1-4716-45af-bcd8-38b9aeff1c42[252566]: [NOTICE]   (252570) : path to executable is /usr/sbin/haproxy
Oct 02 12:46:52 compute-0 neutron-haproxy-ovnmeta-85e4aed1-4716-45af-bcd8-38b9aeff1c42[252566]: [WARNING]  (252570) : Exiting Master process...
Oct 02 12:46:52 compute-0 neutron-haproxy-ovnmeta-85e4aed1-4716-45af-bcd8-38b9aeff1c42[252566]: [ALERT]    (252570) : Current worker (252572) exited with code 143 (Terminated)
Oct 02 12:46:52 compute-0 neutron-haproxy-ovnmeta-85e4aed1-4716-45af-bcd8-38b9aeff1c42[252566]: [WARNING]  (252570) : All workers exited. Exiting... (0)
Oct 02 12:46:52 compute-0 systemd[1]: libpod-2e436edcfc84cd24e43a178e20d20d418f8a9a34ca9651b38379c5981f6f8406.scope: Deactivated successfully.
Oct 02 12:46:52 compute-0 podman[252922]: 2025-10-02 12:46:52.612472092 +0000 UTC m=+0.041830785 container died 2e436edcfc84cd24e43a178e20d20d418f8a9a34ca9651b38379c5981f6f8406 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-85e4aed1-4716-45af-bcd8-38b9aeff1c42, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:46:52 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-2e436edcfc84cd24e43a178e20d20d418f8a9a34ca9651b38379c5981f6f8406-userdata-shm.mount: Deactivated successfully.
Oct 02 12:46:52 compute-0 systemd[1]: var-lib-containers-storage-overlay-07de8edb4237dc9abf2f0425d078fde42f5fe166139ef45d80886e09dc6e92d7-merged.mount: Deactivated successfully.
Oct 02 12:46:52 compute-0 podman[252922]: 2025-10-02 12:46:52.648878017 +0000 UTC m=+0.078236710 container cleanup 2e436edcfc84cd24e43a178e20d20d418f8a9a34ca9651b38379c5981f6f8406 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-85e4aed1-4716-45af-bcd8-38b9aeff1c42, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0)
Oct 02 12:46:52 compute-0 systemd[1]: libpod-conmon-2e436edcfc84cd24e43a178e20d20d418f8a9a34ca9651b38379c5981f6f8406.scope: Deactivated successfully.
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.701 2 INFO nova.virt.libvirt.driver [-] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Instance destroyed successfully.
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.703 2 DEBUG nova.objects.instance [None req-fac2566c-31cb-4b2a-878c-cd1e31868f00 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lazy-loading 'resources' on Instance uuid 475567cc-a1f1-46b1-ae67-fb3b0ef2e230 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:46:52 compute-0 podman[252953]: 2025-10-02 12:46:52.712863506 +0000 UTC m=+0.043560171 container remove 2e436edcfc84cd24e43a178e20d20d418f8a9a34ca9651b38379c5981f6f8406 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-85e4aed1-4716-45af-bcd8-38b9aeff1c42, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0)
Oct 02 12:46:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:46:52.718 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6f2a3e0a-20b0-4e97-884c-277c06b57518]: (4, ('Thu Oct  2 12:46:52 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-85e4aed1-4716-45af-bcd8-38b9aeff1c42 (2e436edcfc84cd24e43a178e20d20d418f8a9a34ca9651b38379c5981f6f8406)\n2e436edcfc84cd24e43a178e20d20d418f8a9a34ca9651b38379c5981f6f8406\nThu Oct  2 12:46:52 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-85e4aed1-4716-45af-bcd8-38b9aeff1c42 (2e436edcfc84cd24e43a178e20d20d418f8a9a34ca9651b38379c5981f6f8406)\n2e436edcfc84cd24e43a178e20d20d418f8a9a34ca9651b38379c5981f6f8406\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:46:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:46:52.719 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[94c84170-26f6-4bd3-bb57-804081165f5c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:46:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:46:52.720 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap85e4aed1-40, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.721 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:52 compute-0 kernel: tap85e4aed1-40: left promiscuous mode
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.728 2 DEBUG nova.virt.libvirt.vif [None req-fac2566c-31cb-4b2a-878c-cd1e31868f00 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:45:45Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-474871786',display_name='tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-474871786',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-server-tempest-testsecuritygroupsbasicops-1020134341-ac',id=181,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBGsUKfvQsRFH/GldSVzED6JnM2R8DeZMSLqFM+7ZoEbCSUSgEpS2XwQTay0eRWx3t/E5S4rEWdCjCoc+0nrAH+n3s9z8s5WA+sL/sdupqrDO9IWm9qn8ROfjJ4EtbzYHtg==',key_name='tempest-TestSecurityGroupsBasicOps-880121214',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:45:54Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='575f3d227ab24f2daa62e65e14a4cd9c',ramdisk_id='',reservation_id='r-evajko44',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-TestSecurityGroupsBasicOps-1020134341',owner_user_name='tempest-TestSecurityGroupsBasicOps-1020134341-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:45:54Z,user_data=None,user_id='2d2b4a2da57543ef88e44ae28ad61647',uuid=475567cc-a1f1-46b1-ae67-fb3b0ef2e230,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "483b5333-614f-4867-bf07-c9c1e37d1ce4", "address": "fa:16:3e:70:a7:5e", "network": {"id": "85e4aed1-4716-45af-bcd8-38b9aeff1c42", "bridge": "br-int", "label": "tempest-network-smoke--13042790", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap483b5333-61", "ovs_interfaceid": "483b5333-614f-4867-bf07-c9c1e37d1ce4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.728 2 DEBUG nova.network.os_vif_util [None req-fac2566c-31cb-4b2a-878c-cd1e31868f00 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Converting VIF {"id": "483b5333-614f-4867-bf07-c9c1e37d1ce4", "address": "fa:16:3e:70:a7:5e", "network": {"id": "85e4aed1-4716-45af-bcd8-38b9aeff1c42", "bridge": "br-int", "label": "tempest-network-smoke--13042790", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap483b5333-61", "ovs_interfaceid": "483b5333-614f-4867-bf07-c9c1e37d1ce4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.729 2 DEBUG nova.network.os_vif_util [None req-fac2566c-31cb-4b2a-878c-cd1e31868f00 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:70:a7:5e,bridge_name='br-int',has_traffic_filtering=True,id=483b5333-614f-4867-bf07-c9c1e37d1ce4,network=Network(85e4aed1-4716-45af-bcd8-38b9aeff1c42),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap483b5333-61') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.729 2 DEBUG os_vif [None req-fac2566c-31cb-4b2a-878c-cd1e31868f00 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:70:a7:5e,bridge_name='br-int',has_traffic_filtering=True,id=483b5333-614f-4867-bf07-c9c1e37d1ce4,network=Network(85e4aed1-4716-45af-bcd8-38b9aeff1c42),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap483b5333-61') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.731 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.732 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap483b5333-61, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.733 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.735 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.735 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.736 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.738 2 INFO os_vif [None req-fac2566c-31cb-4b2a-878c-cd1e31868f00 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:70:a7:5e,bridge_name='br-int',has_traffic_filtering=True,id=483b5333-614f-4867-bf07-c9c1e37d1ce4,network=Network(85e4aed1-4716-45af-bcd8-38b9aeff1c42),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tap483b5333-61')
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.738 2 INFO nova.virt.libvirt.driver [None req-fac2566c-31cb-4b2a-878c-cd1e31868f00 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Deleting instance files /var/lib/nova/instances/475567cc-a1f1-46b1-ae67-fb3b0ef2e230_del
Oct 02 12:46:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:46:52.738 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[5fd341c3-2ba1-44f6-8681-db3bb65926a5]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.739 2 INFO nova.virt.libvirt.driver [None req-fac2566c-31cb-4b2a-878c-cd1e31868f00 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Deletion of /var/lib/nova/instances/475567cc-a1f1-46b1-ae67-fb3b0ef2e230_del complete
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.769 2 DEBUG nova.compute.manager [req-10e431bf-8f0a-4f01-acf8-0c231f7b0fe8 req-ea959dcb-ce81-4b2b-8242-516f1a68f2f0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Received event network-vif-unplugged-483b5333-614f-4867-bf07-c9c1e37d1ce4 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.769 2 DEBUG oslo_concurrency.lockutils [req-10e431bf-8f0a-4f01-acf8-0c231f7b0fe8 req-ea959dcb-ce81-4b2b-8242-516f1a68f2f0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "475567cc-a1f1-46b1-ae67-fb3b0ef2e230-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.770 2 DEBUG oslo_concurrency.lockutils [req-10e431bf-8f0a-4f01-acf8-0c231f7b0fe8 req-ea959dcb-ce81-4b2b-8242-516f1a68f2f0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "475567cc-a1f1-46b1-ae67-fb3b0ef2e230-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.770 2 DEBUG oslo_concurrency.lockutils [req-10e431bf-8f0a-4f01-acf8-0c231f7b0fe8 req-ea959dcb-ce81-4b2b-8242-516f1a68f2f0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "475567cc-a1f1-46b1-ae67-fb3b0ef2e230-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.770 2 DEBUG nova.compute.manager [req-10e431bf-8f0a-4f01-acf8-0c231f7b0fe8 req-ea959dcb-ce81-4b2b-8242-516f1a68f2f0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] No waiting events found dispatching network-vif-unplugged-483b5333-614f-4867-bf07-c9c1e37d1ce4 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.770 2 DEBUG nova.compute.manager [req-10e431bf-8f0a-4f01-acf8-0c231f7b0fe8 req-ea959dcb-ce81-4b2b-8242-516f1a68f2f0 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Received event network-vif-unplugged-483b5333-614f-4867-bf07-c9c1e37d1ce4 for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:46:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:46:52.774 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[657e5a43-5b1e-4bb4-be4b-15e0c1502f76]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:46:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:46:52.775 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[6ec769ff-3b45-4882-93f4-98221e6267b1]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:46:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:46:52.789 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[4019c665-0501-4e46-8ee3-99875ceae6bf]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 715164, 'reachable_time': 39093, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 252984, 'error': None, 'target': 'ovnmeta-85e4aed1-4716-45af-bcd8-38b9aeff1c42', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:46:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:46:52.791 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-85e4aed1-4716-45af-bcd8-38b9aeff1c42 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:46:52 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:46:52.791 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[ebb450d1-a433-4a92-8f2a-63b38c879fd4]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:46:52 compute-0 systemd[1]: run-netns-ovnmeta\x2d85e4aed1\x2d4716\x2d45af\x2dbcd8\x2d38b9aeff1c42.mount: Deactivated successfully.
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.830 2 INFO nova.compute.manager [None req-fac2566c-31cb-4b2a-878c-cd1e31868f00 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Took 0.39 seconds to destroy the instance on the hypervisor.
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.830 2 DEBUG oslo.service.loopingcall [None req-fac2566c-31cb-4b2a-878c-cd1e31868f00 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.831 2 DEBUG nova.compute.manager [-] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:46:52 compute-0 nova_compute[192079]: 2025-10-02 12:46:52.831 2 DEBUG nova.network.neutron [-] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:46:53 compute-0 nova_compute[192079]: 2025-10-02 12:46:53.034 2 DEBUG nova.compute.manager [req-306f3d2b-1ffd-4ab0-af0d-283e90ab102d req-0cb45fbd-1d19-43c0-b22a-42bd76eb8119 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Received event network-changed-483b5333-614f-4867-bf07-c9c1e37d1ce4 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:46:53 compute-0 nova_compute[192079]: 2025-10-02 12:46:53.035 2 DEBUG nova.compute.manager [req-306f3d2b-1ffd-4ab0-af0d-283e90ab102d req-0cb45fbd-1d19-43c0-b22a-42bd76eb8119 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Refreshing instance network info cache due to event network-changed-483b5333-614f-4867-bf07-c9c1e37d1ce4. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:46:53 compute-0 nova_compute[192079]: 2025-10-02 12:46:53.035 2 DEBUG oslo_concurrency.lockutils [req-306f3d2b-1ffd-4ab0-af0d-283e90ab102d req-0cb45fbd-1d19-43c0-b22a-42bd76eb8119 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-475567cc-a1f1-46b1-ae67-fb3b0ef2e230" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:46:53 compute-0 nova_compute[192079]: 2025-10-02 12:46:53.035 2 DEBUG oslo_concurrency.lockutils [req-306f3d2b-1ffd-4ab0-af0d-283e90ab102d req-0cb45fbd-1d19-43c0-b22a-42bd76eb8119 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-475567cc-a1f1-46b1-ae67-fb3b0ef2e230" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:46:53 compute-0 nova_compute[192079]: 2025-10-02 12:46:53.035 2 DEBUG nova.network.neutron [req-306f3d2b-1ffd-4ab0-af0d-283e90ab102d req-0cb45fbd-1d19-43c0-b22a-42bd76eb8119 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Refreshing network info cache for port 483b5333-614f-4867-bf07-c9c1e37d1ce4 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:46:53 compute-0 nova_compute[192079]: 2025-10-02 12:46:53.464 2 DEBUG nova.network.neutron [-] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:46:53 compute-0 nova_compute[192079]: 2025-10-02 12:46:53.481 2 INFO nova.compute.manager [-] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Took 0.65 seconds to deallocate network for instance.
Oct 02 12:46:53 compute-0 nova_compute[192079]: 2025-10-02 12:46:53.650 2 DEBUG oslo_concurrency.lockutils [None req-fac2566c-31cb-4b2a-878c-cd1e31868f00 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:46:53 compute-0 nova_compute[192079]: 2025-10-02 12:46:53.651 2 DEBUG oslo_concurrency.lockutils [None req-fac2566c-31cb-4b2a-878c-cd1e31868f00 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:46:53 compute-0 nova_compute[192079]: 2025-10-02 12:46:53.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:46:53 compute-0 nova_compute[192079]: 2025-10-02 12:46:53.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:46:53 compute-0 nova_compute[192079]: 2025-10-02 12:46:53.664 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:46:53 compute-0 nova_compute[192079]: 2025-10-02 12:46:53.717 2 DEBUG nova.scheduler.client.report [None req-fac2566c-31cb-4b2a-878c-cd1e31868f00 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Refreshing inventories for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708 _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:804
Oct 02 12:46:53 compute-0 nova_compute[192079]: 2025-10-02 12:46:53.737 2 DEBUG nova.scheduler.client.report [None req-fac2566c-31cb-4b2a-878c-cd1e31868f00 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Updating ProviderTree inventory for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 from _refresh_and_get_inventory using data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} _refresh_and_get_inventory /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:768
Oct 02 12:46:53 compute-0 nova_compute[192079]: 2025-10-02 12:46:53.738 2 DEBUG nova.compute.provider_tree [None req-fac2566c-31cb-4b2a-878c-cd1e31868f00 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Updating inventory in ProviderTree for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 with inventory: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:176
Oct 02 12:46:53 compute-0 nova_compute[192079]: 2025-10-02 12:46:53.817 2 DEBUG nova.scheduler.client.report [None req-fac2566c-31cb-4b2a-878c-cd1e31868f00 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Refreshing aggregate associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, aggregates: None _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:813
Oct 02 12:46:53 compute-0 nova_compute[192079]: 2025-10-02 12:46:53.848 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:53 compute-0 nova_compute[192079]: 2025-10-02 12:46:53.862 2 DEBUG nova.scheduler.client.report [None req-fac2566c-31cb-4b2a-878c-cd1e31868f00 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Refreshing trait associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, traits: COMPUTE_SECURITY_UEFI_SECURE_BOOT,COMPUTE_VIOMMU_MODEL_VIRTIO,COMPUTE_VIOMMU_MODEL_AUTO,COMPUTE_IMAGE_TYPE_AKI,COMPUTE_GRAPHICS_MODEL_VIRTIO,COMPUTE_NET_VIF_MODEL_PCNET,HW_CPU_X86_SSE42,COMPUTE_RESCUE_BFV,COMPUTE_VOLUME_EXTEND,COMPUTE_IMAGE_TYPE_QCOW2,COMPUTE_TRUSTED_CERTS,COMPUTE_SOCKET_PCI_NUMA_AFFINITY,COMPUTE_GRAPHICS_MODEL_CIRRUS,HW_CPU_X86_MMX,COMPUTE_STORAGE_BUS_VIRTIO,COMPUTE_NET_ATTACH_INTERFACE_WITH_TAG,COMPUTE_STORAGE_BUS_FDC,COMPUTE_STORAGE_BUS_USB,COMPUTE_NODE,HW_CPU_X86_SSSE3,HW_CPU_X86_SSE2,COMPUTE_GRAPHICS_MODEL_BOCHS,COMPUTE_NET_VIF_MODEL_E1000E,COMPUTE_IMAGE_TYPE_RAW,COMPUTE_NET_VIF_MODEL_NE2K_PCI,COMPUTE_IMAGE_TYPE_AMI,COMPUTE_VIOMMU_MODEL_INTEL,COMPUTE_SECURITY_TPM_2_0,COMPUTE_STORAGE_BUS_SCSI,COMPUTE_IMAGE_TYPE_ARI,COMPUTE_NET_VIF_MODEL_VMXNET3,COMPUTE_SECURITY_TPM_1_2,COMPUTE_NET_VIF_MODEL_E1000,HW_CPU_X86_SSE,COMPUTE_VOLUME_MULTI_ATTACH,COMPUTE_STORAGE_BUS_IDE,COMPUTE_GRAPHICS_MODEL_NONE,COMPUTE_VOLUME_ATTACH_WITH_TAG,COMPUTE_NET_VIF_MODEL_VIRTIO,HW_CPU_X86_SSE41,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_DEVICE_TAGGING,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_ACCELERATORS,COMPUTE_NET_VIF_MODEL_RTL8139,COMPUTE_GRAPHICS_MODEL_VGA,COMPUTE_STORAGE_BUS_SATA,COMPUTE_NET_VIF_MODEL_SPAPR_VLAN _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:825
Oct 02 12:46:53 compute-0 nova_compute[192079]: 2025-10-02 12:46:53.899 2 DEBUG nova.compute.provider_tree [None req-fac2566c-31cb-4b2a-878c-cd1e31868f00 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:46:53 compute-0 nova_compute[192079]: 2025-10-02 12:46:53.916 2 DEBUG nova.scheduler.client.report [None req-fac2566c-31cb-4b2a-878c-cd1e31868f00 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:46:53 compute-0 nova_compute[192079]: 2025-10-02 12:46:53.937 2 DEBUG oslo_concurrency.lockutils [None req-fac2566c-31cb-4b2a-878c-cd1e31868f00 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.286s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:46:53 compute-0 nova_compute[192079]: 2025-10-02 12:46:53.961 2 INFO nova.scheduler.client.report [None req-fac2566c-31cb-4b2a-878c-cd1e31868f00 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Deleted allocations for instance 475567cc-a1f1-46b1-ae67-fb3b0ef2e230
Oct 02 12:46:54 compute-0 nova_compute[192079]: 2025-10-02 12:46:54.045 2 DEBUG oslo_concurrency.lockutils [None req-fac2566c-31cb-4b2a-878c-cd1e31868f00 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "475567cc-a1f1-46b1-ae67-fb3b0ef2e230" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.628s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:46:54 compute-0 podman[252985]: 2025-10-02 12:46:54.136714978 +0000 UTC m=+0.050947285 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:46:54 compute-0 podman[252986]: 2025-10-02 12:46:54.137606802 +0000 UTC m=+0.051214201 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, tcib_managed=true, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=iscsid, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001)
Oct 02 12:46:54 compute-0 nova_compute[192079]: 2025-10-02 12:46:54.610 2 DEBUG nova.network.neutron [req-306f3d2b-1ffd-4ab0-af0d-283e90ab102d req-0cb45fbd-1d19-43c0-b22a-42bd76eb8119 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Updated VIF entry in instance network info cache for port 483b5333-614f-4867-bf07-c9c1e37d1ce4. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:46:54 compute-0 nova_compute[192079]: 2025-10-02 12:46:54.610 2 DEBUG nova.network.neutron [req-306f3d2b-1ffd-4ab0-af0d-283e90ab102d req-0cb45fbd-1d19-43c0-b22a-42bd76eb8119 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Updating instance_info_cache with network_info: [{"id": "483b5333-614f-4867-bf07-c9c1e37d1ce4", "address": "fa:16:3e:70:a7:5e", "network": {"id": "85e4aed1-4716-45af-bcd8-38b9aeff1c42", "bridge": "br-int", "label": "tempest-network-smoke--13042790", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap483b5333-61", "ovs_interfaceid": "483b5333-614f-4867-bf07-c9c1e37d1ce4", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:46:54 compute-0 nova_compute[192079]: 2025-10-02 12:46:54.629 2 DEBUG oslo_concurrency.lockutils [req-306f3d2b-1ffd-4ab0-af0d-283e90ab102d req-0cb45fbd-1d19-43c0-b22a-42bd76eb8119 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-475567cc-a1f1-46b1-ae67-fb3b0ef2e230" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:46:54 compute-0 nova_compute[192079]: 2025-10-02 12:46:54.663 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:46:54 compute-0 nova_compute[192079]: 2025-10-02 12:46:54.680 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:46:54 compute-0 nova_compute[192079]: 2025-10-02 12:46:54.681 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:46:54 compute-0 nova_compute[192079]: 2025-10-02 12:46:54.681 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:46:54 compute-0 nova_compute[192079]: 2025-10-02 12:46:54.681 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:46:54 compute-0 nova_compute[192079]: 2025-10-02 12:46:54.824 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:46:54 compute-0 nova_compute[192079]: 2025-10-02 12:46:54.825 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5732MB free_disk=73.27225875854492GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:46:54 compute-0 nova_compute[192079]: 2025-10-02 12:46:54.825 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:46:54 compute-0 nova_compute[192079]: 2025-10-02 12:46:54.825 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:46:54 compute-0 nova_compute[192079]: 2025-10-02 12:46:54.857 2 DEBUG nova.compute.manager [req-ce5d3e4d-c806-4bda-a5e3-54e2b306ff59 req-aa29d003-3b8e-4b5c-8fd1-e62c371240b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Received event network-vif-plugged-483b5333-614f-4867-bf07-c9c1e37d1ce4 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:46:54 compute-0 nova_compute[192079]: 2025-10-02 12:46:54.858 2 DEBUG oslo_concurrency.lockutils [req-ce5d3e4d-c806-4bda-a5e3-54e2b306ff59 req-aa29d003-3b8e-4b5c-8fd1-e62c371240b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "475567cc-a1f1-46b1-ae67-fb3b0ef2e230-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:46:54 compute-0 nova_compute[192079]: 2025-10-02 12:46:54.858 2 DEBUG oslo_concurrency.lockutils [req-ce5d3e4d-c806-4bda-a5e3-54e2b306ff59 req-aa29d003-3b8e-4b5c-8fd1-e62c371240b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "475567cc-a1f1-46b1-ae67-fb3b0ef2e230-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:46:54 compute-0 nova_compute[192079]: 2025-10-02 12:46:54.858 2 DEBUG oslo_concurrency.lockutils [req-ce5d3e4d-c806-4bda-a5e3-54e2b306ff59 req-aa29d003-3b8e-4b5c-8fd1-e62c371240b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "475567cc-a1f1-46b1-ae67-fb3b0ef2e230-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:46:54 compute-0 nova_compute[192079]: 2025-10-02 12:46:54.859 2 DEBUG nova.compute.manager [req-ce5d3e4d-c806-4bda-a5e3-54e2b306ff59 req-aa29d003-3b8e-4b5c-8fd1-e62c371240b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] No waiting events found dispatching network-vif-plugged-483b5333-614f-4867-bf07-c9c1e37d1ce4 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:46:54 compute-0 nova_compute[192079]: 2025-10-02 12:46:54.859 2 WARNING nova.compute.manager [req-ce5d3e4d-c806-4bda-a5e3-54e2b306ff59 req-aa29d003-3b8e-4b5c-8fd1-e62c371240b2 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Received unexpected event network-vif-plugged-483b5333-614f-4867-bf07-c9c1e37d1ce4 for instance with vm_state deleted and task_state None.
Oct 02 12:46:54 compute-0 nova_compute[192079]: 2025-10-02 12:46:54.883 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:46:54 compute-0 nova_compute[192079]: 2025-10-02 12:46:54.884 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:46:54 compute-0 nova_compute[192079]: 2025-10-02 12:46:54.905 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:46:54 compute-0 nova_compute[192079]: 2025-10-02 12:46:54.921 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:46:54 compute-0 nova_compute[192079]: 2025-10-02 12:46:54.947 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:46:54 compute-0 nova_compute[192079]: 2025-10-02 12:46:54.948 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.123s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:46:55 compute-0 nova_compute[192079]: 2025-10-02 12:46:55.180 2 DEBUG nova.compute.manager [req-11728928-2cc9-4b08-b1e3-59b7d87bdf80 req-002ab095-2d98-415d-96e6-2b90f0f006cc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Received event network-vif-deleted-483b5333-614f-4867-bf07-c9c1e37d1ce4 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:46:55 compute-0 nova_compute[192079]: 2025-10-02 12:46:55.949 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:46:57 compute-0 nova_compute[192079]: 2025-10-02 12:46:57.733 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:57 compute-0 nova_compute[192079]: 2025-10-02 12:46:57.868 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:57 compute-0 nova_compute[192079]: 2025-10-02 12:46:57.938 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:46:58 compute-0 nova_compute[192079]: 2025-10-02 12:46:58.850 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:47:00 compute-0 nova_compute[192079]: 2025-10-02 12:47:00.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:47:00 compute-0 nova_compute[192079]: 2025-10-02 12:47:00.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:47:00 compute-0 nova_compute[192079]: 2025-10-02 12:47:00.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:47:00 compute-0 nova_compute[192079]: 2025-10-02 12:47:00.680 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:47:00 compute-0 nova_compute[192079]: 2025-10-02 12:47:00.681 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:47:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:47:02.253 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:47:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:47:02.253 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:47:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:47:02.253 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:47:02 compute-0 nova_compute[192079]: 2025-10-02 12:47:02.735 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:47:03 compute-0 podman[253031]: 2025-10-02 12:47:03.133928586 +0000 UTC m=+0.047361407 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, container_name=ovn_metadata_agent, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3)
Oct 02 12:47:03 compute-0 podman[253033]: 2025-10-02 12:47:03.171807812 +0000 UTC m=+0.078502378 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:47:03 compute-0 podman[253032]: 2025-10-02 12:47:03.208384272 +0000 UTC m=+0.120868606 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_id=ovn_controller, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, container_name=ovn_controller, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:47:03 compute-0 nova_compute[192079]: 2025-10-02 12:47:03.851 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:47:06 compute-0 nova_compute[192079]: 2025-10-02 12:47:06.677 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:47:07 compute-0 nova_compute[192079]: 2025-10-02 12:47:07.699 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759409212.698265, 475567cc-a1f1-46b1-ae67-fb3b0ef2e230 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:47:07 compute-0 nova_compute[192079]: 2025-10-02 12:47:07.700 2 INFO nova.compute.manager [-] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] VM Stopped (Lifecycle Event)
Oct 02 12:47:07 compute-0 nova_compute[192079]: 2025-10-02 12:47:07.737 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:47:07 compute-0 nova_compute[192079]: 2025-10-02 12:47:07.786 2 DEBUG nova.compute.manager [None req-5873e9ad-56e3-4326-b22b-a183aba22299 - - - - - -] [instance: 475567cc-a1f1-46b1-ae67-fb3b0ef2e230] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:47:08 compute-0 nova_compute[192079]: 2025-10-02 12:47:08.852 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:47:11 compute-0 podman[253099]: 2025-10-02 12:47:11.139044165 +0000 UTC m=+0.052902458 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, container_name=ceilometer_agent_compute, managed_by=edpm_ansible, org.label-schema.build-date=20251001, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:47:12 compute-0 nova_compute[192079]: 2025-10-02 12:47:12.738 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:47:13 compute-0 nova_compute[192079]: 2025-10-02 12:47:13.854 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:47:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:47:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:47:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:47:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:47:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:47:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.allocation, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:47:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:47:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:47:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:47:17.114 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:47:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:47:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:47:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:47:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:47:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:47:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:47:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:47:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:47:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:47:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:47:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:47:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.iops, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:47:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:47:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:47:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:47:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:47:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:47:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:47:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:47:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:47:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:47:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:47:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:47:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.capacity, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:47:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:47:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster cpu, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:47:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:47:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster memory.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:47:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:47:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:47:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:47:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:47:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:47:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:47:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:47:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:47:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:47:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:47:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:47:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:47:17 compute-0 nova_compute[192079]: 2025-10-02 12:47:17.739 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:47:18 compute-0 nova_compute[192079]: 2025-10-02 12:47:18.855 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:47:19 compute-0 podman[253119]: 2025-10-02 12:47:19.1382357 +0000 UTC m=+0.052259079 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., architecture=x86_64, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., maintainer=Red Hat, Inc., build-date=2025-08-20T13:12:41, config_id=edpm, io.openshift.tags=minimal rhel9, io.openshift.expose-services=, name=ubi9-minimal, url=https://catalog.redhat.com/en/search?searchType=containers, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, com.redhat.component=ubi9-minimal-container, container_name=openstack_network_exporter, vendor=Red Hat, Inc., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, vcs-type=git, io.buildah.version=1.33.7, distribution-scope=public, managed_by=edpm_ansible, release=1755695350, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, version=9.6)
Oct 02 12:47:19 compute-0 podman[253120]: 2025-10-02 12:47:19.145271713 +0000 UTC m=+0.055707074 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=multipathd, container_name=multipathd, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, org.label-schema.build-date=20251001)
Oct 02 12:47:21 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:47:21.747 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=53, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=52) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:47:21 compute-0 nova_compute[192079]: 2025-10-02 12:47:21.747 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:47:21 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:47:21.748 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 10 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:47:22 compute-0 nova_compute[192079]: 2025-10-02 12:47:22.741 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:47:23 compute-0 nova_compute[192079]: 2025-10-02 12:47:23.856 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:47:25 compute-0 podman[253161]: 2025-10-02 12:47:25.161403106 +0000 UTC m=+0.071919297 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, container_name=iscsid, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, managed_by=edpm_ansible, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, config_id=iscsid, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']})
Oct 02 12:47:25 compute-0 podman[253160]: 2025-10-02 12:47:25.163293988 +0000 UTC m=+0.077412828 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:47:27 compute-0 nova_compute[192079]: 2025-10-02 12:47:27.742 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:47:28 compute-0 nova_compute[192079]: 2025-10-02 12:47:28.859 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:47:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:47:31.749 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '53'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:47:32 compute-0 nova_compute[192079]: 2025-10-02 12:47:32.757 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:47:33 compute-0 nova_compute[192079]: 2025-10-02 12:47:33.861 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:47:34 compute-0 podman[253203]: 2025-10-02 12:47:34.17081712 +0000 UTC m=+0.084841451 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, container_name=ovn_metadata_agent, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_managed=true)
Oct 02 12:47:34 compute-0 podman[253210]: 2025-10-02 12:47:34.194114298 +0000 UTC m=+0.081412258 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>)
Oct 02 12:47:34 compute-0 podman[253204]: 2025-10-02 12:47:34.204931453 +0000 UTC m=+0.100321284 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, config_id=ovn_controller, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, container_name=ovn_controller, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2)
Oct 02 12:47:37 compute-0 nova_compute[192079]: 2025-10-02 12:47:37.763 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:47:38 compute-0 ovn_controller[94336]: 2025-10-02T12:47:38Z|00718|memory_trim|INFO|Detected inactivity (last active 30006 ms ago): trimming memory
Oct 02 12:47:38 compute-0 nova_compute[192079]: 2025-10-02 12:47:38.863 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:47:42 compute-0 podman[253274]: 2025-10-02 12:47:42.135735919 +0000 UTC m=+0.047058648 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, config_id=edpm)
Oct 02 12:47:42 compute-0 nova_compute[192079]: 2025-10-02 12:47:42.766 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:47:43 compute-0 nova_compute[192079]: 2025-10-02 12:47:43.865 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:47:47 compute-0 nova_compute[192079]: 2025-10-02 12:47:47.675 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:47:47 compute-0 nova_compute[192079]: 2025-10-02 12:47:47.770 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:47:48 compute-0 nova_compute[192079]: 2025-10-02 12:47:48.867 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:47:49 compute-0 nova_compute[192079]: 2025-10-02 12:47:49.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:47:50 compute-0 podman[253295]: 2025-10-02 12:47:50.165948553 +0000 UTC m=+0.076940904 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, tcib_managed=true, config_id=multipathd, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, container_name=multipathd, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001)
Oct 02 12:47:50 compute-0 podman[253294]: 2025-10-02 12:47:50.185510088 +0000 UTC m=+0.099934844 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, distribution-scope=public, maintainer=Red Hat, Inc., url=https://catalog.redhat.com/en/search?searchType=containers, com.redhat.component=ubi9-minimal-container, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., release=1755695350, architecture=x86_64, version=9.6, io.buildah.version=1.33.7, io.openshift.tags=minimal rhel9, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.expose-services=, name=ubi9-minimal, build-date=2025-08-20T13:12:41, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., managed_by=edpm_ansible, vcs-type=git, vendor=Red Hat, Inc., container_name=openstack_network_exporter)
Oct 02 12:47:50 compute-0 nova_compute[192079]: 2025-10-02 12:47:50.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:47:52 compute-0 nova_compute[192079]: 2025-10-02 12:47:52.773 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:47:53 compute-0 nova_compute[192079]: 2025-10-02 12:47:53.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:47:53 compute-0 nova_compute[192079]: 2025-10-02 12:47:53.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:47:53 compute-0 nova_compute[192079]: 2025-10-02 12:47:53.868 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:47:55 compute-0 nova_compute[192079]: 2025-10-02 12:47:55.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:47:55 compute-0 nova_compute[192079]: 2025-10-02 12:47:55.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:47:55 compute-0 nova_compute[192079]: 2025-10-02 12:47:55.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:47:55 compute-0 nova_compute[192079]: 2025-10-02 12:47:55.695 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:47:55 compute-0 nova_compute[192079]: 2025-10-02 12:47:55.696 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:47:55 compute-0 nova_compute[192079]: 2025-10-02 12:47:55.696 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:47:55 compute-0 nova_compute[192079]: 2025-10-02 12:47:55.697 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:47:55 compute-0 nova_compute[192079]: 2025-10-02 12:47:55.856 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:47:55 compute-0 nova_compute[192079]: 2025-10-02 12:47:55.857 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5731MB free_disk=73.27266693115234GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:47:55 compute-0 nova_compute[192079]: 2025-10-02 12:47:55.857 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:47:55 compute-0 nova_compute[192079]: 2025-10-02 12:47:55.857 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:47:55 compute-0 nova_compute[192079]: 2025-10-02 12:47:55.911 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:47:55 compute-0 nova_compute[192079]: 2025-10-02 12:47:55.912 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:47:55 compute-0 nova_compute[192079]: 2025-10-02 12:47:55.933 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:47:55 compute-0 nova_compute[192079]: 2025-10-02 12:47:55.951 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:47:55 compute-0 nova_compute[192079]: 2025-10-02 12:47:55.953 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:47:55 compute-0 nova_compute[192079]: 2025-10-02 12:47:55.953 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.096s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:47:56 compute-0 podman[253335]: 2025-10-02 12:47:56.1288213 +0000 UTC m=+0.047895170 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:47:56 compute-0 podman[253336]: 2025-10-02 12:47:56.143175523 +0000 UTC m=+0.057970027 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=iscsid, org.label-schema.schema-version=1.0, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid)
Oct 02 12:47:57 compute-0 nova_compute[192079]: 2025-10-02 12:47:57.775 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:47:58 compute-0 nova_compute[192079]: 2025-10-02 12:47:58.870 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:47:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:47:59.678 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=54, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=53) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:47:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:47:59.679 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 5 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:47:59 compute-0 nova_compute[192079]: 2025-10-02 12:47:59.680 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:48:00 compute-0 sshd-session[253376]: banner exchange: Connection from 20.29.23.77 port 41962: invalid format
Oct 02 12:48:01 compute-0 nova_compute[192079]: 2025-10-02 12:48:01.952 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:48:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:48:02.254 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:48:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:48:02.254 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:48:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:48:02.255 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:48:02 compute-0 nova_compute[192079]: 2025-10-02 12:48:02.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:48:02 compute-0 nova_compute[192079]: 2025-10-02 12:48:02.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:48:02 compute-0 nova_compute[192079]: 2025-10-02 12:48:02.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:48:02 compute-0 nova_compute[192079]: 2025-10-02 12:48:02.697 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:48:02 compute-0 nova_compute[192079]: 2025-10-02 12:48:02.778 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:48:03 compute-0 nova_compute[192079]: 2025-10-02 12:48:03.873 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:48:04 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:48:04.681 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '54'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:48:05 compute-0 podman[253379]: 2025-10-02 12:48:05.137578757 +0000 UTC m=+0.045034834 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 12:48:05 compute-0 podman[253377]: 2025-10-02 12:48:05.161940063 +0000 UTC m=+0.074457257 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, container_name=ovn_metadata_agent, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:48:05 compute-0 podman[253378]: 2025-10-02 12:48:05.217811801 +0000 UTC m=+0.127991432 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, container_name=ovn_controller, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_id=ovn_controller, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.build-date=20251001, managed_by=edpm_ansible)
Oct 02 12:48:07 compute-0 nova_compute[192079]: 2025-10-02 12:48:07.781 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:48:08 compute-0 nova_compute[192079]: 2025-10-02 12:48:08.874 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:48:10 compute-0 sshd-session[253374]: Connection closed by 20.29.23.77 port 41956 [preauth]
Oct 02 12:48:12 compute-0 nova_compute[192079]: 2025-10-02 12:48:12.784 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:48:13 compute-0 podman[253444]: 2025-10-02 12:48:13.148470523 +0000 UTC m=+0.061898274 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=edpm, container_name=ceilometer_agent_compute, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2)
Oct 02 12:48:13 compute-0 nova_compute[192079]: 2025-10-02 12:48:13.877 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:48:17 compute-0 nova_compute[192079]: 2025-10-02 12:48:17.786 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:48:18 compute-0 nova_compute[192079]: 2025-10-02 12:48:18.877 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:48:21 compute-0 podman[253463]: 2025-10-02 12:48:21.136826533 +0000 UTC m=+0.049183706 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, maintainer=Red Hat, Inc., com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., distribution-scope=public, release=1755695350, io.buildah.version=1.33.7, vendor=Red Hat, Inc., com.redhat.component=ubi9-minimal-container, url=https://catalog.redhat.com/en/search?searchType=containers, container_name=openstack_network_exporter, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.expose-services=, version=9.6, io.openshift.tags=minimal rhel9, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., config_id=edpm, architecture=x86_64, managed_by=edpm_ansible, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, build-date=2025-08-20T13:12:41, name=ubi9-minimal, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, vcs-type=git)
Oct 02 12:48:21 compute-0 podman[253464]: 2025-10-02 12:48:21.150335953 +0000 UTC m=+0.055593642 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, container_name=multipathd, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, config_id=multipathd)
Oct 02 12:48:22 compute-0 nova_compute[192079]: 2025-10-02 12:48:22.788 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:48:23 compute-0 nova_compute[192079]: 2025-10-02 12:48:23.879 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:48:27 compute-0 podman[253504]: 2025-10-02 12:48:27.153908171 +0000 UTC m=+0.065314548 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:48:27 compute-0 podman[253505]: 2025-10-02 12:48:27.160289006 +0000 UTC m=+0.063050786 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, container_name=iscsid, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid)
Oct 02 12:48:27 compute-0 nova_compute[192079]: 2025-10-02 12:48:27.792 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:48:28 compute-0 nova_compute[192079]: 2025-10-02 12:48:28.881 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:48:32 compute-0 nova_compute[192079]: 2025-10-02 12:48:32.797 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:48:33 compute-0 nova_compute[192079]: 2025-10-02 12:48:33.882 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:48:36 compute-0 podman[253546]: 2025-10-02 12:48:36.135829714 +0000 UTC m=+0.053376111 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_id=ovn_metadata_agent, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, tcib_managed=true)
Oct 02 12:48:36 compute-0 podman[253548]: 2025-10-02 12:48:36.146515976 +0000 UTC m=+0.052344903 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 12:48:36 compute-0 podman[253547]: 2025-10-02 12:48:36.171100598 +0000 UTC m=+0.081675625 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, container_name=ovn_controller, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true)
Oct 02 12:48:37 compute-0 nova_compute[192079]: 2025-10-02 12:48:37.801 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:48:38 compute-0 nova_compute[192079]: 2025-10-02 12:48:38.883 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:48:42 compute-0 nova_compute[192079]: 2025-10-02 12:48:42.803 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:48:43 compute-0 nova_compute[192079]: 2025-10-02 12:48:43.885 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:48:44 compute-0 podman[253611]: 2025-10-02 12:48:44.142809333 +0000 UTC m=+0.062020337 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:48:47 compute-0 nova_compute[192079]: 2025-10-02 12:48:47.806 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:48:48 compute-0 nova_compute[192079]: 2025-10-02 12:48:48.693 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:48:48 compute-0 nova_compute[192079]: 2025-10-02 12:48:48.943 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:48:49 compute-0 nova_compute[192079]: 2025-10-02 12:48:49.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:48:50 compute-0 nova_compute[192079]: 2025-10-02 12:48:50.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:48:52 compute-0 podman[253634]: 2025-10-02 12:48:52.156841785 +0000 UTC m=+0.061444192 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, build-date=2025-08-20T13:12:41, io.openshift.expose-services=, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, config_id=edpm, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., name=ubi9-minimal, managed_by=edpm_ansible, vendor=Red Hat, Inc., com.redhat.component=ubi9-minimal-container, container_name=openstack_network_exporter, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.buildah.version=1.33.7, architecture=x86_64, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.openshift.tags=minimal rhel9, url=https://catalog.redhat.com/en/search?searchType=containers, vcs-type=git, release=1755695350, version=9.6, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, distribution-scope=public, maintainer=Red Hat, Inc., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal)
Oct 02 12:48:52 compute-0 podman[253635]: 2025-10-02 12:48:52.180406349 +0000 UTC m=+0.073628265 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, container_name=multipathd, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, config_id=multipathd, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:48:52 compute-0 nova_compute[192079]: 2025-10-02 12:48:52.809 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:48:53 compute-0 nova_compute[192079]: 2025-10-02 12:48:53.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:48:53 compute-0 nova_compute[192079]: 2025-10-02 12:48:53.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:48:53 compute-0 nova_compute[192079]: 2025-10-02 12:48:53.946 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:48:55 compute-0 nova_compute[192079]: 2025-10-02 12:48:55.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:48:55 compute-0 nova_compute[192079]: 2025-10-02 12:48:55.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:48:55 compute-0 nova_compute[192079]: 2025-10-02 12:48:55.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:48:55 compute-0 nova_compute[192079]: 2025-10-02 12:48:55.703 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:48:55 compute-0 nova_compute[192079]: 2025-10-02 12:48:55.703 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:48:55 compute-0 nova_compute[192079]: 2025-10-02 12:48:55.703 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:48:55 compute-0 nova_compute[192079]: 2025-10-02 12:48:55.704 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:48:55 compute-0 nova_compute[192079]: 2025-10-02 12:48:55.847 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:48:55 compute-0 nova_compute[192079]: 2025-10-02 12:48:55.848 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5728MB free_disk=73.27268600463867GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:48:55 compute-0 nova_compute[192079]: 2025-10-02 12:48:55.848 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:48:55 compute-0 nova_compute[192079]: 2025-10-02 12:48:55.848 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:48:55 compute-0 nova_compute[192079]: 2025-10-02 12:48:55.926 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:48:55 compute-0 nova_compute[192079]: 2025-10-02 12:48:55.927 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:48:55 compute-0 nova_compute[192079]: 2025-10-02 12:48:55.970 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:48:55 compute-0 nova_compute[192079]: 2025-10-02 12:48:55.986 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:48:55 compute-0 nova_compute[192079]: 2025-10-02 12:48:55.988 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:48:55 compute-0 nova_compute[192079]: 2025-10-02 12:48:55.988 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.140s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:48:57 compute-0 nova_compute[192079]: 2025-10-02 12:48:57.812 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:48:58 compute-0 podman[253676]: 2025-10-02 12:48:58.135801141 +0000 UTC m=+0.050789030 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:48:58 compute-0 podman[253677]: 2025-10-02 12:48:58.152475227 +0000 UTC m=+0.058136361 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=iscsid, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, config_id=iscsid, org.label-schema.vendor=CentOS)
Oct 02 12:48:58 compute-0 nova_compute[192079]: 2025-10-02 12:48:58.946 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:02.255 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:49:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:02.256 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:49:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:02.256 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:49:02 compute-0 nova_compute[192079]: 2025-10-02 12:49:02.814 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:02 compute-0 nova_compute[192079]: 2025-10-02 12:49:02.987 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:49:02 compute-0 nova_compute[192079]: 2025-10-02 12:49:02.987 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:49:02 compute-0 nova_compute[192079]: 2025-10-02 12:49:02.988 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:49:03 compute-0 nova_compute[192079]: 2025-10-02 12:49:03.714 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:49:03 compute-0 nova_compute[192079]: 2025-10-02 12:49:03.714 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:49:03 compute-0 nova_compute[192079]: 2025-10-02 12:49:03.948 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.035 2 DEBUG oslo_concurrency.lockutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "f47803ac-aae8-4d74-959f-2c47ab5f04ab" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.036 2 DEBUG oslo_concurrency.lockutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "f47803ac-aae8-4d74-959f-2c47ab5f04ab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.056 2 DEBUG nova.compute.manager [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.187 2 DEBUG oslo_concurrency.lockutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.187 2 DEBUG oslo_concurrency.lockutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.194 2 DEBUG nova.virt.hardware [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.194 2 INFO nova.compute.claims [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.335 2 DEBUG nova.compute.provider_tree [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.354 2 DEBUG nova.scheduler.client.report [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.378 2 DEBUG oslo_concurrency.lockutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.191s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.379 2 DEBUG nova.compute.manager [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.442 2 DEBUG nova.compute.manager [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.442 2 DEBUG nova.network.neutron [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.461 2 INFO nova.virt.libvirt.driver [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.483 2 DEBUG nova.compute.manager [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.615 2 DEBUG nova.compute.manager [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.617 2 DEBUG nova.virt.libvirt.driver [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.618 2 INFO nova.virt.libvirt.driver [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Creating image(s)
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.618 2 DEBUG oslo_concurrency.lockutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "/var/lib/nova/instances/f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.618 2 DEBUG oslo_concurrency.lockutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "/var/lib/nova/instances/f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.619 2 DEBUG oslo_concurrency.lockutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "/var/lib/nova/instances/f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.636 2 DEBUG oslo_concurrency.processutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.665 2 DEBUG nova.policy [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.730 2 DEBUG oslo_concurrency.processutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.095s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.731 2 DEBUG oslo_concurrency.lockutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.732 2 DEBUG oslo_concurrency.lockutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.745 2 DEBUG oslo_concurrency.processutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.833 2 DEBUG oslo_concurrency.processutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.088s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.834 2 DEBUG oslo_concurrency.processutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.876 2 DEBUG oslo_concurrency.processutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk 1073741824" returned: 0 in 0.042s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.877 2 DEBUG oslo_concurrency.lockutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.145s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.878 2 DEBUG oslo_concurrency.processutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.938 2 DEBUG oslo_concurrency.processutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.060s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.939 2 DEBUG nova.virt.disk.api [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Checking if we can resize image /var/lib/nova/instances/f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.939 2 DEBUG oslo_concurrency.processutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.995 2 DEBUG oslo_concurrency.processutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk --force-share --output=json" returned: 0 in 0.056s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.996 2 DEBUG nova.virt.disk.api [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Cannot resize image /var/lib/nova/instances/f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:49:04 compute-0 nova_compute[192079]: 2025-10-02 12:49:04.997 2 DEBUG nova.objects.instance [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lazy-loading 'migration_context' on Instance uuid f47803ac-aae8-4d74-959f-2c47ab5f04ab obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:49:05 compute-0 nova_compute[192079]: 2025-10-02 12:49:05.013 2 DEBUG nova.virt.libvirt.driver [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:49:05 compute-0 nova_compute[192079]: 2025-10-02 12:49:05.014 2 DEBUG nova.virt.libvirt.driver [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Ensure instance console log exists: /var/lib/nova/instances/f47803ac-aae8-4d74-959f-2c47ab5f04ab/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:49:05 compute-0 nova_compute[192079]: 2025-10-02 12:49:05.016 2 DEBUG oslo_concurrency.lockutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:49:05 compute-0 nova_compute[192079]: 2025-10-02 12:49:05.017 2 DEBUG oslo_concurrency.lockutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:49:05 compute-0 nova_compute[192079]: 2025-10-02 12:49:05.017 2 DEBUG oslo_concurrency.lockutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:49:05 compute-0 nova_compute[192079]: 2025-10-02 12:49:05.521 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:05 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:05.522 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=55, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=54) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:49:05 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:05.524 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 1 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:49:06 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:06.526 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '55'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:49:06 compute-0 nova_compute[192079]: 2025-10-02 12:49:06.655 2 DEBUG nova.network.neutron [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Successfully created port: b54bc2a5-53f3-444d-9953-22bbf1bcdc83 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:49:07 compute-0 podman[253733]: 2025-10-02 12:49:07.146526382 +0000 UTC m=+0.059503469 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_id=ovn_metadata_agent, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, container_name=ovn_metadata_agent, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:49:07 compute-0 podman[253735]: 2025-10-02 12:49:07.15521234 +0000 UTC m=+0.058735918 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']})
Oct 02 12:49:07 compute-0 podman[253734]: 2025-10-02 12:49:07.201872095 +0000 UTC m=+0.108854677 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller, container_name=ovn_controller, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS)
Oct 02 12:49:07 compute-0 nova_compute[192079]: 2025-10-02 12:49:07.815 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:08 compute-0 nova_compute[192079]: 2025-10-02 12:49:08.387 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:49:08 compute-0 nova_compute[192079]: 2025-10-02 12:49:08.951 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:09 compute-0 nova_compute[192079]: 2025-10-02 12:49:09.127 2 DEBUG nova.network.neutron [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Successfully updated port: b54bc2a5-53f3-444d-9953-22bbf1bcdc83 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:49:09 compute-0 nova_compute[192079]: 2025-10-02 12:49:09.143 2 DEBUG oslo_concurrency.lockutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "refresh_cache-f47803ac-aae8-4d74-959f-2c47ab5f04ab" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:49:09 compute-0 nova_compute[192079]: 2025-10-02 12:49:09.143 2 DEBUG oslo_concurrency.lockutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquired lock "refresh_cache-f47803ac-aae8-4d74-959f-2c47ab5f04ab" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:49:09 compute-0 nova_compute[192079]: 2025-10-02 12:49:09.143 2 DEBUG nova.network.neutron [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:49:09 compute-0 nova_compute[192079]: 2025-10-02 12:49:09.235 2 DEBUG nova.compute.manager [req-7ab2514f-5f9c-437c-8e63-0d1462be4024 req-f0ba8751-71fb-4bc4-8f21-d5b67ba76f38 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Received event network-changed-b54bc2a5-53f3-444d-9953-22bbf1bcdc83 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:49:09 compute-0 nova_compute[192079]: 2025-10-02 12:49:09.236 2 DEBUG nova.compute.manager [req-7ab2514f-5f9c-437c-8e63-0d1462be4024 req-f0ba8751-71fb-4bc4-8f21-d5b67ba76f38 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Refreshing instance network info cache due to event network-changed-b54bc2a5-53f3-444d-9953-22bbf1bcdc83. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:49:09 compute-0 nova_compute[192079]: 2025-10-02 12:49:09.236 2 DEBUG oslo_concurrency.lockutils [req-7ab2514f-5f9c-437c-8e63-0d1462be4024 req-f0ba8751-71fb-4bc4-8f21-d5b67ba76f38 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-f47803ac-aae8-4d74-959f-2c47ab5f04ab" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:49:09 compute-0 nova_compute[192079]: 2025-10-02 12:49:09.295 2 DEBUG nova.network.neutron [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.177 2 DEBUG nova.network.neutron [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Updating instance_info_cache with network_info: [{"id": "b54bc2a5-53f3-444d-9953-22bbf1bcdc83", "address": "fa:16:3e:57:c8:5b", "network": {"id": "3c776fa4-63c0-44fa-bf3f-04ad74974c2c", "bridge": "br-int", "label": "tempest-network-smoke--1497944835", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb54bc2a5-53", "ovs_interfaceid": "b54bc2a5-53f3-444d-9953-22bbf1bcdc83", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.194 2 DEBUG oslo_concurrency.lockutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Releasing lock "refresh_cache-f47803ac-aae8-4d74-959f-2c47ab5f04ab" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.195 2 DEBUG nova.compute.manager [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Instance network_info: |[{"id": "b54bc2a5-53f3-444d-9953-22bbf1bcdc83", "address": "fa:16:3e:57:c8:5b", "network": {"id": "3c776fa4-63c0-44fa-bf3f-04ad74974c2c", "bridge": "br-int", "label": "tempest-network-smoke--1497944835", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb54bc2a5-53", "ovs_interfaceid": "b54bc2a5-53f3-444d-9953-22bbf1bcdc83", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.195 2 DEBUG oslo_concurrency.lockutils [req-7ab2514f-5f9c-437c-8e63-0d1462be4024 req-f0ba8751-71fb-4bc4-8f21-d5b67ba76f38 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-f47803ac-aae8-4d74-959f-2c47ab5f04ab" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.195 2 DEBUG nova.network.neutron [req-7ab2514f-5f9c-437c-8e63-0d1462be4024 req-f0ba8751-71fb-4bc4-8f21-d5b67ba76f38 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Refreshing network info cache for port b54bc2a5-53f3-444d-9953-22bbf1bcdc83 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.198 2 DEBUG nova.virt.libvirt.driver [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Start _get_guest_xml network_info=[{"id": "b54bc2a5-53f3-444d-9953-22bbf1bcdc83", "address": "fa:16:3e:57:c8:5b", "network": {"id": "3c776fa4-63c0-44fa-bf3f-04ad74974c2c", "bridge": "br-int", "label": "tempest-network-smoke--1497944835", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb54bc2a5-53", "ovs_interfaceid": "b54bc2a5-53f3-444d-9953-22bbf1bcdc83", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.202 2 WARNING nova.virt.libvirt.driver [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.207 2 DEBUG nova.virt.libvirt.host [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.207 2 DEBUG nova.virt.libvirt.host [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.210 2 DEBUG nova.virt.libvirt.host [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.210 2 DEBUG nova.virt.libvirt.host [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.211 2 DEBUG nova.virt.libvirt.driver [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.212 2 DEBUG nova.virt.hardware [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.212 2 DEBUG nova.virt.hardware [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.212 2 DEBUG nova.virt.hardware [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.213 2 DEBUG nova.virt.hardware [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.213 2 DEBUG nova.virt.hardware [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.213 2 DEBUG nova.virt.hardware [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.214 2 DEBUG nova.virt.hardware [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.214 2 DEBUG nova.virt.hardware [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.214 2 DEBUG nova.virt.hardware [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.214 2 DEBUG nova.virt.hardware [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.215 2 DEBUG nova.virt.hardware [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.218 2 DEBUG nova.virt.libvirt.vif [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:49:00Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649',display_name='tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-server-tempest-testsecuritygroupsbasicops-1020134341-ge',id=185,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBI2KKxYqJuo+bm0uXO0va+WiltctIuUrNVSuyXKH60Q282vpKz7lkIUwo7YbhQgvFPQ6W6pvlS1MgI71IgsIlYiUsaPlzFVJnshPK84X/j2YUTiXwv4g5W08cDEUTRF7vw==',key_name='tempest-TestSecurityGroupsBasicOps-1976615750',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='575f3d227ab24f2daa62e65e14a4cd9c',ramdisk_id='',reservation_id='r-908mv8rz',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestSecurityGroupsBasicOps-1020134341',owner_user_name='tempest-TestSecurityGroupsBasicOps-1020134341-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:49:04Z,user_data=None,user_id='2d2b4a2da57543ef88e44ae28ad61647',uuid=f47803ac-aae8-4d74-959f-2c47ab5f04ab,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "b54bc2a5-53f3-444d-9953-22bbf1bcdc83", "address": "fa:16:3e:57:c8:5b", "network": {"id": "3c776fa4-63c0-44fa-bf3f-04ad74974c2c", "bridge": "br-int", "label": "tempest-network-smoke--1497944835", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb54bc2a5-53", "ovs_interfaceid": "b54bc2a5-53f3-444d-9953-22bbf1bcdc83", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.218 2 DEBUG nova.network.os_vif_util [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Converting VIF {"id": "b54bc2a5-53f3-444d-9953-22bbf1bcdc83", "address": "fa:16:3e:57:c8:5b", "network": {"id": "3c776fa4-63c0-44fa-bf3f-04ad74974c2c", "bridge": "br-int", "label": "tempest-network-smoke--1497944835", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb54bc2a5-53", "ovs_interfaceid": "b54bc2a5-53f3-444d-9953-22bbf1bcdc83", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.219 2 DEBUG nova.network.os_vif_util [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:57:c8:5b,bridge_name='br-int',has_traffic_filtering=True,id=b54bc2a5-53f3-444d-9953-22bbf1bcdc83,network=Network(3c776fa4-63c0-44fa-bf3f-04ad74974c2c),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapb54bc2a5-53') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.220 2 DEBUG nova.objects.instance [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lazy-loading 'pci_devices' on Instance uuid f47803ac-aae8-4d74-959f-2c47ab5f04ab obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.232 2 DEBUG nova.virt.libvirt.driver [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:49:10 compute-0 nova_compute[192079]:   <uuid>f47803ac-aae8-4d74-959f-2c47ab5f04ab</uuid>
Oct 02 12:49:10 compute-0 nova_compute[192079]:   <name>instance-000000b9</name>
Oct 02 12:49:10 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:49:10 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:49:10 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:49:10 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:       <nova:name>tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649</nova:name>
Oct 02 12:49:10 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:49:10</nova:creationTime>
Oct 02 12:49:10 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:49:10 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:49:10 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:49:10 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:49:10 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:49:10 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:49:10 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:49:10 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:49:10 compute-0 nova_compute[192079]:         <nova:user uuid="2d2b4a2da57543ef88e44ae28ad61647">tempest-TestSecurityGroupsBasicOps-1020134341-project-member</nova:user>
Oct 02 12:49:10 compute-0 nova_compute[192079]:         <nova:project uuid="575f3d227ab24f2daa62e65e14a4cd9c">tempest-TestSecurityGroupsBasicOps-1020134341</nova:project>
Oct 02 12:49:10 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:49:10 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:49:10 compute-0 nova_compute[192079]:         <nova:port uuid="b54bc2a5-53f3-444d-9953-22bbf1bcdc83">
Oct 02 12:49:10 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.12" ipVersion="4"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:49:10 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:49:10 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:49:10 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <system>
Oct 02 12:49:10 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:49:10 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:49:10 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:49:10 compute-0 nova_compute[192079]:       <entry name="serial">f47803ac-aae8-4d74-959f-2c47ab5f04ab</entry>
Oct 02 12:49:10 compute-0 nova_compute[192079]:       <entry name="uuid">f47803ac-aae8-4d74-959f-2c47ab5f04ab</entry>
Oct 02 12:49:10 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     </system>
Oct 02 12:49:10 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:49:10 compute-0 nova_compute[192079]:   <os>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:   </os>
Oct 02 12:49:10 compute-0 nova_compute[192079]:   <features>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:   </features>
Oct 02 12:49:10 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:49:10 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:49:10 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:49:10 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:49:10 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk.config"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:49:10 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:57:c8:5b"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:       <target dev="tapb54bc2a5-53"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:49:10 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/f47803ac-aae8-4d74-959f-2c47ab5f04ab/console.log" append="off"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <video>
Oct 02 12:49:10 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     </video>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:49:10 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:49:10 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:49:10 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:49:10 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:49:10 compute-0 nova_compute[192079]: </domain>
Oct 02 12:49:10 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.233 2 DEBUG nova.compute.manager [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Preparing to wait for external event network-vif-plugged-b54bc2a5-53f3-444d-9953-22bbf1bcdc83 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.233 2 DEBUG oslo_concurrency.lockutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "f47803ac-aae8-4d74-959f-2c47ab5f04ab-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.234 2 DEBUG oslo_concurrency.lockutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "f47803ac-aae8-4d74-959f-2c47ab5f04ab-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.234 2 DEBUG oslo_concurrency.lockutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "f47803ac-aae8-4d74-959f-2c47ab5f04ab-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.235 2 DEBUG nova.virt.libvirt.vif [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:49:00Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649',display_name='tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-server-tempest-testsecuritygroupsbasicops-1020134341-ge',id=185,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBI2KKxYqJuo+bm0uXO0va+WiltctIuUrNVSuyXKH60Q282vpKz7lkIUwo7YbhQgvFPQ6W6pvlS1MgI71IgsIlYiUsaPlzFVJnshPK84X/j2YUTiXwv4g5W08cDEUTRF7vw==',key_name='tempest-TestSecurityGroupsBasicOps-1976615750',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='575f3d227ab24f2daa62e65e14a4cd9c',ramdisk_id='',reservation_id='r-908mv8rz',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestSecurityGroupsBasicOps-1020134341',owner_user_name='tempest-TestSecurityGroupsBasicOps-1020134341-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:49:04Z,user_data=None,user_id='2d2b4a2da57543ef88e44ae28ad61647',uuid=f47803ac-aae8-4d74-959f-2c47ab5f04ab,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "b54bc2a5-53f3-444d-9953-22bbf1bcdc83", "address": "fa:16:3e:57:c8:5b", "network": {"id": "3c776fa4-63c0-44fa-bf3f-04ad74974c2c", "bridge": "br-int", "label": "tempest-network-smoke--1497944835", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb54bc2a5-53", "ovs_interfaceid": "b54bc2a5-53f3-444d-9953-22bbf1bcdc83", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.235 2 DEBUG nova.network.os_vif_util [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Converting VIF {"id": "b54bc2a5-53f3-444d-9953-22bbf1bcdc83", "address": "fa:16:3e:57:c8:5b", "network": {"id": "3c776fa4-63c0-44fa-bf3f-04ad74974c2c", "bridge": "br-int", "label": "tempest-network-smoke--1497944835", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb54bc2a5-53", "ovs_interfaceid": "b54bc2a5-53f3-444d-9953-22bbf1bcdc83", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.236 2 DEBUG nova.network.os_vif_util [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:57:c8:5b,bridge_name='br-int',has_traffic_filtering=True,id=b54bc2a5-53f3-444d-9953-22bbf1bcdc83,network=Network(3c776fa4-63c0-44fa-bf3f-04ad74974c2c),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapb54bc2a5-53') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.236 2 DEBUG os_vif [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:57:c8:5b,bridge_name='br-int',has_traffic_filtering=True,id=b54bc2a5-53f3-444d-9953-22bbf1bcdc83,network=Network(3c776fa4-63c0-44fa-bf3f-04ad74974c2c),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapb54bc2a5-53') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.237 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.237 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.237 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.240 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.241 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapb54bc2a5-53, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.241 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapb54bc2a5-53, col_values=(('external_ids', {'iface-id': 'b54bc2a5-53f3-444d-9953-22bbf1bcdc83', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:57:c8:5b', 'vm-uuid': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.243 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:10 compute-0 NetworkManager[51160]: <info>  [1759409350.2441] manager: (tapb54bc2a5-53): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/351)
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.245 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.249 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.250 2 INFO os_vif [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:57:c8:5b,bridge_name='br-int',has_traffic_filtering=True,id=b54bc2a5-53f3-444d-9953-22bbf1bcdc83,network=Network(3c776fa4-63c0-44fa-bf3f-04ad74974c2c),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapb54bc2a5-53')
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.305 2 DEBUG nova.virt.libvirt.driver [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.306 2 DEBUG nova.virt.libvirt.driver [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.306 2 DEBUG nova.virt.libvirt.driver [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] No VIF found with MAC fa:16:3e:57:c8:5b, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.306 2 INFO nova.virt.libvirt.driver [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Using config drive
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.591 2 INFO nova.virt.libvirt.driver [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Creating config drive at /var/lib/nova/instances/f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk.config
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.595 2 DEBUG oslo_concurrency.processutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpwph5yxnj execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.724 2 DEBUG oslo_concurrency.processutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpwph5yxnj" returned: 0 in 0.129s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:49:10 compute-0 kernel: tapb54bc2a5-53: entered promiscuous mode
Oct 02 12:49:10 compute-0 ovn_controller[94336]: 2025-10-02T12:49:10Z|00719|binding|INFO|Claiming lport b54bc2a5-53f3-444d-9953-22bbf1bcdc83 for this chassis.
Oct 02 12:49:10 compute-0 NetworkManager[51160]: <info>  [1759409350.7819] manager: (tapb54bc2a5-53): new Tun device (/org/freedesktop/NetworkManager/Devices/352)
Oct 02 12:49:10 compute-0 ovn_controller[94336]: 2025-10-02T12:49:10Z|00720|binding|INFO|b54bc2a5-53f3-444d-9953-22bbf1bcdc83: Claiming fa:16:3e:57:c8:5b 10.100.0.12
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.782 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:10 compute-0 NetworkManager[51160]: <info>  [1759409350.7896] manager: (patch-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d-to-br-int): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/353)
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.788 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:10 compute-0 NetworkManager[51160]: <info>  [1759409350.7903] manager: (patch-br-int-to-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/354)
Oct 02 12:49:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:10.795 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:57:c8:5b 10.100.0.12'], port_security=['fa:16:3e:57:c8:5b 10.100.0.12'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.12/28', 'neutron:device_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-3c776fa4-63c0-44fa-bf3f-04ad74974c2c', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'neutron:revision_number': '2', 'neutron:security_group_ids': '3bf5c068-41c3-45ca-8822-72717311e7da', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=78978e88-15ad-4f25-bc19-feb08335ac33, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=b54bc2a5-53f3-444d-9953-22bbf1bcdc83) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:49:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:10.796 103294 INFO neutron.agent.ovn.metadata.agent [-] Port b54bc2a5-53f3-444d-9953-22bbf1bcdc83 in datapath 3c776fa4-63c0-44fa-bf3f-04ad74974c2c bound to our chassis
Oct 02 12:49:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:10.797 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network 3c776fa4-63c0-44fa-bf3f-04ad74974c2c
Oct 02 12:49:10 compute-0 systemd-udevd[253816]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:49:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:10.807 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[71cc02d4-f435-448a-bf44-354605a00ff2]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:49:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:10.808 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tap3c776fa4-61 in ovnmeta-3c776fa4-63c0-44fa-bf3f-04ad74974c2c namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:49:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:10.809 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tap3c776fa4-60 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:49:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:10.809 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[987fc55c-3428-455f-a7fb-6e034065fd71]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:49:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:10.810 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[347e5644-053a-4758-8d4f-b140d02b13c8]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:49:10 compute-0 NetworkManager[51160]: <info>  [1759409350.8218] device (tapb54bc2a5-53): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:49:10 compute-0 NetworkManager[51160]: <info>  [1759409350.8226] device (tapb54bc2a5-53): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:49:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:10.821 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[9e87bdbc-0a06-42d9-87f9-bc1e5eb870cf]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:49:10 compute-0 systemd-machined[152150]: New machine qemu-87-instance-000000b9.
Oct 02 12:49:10 compute-0 systemd[1]: Started Virtual Machine qemu-87-instance-000000b9.
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.860 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:10.860 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[3029fc57-e9b1-498a-9dde-40817af769cb]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.873 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:10 compute-0 ovn_controller[94336]: 2025-10-02T12:49:10Z|00721|binding|INFO|Setting lport b54bc2a5-53f3-444d-9953-22bbf1bcdc83 ovn-installed in OVS
Oct 02 12:49:10 compute-0 ovn_controller[94336]: 2025-10-02T12:49:10Z|00722|binding|INFO|Setting lport b54bc2a5-53f3-444d-9953-22bbf1bcdc83 up in Southbound
Oct 02 12:49:10 compute-0 nova_compute[192079]: 2025-10-02 12:49:10.881 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:10.887 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[5af7c669-faeb-470b-871f-d578940971ad]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:49:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:10.892 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[d496baa2-b6b0-4282-bf59-e6b52b383951]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:49:10 compute-0 NetworkManager[51160]: <info>  [1759409350.8934] manager: (tap3c776fa4-60): new Veth device (/org/freedesktop/NetworkManager/Devices/355)
Oct 02 12:49:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:10.922 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[14733763-2c1f-481f-9db5-513569c4a6a1]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:49:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:10.925 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[3444d940-32b2-4843-b3fe-126d3f292928]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:49:10 compute-0 NetworkManager[51160]: <info>  [1759409350.9467] device (tap3c776fa4-60): carrier: link connected
Oct 02 12:49:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:10.952 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[f62b9309-1fa0-46b4-be3c-c314e9c37c75]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:49:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:10.969 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7196305e-c01e-4cd5-93c6-9e8228e6de50]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap3c776fa4-61'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:c5:93:34'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 224], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 734857, 'reachable_time': 34068, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 253851, 'error': None, 'target': 'ovnmeta-3c776fa4-63c0-44fa-bf3f-04ad74974c2c', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:49:10 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:10.984 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f9128d70-5037-4171-8829-0eda40dd9526]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fec5:9334'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 734857, 'tstamp': 734857}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 253852, 'error': None, 'target': 'ovnmeta-3c776fa4-63c0-44fa-bf3f-04ad74974c2c', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:11.000 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[ca8f170f-bb1d-4f38-b6ad-89a0e893b1a7]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tap3c776fa4-61'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:c5:93:34'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 224], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 734857, 'reachable_time': 34068, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 253853, 'error': None, 'target': 'ovnmeta-3c776fa4-63c0-44fa-bf3f-04ad74974c2c', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:11.028 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[906b9dd9-34c4-47bb-af64-f32d98dfd1df]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:11.078 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[bad20b23-3d06-4c5d-88b1-0487cd7231f0]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:11.080 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap3c776fa4-60, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:11.080 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:11.080 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tap3c776fa4-60, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:49:11 compute-0 nova_compute[192079]: 2025-10-02 12:49:11.082 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:11 compute-0 NetworkManager[51160]: <info>  [1759409351.0829] manager: (tap3c776fa4-60): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/356)
Oct 02 12:49:11 compute-0 kernel: tap3c776fa4-60: entered promiscuous mode
Oct 02 12:49:11 compute-0 nova_compute[192079]: 2025-10-02 12:49:11.084 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:11.087 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tap3c776fa4-60, col_values=(('external_ids', {'iface-id': 'b354dca0-bf82-4ac8-ba2d-7afd74e436fa'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:49:11 compute-0 nova_compute[192079]: 2025-10-02 12:49:11.088 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:11 compute-0 ovn_controller[94336]: 2025-10-02T12:49:11Z|00723|binding|INFO|Releasing lport b354dca0-bf82-4ac8-ba2d-7afd74e436fa from this chassis (sb_readonly=0)
Oct 02 12:49:11 compute-0 nova_compute[192079]: 2025-10-02 12:49:11.089 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:11.091 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/3c776fa4-63c0-44fa-bf3f-04ad74974c2c.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/3c776fa4-63c0-44fa-bf3f-04ad74974c2c.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:11.092 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[85bc2870-e1f8-47f7-ae28-357ffe9e4ee2]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:11.093 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-3c776fa4-63c0-44fa-bf3f-04ad74974c2c
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/3c776fa4-63c0-44fa-bf3f-04ad74974c2c.pid.haproxy
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID 3c776fa4-63c0-44fa-bf3f-04ad74974c2c
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:49:11 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:11.094 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-3c776fa4-63c0-44fa-bf3f-04ad74974c2c', 'env', 'PROCESS_TAG=haproxy-3c776fa4-63c0-44fa-bf3f-04ad74974c2c', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/3c776fa4-63c0-44fa-bf3f-04ad74974c2c.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:49:11 compute-0 nova_compute[192079]: 2025-10-02 12:49:11.103 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:11 compute-0 podman[253885]: 2025-10-02 12:49:11.437885988 +0000 UTC m=+0.042019087 container create ff3b99ed92b4928f95b685c31239e997352b88fd33398301a01037f5a94f52a0 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-3c776fa4-63c0-44fa-bf3f-04ad74974c2c, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, io.buildah.version=1.41.3)
Oct 02 12:49:11 compute-0 systemd[1]: Started libpod-conmon-ff3b99ed92b4928f95b685c31239e997352b88fd33398301a01037f5a94f52a0.scope.
Oct 02 12:49:11 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:49:11 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/d5197312924dfae97ce70b23587c27fcbd0c1c1fe698f5a2175e830e4a4e3889/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:49:11 compute-0 podman[253885]: 2025-10-02 12:49:11.416482414 +0000 UTC m=+0.020615523 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:49:11 compute-0 podman[253885]: 2025-10-02 12:49:11.515901015 +0000 UTC m=+0.120034134 container init ff3b99ed92b4928f95b685c31239e997352b88fd33398301a01037f5a94f52a0 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-3c776fa4-63c0-44fa-bf3f-04ad74974c2c, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true)
Oct 02 12:49:11 compute-0 podman[253885]: 2025-10-02 12:49:11.52159613 +0000 UTC m=+0.125729229 container start ff3b99ed92b4928f95b685c31239e997352b88fd33398301a01037f5a94f52a0 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-3c776fa4-63c0-44fa-bf3f-04ad74974c2c, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 12:49:11 compute-0 neutron-haproxy-ovnmeta-3c776fa4-63c0-44fa-bf3f-04ad74974c2c[253901]: [NOTICE]   (253905) : New worker (253907) forked
Oct 02 12:49:11 compute-0 neutron-haproxy-ovnmeta-3c776fa4-63c0-44fa-bf3f-04ad74974c2c[253901]: [NOTICE]   (253905) : Loading success.
Oct 02 12:49:11 compute-0 nova_compute[192079]: 2025-10-02 12:49:11.823 2 DEBUG nova.compute.manager [req-93307f8b-b9dc-4133-9028-e96d3c73561d req-b04f0f39-a5e9-443d-ba74-b06ee93832c4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Received event network-vif-plugged-b54bc2a5-53f3-444d-9953-22bbf1bcdc83 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:49:11 compute-0 nova_compute[192079]: 2025-10-02 12:49:11.824 2 DEBUG oslo_concurrency.lockutils [req-93307f8b-b9dc-4133-9028-e96d3c73561d req-b04f0f39-a5e9-443d-ba74-b06ee93832c4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "f47803ac-aae8-4d74-959f-2c47ab5f04ab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:49:11 compute-0 nova_compute[192079]: 2025-10-02 12:49:11.825 2 DEBUG oslo_concurrency.lockutils [req-93307f8b-b9dc-4133-9028-e96d3c73561d req-b04f0f39-a5e9-443d-ba74-b06ee93832c4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "f47803ac-aae8-4d74-959f-2c47ab5f04ab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:49:11 compute-0 nova_compute[192079]: 2025-10-02 12:49:11.825 2 DEBUG oslo_concurrency.lockutils [req-93307f8b-b9dc-4133-9028-e96d3c73561d req-b04f0f39-a5e9-443d-ba74-b06ee93832c4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "f47803ac-aae8-4d74-959f-2c47ab5f04ab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:49:11 compute-0 nova_compute[192079]: 2025-10-02 12:49:11.825 2 DEBUG nova.compute.manager [req-93307f8b-b9dc-4133-9028-e96d3c73561d req-b04f0f39-a5e9-443d-ba74-b06ee93832c4 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Processing event network-vif-plugged-b54bc2a5-53f3-444d-9953-22bbf1bcdc83 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.182 2 DEBUG nova.compute.manager [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Instance event wait completed in 0 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.183 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759409352.1815214, f47803ac-aae8-4d74-959f-2c47ab5f04ab => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.183 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] VM Started (Lifecycle Event)
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.186 2 DEBUG nova.virt.libvirt.driver [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.189 2 INFO nova.virt.libvirt.driver [-] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Instance spawned successfully.
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.189 2 DEBUG nova.virt.libvirt.driver [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.228 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.233 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Synchronizing instance power state after lifecycle event "Started"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.236 2 DEBUG nova.virt.libvirt.driver [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.236 2 DEBUG nova.virt.libvirt.driver [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.237 2 DEBUG nova.virt.libvirt.driver [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.237 2 DEBUG nova.virt.libvirt.driver [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.238 2 DEBUG nova.virt.libvirt.driver [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.238 2 DEBUG nova.virt.libvirt.driver [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.284 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.285 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759409352.1825907, f47803ac-aae8-4d74-959f-2c47ab5f04ab => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.285 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] VM Paused (Lifecycle Event)
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.309 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.312 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759409352.185548, f47803ac-aae8-4d74-959f-2c47ab5f04ab => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.312 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] VM Resumed (Lifecycle Event)
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.341 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.342 2 INFO nova.compute.manager [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Took 7.73 seconds to spawn the instance on the hypervisor.
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.343 2 DEBUG nova.compute.manager [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.345 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.376 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.439 2 INFO nova.compute.manager [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Took 8.30 seconds to build instance.
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.457 2 DEBUG oslo_concurrency.lockutils [None req-765b24ad-b4f5-4ec7-88e7-d77a2cd7f43f 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "f47803ac-aae8-4d74-959f-2c47ab5f04ab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 8.421s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.535 2 DEBUG nova.network.neutron [req-7ab2514f-5f9c-437c-8e63-0d1462be4024 req-f0ba8751-71fb-4bc4-8f21-d5b67ba76f38 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Updated VIF entry in instance network info cache for port b54bc2a5-53f3-444d-9953-22bbf1bcdc83. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.535 2 DEBUG nova.network.neutron [req-7ab2514f-5f9c-437c-8e63-0d1462be4024 req-f0ba8751-71fb-4bc4-8f21-d5b67ba76f38 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Updating instance_info_cache with network_info: [{"id": "b54bc2a5-53f3-444d-9953-22bbf1bcdc83", "address": "fa:16:3e:57:c8:5b", "network": {"id": "3c776fa4-63c0-44fa-bf3f-04ad74974c2c", "bridge": "br-int", "label": "tempest-network-smoke--1497944835", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb54bc2a5-53", "ovs_interfaceid": "b54bc2a5-53f3-444d-9953-22bbf1bcdc83", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:49:12 compute-0 nova_compute[192079]: 2025-10-02 12:49:12.557 2 DEBUG oslo_concurrency.lockutils [req-7ab2514f-5f9c-437c-8e63-0d1462be4024 req-f0ba8751-71fb-4bc4-8f21-d5b67ba76f38 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-f47803ac-aae8-4d74-959f-2c47ab5f04ab" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:49:13 compute-0 nova_compute[192079]: 2025-10-02 12:49:13.953 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:14 compute-0 nova_compute[192079]: 2025-10-02 12:49:14.031 2 DEBUG nova.compute.manager [req-2b4aad21-7915-4754-a262-6e17f7d9227c req-33504b10-ee97-4a82-89ee-f4d1b43c8369 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Received event network-vif-plugged-b54bc2a5-53f3-444d-9953-22bbf1bcdc83 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:49:14 compute-0 nova_compute[192079]: 2025-10-02 12:49:14.031 2 DEBUG oslo_concurrency.lockutils [req-2b4aad21-7915-4754-a262-6e17f7d9227c req-33504b10-ee97-4a82-89ee-f4d1b43c8369 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "f47803ac-aae8-4d74-959f-2c47ab5f04ab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:49:14 compute-0 nova_compute[192079]: 2025-10-02 12:49:14.031 2 DEBUG oslo_concurrency.lockutils [req-2b4aad21-7915-4754-a262-6e17f7d9227c req-33504b10-ee97-4a82-89ee-f4d1b43c8369 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "f47803ac-aae8-4d74-959f-2c47ab5f04ab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:49:14 compute-0 nova_compute[192079]: 2025-10-02 12:49:14.031 2 DEBUG oslo_concurrency.lockutils [req-2b4aad21-7915-4754-a262-6e17f7d9227c req-33504b10-ee97-4a82-89ee-f4d1b43c8369 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "f47803ac-aae8-4d74-959f-2c47ab5f04ab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:49:14 compute-0 nova_compute[192079]: 2025-10-02 12:49:14.031 2 DEBUG nova.compute.manager [req-2b4aad21-7915-4754-a262-6e17f7d9227c req-33504b10-ee97-4a82-89ee-f4d1b43c8369 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] No waiting events found dispatching network-vif-plugged-b54bc2a5-53f3-444d-9953-22bbf1bcdc83 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:49:14 compute-0 nova_compute[192079]: 2025-10-02 12:49:14.032 2 WARNING nova.compute.manager [req-2b4aad21-7915-4754-a262-6e17f7d9227c req-33504b10-ee97-4a82-89ee-f4d1b43c8369 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Received unexpected event network-vif-plugged-b54bc2a5-53f3-444d-9953-22bbf1bcdc83 for instance with vm_state active and task_state None.
Oct 02 12:49:15 compute-0 podman[253923]: 2025-10-02 12:49:15.151962001 +0000 UTC m=+0.064850120 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, container_name=ceilometer_agent_compute, config_id=edpm, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_managed=true)
Oct 02 12:49:15 compute-0 nova_compute[192079]: 2025-10-02 12:49:15.243 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:15 compute-0 nova_compute[192079]: 2025-10-02 12:49:15.681 2 DEBUG nova.compute.manager [req-d62d14ae-0627-411b-aa96-5c643d6de055 req-f8bf5145-df68-435d-8e5d-00b42e650f42 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Received event network-changed-b54bc2a5-53f3-444d-9953-22bbf1bcdc83 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:49:15 compute-0 nova_compute[192079]: 2025-10-02 12:49:15.681 2 DEBUG nova.compute.manager [req-d62d14ae-0627-411b-aa96-5c643d6de055 req-f8bf5145-df68-435d-8e5d-00b42e650f42 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Refreshing instance network info cache due to event network-changed-b54bc2a5-53f3-444d-9953-22bbf1bcdc83. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:49:15 compute-0 nova_compute[192079]: 2025-10-02 12:49:15.681 2 DEBUG oslo_concurrency.lockutils [req-d62d14ae-0627-411b-aa96-5c643d6de055 req-f8bf5145-df68-435d-8e5d-00b42e650f42 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-f47803ac-aae8-4d74-959f-2c47ab5f04ab" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:49:15 compute-0 nova_compute[192079]: 2025-10-02 12:49:15.681 2 DEBUG oslo_concurrency.lockutils [req-d62d14ae-0627-411b-aa96-5c643d6de055 req-f8bf5145-df68-435d-8e5d-00b42e650f42 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-f47803ac-aae8-4d74-959f-2c47ab5f04ab" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:49:15 compute-0 nova_compute[192079]: 2025-10-02 12:49:15.681 2 DEBUG nova.network.neutron [req-d62d14ae-0627-411b-aa96-5c643d6de055 req-f8bf5145-df68-435d-8e5d-00b42e650f42 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Refreshing network info cache for port b54bc2a5-53f3-444d-9953-22bbf1bcdc83 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.116 12 DEBUG ceilometer.compute.discovery [-] instance data: {'id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'os_type': 'hvm', 'architecture': 'x86_64', 'OS-EXT-SRV-ATTR:instance_name': 'instance-000000b9', 'OS-EXT-SRV-ATTR:host': 'compute-0.ctlplane.example.com', 'OS-EXT-STS:vm_state': 'running', 'tenant_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'hostId': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'status': 'active', 'metadata': {}} discover_libvirt_polling /usr/lib/python3.9/site-packages/ceilometer/compute/discovery.py:228
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.116 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.latency in the context of pollsters
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.136 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk.device.write.latency volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.136 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk.device.write.latency volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '14d80c09-cfe6-40f0-b732-fce810e411ca', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 0, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab-vda', 'timestamp': '2025-10-02T12:49:17.116872', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'name': 'instance-000000b9', 'instance_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '34f8a224-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7354.803930848, 'message_signature': '672a841e6002a153ace94ccdb98ffaa823d833cb7ad793b400ca4b6da04e7376'}, {'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 0, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab-sda', 'timestamp': '2025-10-02T12:49:17.116872', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'name': 'instance-000000b9', 'instance_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '34f8ade6-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7354.803930848, 'message_signature': '579f5342f7228e3ebd811c60b62a2979bdaf522fbd89e42219326da9963ea556'}]}, 'timestamp': '2025-10-02 12:49:17.137204', '_unique_id': '50129eb35c20421f9e05537f21711879'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.138 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.139 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.requests in the context of pollsters
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.139 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk.device.read.requests volume: 760 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.139 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk.device.read.requests volume: 1 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'aaa298cf-db86-48b3-934c-f54cb61a5f6e', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 760, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab-vda', 'timestamp': '2025-10-02T12:49:17.139421', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'name': 'instance-000000b9', 'instance_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '34f91010-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7354.803930848, 'message_signature': '1f2e39464b82e7f8143458bf1d128af95b0980a8e28bc326721d0ffde28af898'}, {'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 1, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab-sda', 'timestamp': '2025-10-02T12:49:17.139421', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'name': 'instance-000000b9', 'instance_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '34f91844-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7354.803930848, 'message_signature': 'e95ac72333d8df1e847d6c753242205c455e4beca56d33b48fe3d54fab793b1b'}]}, 'timestamp': '2025-10-02 12:49:17.139867', '_unique_id': '23796fabf90942bc95ddb21893d416b3'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.140 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.141 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.usage in the context of pollsters
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.156 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk.device.usage volume: 196624 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.157 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk.device.usage volume: 485376 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '1e7c4a09-ca49-4ee5-9819-6371653c3a57', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 196624, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab-vda', 'timestamp': '2025-10-02T12:49:17.141104', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'name': 'instance-000000b9', 'instance_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '34fbb572-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7354.828174189, 'message_signature': '477b5a807c9a63a79e5f55376e89a627ff142c645c2c594a3fd63cf9b4a2f9c3'}, {'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 485376, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab-sda', 'timestamp': '2025-10-02T12:49:17.141104', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'name': 'instance-000000b9', 'instance_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '34fbc404-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7354.828174189, 'message_signature': '7c3550b503d3710a7d6375a2d25459446989c7ffe852e6efcfeda2a793c2415f'}]}, 'timestamp': '2025-10-02 12:49:17.157388', '_unique_id': '72db899a7f94413daad3707445ea4c2e'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.158 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.159 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.latency in the context of pollsters
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.159 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for PerDeviceDiskLatencyPollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.159 12 ERROR ceilometer.polling.manager [-] Prevent pollster disk.device.latency from polling [<NovaLikeServer: tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649>]
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.160 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.allocation in the context of pollsters
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.160 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk.device.allocation volume: 204800 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.160 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk.device.allocation volume: 487424 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'c7aaf619-bed5-4402-a814-14db76273c22', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 204800, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab-vda', 'timestamp': '2025-10-02T12:49:17.160194', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'name': 'instance-000000b9', 'instance_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '34fc3cd6-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7354.828174189, 'message_signature': 'ba9eccb2366dcb48d82d54674c14e778edb1eb382b6366a278f4b6a499832e77'}, {'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 487424, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab-sda', 'timestamp': '2025-10-02T12:49:17.160194', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'name': 'instance-000000b9', 'instance_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '34fc4730-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7354.828174189, 'message_signature': 'e0b48b714b515ce141ce9ec902e88c18fd945770004cbc3d828e8092de8957cc'}]}, 'timestamp': '2025-10-02 12:49:17.160749', '_unique_id': '1be583fd008244bca862a2138c114583'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.161 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.162 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets.drop in the context of pollsters
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.164 12 DEBUG ceilometer.compute.virt.libvirt.inspector [-] No delta meter predecessor for f47803ac-aae8-4d74-959f-2c47ab5f04ab / tapb54bc2a5-53 inspect_vnics /usr/lib/python3.9/site-packages/ceilometer/compute/virt/libvirt/inspector.py:136
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.164 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/network.outgoing.packets.drop volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '59c26434-70cb-4eeb-a226-6e1d262e1cae', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets.drop', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'instance-000000b9-f47803ac-aae8-4d74-959f-2c47ab5f04ab-tapb54bc2a5-53', 'timestamp': '2025-10-02T12:49:17.162410', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'name': 'tapb54bc2a5-53', 'instance_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:57:c8:5b', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapb54bc2a5-53'}, 'message_id': '34fcf086-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7354.849488201, 'message_signature': '98d30ddabc56f74cc9b6bd86e7fc996dededd175eb2f34d6d980ff37b69bf6ee'}]}, 'timestamp': '2025-10-02 12:49:17.165129', '_unique_id': 'b872f64c05234c459afe331d542a2888'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.166 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.167 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets.drop in the context of pollsters
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.167 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/network.incoming.packets.drop volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'a1c7f406-a9f3-4c48-9ead-56c90a916e33', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets.drop', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'instance-000000b9-f47803ac-aae8-4d74-959f-2c47ab5f04ab-tapb54bc2a5-53', 'timestamp': '2025-10-02T12:49:17.167421', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'name': 'tapb54bc2a5-53', 'instance_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:57:c8:5b', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapb54bc2a5-53'}, 'message_id': '34fd5850-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7354.849488201, 'message_signature': '771e7a3512484f408aae3d586aa73c2931e520a96e2c310fc0ac14730b479b6d'}]}, 'timestamp': '2025-10-02 12:49:17.167748', '_unique_id': '0cf87e4a14ce43c5b3b7bb4bf8062ca2'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.168 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.169 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets in the context of pollsters
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.169 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/network.outgoing.packets volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '266605d2-1408-49e6-b23d-5dcfa3b3846e', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'instance-000000b9-f47803ac-aae8-4d74-959f-2c47ab5f04ab-tapb54bc2a5-53', 'timestamp': '2025-10-02T12:49:17.169562', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'name': 'tapb54bc2a5-53', 'instance_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:57:c8:5b', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapb54bc2a5-53'}, 'message_id': '34fdabfc-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7354.849488201, 'message_signature': 'e217b76b1f33094615c87c76621e744eb140ee45149cffda2c5e7556475588cd'}]}, 'timestamp': '2025-10-02 12:49:17.169922', '_unique_id': 'ddb75434fed04c26b05323a19ced1178'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.170 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.171 12 INFO ceilometer.polling.manager [-] Polling pollster memory.usage in the context of pollsters
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.190 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/memory.usage volume: Unavailable _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.190 12 WARNING ceilometer.compute.pollsters [-] memory.usage statistic in not available for instance f47803ac-aae8-4d74-959f-2c47ab5f04ab: ceilometer.compute.pollsters.NoVolumeException
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.190 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes in the context of pollsters
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.191 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/network.incoming.bytes volume: 110 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '6a5ce2a3-36f7-4535-a682-72d5ea3f9d0b', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 110, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'instance-000000b9-f47803ac-aae8-4d74-959f-2c47ab5f04ab-tapb54bc2a5-53', 'timestamp': '2025-10-02T12:49:17.191023', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'name': 'tapb54bc2a5-53', 'instance_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:57:c8:5b', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapb54bc2a5-53'}, 'message_id': '3500f49c-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7354.849488201, 'message_signature': 'e6eef95138e1c755f72f606e603e966544781804e6d55348fe15b2124d5483b3'}]}, 'timestamp': '2025-10-02 12:49:17.191456', '_unique_id': '19789d84a0084218a9446e0314f60593'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.192 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.193 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes.delta in the context of pollsters
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.193 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/network.incoming.bytes.delta volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '2274ba43-7536-4d13-8a87-fc158ac5b3dd', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.bytes.delta', 'counter_type': 'delta', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'instance-000000b9-f47803ac-aae8-4d74-959f-2c47ab5f04ab-tapb54bc2a5-53', 'timestamp': '2025-10-02T12:49:17.193330', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'name': 'tapb54bc2a5-53', 'instance_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:57:c8:5b', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapb54bc2a5-53'}, 'message_id': '35014ad2-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7354.849488201, 'message_signature': '6b457e404f9ff30622c30b12d6b548c3fa76c561d5d90055e67aedc61ed84c40'}]}, 'timestamp': '2025-10-02 12:49:17.193604', '_unique_id': 'f3651631bf6549c7a17065e3dbad6bde'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.194 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.bytes in the context of pollsters
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.195 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk.device.read.bytes volume: 23775232 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.195 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk.device.read.bytes volume: 2048 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '4a8d1ae4-d977-40c0-8c56-4a9375075932', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 23775232, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab-vda', 'timestamp': '2025-10-02T12:49:17.195060', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'name': 'instance-000000b9', 'instance_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '35018e20-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7354.803930848, 'message_signature': '79ef1f2daa1e0d54070fde14b5528f80224ba8aab70ad872c76e76b3b0544cec'}, {'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 2048, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab-sda', 'timestamp': '2025-10-02T12:49:17.195060', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'name': 'instance-000000b9', 'instance_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '35019794-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7354.803930848, 'message_signature': '11c35a64e8dfc4293f27cae6e007d46e480b4047bb172f07bf241979380112f0'}]}, 'timestamp': '2025-10-02 12:49:17.195555', '_unique_id': '1a023c4c211740489786908b490a5048'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.196 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes.rate in the context of pollsters
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.197 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for OutgoingBytesRatePollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.197 12 ERROR ceilometer.polling.manager [-] Prevent pollster network.outgoing.bytes.rate from polling [<NovaLikeServer: tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649>]
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.197 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes in the context of pollsters
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.197 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/network.outgoing.bytes volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '2dc8440d-ddb9-421b-955a-dc6d6d651f9a', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'instance-000000b9-f47803ac-aae8-4d74-959f-2c47ab5f04ab-tapb54bc2a5-53', 'timestamp': '2025-10-02T12:49:17.197530', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'name': 'tapb54bc2a5-53', 'instance_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:57:c8:5b', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapb54bc2a5-53'}, 'message_id': '3501eeb0-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7354.849488201, 'message_signature': '117300bd414b1c41e4bbad9a740bd50a18a3e1a2bf8894fd7fa24afda5ce7e2e'}]}, 'timestamp': '2025-10-02 12:49:17.197801', '_unique_id': '9cd3cc2309eb439d8470e02d41b9dd75'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.198 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.199 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets.error in the context of pollsters
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.199 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/network.outgoing.packets.error volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '6f7111e5-e7a1-460e-bca3-0ba154a6b80b', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets.error', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'instance-000000b9-f47803ac-aae8-4d74-959f-2c47ab5f04ab-tapb54bc2a5-53', 'timestamp': '2025-10-02T12:49:17.199281', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'name': 'tapb54bc2a5-53', 'instance_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:57:c8:5b', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapb54bc2a5-53'}, 'message_id': '35023334-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7354.849488201, 'message_signature': '11d916105025934149444d8bf9c42ce8ca63160dd07c93f07b5193d791521481'}]}, 'timestamp': '2025-10-02 12:49:17.199552', '_unique_id': '4f617ee4a1514462822e34ee69376d46'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.200 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets.error in the context of pollsters
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/network.incoming.packets.error volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'ec5c1c1c-248b-4f5d-a20f-ef912ce6e736', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets.error', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'instance-000000b9-f47803ac-aae8-4d74-959f-2c47ab5f04ab-tapb54bc2a5-53', 'timestamp': '2025-10-02T12:49:17.201002', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'name': 'tapb54bc2a5-53', 'instance_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:57:c8:5b', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapb54bc2a5-53'}, 'message_id': '3502768c-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7354.849488201, 'message_signature': '535a61902fed02b8be866820952ec7e6151a78cd20ac7646c7759b147319d8fa'}]}, 'timestamp': '2025-10-02 12:49:17.201277', '_unique_id': 'ad7669b855634cd98febe2274ad57f33'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.201 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.202 12 INFO ceilometer.polling.manager [-] Polling pollster cpu in the context of pollsters
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.202 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/cpu volume: 4790000000 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'a0480c61-c85e-4f8d-b424-3db8de7f554b', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'cpu', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 4790000000, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'timestamp': '2025-10-02T12:49:17.202720', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'name': 'instance-000000b9', 'instance_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'cpu_number': 1}, 'message_id': '3502b96c-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7354.87733037, 'message_signature': '9b726725f327aaaa88737f9144a4540029340f6c9aa5d9920a53295142cbd249'}]}, 'timestamp': '2025-10-02 12:49:17.203011', '_unique_id': '13987e3719d44b28bfce7cee48363bb6'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.203 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.204 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.requests in the context of pollsters
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.204 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk.device.write.requests volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.204 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk.device.write.requests volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'dede47d3-519d-4ab8-b3e1-156735fa3af3', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 0, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab-vda', 'timestamp': '2025-10-02T12:49:17.204477', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'name': 'instance-000000b9', 'instance_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '3502fe04-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7354.803930848, 'message_signature': 'facca91d509486dbb858e07db5f0ad05675bf4594c59b21f0ab2bd273ba95410'}, {'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 0, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab-sda', 'timestamp': '2025-10-02T12:49:17.204477', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'name': 'instance-000000b9', 'instance_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '3503075a-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7354.803930848, 'message_signature': '93f104510253ff0bb35cf6c911a1676ac8d16763f45759d61d9685e50c5147da'}]}, 'timestamp': '2025-10-02 12:49:17.204971', '_unique_id': 'efbd625d99884943af7a1a198f679f07'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.205 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.206 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes.rate in the context of pollsters
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.206 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for IncomingBytesRatePollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.206 12 ERROR ceilometer.polling.manager [-] Prevent pollster network.incoming.bytes.rate from polling [<NovaLikeServer: tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649>]
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.206 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.bytes in the context of pollsters
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.206 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk.device.write.bytes volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.207 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk.device.write.bytes volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'f6ff250f-408d-4419-b5ba-71486c6516e6', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab-vda', 'timestamp': '2025-10-02T12:49:17.206873', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'name': 'instance-000000b9', 'instance_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '35035c28-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7354.803930848, 'message_signature': '2de8fd598f4552160e3def0abe96fdd6cba0c88657a56d7f654e366177098240'}, {'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab-sda', 'timestamp': '2025-10-02T12:49:17.206873', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'name': 'instance-000000b9', 'instance_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '350365a6-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7354.803930848, 'message_signature': 'e16eadb3e3c7c1d40a0af46d9fb57adf4e4c7a5ad71ef12657ebc89d5550cf12'}]}, 'timestamp': '2025-10-02 12:49:17.207383', '_unique_id': '6bc58de18d81466b8196bd627fc02b1d'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.208 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets in the context of pollsters
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/network.incoming.packets volume: 1 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '0f716618-7330-4d86-a0a8-bcbf45c89364', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 1, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'instance-000000b9-f47803ac-aae8-4d74-959f-2c47ab5f04ab-tapb54bc2a5-53', 'timestamp': '2025-10-02T12:49:17.209066', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'name': 'tapb54bc2a5-53', 'instance_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:57:c8:5b', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapb54bc2a5-53'}, 'message_id': '3503b150-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7354.849488201, 'message_signature': 'e6b00750496be38688effd9ee2105f01a1ddb8228dd85850c24ba84113982052'}]}, 'timestamp': '2025-10-02 12:49:17.209333', '_unique_id': '51605d4763e44749a9193900a793a82b'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.209 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.210 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.capacity in the context of pollsters
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.210 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk.device.capacity volume: 1073741824 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk.device.capacity volume: 485376 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'ed9529d8-b91b-4a62-bee0-74819ea36db4', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 1073741824, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab-vda', 'timestamp': '2025-10-02T12:49:17.210745', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'name': 'instance-000000b9', 'instance_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '3503f2dc-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7354.828174189, 'message_signature': '60777dd1d34ce7e315e93aa97aea5e92e8ecdc1c610b9b0480c6edb9094b1069'}, {'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 485376, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab-sda', 'timestamp': '2025-10-02T12:49:17.210745', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'name': 'instance-000000b9', 'instance_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '3503fd9a-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7354.828174189, 'message_signature': 'dde6990397176dce57aebf55afd6484107745ab1d5baed31d83eeef9123cb45a'}]}, 'timestamp': '2025-10-02 12:49:17.211276', '_unique_id': 'b621155d93cb40fb817589fce957b7b1'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.211 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.212 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes.delta in the context of pollsters
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.212 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/network.outgoing.bytes.delta volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '095fe010-f63a-4057-b5d1-6d3816ea1c47', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.bytes.delta', 'counter_type': 'delta', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'instance-000000b9-f47803ac-aae8-4d74-959f-2c47ab5f04ab-tapb54bc2a5-53', 'timestamp': '2025-10-02T12:49:17.212857', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'name': 'tapb54bc2a5-53', 'instance_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:57:c8:5b', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapb54bc2a5-53'}, 'message_id': '350445e8-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7354.849488201, 'message_signature': '9d1bf6d8351688c04d487c6706c3dfd90df309a40e2d9a3d450708820a587aad'}]}, 'timestamp': '2025-10-02 12:49:17.213138', '_unique_id': 'c17498d2d37e4fe696cae9f38134091e'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.213 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.214 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.latency in the context of pollsters
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.214 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk.device.read.latency volume: 428577452 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.214 12 DEBUG ceilometer.compute.pollsters [-] f47803ac-aae8-4d74-959f-2c47ab5f04ab/disk.device.read.latency volume: 4208634 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'c7cd95cc-1822-4888-acc5-0bd8a0bf0930', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 428577452, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab-vda', 'timestamp': '2025-10-02T12:49:17.214572', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'name': 'instance-000000b9', 'instance_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '35048850-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7354.803930848, 'message_signature': '99b58bc733c5890263408c258ce88412f77568f3bfc5bdaea04809d3590c140d'}, {'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 4208634, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab-sda', 'timestamp': '2025-10-02T12:49:17.214572', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649', 'name': 'instance-000000b9', 'instance_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '350491b0-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7354.803930848, 'message_signature': '031751b996a0002437446be6b2fc7d7514b49f610c30c547c1a5608e8e191c11'}]}, 'timestamp': '2025-10-02 12:49:17.215085', '_unique_id': '7c11601af86a4696a55a59a84b09cdcd'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.215 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.216 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.iops in the context of pollsters
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.216 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for PerDeviceDiskIOPSPollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:49:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:49:17.216 12 ERROR ceilometer.polling.manager [-] Prevent pollster disk.device.iops from polling [<NovaLikeServer: tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649>]
Oct 02 12:49:17 compute-0 nova_compute[192079]: 2025-10-02 12:49:17.891 2 DEBUG nova.compute.manager [req-d4d9c04f-3daa-4549-8d15-1b6d15ee2464 req-12ee3347-8e34-4cfd-85c4-8f9a633e8181 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Received event network-changed-b54bc2a5-53f3-444d-9953-22bbf1bcdc83 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:49:17 compute-0 nova_compute[192079]: 2025-10-02 12:49:17.893 2 DEBUG nova.compute.manager [req-d4d9c04f-3daa-4549-8d15-1b6d15ee2464 req-12ee3347-8e34-4cfd-85c4-8f9a633e8181 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Refreshing instance network info cache due to event network-changed-b54bc2a5-53f3-444d-9953-22bbf1bcdc83. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:49:17 compute-0 nova_compute[192079]: 2025-10-02 12:49:17.894 2 DEBUG oslo_concurrency.lockutils [req-d4d9c04f-3daa-4549-8d15-1b6d15ee2464 req-12ee3347-8e34-4cfd-85c4-8f9a633e8181 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-f47803ac-aae8-4d74-959f-2c47ab5f04ab" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:49:18 compute-0 nova_compute[192079]: 2025-10-02 12:49:18.956 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:19 compute-0 nova_compute[192079]: 2025-10-02 12:49:19.687 2 DEBUG nova.network.neutron [req-d62d14ae-0627-411b-aa96-5c643d6de055 req-f8bf5145-df68-435d-8e5d-00b42e650f42 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Updated VIF entry in instance network info cache for port b54bc2a5-53f3-444d-9953-22bbf1bcdc83. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:49:19 compute-0 nova_compute[192079]: 2025-10-02 12:49:19.688 2 DEBUG nova.network.neutron [req-d62d14ae-0627-411b-aa96-5c643d6de055 req-f8bf5145-df68-435d-8e5d-00b42e650f42 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Updating instance_info_cache with network_info: [{"id": "b54bc2a5-53f3-444d-9953-22bbf1bcdc83", "address": "fa:16:3e:57:c8:5b", "network": {"id": "3c776fa4-63c0-44fa-bf3f-04ad74974c2c", "bridge": "br-int", "label": "tempest-network-smoke--1497944835", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb54bc2a5-53", "ovs_interfaceid": "b54bc2a5-53f3-444d-9953-22bbf1bcdc83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:49:19 compute-0 nova_compute[192079]: 2025-10-02 12:49:19.710 2 DEBUG oslo_concurrency.lockutils [req-d62d14ae-0627-411b-aa96-5c643d6de055 req-f8bf5145-df68-435d-8e5d-00b42e650f42 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-f47803ac-aae8-4d74-959f-2c47ab5f04ab" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:49:19 compute-0 nova_compute[192079]: 2025-10-02 12:49:19.710 2 DEBUG oslo_concurrency.lockutils [req-d4d9c04f-3daa-4549-8d15-1b6d15ee2464 req-12ee3347-8e34-4cfd-85c4-8f9a633e8181 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-f47803ac-aae8-4d74-959f-2c47ab5f04ab" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:49:19 compute-0 nova_compute[192079]: 2025-10-02 12:49:19.711 2 DEBUG nova.network.neutron [req-d4d9c04f-3daa-4549-8d15-1b6d15ee2464 req-12ee3347-8e34-4cfd-85c4-8f9a633e8181 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Refreshing network info cache for port b54bc2a5-53f3-444d-9953-22bbf1bcdc83 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:49:20 compute-0 nova_compute[192079]: 2025-10-02 12:49:20.381 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:21 compute-0 nova_compute[192079]: 2025-10-02 12:49:21.748 2 DEBUG nova.network.neutron [req-d4d9c04f-3daa-4549-8d15-1b6d15ee2464 req-12ee3347-8e34-4cfd-85c4-8f9a633e8181 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Updated VIF entry in instance network info cache for port b54bc2a5-53f3-444d-9953-22bbf1bcdc83. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:49:21 compute-0 nova_compute[192079]: 2025-10-02 12:49:21.749 2 DEBUG nova.network.neutron [req-d4d9c04f-3daa-4549-8d15-1b6d15ee2464 req-12ee3347-8e34-4cfd-85c4-8f9a633e8181 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Updating instance_info_cache with network_info: [{"id": "b54bc2a5-53f3-444d-9953-22bbf1bcdc83", "address": "fa:16:3e:57:c8:5b", "network": {"id": "3c776fa4-63c0-44fa-bf3f-04ad74974c2c", "bridge": "br-int", "label": "tempest-network-smoke--1497944835", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb54bc2a5-53", "ovs_interfaceid": "b54bc2a5-53f3-444d-9953-22bbf1bcdc83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:49:21 compute-0 nova_compute[192079]: 2025-10-02 12:49:21.768 2 DEBUG oslo_concurrency.lockutils [req-d4d9c04f-3daa-4549-8d15-1b6d15ee2464 req-12ee3347-8e34-4cfd-85c4-8f9a633e8181 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-f47803ac-aae8-4d74-959f-2c47ab5f04ab" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:49:23 compute-0 podman[253946]: 2025-10-02 12:49:23.152939334 +0000 UTC m=+0.062666680 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, version=9.6, io.buildah.version=1.33.7, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, release=1755695350, config_id=edpm, vcs-type=git, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., architecture=x86_64, com.redhat.component=ubi9-minimal-container, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, managed_by=edpm_ansible, maintainer=Red Hat, Inc., io.openshift.expose-services=, container_name=openstack_network_exporter, build-date=2025-08-20T13:12:41, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, distribution-scope=public, io.openshift.tags=minimal rhel9, vendor=Red Hat, Inc., description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., url=https://catalog.redhat.com/en/search?searchType=containers, name=ubi9-minimal, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal)
Oct 02 12:49:23 compute-0 podman[253947]: 2025-10-02 12:49:23.153530061 +0000 UTC m=+0.063430661 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, container_name=multipathd, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_managed=true, config_id=multipathd)
Oct 02 12:49:23 compute-0 nova_compute[192079]: 2025-10-02 12:49:23.957 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:24 compute-0 ovn_controller[94336]: 2025-10-02T12:49:24Z|00085|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:57:c8:5b 10.100.0.12
Oct 02 12:49:24 compute-0 ovn_controller[94336]: 2025-10-02T12:49:24Z|00086|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:57:c8:5b 10.100.0.12
Oct 02 12:49:25 compute-0 nova_compute[192079]: 2025-10-02 12:49:25.385 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:28 compute-0 nova_compute[192079]: 2025-10-02 12:49:28.961 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:29 compute-0 podman[254005]: 2025-10-02 12:49:29.137448783 +0000 UTC m=+0.053832850 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:49:29 compute-0 podman[254006]: 2025-10-02 12:49:29.137832024 +0000 UTC m=+0.051970519 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, config_id=iscsid, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=iscsid, managed_by=edpm_ansible, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:49:30 compute-0 nova_compute[192079]: 2025-10-02 12:49:30.389 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:30 compute-0 nova_compute[192079]: 2025-10-02 12:49:30.847 2 DEBUG oslo_concurrency.lockutils [None req-57420654-9402-44b7-8eed-24e29de9e895 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "f47803ac-aae8-4d74-959f-2c47ab5f04ab" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:49:30 compute-0 nova_compute[192079]: 2025-10-02 12:49:30.848 2 DEBUG oslo_concurrency.lockutils [None req-57420654-9402-44b7-8eed-24e29de9e895 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "f47803ac-aae8-4d74-959f-2c47ab5f04ab" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:49:30 compute-0 nova_compute[192079]: 2025-10-02 12:49:30.848 2 DEBUG oslo_concurrency.lockutils [None req-57420654-9402-44b7-8eed-24e29de9e895 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "f47803ac-aae8-4d74-959f-2c47ab5f04ab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:49:30 compute-0 nova_compute[192079]: 2025-10-02 12:49:30.848 2 DEBUG oslo_concurrency.lockutils [None req-57420654-9402-44b7-8eed-24e29de9e895 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "f47803ac-aae8-4d74-959f-2c47ab5f04ab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:49:30 compute-0 nova_compute[192079]: 2025-10-02 12:49:30.848 2 DEBUG oslo_concurrency.lockutils [None req-57420654-9402-44b7-8eed-24e29de9e895 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "f47803ac-aae8-4d74-959f-2c47ab5f04ab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:49:30 compute-0 nova_compute[192079]: 2025-10-02 12:49:30.882 2 INFO nova.compute.manager [None req-57420654-9402-44b7-8eed-24e29de9e895 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Terminating instance
Oct 02 12:49:30 compute-0 nova_compute[192079]: 2025-10-02 12:49:30.940 2 DEBUG nova.compute.manager [None req-57420654-9402-44b7-8eed-24e29de9e895 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:49:30 compute-0 kernel: tapb54bc2a5-53 (unregistering): left promiscuous mode
Oct 02 12:49:30 compute-0 NetworkManager[51160]: <info>  [1759409370.9666] device (tapb54bc2a5-53): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:49:30 compute-0 nova_compute[192079]: 2025-10-02 12:49:30.978 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:30 compute-0 ovn_controller[94336]: 2025-10-02T12:49:30Z|00724|binding|INFO|Releasing lport b54bc2a5-53f3-444d-9953-22bbf1bcdc83 from this chassis (sb_readonly=0)
Oct 02 12:49:30 compute-0 ovn_controller[94336]: 2025-10-02T12:49:30Z|00725|binding|INFO|Setting lport b54bc2a5-53f3-444d-9953-22bbf1bcdc83 down in Southbound
Oct 02 12:49:30 compute-0 ovn_controller[94336]: 2025-10-02T12:49:30Z|00726|binding|INFO|Removing iface tapb54bc2a5-53 ovn-installed in OVS
Oct 02 12:49:30 compute-0 nova_compute[192079]: 2025-10-02 12:49:30.980 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:31 compute-0 nova_compute[192079]: 2025-10-02 12:49:31.003 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:31 compute-0 systemd[1]: machine-qemu\x2d87\x2dinstance\x2d000000b9.scope: Deactivated successfully.
Oct 02 12:49:31 compute-0 systemd[1]: machine-qemu\x2d87\x2dinstance\x2d000000b9.scope: Consumed 13.431s CPU time.
Oct 02 12:49:31 compute-0 systemd-machined[152150]: Machine qemu-87-instance-000000b9 terminated.
Oct 02 12:49:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:31.033 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:57:c8:5b 10.100.0.12', 'unknown'], port_security=[], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.12/28', 'neutron:device_id': 'f47803ac-aae8-4d74-959f-2c47ab5f04ab', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-3c776fa4-63c0-44fa-bf3f-04ad74974c2c', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'neutron:revision_number': '6', 'neutron:security_group_ids': '', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=78978e88-15ad-4f25-bc19-feb08335ac33, chassis=[], tunnel_key=4, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=b54bc2a5-53f3-444d-9953-22bbf1bcdc83) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:49:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:31.035 103294 INFO neutron.agent.ovn.metadata.agent [-] Port b54bc2a5-53f3-444d-9953-22bbf1bcdc83 in datapath 3c776fa4-63c0-44fa-bf3f-04ad74974c2c unbound from our chassis
Oct 02 12:49:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:31.037 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network 3c776fa4-63c0-44fa-bf3f-04ad74974c2c, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:49:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:31.039 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[363710e3-05bb-4326-836d-a3fced6e7719]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:49:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:31.039 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-3c776fa4-63c0-44fa-bf3f-04ad74974c2c namespace which is not needed anymore
Oct 02 12:49:31 compute-0 neutron-haproxy-ovnmeta-3c776fa4-63c0-44fa-bf3f-04ad74974c2c[253901]: [NOTICE]   (253905) : haproxy version is 2.8.14-c23fe91
Oct 02 12:49:31 compute-0 neutron-haproxy-ovnmeta-3c776fa4-63c0-44fa-bf3f-04ad74974c2c[253901]: [NOTICE]   (253905) : path to executable is /usr/sbin/haproxy
Oct 02 12:49:31 compute-0 neutron-haproxy-ovnmeta-3c776fa4-63c0-44fa-bf3f-04ad74974c2c[253901]: [WARNING]  (253905) : Exiting Master process...
Oct 02 12:49:31 compute-0 neutron-haproxy-ovnmeta-3c776fa4-63c0-44fa-bf3f-04ad74974c2c[253901]: [ALERT]    (253905) : Current worker (253907) exited with code 143 (Terminated)
Oct 02 12:49:31 compute-0 neutron-haproxy-ovnmeta-3c776fa4-63c0-44fa-bf3f-04ad74974c2c[253901]: [WARNING]  (253905) : All workers exited. Exiting... (0)
Oct 02 12:49:31 compute-0 systemd[1]: libpod-ff3b99ed92b4928f95b685c31239e997352b88fd33398301a01037f5a94f52a0.scope: Deactivated successfully.
Oct 02 12:49:31 compute-0 podman[254072]: 2025-10-02 12:49:31.201378142 +0000 UTC m=+0.043377363 container died ff3b99ed92b4928f95b685c31239e997352b88fd33398301a01037f5a94f52a0 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-3c776fa4-63c0-44fa-bf3f-04ad74974c2c, org.label-schema.license=GPLv2, tcib_managed=true, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:49:31 compute-0 nova_compute[192079]: 2025-10-02 12:49:31.201 2 INFO nova.virt.libvirt.driver [-] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Instance destroyed successfully.
Oct 02 12:49:31 compute-0 nova_compute[192079]: 2025-10-02 12:49:31.202 2 DEBUG nova.objects.instance [None req-57420654-9402-44b7-8eed-24e29de9e895 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lazy-loading 'resources' on Instance uuid f47803ac-aae8-4d74-959f-2c47ab5f04ab obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:49:31 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-ff3b99ed92b4928f95b685c31239e997352b88fd33398301a01037f5a94f52a0-userdata-shm.mount: Deactivated successfully.
Oct 02 12:49:31 compute-0 systemd[1]: var-lib-containers-storage-overlay-d5197312924dfae97ce70b23587c27fcbd0c1c1fe698f5a2175e830e4a4e3889-merged.mount: Deactivated successfully.
Oct 02 12:49:31 compute-0 podman[254072]: 2025-10-02 12:49:31.236595883 +0000 UTC m=+0.078595104 container cleanup ff3b99ed92b4928f95b685c31239e997352b88fd33398301a01037f5a94f52a0 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-3c776fa4-63c0-44fa-bf3f-04ad74974c2c, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0)
Oct 02 12:49:31 compute-0 systemd[1]: libpod-conmon-ff3b99ed92b4928f95b685c31239e997352b88fd33398301a01037f5a94f52a0.scope: Deactivated successfully.
Oct 02 12:49:31 compute-0 nova_compute[192079]: 2025-10-02 12:49:31.247 2 DEBUG nova.virt.libvirt.vif [None req-57420654-9402-44b7-8eed-24e29de9e895 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:49:00Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649',display_name='tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-gen-1-2097647649',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-server-tempest-testsecuritygroupsbasicops-1020134341-ge',id=185,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBI2KKxYqJuo+bm0uXO0va+WiltctIuUrNVSuyXKH60Q282vpKz7lkIUwo7YbhQgvFPQ6W6pvlS1MgI71IgsIlYiUsaPlzFVJnshPK84X/j2YUTiXwv4g5W08cDEUTRF7vw==',key_name='tempest-TestSecurityGroupsBasicOps-1976615750',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:49:12Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='575f3d227ab24f2daa62e65e14a4cd9c',ramdisk_id='',reservation_id='r-908mv8rz',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-TestSecurityGroupsBasicOps-1020134341',owner_user_name='tempest-TestSecurityGroupsBasicOps-1020134341-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:49:12Z,user_data=None,user_id='2d2b4a2da57543ef88e44ae28ad61647',uuid=f47803ac-aae8-4d74-959f-2c47ab5f04ab,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "b54bc2a5-53f3-444d-9953-22bbf1bcdc83", "address": "fa:16:3e:57:c8:5b", "network": {"id": "3c776fa4-63c0-44fa-bf3f-04ad74974c2c", "bridge": "br-int", "label": "tempest-network-smoke--1497944835", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb54bc2a5-53", "ovs_interfaceid": "b54bc2a5-53f3-444d-9953-22bbf1bcdc83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:49:31 compute-0 nova_compute[192079]: 2025-10-02 12:49:31.247 2 DEBUG nova.network.os_vif_util [None req-57420654-9402-44b7-8eed-24e29de9e895 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Converting VIF {"id": "b54bc2a5-53f3-444d-9953-22bbf1bcdc83", "address": "fa:16:3e:57:c8:5b", "network": {"id": "3c776fa4-63c0-44fa-bf3f-04ad74974c2c", "bridge": "br-int", "label": "tempest-network-smoke--1497944835", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapb54bc2a5-53", "ovs_interfaceid": "b54bc2a5-53f3-444d-9953-22bbf1bcdc83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:49:31 compute-0 nova_compute[192079]: 2025-10-02 12:49:31.248 2 DEBUG nova.network.os_vif_util [None req-57420654-9402-44b7-8eed-24e29de9e895 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:57:c8:5b,bridge_name='br-int',has_traffic_filtering=True,id=b54bc2a5-53f3-444d-9953-22bbf1bcdc83,network=Network(3c776fa4-63c0-44fa-bf3f-04ad74974c2c),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapb54bc2a5-53') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:49:31 compute-0 nova_compute[192079]: 2025-10-02 12:49:31.248 2 DEBUG os_vif [None req-57420654-9402-44b7-8eed-24e29de9e895 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:57:c8:5b,bridge_name='br-int',has_traffic_filtering=True,id=b54bc2a5-53f3-444d-9953-22bbf1bcdc83,network=Network(3c776fa4-63c0-44fa-bf3f-04ad74974c2c),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapb54bc2a5-53') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:49:31 compute-0 nova_compute[192079]: 2025-10-02 12:49:31.250 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:31 compute-0 nova_compute[192079]: 2025-10-02 12:49:31.250 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapb54bc2a5-53, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:49:31 compute-0 nova_compute[192079]: 2025-10-02 12:49:31.251 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:31 compute-0 nova_compute[192079]: 2025-10-02 12:49:31.254 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:49:31 compute-0 nova_compute[192079]: 2025-10-02 12:49:31.255 2 INFO os_vif [None req-57420654-9402-44b7-8eed-24e29de9e895 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:57:c8:5b,bridge_name='br-int',has_traffic_filtering=True,id=b54bc2a5-53f3-444d-9953-22bbf1bcdc83,network=Network(3c776fa4-63c0-44fa-bf3f-04ad74974c2c),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapb54bc2a5-53')
Oct 02 12:49:31 compute-0 nova_compute[192079]: 2025-10-02 12:49:31.256 2 INFO nova.virt.libvirt.driver [None req-57420654-9402-44b7-8eed-24e29de9e895 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Deleting instance files /var/lib/nova/instances/f47803ac-aae8-4d74-959f-2c47ab5f04ab_del
Oct 02 12:49:31 compute-0 nova_compute[192079]: 2025-10-02 12:49:31.256 2 INFO nova.virt.libvirt.driver [None req-57420654-9402-44b7-8eed-24e29de9e895 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Deletion of /var/lib/nova/instances/f47803ac-aae8-4d74-959f-2c47ab5f04ab_del complete
Oct 02 12:49:31 compute-0 podman[254117]: 2025-10-02 12:49:31.301873484 +0000 UTC m=+0.044382012 container remove ff3b99ed92b4928f95b685c31239e997352b88fd33398301a01037f5a94f52a0 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-3c776fa4-63c0-44fa-bf3f-04ad74974c2c, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2)
Oct 02 12:49:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:31.306 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[72ffd340-8f4d-41da-a2a3-fc9809b33d12]: (4, ('Thu Oct  2 12:49:31 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-3c776fa4-63c0-44fa-bf3f-04ad74974c2c (ff3b99ed92b4928f95b685c31239e997352b88fd33398301a01037f5a94f52a0)\nff3b99ed92b4928f95b685c31239e997352b88fd33398301a01037f5a94f52a0\nThu Oct  2 12:49:31 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-3c776fa4-63c0-44fa-bf3f-04ad74974c2c (ff3b99ed92b4928f95b685c31239e997352b88fd33398301a01037f5a94f52a0)\nff3b99ed92b4928f95b685c31239e997352b88fd33398301a01037f5a94f52a0\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:49:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:31.308 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[080baea8-bd16-40cb-a01c-6af59e020758]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:49:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:31.309 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tap3c776fa4-60, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:49:31 compute-0 nova_compute[192079]: 2025-10-02 12:49:31.310 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:31 compute-0 kernel: tap3c776fa4-60: left promiscuous mode
Oct 02 12:49:31 compute-0 nova_compute[192079]: 2025-10-02 12:49:31.322 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:31.325 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e8a0ba70-e836-4a54-acbb-25a646a60e0d]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:49:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:31.357 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[2629aa56-9b10-4cd9-b5da-59f639d6175a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:49:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:31.358 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[7f8a9a29-5f7f-4e8a-9ed1-2c3682bb395b]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:49:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:31.372 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0763d541-51ed-45e3-b5a8-b6591dee9720]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 734851, 'reachable_time': 33780, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 254132, 'error': None, 'target': 'ovnmeta-3c776fa4-63c0-44fa-bf3f-04ad74974c2c', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:49:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:31.374 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-3c776fa4-63c0-44fa-bf3f-04ad74974c2c deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:49:31 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:49:31.374 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[c27d856a-cde1-4795-baeb-b83d7594097b]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:49:31 compute-0 systemd[1]: run-netns-ovnmeta\x2d3c776fa4\x2d63c0\x2d44fa\x2dbf3f\x2d04ad74974c2c.mount: Deactivated successfully.
Oct 02 12:49:31 compute-0 nova_compute[192079]: 2025-10-02 12:49:31.529 2 INFO nova.compute.manager [None req-57420654-9402-44b7-8eed-24e29de9e895 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Took 0.59 seconds to destroy the instance on the hypervisor.
Oct 02 12:49:31 compute-0 nova_compute[192079]: 2025-10-02 12:49:31.530 2 DEBUG oslo.service.loopingcall [None req-57420654-9402-44b7-8eed-24e29de9e895 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:49:31 compute-0 nova_compute[192079]: 2025-10-02 12:49:31.530 2 DEBUG nova.compute.manager [-] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:49:31 compute-0 nova_compute[192079]: 2025-10-02 12:49:31.531 2 DEBUG nova.network.neutron [-] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:49:31 compute-0 nova_compute[192079]: 2025-10-02 12:49:31.986 2 DEBUG nova.compute.manager [req-d41c26a8-7b31-43b8-bb46-d78272861944 req-3246541c-11ac-4312-b3f2-7cd7eedf1b29 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Received event network-vif-unplugged-b54bc2a5-53f3-444d-9953-22bbf1bcdc83 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:49:31 compute-0 nova_compute[192079]: 2025-10-02 12:49:31.986 2 DEBUG oslo_concurrency.lockutils [req-d41c26a8-7b31-43b8-bb46-d78272861944 req-3246541c-11ac-4312-b3f2-7cd7eedf1b29 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "f47803ac-aae8-4d74-959f-2c47ab5f04ab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:49:31 compute-0 nova_compute[192079]: 2025-10-02 12:49:31.987 2 DEBUG oslo_concurrency.lockutils [req-d41c26a8-7b31-43b8-bb46-d78272861944 req-3246541c-11ac-4312-b3f2-7cd7eedf1b29 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "f47803ac-aae8-4d74-959f-2c47ab5f04ab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:49:31 compute-0 nova_compute[192079]: 2025-10-02 12:49:31.987 2 DEBUG oslo_concurrency.lockutils [req-d41c26a8-7b31-43b8-bb46-d78272861944 req-3246541c-11ac-4312-b3f2-7cd7eedf1b29 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "f47803ac-aae8-4d74-959f-2c47ab5f04ab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:49:31 compute-0 nova_compute[192079]: 2025-10-02 12:49:31.987 2 DEBUG nova.compute.manager [req-d41c26a8-7b31-43b8-bb46-d78272861944 req-3246541c-11ac-4312-b3f2-7cd7eedf1b29 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] No waiting events found dispatching network-vif-unplugged-b54bc2a5-53f3-444d-9953-22bbf1bcdc83 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:49:31 compute-0 nova_compute[192079]: 2025-10-02 12:49:31.988 2 DEBUG nova.compute.manager [req-d41c26a8-7b31-43b8-bb46-d78272861944 req-3246541c-11ac-4312-b3f2-7cd7eedf1b29 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Received event network-vif-unplugged-b54bc2a5-53f3-444d-9953-22bbf1bcdc83 for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:49:32 compute-0 nova_compute[192079]: 2025-10-02 12:49:32.445 2 DEBUG nova.network.neutron [-] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:49:32 compute-0 nova_compute[192079]: 2025-10-02 12:49:32.461 2 INFO nova.compute.manager [-] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Took 0.93 seconds to deallocate network for instance.
Oct 02 12:49:32 compute-0 nova_compute[192079]: 2025-10-02 12:49:32.552 2 DEBUG oslo_concurrency.lockutils [None req-57420654-9402-44b7-8eed-24e29de9e895 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:49:32 compute-0 nova_compute[192079]: 2025-10-02 12:49:32.552 2 DEBUG oslo_concurrency.lockutils [None req-57420654-9402-44b7-8eed-24e29de9e895 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:49:32 compute-0 nova_compute[192079]: 2025-10-02 12:49:32.565 2 DEBUG nova.compute.manager [req-efc7f13c-940a-4109-9832-604743ee0880 req-0b4dead8-7903-4223-994e-eca60719bc56 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Received event network-vif-deleted-b54bc2a5-53f3-444d-9953-22bbf1bcdc83 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:49:32 compute-0 nova_compute[192079]: 2025-10-02 12:49:32.605 2 DEBUG nova.compute.provider_tree [None req-57420654-9402-44b7-8eed-24e29de9e895 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:49:32 compute-0 nova_compute[192079]: 2025-10-02 12:49:32.620 2 DEBUG nova.scheduler.client.report [None req-57420654-9402-44b7-8eed-24e29de9e895 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:49:32 compute-0 nova_compute[192079]: 2025-10-02 12:49:32.640 2 DEBUG oslo_concurrency.lockutils [None req-57420654-9402-44b7-8eed-24e29de9e895 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.087s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:49:32 compute-0 nova_compute[192079]: 2025-10-02 12:49:32.667 2 INFO nova.scheduler.client.report [None req-57420654-9402-44b7-8eed-24e29de9e895 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Deleted allocations for instance f47803ac-aae8-4d74-959f-2c47ab5f04ab
Oct 02 12:49:32 compute-0 nova_compute[192079]: 2025-10-02 12:49:32.735 2 DEBUG oslo_concurrency.lockutils [None req-57420654-9402-44b7-8eed-24e29de9e895 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "f47803ac-aae8-4d74-959f-2c47ab5f04ab" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 1.888s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:49:33 compute-0 nova_compute[192079]: 2025-10-02 12:49:33.964 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:34 compute-0 nova_compute[192079]: 2025-10-02 12:49:34.108 2 DEBUG nova.compute.manager [req-bb81d7e3-0932-4a2f-9c55-d8d6db0614a4 req-0eb8df95-32d2-48e9-a26d-352dad5c66cc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Received event network-vif-plugged-b54bc2a5-53f3-444d-9953-22bbf1bcdc83 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:49:34 compute-0 nova_compute[192079]: 2025-10-02 12:49:34.109 2 DEBUG oslo_concurrency.lockutils [req-bb81d7e3-0932-4a2f-9c55-d8d6db0614a4 req-0eb8df95-32d2-48e9-a26d-352dad5c66cc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "f47803ac-aae8-4d74-959f-2c47ab5f04ab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:49:34 compute-0 nova_compute[192079]: 2025-10-02 12:49:34.109 2 DEBUG oslo_concurrency.lockutils [req-bb81d7e3-0932-4a2f-9c55-d8d6db0614a4 req-0eb8df95-32d2-48e9-a26d-352dad5c66cc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "f47803ac-aae8-4d74-959f-2c47ab5f04ab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:49:34 compute-0 nova_compute[192079]: 2025-10-02 12:49:34.110 2 DEBUG oslo_concurrency.lockutils [req-bb81d7e3-0932-4a2f-9c55-d8d6db0614a4 req-0eb8df95-32d2-48e9-a26d-352dad5c66cc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "f47803ac-aae8-4d74-959f-2c47ab5f04ab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:49:34 compute-0 nova_compute[192079]: 2025-10-02 12:49:34.111 2 DEBUG nova.compute.manager [req-bb81d7e3-0932-4a2f-9c55-d8d6db0614a4 req-0eb8df95-32d2-48e9-a26d-352dad5c66cc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] No waiting events found dispatching network-vif-plugged-b54bc2a5-53f3-444d-9953-22bbf1bcdc83 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:49:34 compute-0 nova_compute[192079]: 2025-10-02 12:49:34.111 2 WARNING nova.compute.manager [req-bb81d7e3-0932-4a2f-9c55-d8d6db0614a4 req-0eb8df95-32d2-48e9-a26d-352dad5c66cc 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Received unexpected event network-vif-plugged-b54bc2a5-53f3-444d-9953-22bbf1bcdc83 for instance with vm_state deleted and task_state None.
Oct 02 12:49:36 compute-0 nova_compute[192079]: 2025-10-02 12:49:36.254 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:38 compute-0 podman[254134]: 2025-10-02 12:49:38.130103957 +0000 UTC m=+0.046259144 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_id=ovn_metadata_agent, org.label-schema.build-date=20251001, container_name=ovn_metadata_agent, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']})
Oct 02 12:49:38 compute-0 podman[254136]: 2025-10-02 12:49:38.149911787 +0000 UTC m=+0.058482117 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:49:38 compute-0 podman[254135]: 2025-10-02 12:49:38.182756823 +0000 UTC m=+0.092035752 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, container_name=ovn_controller, managed_by=edpm_ansible, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_managed=true, config_id=ovn_controller, org.label-schema.schema-version=1.0)
Oct 02 12:49:38 compute-0 nova_compute[192079]: 2025-10-02 12:49:38.966 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:41 compute-0 nova_compute[192079]: 2025-10-02 12:49:41.258 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:43 compute-0 nova_compute[192079]: 2025-10-02 12:49:43.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_incomplete_migrations run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:49:43 compute-0 nova_compute[192079]: 2025-10-02 12:49:43.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Cleaning up deleted instances with incomplete migration  _cleanup_incomplete_migrations /usr/lib/python3.9/site-packages/nova/compute/manager.py:11183
Oct 02 12:49:43 compute-0 nova_compute[192079]: 2025-10-02 12:49:43.967 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:44 compute-0 nova_compute[192079]: 2025-10-02 12:49:44.993 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:45 compute-0 nova_compute[192079]: 2025-10-02 12:49:45.065 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:46 compute-0 podman[254198]: 2025-10-02 12:49:46.143098346 +0000 UTC m=+0.059673459 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, config_id=edpm, managed_by=edpm_ansible, org.label-schema.license=GPLv2, container_name=ceilometer_agent_compute, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001)
Oct 02 12:49:46 compute-0 nova_compute[192079]: 2025-10-02 12:49:46.200 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759409371.1992784, f47803ac-aae8-4d74-959f-2c47ab5f04ab => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:49:46 compute-0 nova_compute[192079]: 2025-10-02 12:49:46.200 2 INFO nova.compute.manager [-] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] VM Stopped (Lifecycle Event)
Oct 02 12:49:46 compute-0 nova_compute[192079]: 2025-10-02 12:49:46.260 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:46 compute-0 nova_compute[192079]: 2025-10-02 12:49:46.687 2 DEBUG nova.compute.manager [None req-55e59915-1803-4dae-ba1b-c9849061b540 - - - - - -] [instance: f47803ac-aae8-4d74-959f-2c47ab5f04ab] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:49:49 compute-0 nova_compute[192079]: 2025-10-02 12:49:49.002 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:49 compute-0 nova_compute[192079]: 2025-10-02 12:49:49.974 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:49:51 compute-0 nova_compute[192079]: 2025-10-02 12:49:51.263 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:51 compute-0 nova_compute[192079]: 2025-10-02 12:49:51.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:49:52 compute-0 nova_compute[192079]: 2025-10-02 12:49:52.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:49:54 compute-0 nova_compute[192079]: 2025-10-02 12:49:54.004 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:54 compute-0 podman[254219]: 2025-10-02 12:49:54.13236436 +0000 UTC m=+0.049034559 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, config_id=multipathd, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, container_name=multipathd, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']})
Oct 02 12:49:54 compute-0 podman[254218]: 2025-10-02 12:49:54.132021571 +0000 UTC m=+0.051873486 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.openshift.tags=minimal rhel9, io.buildah.version=1.33.7, release=1755695350, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., distribution-scope=public, version=9.6, maintainer=Red Hat, Inc., io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., com.redhat.component=ubi9-minimal-container, config_id=edpm, url=https://catalog.redhat.com/en/search?searchType=containers, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., architecture=x86_64, name=ubi9-minimal, container_name=openstack_network_exporter, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, io.openshift.expose-services=, build-date=2025-08-20T13:12:41, managed_by=edpm_ansible, vendor=Red Hat, Inc., vcs-type=git, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']})
Oct 02 12:49:55 compute-0 nova_compute[192079]: 2025-10-02 12:49:55.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:49:55 compute-0 nova_compute[192079]: 2025-10-02 12:49:55.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:49:55 compute-0 nova_compute[192079]: 2025-10-02 12:49:55.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:49:55 compute-0 nova_compute[192079]: 2025-10-02 12:49:55.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:49:56 compute-0 nova_compute[192079]: 2025-10-02 12:49:56.267 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:49:57 compute-0 nova_compute[192079]: 2025-10-02 12:49:57.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:49:57 compute-0 nova_compute[192079]: 2025-10-02 12:49:57.701 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:49:57 compute-0 nova_compute[192079]: 2025-10-02 12:49:57.701 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:49:57 compute-0 nova_compute[192079]: 2025-10-02 12:49:57.702 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:49:57 compute-0 nova_compute[192079]: 2025-10-02 12:49:57.702 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:49:57 compute-0 nova_compute[192079]: 2025-10-02 12:49:57.868 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:49:57 compute-0 nova_compute[192079]: 2025-10-02 12:49:57.869 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5712MB free_disk=73.27172088623047GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:49:57 compute-0 nova_compute[192079]: 2025-10-02 12:49:57.869 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:49:57 compute-0 nova_compute[192079]: 2025-10-02 12:49:57.869 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:49:57 compute-0 nova_compute[192079]: 2025-10-02 12:49:57.967 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:49:57 compute-0 nova_compute[192079]: 2025-10-02 12:49:57.968 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:49:58 compute-0 nova_compute[192079]: 2025-10-02 12:49:58.005 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:49:58 compute-0 nova_compute[192079]: 2025-10-02 12:49:58.021 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:49:58 compute-0 nova_compute[192079]: 2025-10-02 12:49:58.048 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:49:58 compute-0 nova_compute[192079]: 2025-10-02 12:49:58.049 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.180s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:49:59 compute-0 nova_compute[192079]: 2025-10-02 12:49:59.046 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:00 compute-0 podman[254256]: 2025-10-02 12:50:00.154946967 +0000 UTC m=+0.066346131 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter)
Oct 02 12:50:00 compute-0 podman[254257]: 2025-10-02 12:50:00.179782265 +0000 UTC m=+0.091009665 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=iscsid, container_name=iscsid, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:50:01 compute-0 nova_compute[192079]: 2025-10-02 12:50:01.305 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:02.257 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:50:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:02.257 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:50:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:02.257 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:50:03 compute-0 nova_compute[192079]: 2025-10-02 12:50:03.049 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:50:03 compute-0 nova_compute[192079]: 2025-10-02 12:50:03.050 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:50:03 compute-0 nova_compute[192079]: 2025-10-02 12:50:03.050 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:50:03 compute-0 nova_compute[192079]: 2025-10-02 12:50:03.067 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:50:04 compute-0 nova_compute[192079]: 2025-10-02 12:50:04.048 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:04 compute-0 nova_compute[192079]: 2025-10-02 12:50:04.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:50:06 compute-0 nova_compute[192079]: 2025-10-02 12:50:06.309 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:09 compute-0 nova_compute[192079]: 2025-10-02 12:50:09.050 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:09 compute-0 podman[254302]: 2025-10-02 12:50:09.144539461 +0000 UTC m=+0.047829656 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:50:09 compute-0 podman[254300]: 2025-10-02 12:50:09.158376068 +0000 UTC m=+0.063276117 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, container_name=ovn_metadata_agent, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2)
Oct 02 12:50:09 compute-0 podman[254301]: 2025-10-02 12:50:09.227750911 +0000 UTC m=+0.135349884 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_id=ovn_controller, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, container_name=ovn_controller, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']})
Oct 02 12:50:10 compute-0 nova_compute[192079]: 2025-10-02 12:50:10.438 2 DEBUG oslo_concurrency.lockutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "d891f4b6-930a-42b0-a95f-a5383e4f5827" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:50:10 compute-0 nova_compute[192079]: 2025-10-02 12:50:10.439 2 DEBUG oslo_concurrency.lockutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "d891f4b6-930a-42b0-a95f-a5383e4f5827" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:50:10 compute-0 nova_compute[192079]: 2025-10-02 12:50:10.463 2 DEBUG nova.compute.manager [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Starting instance... _do_build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2402
Oct 02 12:50:10 compute-0 nova_compute[192079]: 2025-10-02 12:50:10.615 2 DEBUG oslo_concurrency.lockutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:50:10 compute-0 nova_compute[192079]: 2025-10-02 12:50:10.615 2 DEBUG oslo_concurrency.lockutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:50:10 compute-0 nova_compute[192079]: 2025-10-02 12:50:10.626 2 DEBUG nova.virt.hardware [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Require both a host and instance NUMA topology to fit instance on host. numa_fit_instance_to_host /usr/lib/python3.9/site-packages/nova/virt/hardware.py:2368
Oct 02 12:50:10 compute-0 nova_compute[192079]: 2025-10-02 12:50:10.626 2 INFO nova.compute.claims [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Claim successful on node compute-0.ctlplane.example.com
Oct 02 12:50:10 compute-0 nova_compute[192079]: 2025-10-02 12:50:10.776 2 DEBUG nova.compute.provider_tree [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:50:10 compute-0 nova_compute[192079]: 2025-10-02 12:50:10.828 2 DEBUG nova.scheduler.client.report [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:50:10 compute-0 nova_compute[192079]: 2025-10-02 12:50:10.984 2 DEBUG oslo_concurrency.lockutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.369s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:50:10 compute-0 nova_compute[192079]: 2025-10-02 12:50:10.985 2 DEBUG nova.compute.manager [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Start building networks asynchronously for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2799
Oct 02 12:50:11 compute-0 nova_compute[192079]: 2025-10-02 12:50:11.202 2 DEBUG nova.compute.manager [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Allocating IP information in the background. _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1952
Oct 02 12:50:11 compute-0 nova_compute[192079]: 2025-10-02 12:50:11.202 2 DEBUG nova.network.neutron [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] allocate_for_instance() allocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1156
Oct 02 12:50:11 compute-0 nova_compute[192079]: 2025-10-02 12:50:11.227 2 INFO nova.virt.libvirt.driver [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Ignoring supplied device name: /dev/vda. Libvirt can't honour user-supplied dev names
Oct 02 12:50:11 compute-0 nova_compute[192079]: 2025-10-02 12:50:11.256 2 DEBUG nova.compute.manager [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Start building block device mappings for instance. _build_resources /usr/lib/python3.9/site-packages/nova/compute/manager.py:2834
Oct 02 12:50:11 compute-0 nova_compute[192079]: 2025-10-02 12:50:11.355 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:11 compute-0 nova_compute[192079]: 2025-10-02 12:50:11.454 2 DEBUG nova.compute.manager [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Start spawning the instance on the hypervisor. _build_and_run_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:2608
Oct 02 12:50:11 compute-0 nova_compute[192079]: 2025-10-02 12:50:11.455 2 DEBUG nova.virt.libvirt.driver [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Creating instance directory _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4723
Oct 02 12:50:11 compute-0 nova_compute[192079]: 2025-10-02 12:50:11.456 2 INFO nova.virt.libvirt.driver [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Creating image(s)
Oct 02 12:50:11 compute-0 nova_compute[192079]: 2025-10-02 12:50:11.457 2 DEBUG oslo_concurrency.lockutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "/var/lib/nova/instances/d891f4b6-930a-42b0-a95f-a5383e4f5827/disk.info" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:50:11 compute-0 nova_compute[192079]: 2025-10-02 12:50:11.457 2 DEBUG oslo_concurrency.lockutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "/var/lib/nova/instances/d891f4b6-930a-42b0-a95f-a5383e4f5827/disk.info" acquired by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:50:11 compute-0 nova_compute[192079]: 2025-10-02 12:50:11.458 2 DEBUG oslo_concurrency.lockutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "/var/lib/nova/instances/d891f4b6-930a-42b0-a95f-a5383e4f5827/disk.info" "released" by "nova.virt.libvirt.imagebackend.Image.resolve_driver_format.<locals>.write_to_disk_info_file" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:50:11 compute-0 nova_compute[192079]: 2025-10-02 12:50:11.482 2 DEBUG oslo_concurrency.processutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:50:11 compute-0 nova_compute[192079]: 2025-10-02 12:50:11.558 2 DEBUG oslo_concurrency.processutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.076s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:50:11 compute-0 nova_compute[192079]: 2025-10-02 12:50:11.559 2 DEBUG oslo_concurrency.lockutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "068b233e8d7f49e215e2900dde7d25b776cad955" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:50:11 compute-0 nova_compute[192079]: 2025-10-02 12:50:11.560 2 DEBUG oslo_concurrency.lockutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" acquired by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:50:11 compute-0 nova_compute[192079]: 2025-10-02 12:50:11.572 2 DEBUG oslo_concurrency.processutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:50:11 compute-0 nova_compute[192079]: 2025-10-02 12:50:11.642 2 DEBUG oslo_concurrency.processutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.070s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:50:11 compute-0 nova_compute[192079]: 2025-10-02 12:50:11.643 2 DEBUG oslo_concurrency.processutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Running cmd (subprocess): env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/d891f4b6-930a-42b0-a95f-a5383e4f5827/disk 1073741824 execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:50:11 compute-0 nova_compute[192079]: 2025-10-02 12:50:11.692 2 DEBUG oslo_concurrency.processutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] CMD "env LC_ALL=C LANG=C qemu-img create -f qcow2 -o backing_file=/var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955,backing_fmt=raw /var/lib/nova/instances/d891f4b6-930a-42b0-a95f-a5383e4f5827/disk 1073741824" returned: 0 in 0.049s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:50:11 compute-0 nova_compute[192079]: 2025-10-02 12:50:11.693 2 DEBUG oslo_concurrency.lockutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "068b233e8d7f49e215e2900dde7d25b776cad955" "released" by "nova.virt.libvirt.imagebackend.Qcow2.create_image.<locals>.create_qcow2_image" :: held 0.133s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:50:11 compute-0 nova_compute[192079]: 2025-10-02 12:50:11.693 2 DEBUG oslo_concurrency.processutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:50:11 compute-0 nova_compute[192079]: 2025-10-02 12:50:11.759 2 DEBUG nova.policy [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} authorize /usr/lib/python3.9/site-packages/nova/policy.py:203
Oct 02 12:50:11 compute-0 nova_compute[192079]: 2025-10-02 12:50:11.765 2 DEBUG oslo_concurrency.processutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/_base/068b233e8d7f49e215e2900dde7d25b776cad955 --force-share --output=json" returned: 0 in 0.071s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:50:11 compute-0 nova_compute[192079]: 2025-10-02 12:50:11.765 2 DEBUG nova.virt.disk.api [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Checking if we can resize image /var/lib/nova/instances/d891f4b6-930a-42b0-a95f-a5383e4f5827/disk. size=1073741824 can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:166
Oct 02 12:50:11 compute-0 nova_compute[192079]: 2025-10-02 12:50:11.765 2 DEBUG oslo_concurrency.processutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/d891f4b6-930a-42b0-a95f-a5383e4f5827/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:50:11 compute-0 nova_compute[192079]: 2025-10-02 12:50:11.850 2 DEBUG oslo_concurrency.processutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/d891f4b6-930a-42b0-a95f-a5383e4f5827/disk --force-share --output=json" returned: 0 in 0.085s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:50:11 compute-0 nova_compute[192079]: 2025-10-02 12:50:11.851 2 DEBUG nova.virt.disk.api [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Cannot resize image /var/lib/nova/instances/d891f4b6-930a-42b0-a95f-a5383e4f5827/disk to a smaller size. can_resize_image /usr/lib/python3.9/site-packages/nova/virt/disk/api.py:172
Oct 02 12:50:11 compute-0 nova_compute[192079]: 2025-10-02 12:50:11.852 2 DEBUG nova.objects.instance [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lazy-loading 'migration_context' on Instance uuid d891f4b6-930a-42b0-a95f-a5383e4f5827 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:50:12 compute-0 nova_compute[192079]: 2025-10-02 12:50:12.248 2 DEBUG nova.virt.libvirt.driver [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Created local disks _create_image /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4857
Oct 02 12:50:12 compute-0 nova_compute[192079]: 2025-10-02 12:50:12.248 2 DEBUG nova.virt.libvirt.driver [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Ensure instance console log exists: /var/lib/nova/instances/d891f4b6-930a-42b0-a95f-a5383e4f5827/console.log _ensure_console_log_for_instance /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4609
Oct 02 12:50:12 compute-0 nova_compute[192079]: 2025-10-02 12:50:12.249 2 DEBUG oslo_concurrency.lockutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "vgpu_resources" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:50:12 compute-0 nova_compute[192079]: 2025-10-02 12:50:12.249 2 DEBUG oslo_concurrency.lockutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "vgpu_resources" acquired by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:50:12 compute-0 nova_compute[192079]: 2025-10-02 12:50:12.249 2 DEBUG oslo_concurrency.lockutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "vgpu_resources" "released" by "nova.virt.libvirt.driver.LibvirtDriver._allocate_mdevs" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:50:14 compute-0 nova_compute[192079]: 2025-10-02 12:50:14.052 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:14.438 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=56, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=55) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:50:14 compute-0 nova_compute[192079]: 2025-10-02 12:50:14.438 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:14 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:14.440 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 6 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:50:14 compute-0 nova_compute[192079]: 2025-10-02 12:50:14.874 2 DEBUG nova.network.neutron [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Successfully created port: a47e0808-6910-4418-ad25-108c7168bf02 _create_port_minimal /usr/lib/python3.9/site-packages/nova/network/neutron.py:548
Oct 02 12:50:16 compute-0 nova_compute[192079]: 2025-10-02 12:50:16.356 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:16 compute-0 nova_compute[192079]: 2025-10-02 12:50:16.802 2 DEBUG nova.network.neutron [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Successfully updated port: a47e0808-6910-4418-ad25-108c7168bf02 _update_port /usr/lib/python3.9/site-packages/nova/network/neutron.py:586
Oct 02 12:50:16 compute-0 nova_compute[192079]: 2025-10-02 12:50:16.939 2 DEBUG oslo_concurrency.lockutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "refresh_cache-d891f4b6-930a-42b0-a95f-a5383e4f5827" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:50:16 compute-0 nova_compute[192079]: 2025-10-02 12:50:16.939 2 DEBUG oslo_concurrency.lockutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquired lock "refresh_cache-d891f4b6-930a-42b0-a95f-a5383e4f5827" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:50:16 compute-0 nova_compute[192079]: 2025-10-02 12:50:16.939 2 DEBUG nova.network.neutron [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Building network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2010
Oct 02 12:50:16 compute-0 nova_compute[192079]: 2025-10-02 12:50:16.988 2 DEBUG nova.compute.manager [req-3cace2c1-9d5d-4bd1-a52c-bb8a8c2eb3da req-bd061b61-00b6-4507-be81-809d1625cb22 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Received event network-changed-a47e0808-6910-4418-ad25-108c7168bf02 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:50:16 compute-0 nova_compute[192079]: 2025-10-02 12:50:16.989 2 DEBUG nova.compute.manager [req-3cace2c1-9d5d-4bd1-a52c-bb8a8c2eb3da req-bd061b61-00b6-4507-be81-809d1625cb22 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Refreshing instance network info cache due to event network-changed-a47e0808-6910-4418-ad25-108c7168bf02. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:50:16 compute-0 nova_compute[192079]: 2025-10-02 12:50:16.989 2 DEBUG oslo_concurrency.lockutils [req-3cace2c1-9d5d-4bd1-a52c-bb8a8c2eb3da req-bd061b61-00b6-4507-be81-809d1625cb22 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-d891f4b6-930a-42b0-a95f-a5383e4f5827" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:50:17 compute-0 podman[254381]: 2025-10-02 12:50:17.147962841 +0000 UTC m=+0.060916463 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, managed_by=edpm_ansible, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=edpm, container_name=ceilometer_agent_compute, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:50:17 compute-0 nova_compute[192079]: 2025-10-02 12:50:17.344 2 DEBUG nova.network.neutron [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Instance cache missing network info. _get_preexisting_port_ids /usr/lib/python3.9/site-packages/nova/network/neutron.py:3323
Oct 02 12:50:19 compute-0 nova_compute[192079]: 2025-10-02 12:50:19.088 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:20 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:20.442 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '56'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:50:20 compute-0 nova_compute[192079]: 2025-10-02 12:50:20.760 2 DEBUG nova.network.neutron [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Updating instance_info_cache with network_info: [{"id": "a47e0808-6910-4418-ad25-108c7168bf02", "address": "fa:16:3e:a5:a1:66", "network": {"id": "c56f578e-f013-4483-b9f2-ee1459896133", "bridge": "br-int", "label": "tempest-network-smoke--1680080003", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa47e0808-69", "ovs_interfaceid": "a47e0808-6910-4418-ad25-108c7168bf02", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.359 2 DEBUG oslo_concurrency.lockutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Releasing lock "refresh_cache-d891f4b6-930a-42b0-a95f-a5383e4f5827" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.360 2 DEBUG nova.compute.manager [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Instance network_info: |[{"id": "a47e0808-6910-4418-ad25-108c7168bf02", "address": "fa:16:3e:a5:a1:66", "network": {"id": "c56f578e-f013-4483-b9f2-ee1459896133", "bridge": "br-int", "label": "tempest-network-smoke--1680080003", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa47e0808-69", "ovs_interfaceid": "a47e0808-6910-4418-ad25-108c7168bf02", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| _allocate_network_async /usr/lib/python3.9/site-packages/nova/compute/manager.py:1967
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.360 2 DEBUG oslo_concurrency.lockutils [req-3cace2c1-9d5d-4bd1-a52c-bb8a8c2eb3da req-bd061b61-00b6-4507-be81-809d1625cb22 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-d891f4b6-930a-42b0-a95f-a5383e4f5827" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.361 2 DEBUG nova.network.neutron [req-3cace2c1-9d5d-4bd1-a52c-bb8a8c2eb3da req-bd061b61-00b6-4507-be81-809d1625cb22 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Refreshing network info cache for port a47e0808-6910-4418-ad25-108c7168bf02 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.365 2 DEBUG nova.virt.libvirt.driver [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Start _get_guest_xml network_info=[{"id": "a47e0808-6910-4418-ad25-108c7168bf02", "address": "fa:16:3e:a5:a1:66", "network": {"id": "c56f578e-f013-4483-b9f2-ee1459896133", "bridge": "br-int", "label": "tempest-network-smoke--1680080003", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa47e0808-69", "ovs_interfaceid": "a47e0808-6910-4418-ad25-108c7168bf02", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] disk_info={'disk_bus': 'virtio', 'cdrom_bus': 'sata', 'mapping': {'root': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk': {'bus': 'virtio', 'dev': 'vda', 'type': 'disk', 'boot_index': '1'}, 'disk.config': {'bus': 'sata', 'dev': 'sda', 'type': 'cdrom'}}} image_meta=ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>) rescue=None block_device_info={'root_device_name': '/dev/vda', 'image': [{'guest_format': None, 'size': 0, 'encrypted': False, 'device_name': '/dev/vda', 'encryption_format': None, 'disk_bus': 'virtio', 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'image_id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}], 'ephemerals': [], 'block_device_mapping': [], 'swap': None} _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7549
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.406 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.410 2 WARNING nova.virt.libvirt.driver [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.420 2 DEBUG nova.virt.libvirt.host [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V1... _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1653
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.420 2 DEBUG nova.virt.libvirt.host [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] CPU controller missing on host. _has_cgroupsv1_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1663
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.424 2 DEBUG nova.virt.libvirt.host [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Searching host: 'compute-0.ctlplane.example.com' for CPU controller through CGroups V2... _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1672
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.424 2 DEBUG nova.virt.libvirt.host [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] CPU controller found on host. _has_cgroupsv2_cpu_controller /usr/lib/python3.9/site-packages/nova/virt/libvirt/host.py:1679
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.425 2 DEBUG nova.virt.libvirt.driver [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] CPU mode 'custom' models 'Nehalem' was chosen, with extra flags: '' _get_guest_cpu_model_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:5396
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.425 2 DEBUG nova.virt.hardware [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Getting desirable topologies for flavor Flavor(created_at=2025-10-02T11:59:24Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='9ac83da7-f31e-4467-8569-d28002f6aeed',id=1,is_public=True,memory_mb=128,name='m1.nano',projects=<?>,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c8fc807773e5354afe61636071771906',container_format='bare',created_at=2025-10-02T11:59:26Z,direct_url=<?>,disk_format='qcow2',id=cf60d86d-f1d5-4be4-976e-7488dbdcf0b2,min_disk=0,min_ram=0,name='cirros-0.6.2-x86_64-disk.img',owner='c543175414e2485bb476e4dfce01c394',properties=ImageMetaProps,protected=<?>,size=21430272,status='active',tags=<?>,updated_at=2025-10-02T11:59:27Z,virtual_size=<?>,visibility=<?>), allow threads: True _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:563
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.426 2 DEBUG nova.virt.hardware [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Flavor limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:348
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.426 2 DEBUG nova.virt.hardware [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Image limits 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:352
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.426 2 DEBUG nova.virt.hardware [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Flavor pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:388
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.426 2 DEBUG nova.virt.hardware [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Image pref 0:0:0 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:392
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.426 2 DEBUG nova.virt.hardware [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 get_cpu_topology_constraints /usr/lib/python3.9/site-packages/nova/virt/hardware.py:430
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.426 2 DEBUG nova.virt.hardware [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:569
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.427 2 DEBUG nova.virt.hardware [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Build topologies for 1 vcpu(s) 1:1:1 _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:471
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.427 2 DEBUG nova.virt.hardware [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Got 1 possible topologies _get_possible_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:501
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.427 2 DEBUG nova.virt.hardware [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:575
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.427 2 DEBUG nova.virt.hardware [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] _get_desirable_cpu_topologies /usr/lib/python3.9/site-packages/nova/virt/hardware.py:577
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.431 2 DEBUG nova.virt.libvirt.vif [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:50:08Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549',display_name='tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-server-tempest-testsecuritygroupsbasicops-1020134341-ac',id=186,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBPtdLgRziYi/gtQwh2c90NnE9jWcSnkXXhVGvo+TNtzW3MSE83NoyumTXAaB/UU4ExIeaKr77+vb5N+WSXOcyyn7dDdXMPaG0pk4M0kXpEwbShNs9Jn1NVnaa85coBBWBQ==',key_name='tempest-TestSecurityGroupsBasicOps-1574692784',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='575f3d227ab24f2daa62e65e14a4cd9c',ramdisk_id='',reservation_id='r-dtj46nao',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestSecurityGroupsBasicOps-1020134341',owner_user_name='tempest-TestSecurityGroupsBasicOps-1020134341-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:50:11Z,user_data=None,user_id='2d2b4a2da57543ef88e44ae28ad61647',uuid=d891f4b6-930a-42b0-a95f-a5383e4f5827,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "a47e0808-6910-4418-ad25-108c7168bf02", "address": "fa:16:3e:a5:a1:66", "network": {"id": "c56f578e-f013-4483-b9f2-ee1459896133", "bridge": "br-int", "label": "tempest-network-smoke--1680080003", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa47e0808-69", "ovs_interfaceid": "a47e0808-6910-4418-ad25-108c7168bf02", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} virt_type=kvm get_config /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:563
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.431 2 DEBUG nova.network.os_vif_util [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Converting VIF {"id": "a47e0808-6910-4418-ad25-108c7168bf02", "address": "fa:16:3e:a5:a1:66", "network": {"id": "c56f578e-f013-4483-b9f2-ee1459896133", "bridge": "br-int", "label": "tempest-network-smoke--1680080003", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa47e0808-69", "ovs_interfaceid": "a47e0808-6910-4418-ad25-108c7168bf02", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.432 2 DEBUG nova.network.os_vif_util [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:a5:a1:66,bridge_name='br-int',has_traffic_filtering=True,id=a47e0808-6910-4418-ad25-108c7168bf02,network=Network(c56f578e-f013-4483-b9f2-ee1459896133),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapa47e0808-69') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.433 2 DEBUG nova.objects.instance [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lazy-loading 'pci_devices' on Instance uuid d891f4b6-930a-42b0-a95f-a5383e4f5827 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.555 2 DEBUG nova.virt.libvirt.driver [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] End _get_guest_xml xml=<domain type="kvm">
Oct 02 12:50:21 compute-0 nova_compute[192079]:   <uuid>d891f4b6-930a-42b0-a95f-a5383e4f5827</uuid>
Oct 02 12:50:21 compute-0 nova_compute[192079]:   <name>instance-000000ba</name>
Oct 02 12:50:21 compute-0 nova_compute[192079]:   <memory>131072</memory>
Oct 02 12:50:21 compute-0 nova_compute[192079]:   <vcpu>1</vcpu>
Oct 02 12:50:21 compute-0 nova_compute[192079]:   <metadata>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <nova:instance xmlns:nova="http://openstack.org/xmlns/libvirt/nova/1.1">
Oct 02 12:50:21 compute-0 nova_compute[192079]:       <nova:package version="27.5.2-0.20250829104910.6f8decf.el9"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:       <nova:name>tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549</nova:name>
Oct 02 12:50:21 compute-0 nova_compute[192079]:       <nova:creationTime>2025-10-02 12:50:21</nova:creationTime>
Oct 02 12:50:21 compute-0 nova_compute[192079]:       <nova:flavor name="m1.nano">
Oct 02 12:50:21 compute-0 nova_compute[192079]:         <nova:memory>128</nova:memory>
Oct 02 12:50:21 compute-0 nova_compute[192079]:         <nova:disk>1</nova:disk>
Oct 02 12:50:21 compute-0 nova_compute[192079]:         <nova:swap>0</nova:swap>
Oct 02 12:50:21 compute-0 nova_compute[192079]:         <nova:ephemeral>0</nova:ephemeral>
Oct 02 12:50:21 compute-0 nova_compute[192079]:         <nova:vcpus>1</nova:vcpus>
Oct 02 12:50:21 compute-0 nova_compute[192079]:       </nova:flavor>
Oct 02 12:50:21 compute-0 nova_compute[192079]:       <nova:owner>
Oct 02 12:50:21 compute-0 nova_compute[192079]:         <nova:user uuid="2d2b4a2da57543ef88e44ae28ad61647">tempest-TestSecurityGroupsBasicOps-1020134341-project-member</nova:user>
Oct 02 12:50:21 compute-0 nova_compute[192079]:         <nova:project uuid="575f3d227ab24f2daa62e65e14a4cd9c">tempest-TestSecurityGroupsBasicOps-1020134341</nova:project>
Oct 02 12:50:21 compute-0 nova_compute[192079]:       </nova:owner>
Oct 02 12:50:21 compute-0 nova_compute[192079]:       <nova:root type="image" uuid="cf60d86d-f1d5-4be4-976e-7488dbdcf0b2"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:       <nova:ports>
Oct 02 12:50:21 compute-0 nova_compute[192079]:         <nova:port uuid="a47e0808-6910-4418-ad25-108c7168bf02">
Oct 02 12:50:21 compute-0 nova_compute[192079]:           <nova:ip type="fixed" address="10.100.0.6" ipVersion="4"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:         </nova:port>
Oct 02 12:50:21 compute-0 nova_compute[192079]:       </nova:ports>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     </nova:instance>
Oct 02 12:50:21 compute-0 nova_compute[192079]:   </metadata>
Oct 02 12:50:21 compute-0 nova_compute[192079]:   <sysinfo type="smbios">
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <system>
Oct 02 12:50:21 compute-0 nova_compute[192079]:       <entry name="manufacturer">RDO</entry>
Oct 02 12:50:21 compute-0 nova_compute[192079]:       <entry name="product">OpenStack Compute</entry>
Oct 02 12:50:21 compute-0 nova_compute[192079]:       <entry name="version">27.5.2-0.20250829104910.6f8decf.el9</entry>
Oct 02 12:50:21 compute-0 nova_compute[192079]:       <entry name="serial">d891f4b6-930a-42b0-a95f-a5383e4f5827</entry>
Oct 02 12:50:21 compute-0 nova_compute[192079]:       <entry name="uuid">d891f4b6-930a-42b0-a95f-a5383e4f5827</entry>
Oct 02 12:50:21 compute-0 nova_compute[192079]:       <entry name="family">Virtual Machine</entry>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     </system>
Oct 02 12:50:21 compute-0 nova_compute[192079]:   </sysinfo>
Oct 02 12:50:21 compute-0 nova_compute[192079]:   <os>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <type arch="x86_64" machine="q35">hvm</type>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <boot dev="hd"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <smbios mode="sysinfo"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:   </os>
Oct 02 12:50:21 compute-0 nova_compute[192079]:   <features>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <acpi/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <apic/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <vmcoreinfo/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:   </features>
Oct 02 12:50:21 compute-0 nova_compute[192079]:   <clock offset="utc">
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <timer name="pit" tickpolicy="delay"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <timer name="rtc" tickpolicy="catchup"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <timer name="hpet" present="no"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:   </clock>
Oct 02 12:50:21 compute-0 nova_compute[192079]:   <cpu mode="custom" match="exact">
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <model>Nehalem</model>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <topology sockets="1" cores="1" threads="1"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:   </cpu>
Oct 02 12:50:21 compute-0 nova_compute[192079]:   <devices>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <disk type="file" device="disk">
Oct 02 12:50:21 compute-0 nova_compute[192079]:       <driver name="qemu" type="qcow2" cache="none"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/d891f4b6-930a-42b0-a95f-a5383e4f5827/disk"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:       <target dev="vda" bus="virtio"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <disk type="file" device="cdrom">
Oct 02 12:50:21 compute-0 nova_compute[192079]:       <driver name="qemu" type="raw" cache="none"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:       <source file="/var/lib/nova/instances/d891f4b6-930a-42b0-a95f-a5383e4f5827/disk.config"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:       <target dev="sda" bus="sata"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     </disk>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <interface type="ethernet">
Oct 02 12:50:21 compute-0 nova_compute[192079]:       <mac address="fa:16:3e:a5:a1:66"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:       <driver name="vhost" rx_queue_size="512"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:       <mtu size="1442"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:       <target dev="tapa47e0808-69"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     </interface>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <serial type="pty">
Oct 02 12:50:21 compute-0 nova_compute[192079]:       <log file="/var/lib/nova/instances/d891f4b6-930a-42b0-a95f-a5383e4f5827/console.log" append="off"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     </serial>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <graphics type="vnc" autoport="yes" listen="::0"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <video>
Oct 02 12:50:21 compute-0 nova_compute[192079]:       <model type="virtio"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     </video>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <input type="tablet" bus="usb"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <rng model="virtio">
Oct 02 12:50:21 compute-0 nova_compute[192079]:       <backend model="random">/dev/urandom</backend>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     </rng>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <controller type="pci" model="pcie-root-port"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <controller type="usb" index="0"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     <memballoon model="virtio">
Oct 02 12:50:21 compute-0 nova_compute[192079]:       <stats period="10"/>
Oct 02 12:50:21 compute-0 nova_compute[192079]:     </memballoon>
Oct 02 12:50:21 compute-0 nova_compute[192079]:   </devices>
Oct 02 12:50:21 compute-0 nova_compute[192079]: </domain>
Oct 02 12:50:21 compute-0 nova_compute[192079]:  _get_guest_xml /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:7555
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.556 2 DEBUG nova.compute.manager [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Preparing to wait for external event network-vif-plugged-a47e0808-6910-4418-ad25-108c7168bf02 prepare_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:283
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.557 2 DEBUG oslo_concurrency.lockutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "d891f4b6-930a-42b0-a95f-a5383e4f5827-events" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.557 2 DEBUG oslo_concurrency.lockutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "d891f4b6-930a-42b0-a95f-a5383e4f5827-events" acquired by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.557 2 DEBUG oslo_concurrency.lockutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "d891f4b6-930a-42b0-a95f-a5383e4f5827-events" "released" by "nova.compute.manager.InstanceEvents.prepare_for_instance_event.<locals>._create_or_get_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.558 2 DEBUG nova.virt.libvirt.vif [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='',created_at=2025-10-02T12:50:08Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=None,disable_terminate=False,display_description='tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549',display_name='tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549',ec2_ids=EC2Ids,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-server-tempest-testsecuritygroupsbasicops-1020134341-ac',id=186,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBPtdLgRziYi/gtQwh2c90NnE9jWcSnkXXhVGvo+TNtzW3MSE83NoyumTXAaB/UU4ExIeaKr77+vb5N+WSXOcyyn7dDdXMPaG0pk4M0kXpEwbShNs9Jn1NVnaa85coBBWBQ==',key_name='tempest-TestSecurityGroupsBasicOps-1574692784',keypairs=KeyPairList,launch_index=0,launched_at=None,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=None,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=PciDeviceList,pci_requests=InstancePCIRequests,power_state=0,progress=0,project_id='575f3d227ab24f2daa62e65e14a4cd9c',ramdisk_id='',reservation_id='r-dtj46nao',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_machine_type='q35',image_hw_rng_model='virtio',image_min_disk='1',image_min_ram='0',network_allocated='True',owner_project_name='tempest-TestSecurityGroupsBasicOps-1020134341',owner_user_name='tempest-TestSecurityGroupsBasicOps-1020134341-project-member'},tags=TagList,task_state='spawning',terminated_at=None,trusted_certs=None,updated_at=2025-10-02T12:50:11Z,user_data=None,user_id='2d2b4a2da57543ef88e44ae28ad61647',uuid=d891f4b6-930a-42b0-a95f-a5383e4f5827,vcpu_model=VirtCPUModel,vcpus=1,vm_mode=None,vm_state='building') vif={"id": "a47e0808-6910-4418-ad25-108c7168bf02", "address": "fa:16:3e:a5:a1:66", "network": {"id": "c56f578e-f013-4483-b9f2-ee1459896133", "bridge": "br-int", "label": "tempest-network-smoke--1680080003", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa47e0808-69", "ovs_interfaceid": "a47e0808-6910-4418-ad25-108c7168bf02", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} plug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:710
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.558 2 DEBUG nova.network.os_vif_util [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Converting VIF {"id": "a47e0808-6910-4418-ad25-108c7168bf02", "address": "fa:16:3e:a5:a1:66", "network": {"id": "c56f578e-f013-4483-b9f2-ee1459896133", "bridge": "br-int", "label": "tempest-network-smoke--1680080003", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa47e0808-69", "ovs_interfaceid": "a47e0808-6910-4418-ad25-108c7168bf02", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.559 2 DEBUG nova.network.os_vif_util [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Converted object VIFOpenVSwitch(active=False,address=fa:16:3e:a5:a1:66,bridge_name='br-int',has_traffic_filtering=True,id=a47e0808-6910-4418-ad25-108c7168bf02,network=Network(c56f578e-f013-4483-b9f2-ee1459896133),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapa47e0808-69') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.560 2 DEBUG os_vif [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Plugging vif VIFOpenVSwitch(active=False,address=fa:16:3e:a5:a1:66,bridge_name='br-int',has_traffic_filtering=True,id=a47e0808-6910-4418-ad25-108c7168bf02,network=Network(c56f578e-f013-4483-b9f2-ee1459896133),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapa47e0808-69') plug /usr/lib/python3.9/site-packages/os_vif/__init__.py:76
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.560 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.560 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddBridgeCommand(_result=None, name=br-int, may_exist=True, datapath_type=system) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.561 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.563 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.563 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapa47e0808-69, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.564 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=1): DbSetCommand(_result=None, table=Interface, record=tapa47e0808-69, col_values=(('external_ids', {'iface-id': 'a47e0808-6910-4418-ad25-108c7168bf02', 'iface-status': 'active', 'attached-mac': 'fa:16:3e:a5:a1:66', 'vm-uuid': 'd891f4b6-930a-42b0-a95f-a5383e4f5827'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.565 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:21 compute-0 NetworkManager[51160]: <info>  [1759409421.5662] manager: (tapa47e0808-69): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/357)
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.568 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.573 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:21 compute-0 nova_compute[192079]: 2025-10-02 12:50:21.574 2 INFO os_vif [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Successfully plugged vif VIFOpenVSwitch(active=False,address=fa:16:3e:a5:a1:66,bridge_name='br-int',has_traffic_filtering=True,id=a47e0808-6910-4418-ad25-108c7168bf02,network=Network(c56f578e-f013-4483-b9f2-ee1459896133),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapa47e0808-69')
Oct 02 12:50:22 compute-0 nova_compute[192079]: 2025-10-02 12:50:22.122 2 DEBUG nova.virt.libvirt.driver [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] No BDM found with device name vda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:50:22 compute-0 nova_compute[192079]: 2025-10-02 12:50:22.123 2 DEBUG nova.virt.libvirt.driver [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] No BDM found with device name sda, not building metadata. _build_disk_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12116
Oct 02 12:50:22 compute-0 nova_compute[192079]: 2025-10-02 12:50:22.123 2 DEBUG nova.virt.libvirt.driver [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] No VIF found with MAC fa:16:3e:a5:a1:66, not building metadata _build_interface_metadata /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:12092
Oct 02 12:50:22 compute-0 nova_compute[192079]: 2025-10-02 12:50:22.123 2 INFO nova.virt.libvirt.driver [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Using config drive
Oct 02 12:50:22 compute-0 nova_compute[192079]: 2025-10-02 12:50:22.930 2 INFO nova.virt.libvirt.driver [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Creating config drive at /var/lib/nova/instances/d891f4b6-930a-42b0-a95f-a5383e4f5827/disk.config
Oct 02 12:50:22 compute-0 nova_compute[192079]: 2025-10-02 12:50:22.935 2 DEBUG oslo_concurrency.processutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Running cmd (subprocess): /usr/bin/mkisofs -o /var/lib/nova/instances/d891f4b6-930a-42b0-a95f-a5383e4f5827/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpbbbgkxmb execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:50:23 compute-0 nova_compute[192079]: 2025-10-02 12:50:23.059 2 DEBUG oslo_concurrency.processutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] CMD "/usr/bin/mkisofs -o /var/lib/nova/instances/d891f4b6-930a-42b0-a95f-a5383e4f5827/disk.config -ldots -allow-lowercase -allow-multidot -l -publisher OpenStack Compute 27.5.2-0.20250829104910.6f8decf.el9 -quiet -J -r -V config-2 /tmp/tmpbbbgkxmb" returned: 0 in 0.124s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:50:23 compute-0 kernel: tapa47e0808-69: entered promiscuous mode
Oct 02 12:50:23 compute-0 NetworkManager[51160]: <info>  [1759409423.1318] manager: (tapa47e0808-69): new Tun device (/org/freedesktop/NetworkManager/Devices/358)
Oct 02 12:50:23 compute-0 nova_compute[192079]: 2025-10-02 12:50:23.131 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:23 compute-0 ovn_controller[94336]: 2025-10-02T12:50:23Z|00727|binding|INFO|Claiming lport a47e0808-6910-4418-ad25-108c7168bf02 for this chassis.
Oct 02 12:50:23 compute-0 ovn_controller[94336]: 2025-10-02T12:50:23Z|00728|binding|INFO|a47e0808-6910-4418-ad25-108c7168bf02: Claiming fa:16:3e:a5:a1:66 10.100.0.6
Oct 02 12:50:23 compute-0 nova_compute[192079]: 2025-10-02 12:50:23.135 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:23 compute-0 nova_compute[192079]: 2025-10-02 12:50:23.140 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:23 compute-0 nova_compute[192079]: 2025-10-02 12:50:23.145 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:23 compute-0 systemd-udevd[254420]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:50:23 compute-0 systemd-machined[152150]: New machine qemu-88-instance-000000ba.
Oct 02 12:50:23 compute-0 NetworkManager[51160]: <info>  [1759409423.1818] device (tapa47e0808-69): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Oct 02 12:50:23 compute-0 NetworkManager[51160]: <info>  [1759409423.1834] device (tapa47e0808-69): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Oct 02 12:50:23 compute-0 systemd[1]: Started Virtual Machine qemu-88-instance-000000ba.
Oct 02 12:50:23 compute-0 ovn_controller[94336]: 2025-10-02T12:50:23Z|00729|binding|INFO|Setting lport a47e0808-6910-4418-ad25-108c7168bf02 ovn-installed in OVS
Oct 02 12:50:23 compute-0 nova_compute[192079]: 2025-10-02 12:50:23.200 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:23 compute-0 nova_compute[192079]: 2025-10-02 12:50:23.375 2 DEBUG nova.network.neutron [req-3cace2c1-9d5d-4bd1-a52c-bb8a8c2eb3da req-bd061b61-00b6-4507-be81-809d1625cb22 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Updated VIF entry in instance network info cache for port a47e0808-6910-4418-ad25-108c7168bf02. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:50:23 compute-0 nova_compute[192079]: 2025-10-02 12:50:23.376 2 DEBUG nova.network.neutron [req-3cace2c1-9d5d-4bd1-a52c-bb8a8c2eb3da req-bd061b61-00b6-4507-be81-809d1625cb22 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Updating instance_info_cache with network_info: [{"id": "a47e0808-6910-4418-ad25-108c7168bf02", "address": "fa:16:3e:a5:a1:66", "network": {"id": "c56f578e-f013-4483-b9f2-ee1459896133", "bridge": "br-int", "label": "tempest-network-smoke--1680080003", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa47e0808-69", "ovs_interfaceid": "a47e0808-6910-4418-ad25-108c7168bf02", "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:50:23 compute-0 ovn_controller[94336]: 2025-10-02T12:50:23Z|00730|binding|INFO|Setting lport a47e0808-6910-4418-ad25-108c7168bf02 up in Southbound
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:23.543 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:a5:a1:66 10.100.0.6'], port_security=['fa:16:3e:a5:a1:66 10.100.0.6'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.6/28', 'neutron:device_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-c56f578e-f013-4483-b9f2-ee1459896133', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'neutron:revision_number': '2', 'neutron:security_group_ids': '48a842a9-048b-49fa-aad1-710802b3266f fa097138-2672-47b1-9f70-62839b3b8fff', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=50529df0-c539-4067-a62b-3ef6d48b20aa, chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=a47e0808-6910-4418-ad25-108c7168bf02) old=Port_Binding(chassis=[]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:23.544 103294 INFO neutron.agent.ovn.metadata.agent [-] Port a47e0808-6910-4418-ad25-108c7168bf02 in datapath c56f578e-f013-4483-b9f2-ee1459896133 bound to our chassis
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:23.545 103294 INFO neutron.agent.ovn.metadata.agent [-] Provisioning metadata for network c56f578e-f013-4483-b9f2-ee1459896133
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:23.561 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[445f686b-50f9-4c42-883d-971d6906f6f4]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:23.562 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Creating VETH tapc56f578e-f1 in ovnmeta-c56f578e-f013-4483-b9f2-ee1459896133 namespace provision_datapath /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:665
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:23.564 219793 DEBUG neutron.privileged.agent.linux.ip_lib [-] Interface tapc56f578e-f0 not found in namespace None get_link_id /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:204
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:23.564 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[fdf9c7d0-3748-4278-bbfb-7e2641eef257]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:23.565 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[dbd8e798-b911-4bc9-9fd0-7e1b91188a80]: (4, False) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:23.579 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[76e53942-06b5-4c95-97a2-15031def7089]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:23.603 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f4c1cde4-b479-4fe0-9ae1-84db9ef8d43a]: (4, ('net.ipv4.conf.all.promote_secondaries = 1\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:23.630 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[a2e4d49b-256a-4538-aafa-28049bbbd0f7]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:50:23 compute-0 nova_compute[192079]: 2025-10-02 12:50:23.630 2 DEBUG oslo_concurrency.lockutils [req-3cace2c1-9d5d-4bd1-a52c-bb8a8c2eb3da req-bd061b61-00b6-4507-be81-809d1625cb22 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-d891f4b6-930a-42b0-a95f-a5383e4f5827" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:50:23 compute-0 NetworkManager[51160]: <info>  [1759409423.6391] manager: (tapc56f578e-f0): new Veth device (/org/freedesktop/NetworkManager/Devices/359)
Oct 02 12:50:23 compute-0 systemd-udevd[254423]: Network interface NamePolicy= disabled on kernel command line.
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:23.641 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[45770a0b-c136-4339-bd2f-fa46e732d0ed]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:23.683 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[23218117-40fa-4b38-81bf-6842f1021af4]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:23.687 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[08c207ab-8e75-42af-a354-199724f569fe]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:50:23 compute-0 NetworkManager[51160]: <info>  [1759409423.7196] device (tapc56f578e-f0): carrier: link connected
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:23.727 219859 DEBUG oslo.privsep.daemon [-] privsep: reply[7a25b03a-b222-41e9-8c9e-8c56f58af728]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:23.746 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0da66765-97e0-4f97-8650-593b76a11ebb]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapc56f578e-f1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:6b:73:63'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 227], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 742134, 'reachable_time': 42755, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 254454, 'error': None, 'target': 'ovnmeta-c56f578e-f013-4483-b9f2-ee1459896133', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:23.766 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1d167302-d25d-479b-b16b-3140d1b05108]: (4, ({'family': 10, 'prefixlen': 64, 'flags': 192, 'scope': 253, 'index': 2, 'attrs': [['IFA_ADDRESS', 'fe80::f816:3eff:fe6b:7363'], ['IFA_CACHEINFO', {'ifa_preferred': 4294967295, 'ifa_valid': 4294967295, 'cstamp': 742134, 'tstamp': 742134}], ['IFA_FLAGS', 192]], 'header': {'length': 72, 'type': 20, 'flags': 2, 'sequence_number': 255, 'pid': 254455, 'error': None, 'target': 'ovnmeta-c56f578e-f013-4483-b9f2-ee1459896133', 'stats': (0, 0, 0)}, 'event': 'RTM_NEWADDR'},)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:23.785 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[f1da9e7e-cf9d-4dbf-aa13-d3d96ed02970]: (4, [{'family': 0, '__align': (), 'ifi_type': 1, 'index': 2, 'flags': 69699, 'change': 0, 'attrs': [['IFLA_IFNAME', 'tapc56f578e-f1'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UP'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 1500], ['IFLA_MIN_MTU', 68], ['IFLA_MAX_MTU', 65535], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 8], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 8], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 2], ['IFLA_CARRIER_UP_COUNT', 1], ['IFLA_CARRIER_DOWN_COUNT', 1], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', 'fa:16:3e:6b:73:63'], ['IFLA_BROADCAST', 'ff:ff:ff:ff:ff:ff'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 110, 'tx_bytes': 90, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_LINKINFO', {'attrs': [['IFLA_INFO_KIND', 'veth']]}], ['IFLA_LINK_NETNSID', 0], ['IFLA_LINK', 227], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 0, 'nopolicy': 0, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 742134, 'reachable_time': 42755, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 1500, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 0, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 1, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 1, 'inoctets': 96, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 1, 'outoctets': 76, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 1, 'outmcastpkts': 1, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 96, 'outmcastoctets': 76, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 1, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 1, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1448, 'type': 16, 'flags': 0, 'sequence_number': 255, 'pid': 254456, 'error': None, 'target': 'ovnmeta-c56f578e-f013-4483-b9f2-ee1459896133', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:23.818 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[9a7817aa-71ae-4378-9a4b-cc94119ed71a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:23.875 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[c3e9c576-164a-439d-9123-43e132e692d4]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:23.876 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapc56f578e-f0, bridge=br-ex, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:23.877 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Transaction caused no change do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:129
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:23.877 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): AddPortCommand(_result=None, bridge=br-int, port=tapc56f578e-f0, may_exist=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:50:23 compute-0 kernel: tapc56f578e-f0: entered promiscuous mode
Oct 02 12:50:23 compute-0 NetworkManager[51160]: <info>  [1759409423.8796] manager: (tapc56f578e-f0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/360)
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:23.881 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Interface, record=tapc56f578e-f0, col_values=(('external_ids', {'iface-id': '14c8642a-f433-48b2-a9ce-dc24a1a84079'}),)) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:50:23 compute-0 ovn_controller[94336]: 2025-10-02T12:50:23Z|00731|binding|INFO|Releasing lport 14c8642a-f433-48b2-a9ce-dc24a1a84079 from this chassis (sb_readonly=0)
Oct 02 12:50:23 compute-0 nova_compute[192079]: 2025-10-02 12:50:23.879 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:23 compute-0 nova_compute[192079]: 2025-10-02 12:50:23.882 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:23 compute-0 nova_compute[192079]: 2025-10-02 12:50:23.894 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:23.894 103294 DEBUG neutron.agent.linux.utils [-] Unable to access /var/lib/neutron/external/pids/c56f578e-f013-4483-b9f2-ee1459896133.pid.haproxy; Error: [Errno 2] No such file or directory: '/var/lib/neutron/external/pids/c56f578e-f013-4483-b9f2-ee1459896133.pid.haproxy' get_value_from_file /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:252
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:23.895 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b7a25b46-f70c-4c76-822b-7f4c444ead07]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:23.896 103294 DEBUG neutron.agent.ovn.metadata.driver [-] haproxy_cfg = 
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: global
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]:     log         /dev/log local0 debug
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]:     log-tag     haproxy-metadata-proxy-c56f578e-f013-4483-b9f2-ee1459896133
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]:     user        root
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]:     group       root
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]:     maxconn     1024
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]:     pidfile     /var/lib/neutron/external/pids/c56f578e-f013-4483-b9f2-ee1459896133.pid.haproxy
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]:     daemon
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: defaults
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]:     log global
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]:     mode http
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]:     option httplog
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]:     option dontlognull
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]:     option http-server-close
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]:     option forwardfor
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]:     retries                 3
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]:     timeout http-request    30s
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]:     timeout connect         30s
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]:     timeout client          32s
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]:     timeout server          32s
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]:     timeout http-keep-alive 30s
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: listen listener
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]:     bind 169.254.169.254:80
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]:     server metadata /var/lib/neutron/metadata_proxy
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]:     http-request add-header X-OVN-Network-ID c56f578e-f013-4483-b9f2-ee1459896133
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]:  create_config_file /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/driver.py:107
Oct 02 12:50:23 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:50:23.897 103294 DEBUG neutron.agent.linux.utils [-] Running command: ['sudo', 'neutron-rootwrap', '/etc/neutron/rootwrap.conf', 'ip', 'netns', 'exec', 'ovnmeta-c56f578e-f013-4483-b9f2-ee1459896133', 'env', 'PROCESS_TAG=haproxy-c56f578e-f013-4483-b9f2-ee1459896133', 'haproxy', '-f', '/var/lib/neutron/ovn-metadata-proxy/c56f578e-f013-4483-b9f2-ee1459896133.conf'] create_process /usr/lib/python3.9/site-packages/neutron/agent/linux/utils.py:84
Oct 02 12:50:24 compute-0 nova_compute[192079]: 2025-10-02 12:50:24.089 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:24 compute-0 podman[254494]: 2025-10-02 12:50:24.313514617 +0000 UTC m=+0.099565288 container create 4795c3a08209c2dd904c52069e126e3251352ae61c0fec6cff675e6a90e7818d (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-c56f578e-f013-4483-b9f2-ee1459896133, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3)
Oct 02 12:50:24 compute-0 podman[254494]: 2025-10-02 12:50:24.238385157 +0000 UTC m=+0.024435828 image pull df4949fbbe269ec91c503c0c2a01f0407aa671cfac804c078bc791d1efed5574 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Oct 02 12:50:24 compute-0 systemd[1]: Started libpod-conmon-4795c3a08209c2dd904c52069e126e3251352ae61c0fec6cff675e6a90e7818d.scope.
Oct 02 12:50:24 compute-0 systemd[1]: Started libcrun container.
Oct 02 12:50:24 compute-0 kernel: xfs filesystem being remounted at /var/lib/containers/storage/overlay/769a952cbbcaee7cb12a1679004257196ee275021cdf6b200922ba961974763c/merged/var/lib/neutron supports timestamps until 2038 (0x7fffffff)
Oct 02 12:50:24 compute-0 podman[254494]: 2025-10-02 12:50:24.41150479 +0000 UTC m=+0.197555471 container init 4795c3a08209c2dd904c52069e126e3251352ae61c0fec6cff675e6a90e7818d (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-c56f578e-f013-4483-b9f2-ee1459896133, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true)
Oct 02 12:50:24 compute-0 podman[254508]: 2025-10-02 12:50:24.415380785 +0000 UTC m=+0.056245785 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=multipathd, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.name=CentOS Stream 9 Base Image, config_id=multipathd, managed_by=edpm_ansible, org.label-schema.vendor=CentOS)
Oct 02 12:50:24 compute-0 podman[254505]: 2025-10-02 12:50:24.420603868 +0000 UTC m=+0.064994454 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.openshift.expose-services=, release=1755695350, com.redhat.component=ubi9-minimal-container, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, maintainer=Red Hat, Inc., io.openshift.tags=minimal rhel9, managed_by=edpm_ansible, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., config_id=edpm, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, name=ubi9-minimal, url=https://catalog.redhat.com/en/search?searchType=containers, architecture=x86_64, build-date=2025-08-20T13:12:41, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., version=9.6, container_name=openstack_network_exporter, distribution-scope=public, io.buildah.version=1.33.7, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, vcs-type=git, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vendor=Red Hat, Inc., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']})
Oct 02 12:50:24 compute-0 podman[254494]: 2025-10-02 12:50:24.419549529 +0000 UTC m=+0.205600190 container start 4795c3a08209c2dd904c52069e126e3251352ae61c0fec6cff675e6a90e7818d (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-c56f578e-f013-4483-b9f2-ee1459896133, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, org.label-schema.vendor=CentOS)
Oct 02 12:50:24 compute-0 neutron-haproxy-ovnmeta-c56f578e-f013-4483-b9f2-ee1459896133[254519]: [NOTICE]   (254554) : New worker (254556) forked
Oct 02 12:50:24 compute-0 neutron-haproxy-ovnmeta-c56f578e-f013-4483-b9f2-ee1459896133[254519]: [NOTICE]   (254554) : Loading success.
Oct 02 12:50:24 compute-0 nova_compute[192079]: 2025-10-02 12:50:24.452 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759409424.4513438, d891f4b6-930a-42b0-a95f-a5383e4f5827 => Started> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:50:24 compute-0 nova_compute[192079]: 2025-10-02 12:50:24.452 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] VM Started (Lifecycle Event)
Oct 02 12:50:25 compute-0 nova_compute[192079]: 2025-10-02 12:50:25.111 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:50:25 compute-0 nova_compute[192079]: 2025-10-02 12:50:25.116 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759409424.4517965, d891f4b6-930a-42b0-a95f-a5383e4f5827 => Paused> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:50:25 compute-0 nova_compute[192079]: 2025-10-02 12:50:25.117 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] VM Paused (Lifecycle Event)
Oct 02 12:50:25 compute-0 nova_compute[192079]: 2025-10-02 12:50:25.301 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:50:25 compute-0 nova_compute[192079]: 2025-10-02 12:50:25.305 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Synchronizing instance power state after lifecycle event "Paused"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 3 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:50:25 compute-0 nova_compute[192079]: 2025-10-02 12:50:25.565 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:50:25 compute-0 nova_compute[192079]: 2025-10-02 12:50:25.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:50:26 compute-0 nova_compute[192079]: 2025-10-02 12:50:26.568 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:26 compute-0 nova_compute[192079]: 2025-10-02 12:50:26.918 2 DEBUG nova.compute.manager [req-a995a8d5-e613-4c90-b42e-f8f45f6c425a req-bca3bc98-b159-4ea5-8369-2b2be6b86d46 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Received event network-vif-plugged-a47e0808-6910-4418-ad25-108c7168bf02 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:50:26 compute-0 nova_compute[192079]: 2025-10-02 12:50:26.918 2 DEBUG oslo_concurrency.lockutils [req-a995a8d5-e613-4c90-b42e-f8f45f6c425a req-bca3bc98-b159-4ea5-8369-2b2be6b86d46 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "d891f4b6-930a-42b0-a95f-a5383e4f5827-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:50:26 compute-0 nova_compute[192079]: 2025-10-02 12:50:26.919 2 DEBUG oslo_concurrency.lockutils [req-a995a8d5-e613-4c90-b42e-f8f45f6c425a req-bca3bc98-b159-4ea5-8369-2b2be6b86d46 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d891f4b6-930a-42b0-a95f-a5383e4f5827-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:50:26 compute-0 nova_compute[192079]: 2025-10-02 12:50:26.920 2 DEBUG oslo_concurrency.lockutils [req-a995a8d5-e613-4c90-b42e-f8f45f6c425a req-bca3bc98-b159-4ea5-8369-2b2be6b86d46 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d891f4b6-930a-42b0-a95f-a5383e4f5827-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:50:26 compute-0 nova_compute[192079]: 2025-10-02 12:50:26.920 2 DEBUG nova.compute.manager [req-a995a8d5-e613-4c90-b42e-f8f45f6c425a req-bca3bc98-b159-4ea5-8369-2b2be6b86d46 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Processing event network-vif-plugged-a47e0808-6910-4418-ad25-108c7168bf02 _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10808
Oct 02 12:50:26 compute-0 nova_compute[192079]: 2025-10-02 12:50:26.921 2 DEBUG nova.compute.manager [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Instance event wait completed in 2 seconds for network-vif-plugged wait_for_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:577
Oct 02 12:50:26 compute-0 nova_compute[192079]: 2025-10-02 12:50:26.927 2 DEBUG nova.virt.libvirt.driver [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Guest created on hypervisor spawn /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:4417
Oct 02 12:50:26 compute-0 nova_compute[192079]: 2025-10-02 12:50:26.928 2 DEBUG nova.virt.driver [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] Emitting event <LifecycleEvent: 1759409426.9274428, d891f4b6-930a-42b0-a95f-a5383e4f5827 => Resumed> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:50:26 compute-0 nova_compute[192079]: 2025-10-02 12:50:26.928 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] VM Resumed (Lifecycle Event)
Oct 02 12:50:26 compute-0 nova_compute[192079]: 2025-10-02 12:50:26.933 2 INFO nova.virt.libvirt.driver [-] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Instance spawned successfully.
Oct 02 12:50:26 compute-0 nova_compute[192079]: 2025-10-02 12:50:26.934 2 DEBUG nova.virt.libvirt.driver [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Attempting to register defaults for the following image properties: ['hw_cdrom_bus', 'hw_disk_bus', 'hw_input_bus', 'hw_pointer_model', 'hw_video_model', 'hw_vif_model'] _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:917
Oct 02 12:50:26 compute-0 nova_compute[192079]: 2025-10-02 12:50:26.954 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:50:26 compute-0 nova_compute[192079]: 2025-10-02 12:50:26.959 2 DEBUG nova.virt.libvirt.driver [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Found default for hw_cdrom_bus of sata _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:50:26 compute-0 nova_compute[192079]: 2025-10-02 12:50:26.959 2 DEBUG nova.virt.libvirt.driver [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Found default for hw_disk_bus of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:50:26 compute-0 nova_compute[192079]: 2025-10-02 12:50:26.960 2 DEBUG nova.virt.libvirt.driver [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Found default for hw_input_bus of usb _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:50:26 compute-0 nova_compute[192079]: 2025-10-02 12:50:26.960 2 DEBUG nova.virt.libvirt.driver [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Found default for hw_pointer_model of usbtablet _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:50:26 compute-0 nova_compute[192079]: 2025-10-02 12:50:26.960 2 DEBUG nova.virt.libvirt.driver [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Found default for hw_video_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:50:26 compute-0 nova_compute[192079]: 2025-10-02 12:50:26.961 2 DEBUG nova.virt.libvirt.driver [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Found default for hw_vif_model of virtio _register_undefined_instance_details /usr/lib/python3.9/site-packages/nova/virt/libvirt/driver.py:946
Oct 02 12:50:26 compute-0 nova_compute[192079]: 2025-10-02 12:50:26.965 2 DEBUG nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Synchronizing instance power state after lifecycle event "Resumed"; current vm_state: building, current task_state: spawning, current DB power_state: 0, VM power_state: 1 handle_lifecycle_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:1396
Oct 02 12:50:27 compute-0 nova_compute[192079]: 2025-10-02 12:50:27.003 2 INFO nova.compute.manager [None req-64fa6adf-21ac-4719-bd13-d6d663e62047 - - - - - -] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] During sync_power_state the instance has a pending task (spawning). Skip.
Oct 02 12:50:27 compute-0 nova_compute[192079]: 2025-10-02 12:50:27.039 2 INFO nova.compute.manager [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Took 15.58 seconds to spawn the instance on the hypervisor.
Oct 02 12:50:27 compute-0 nova_compute[192079]: 2025-10-02 12:50:27.040 2 DEBUG nova.compute.manager [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:50:27 compute-0 nova_compute[192079]: 2025-10-02 12:50:27.163 2 INFO nova.compute.manager [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Took 16.64 seconds to build instance.
Oct 02 12:50:27 compute-0 nova_compute[192079]: 2025-10-02 12:50:27.330 2 DEBUG oslo_concurrency.lockutils [None req-05e1be2b-c3b5-44c5-8ea8-c7c8438f8b86 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "d891f4b6-930a-42b0-a95f-a5383e4f5827" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.<locals>._locked_do_build_and_run_instance" :: held 16.892s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:50:29 compute-0 nova_compute[192079]: 2025-10-02 12:50:29.092 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:29 compute-0 nova_compute[192079]: 2025-10-02 12:50:29.109 2 DEBUG nova.compute.manager [req-becd53bf-4da3-4063-84c4-10cafa933bab req-a2b4480c-4746-4a89-9f34-d6da4012493e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Received event network-vif-plugged-a47e0808-6910-4418-ad25-108c7168bf02 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:50:29 compute-0 nova_compute[192079]: 2025-10-02 12:50:29.110 2 DEBUG oslo_concurrency.lockutils [req-becd53bf-4da3-4063-84c4-10cafa933bab req-a2b4480c-4746-4a89-9f34-d6da4012493e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "d891f4b6-930a-42b0-a95f-a5383e4f5827-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:50:29 compute-0 nova_compute[192079]: 2025-10-02 12:50:29.110 2 DEBUG oslo_concurrency.lockutils [req-becd53bf-4da3-4063-84c4-10cafa933bab req-a2b4480c-4746-4a89-9f34-d6da4012493e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d891f4b6-930a-42b0-a95f-a5383e4f5827-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:50:29 compute-0 nova_compute[192079]: 2025-10-02 12:50:29.110 2 DEBUG oslo_concurrency.lockutils [req-becd53bf-4da3-4063-84c4-10cafa933bab req-a2b4480c-4746-4a89-9f34-d6da4012493e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d891f4b6-930a-42b0-a95f-a5383e4f5827-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:50:29 compute-0 nova_compute[192079]: 2025-10-02 12:50:29.111 2 DEBUG nova.compute.manager [req-becd53bf-4da3-4063-84c4-10cafa933bab req-a2b4480c-4746-4a89-9f34-d6da4012493e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] No waiting events found dispatching network-vif-plugged-a47e0808-6910-4418-ad25-108c7168bf02 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:50:29 compute-0 nova_compute[192079]: 2025-10-02 12:50:29.111 2 WARNING nova.compute.manager [req-becd53bf-4da3-4063-84c4-10cafa933bab req-a2b4480c-4746-4a89-9f34-d6da4012493e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Received unexpected event network-vif-plugged-a47e0808-6910-4418-ad25-108c7168bf02 for instance with vm_state active and task_state None.
Oct 02 12:50:31 compute-0 podman[254565]: 2025-10-02 12:50:31.135852048 +0000 UTC m=+0.050686753 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter)
Oct 02 12:50:31 compute-0 podman[254566]: 2025-10-02 12:50:31.142772927 +0000 UTC m=+0.052831292 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, config_id=iscsid, container_name=iscsid, org.label-schema.build-date=20251001, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:50:31 compute-0 nova_compute[192079]: 2025-10-02 12:50:31.574 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:32 compute-0 NetworkManager[51160]: <info>  [1759409432.1657] manager: (patch-br-int-to-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/361)
Oct 02 12:50:32 compute-0 NetworkManager[51160]: <info>  [1759409432.1668] manager: (patch-provnet-9e5e5ef3-dd6f-48b8-9d1f-8f15cf85bb3d-to-br-int): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/362)
Oct 02 12:50:32 compute-0 ovn_controller[94336]: 2025-10-02T12:50:32Z|00732|binding|INFO|Releasing lport 14c8642a-f433-48b2-a9ce-dc24a1a84079 from this chassis (sb_readonly=0)
Oct 02 12:50:32 compute-0 nova_compute[192079]: 2025-10-02 12:50:32.171 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:32 compute-0 ovn_controller[94336]: 2025-10-02T12:50:32Z|00733|binding|INFO|Releasing lport 14c8642a-f433-48b2-a9ce-dc24a1a84079 from this chassis (sb_readonly=0)
Oct 02 12:50:32 compute-0 nova_compute[192079]: 2025-10-02 12:50:32.213 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:32 compute-0 nova_compute[192079]: 2025-10-02 12:50:32.989 2 DEBUG nova.compute.manager [req-ec204446-a157-485d-a368-d43a6b2344b9 req-c725ca84-c50c-4d25-bc04-ab1101925fa5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Received event network-changed-a47e0808-6910-4418-ad25-108c7168bf02 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:50:32 compute-0 nova_compute[192079]: 2025-10-02 12:50:32.989 2 DEBUG nova.compute.manager [req-ec204446-a157-485d-a368-d43a6b2344b9 req-c725ca84-c50c-4d25-bc04-ab1101925fa5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Refreshing instance network info cache due to event network-changed-a47e0808-6910-4418-ad25-108c7168bf02. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:50:32 compute-0 nova_compute[192079]: 2025-10-02 12:50:32.989 2 DEBUG oslo_concurrency.lockutils [req-ec204446-a157-485d-a368-d43a6b2344b9 req-c725ca84-c50c-4d25-bc04-ab1101925fa5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-d891f4b6-930a-42b0-a95f-a5383e4f5827" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:50:32 compute-0 nova_compute[192079]: 2025-10-02 12:50:32.989 2 DEBUG oslo_concurrency.lockutils [req-ec204446-a157-485d-a368-d43a6b2344b9 req-c725ca84-c50c-4d25-bc04-ab1101925fa5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-d891f4b6-930a-42b0-a95f-a5383e4f5827" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:50:32 compute-0 nova_compute[192079]: 2025-10-02 12:50:32.990 2 DEBUG nova.network.neutron [req-ec204446-a157-485d-a368-d43a6b2344b9 req-c725ca84-c50c-4d25-bc04-ab1101925fa5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Refreshing network info cache for port a47e0808-6910-4418-ad25-108c7168bf02 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:50:33 compute-0 nova_compute[192079]: 2025-10-02 12:50:33.811 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._run_pending_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:50:33 compute-0 nova_compute[192079]: 2025-10-02 12:50:33.811 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Cleaning up deleted instances _run_pending_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:11145
Oct 02 12:50:33 compute-0 nova_compute[192079]: 2025-10-02 12:50:33.834 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] There are 0 instances to clean _run_pending_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:11154
Oct 02 12:50:34 compute-0 nova_compute[192079]: 2025-10-02 12:50:34.096 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:34 compute-0 nova_compute[192079]: 2025-10-02 12:50:34.464 2 DEBUG nova.network.neutron [req-ec204446-a157-485d-a368-d43a6b2344b9 req-c725ca84-c50c-4d25-bc04-ab1101925fa5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Updated VIF entry in instance network info cache for port a47e0808-6910-4418-ad25-108c7168bf02. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:50:34 compute-0 nova_compute[192079]: 2025-10-02 12:50:34.465 2 DEBUG nova.network.neutron [req-ec204446-a157-485d-a368-d43a6b2344b9 req-c725ca84-c50c-4d25-bc04-ab1101925fa5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Updating instance_info_cache with network_info: [{"id": "a47e0808-6910-4418-ad25-108c7168bf02", "address": "fa:16:3e:a5:a1:66", "network": {"id": "c56f578e-f013-4483-b9f2-ee1459896133", "bridge": "br-int", "label": "tempest-network-smoke--1680080003", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.214", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa47e0808-69", "ovs_interfaceid": "a47e0808-6910-4418-ad25-108c7168bf02", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:50:34 compute-0 nova_compute[192079]: 2025-10-02 12:50:34.486 2 DEBUG oslo_concurrency.lockutils [req-ec204446-a157-485d-a368-d43a6b2344b9 req-c725ca84-c50c-4d25-bc04-ab1101925fa5 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-d891f4b6-930a-42b0-a95f-a5383e4f5827" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:50:36 compute-0 nova_compute[192079]: 2025-10-02 12:50:36.580 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:39 compute-0 nova_compute[192079]: 2025-10-02 12:50:39.098 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:40 compute-0 podman[254622]: 2025-10-02 12:50:40.150896724 +0000 UTC m=+0.061208141 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, config_id=ovn_metadata_agent, container_name=ovn_metadata_agent, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']})
Oct 02 12:50:40 compute-0 podman[254624]: 2025-10-02 12:50:40.152335533 +0000 UTC m=+0.055540026 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 12:50:40 compute-0 podman[254623]: 2025-10-02 12:50:40.200028314 +0000 UTC m=+0.098098327 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:50:40 compute-0 ovn_controller[94336]: 2025-10-02T12:50:40Z|00087|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:a5:a1:66 10.100.0.6
Oct 02 12:50:40 compute-0 ovn_controller[94336]: 2025-10-02T12:50:40Z|00088|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:a5:a1:66 10.100.0.6
Oct 02 12:50:41 compute-0 nova_compute[192079]: 2025-10-02 12:50:41.583 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:44 compute-0 nova_compute[192079]: 2025-10-02 12:50:44.100 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:46 compute-0 nova_compute[192079]: 2025-10-02 12:50:46.587 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:48 compute-0 podman[254692]: 2025-10-02 12:50:48.130565426 +0000 UTC m=+0.048437603 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, config_id=edpm, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:50:49 compute-0 nova_compute[192079]: 2025-10-02 12:50:49.103 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:51 compute-0 nova_compute[192079]: 2025-10-02 12:50:51.592 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:51 compute-0 nova_compute[192079]: 2025-10-02 12:50:51.683 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:50:52 compute-0 nova_compute[192079]: 2025-10-02 12:50:52.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:50:54 compute-0 nova_compute[192079]: 2025-10-02 12:50:54.104 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:54 compute-0 nova_compute[192079]: 2025-10-02 12:50:54.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:50:55 compute-0 podman[254713]: 2025-10-02 12:50:55.136119323 +0000 UTC m=+0.052830082 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, container_name=multipathd, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=multipathd, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2)
Oct 02 12:50:55 compute-0 podman[254712]: 2025-10-02 12:50:55.171101368 +0000 UTC m=+0.087694994 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, build-date=2025-08-20T13:12:41, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., version=9.6, container_name=openstack_network_exporter, com.redhat.component=ubi9-minimal-container, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, maintainer=Red Hat, Inc., release=1755695350, architecture=x86_64, distribution-scope=public, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, vendor=Red Hat, Inc., config_id=edpm, managed_by=edpm_ansible, name=ubi9-minimal, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.expose-services=, io.buildah.version=1.33.7, io.openshift.tags=minimal rhel9, url=https://catalog.redhat.com/en/search?searchType=containers, vcs-type=git)
Oct 02 12:50:55 compute-0 nova_compute[192079]: 2025-10-02 12:50:55.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:50:55 compute-0 nova_compute[192079]: 2025-10-02 12:50:55.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:50:55 compute-0 nova_compute[192079]: 2025-10-02 12:50:55.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:50:56 compute-0 nova_compute[192079]: 2025-10-02 12:50:56.596 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:56 compute-0 nova_compute[192079]: 2025-10-02 12:50:56.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:50:59 compute-0 nova_compute[192079]: 2025-10-02 12:50:59.157 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:50:59 compute-0 nova_compute[192079]: 2025-10-02 12:50:59.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:50:59 compute-0 nova_compute[192079]: 2025-10-02 12:50:59.709 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:50:59 compute-0 nova_compute[192079]: 2025-10-02 12:50:59.709 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:50:59 compute-0 nova_compute[192079]: 2025-10-02 12:50:59.709 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:50:59 compute-0 nova_compute[192079]: 2025-10-02 12:50:59.710 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:51:01 compute-0 nova_compute[192079]: 2025-10-02 12:51:01.600 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:02 compute-0 podman[254751]: 2025-10-02 12:51:02.137043907 +0000 UTC m=+0.049377087 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:51:02 compute-0 podman[254752]: 2025-10-02 12:51:02.137178321 +0000 UTC m=+0.048503654 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=iscsid, io.buildah.version=1.41.3, org.label-schema.build-date=20251001)
Oct 02 12:51:02 compute-0 ovn_controller[94336]: 2025-10-02T12:51:02Z|00734|memory_trim|INFO|Detected inactivity (last active 30006 ms ago): trimming memory
Oct 02 12:51:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:51:02.259 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:51:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:51:02.259 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:51:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:51:02.260 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:51:04 compute-0 nova_compute[192079]: 2025-10-02 12:51:04.038 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/d891f4b6-930a-42b0-a95f-a5383e4f5827/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:51:04 compute-0 nova_compute[192079]: 2025-10-02 12:51:04.106 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/d891f4b6-930a-42b0-a95f-a5383e4f5827/disk --force-share --output=json" returned: 0 in 0.067s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:51:04 compute-0 nova_compute[192079]: 2025-10-02 12:51:04.107 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running cmd (subprocess): /usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/d891f4b6-930a-42b0-a95f-a5383e4f5827/disk --force-share --output=json execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:51:04 compute-0 nova_compute[192079]: 2025-10-02 12:51:04.159 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:04 compute-0 nova_compute[192079]: 2025-10-02 12:51:04.166 2 DEBUG oslo_concurrency.processutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CMD "/usr/bin/python3 -m oslo_concurrency.prlimit --as=1073741824 --cpu=30 -- env LC_ALL=C LANG=C qemu-img info /var/lib/nova/instances/d891f4b6-930a-42b0-a95f-a5383e4f5827/disk --force-share --output=json" returned: 0 in 0.060s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:51:04 compute-0 nova_compute[192079]: 2025-10-02 12:51:04.318 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:51:04 compute-0 nova_compute[192079]: 2025-10-02 12:51:04.319 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5564MB free_disk=73.24244689941406GB free_vcpus=7 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:51:04 compute-0 nova_compute[192079]: 2025-10-02 12:51:04.319 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:51:04 compute-0 nova_compute[192079]: 2025-10-02 12:51:04.319 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:51:04 compute-0 nova_compute[192079]: 2025-10-02 12:51:04.515 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Instance d891f4b6-930a-42b0-a95f-a5383e4f5827 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. _remove_deleted_instances_allocations /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1635
Oct 02 12:51:04 compute-0 nova_compute[192079]: 2025-10-02 12:51:04.515 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 1 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:51:04 compute-0 nova_compute[192079]: 2025-10-02 12:51:04.516 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=640MB phys_disk=79GB used_disk=1GB total_vcpus=8 used_vcpus=1 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:51:04 compute-0 nova_compute[192079]: 2025-10-02 12:51:04.561 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:51:04 compute-0 nova_compute[192079]: 2025-10-02 12:51:04.657 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:51:04 compute-0 nova_compute[192079]: 2025-10-02 12:51:04.930 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:51:04 compute-0 nova_compute[192079]: 2025-10-02 12:51:04.931 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.611s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:51:06 compute-0 nova_compute[192079]: 2025-10-02 12:51:06.602 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:09 compute-0 nova_compute[192079]: 2025-10-02 12:51:09.161 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:09 compute-0 nova_compute[192079]: 2025-10-02 12:51:09.932 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:51:10 compute-0 nova_compute[192079]: 2025-10-02 12:51:10.178 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:51:10 compute-0 nova_compute[192079]: 2025-10-02 12:51:10.179 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:51:10 compute-0 nova_compute[192079]: 2025-10-02 12:51:10.179 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:51:10 compute-0 nova_compute[192079]: 2025-10-02 12:51:10.451 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "refresh_cache-d891f4b6-930a-42b0-a95f-a5383e4f5827" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:51:10 compute-0 nova_compute[192079]: 2025-10-02 12:51:10.451 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquired lock "refresh_cache-d891f4b6-930a-42b0-a95f-a5383e4f5827" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:51:10 compute-0 nova_compute[192079]: 2025-10-02 12:51:10.451 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Forcefully refreshing network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2004
Oct 02 12:51:10 compute-0 nova_compute[192079]: 2025-10-02 12:51:10.451 2 DEBUG nova.objects.instance [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lazy-loading 'info_cache' on Instance uuid d891f4b6-930a-42b0-a95f-a5383e4f5827 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:51:11 compute-0 podman[254800]: 2025-10-02 12:51:11.145778481 +0000 UTC m=+0.059703410 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, container_name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, org.label-schema.license=GPLv2)
Oct 02 12:51:11 compute-0 podman[254802]: 2025-10-02 12:51:11.152099843 +0000 UTC m=+0.056906053 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:51:11 compute-0 podman[254801]: 2025-10-02 12:51:11.168408398 +0000 UTC m=+0.078753899 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, container_name=ovn_controller, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, config_id=ovn_controller, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:51:11 compute-0 nova_compute[192079]: 2025-10-02 12:51:11.605 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:12 compute-0 nova_compute[192079]: 2025-10-02 12:51:12.796 2 DEBUG nova.network.neutron [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Updating instance_info_cache with network_info: [{"id": "a47e0808-6910-4418-ad25-108c7168bf02", "address": "fa:16:3e:a5:a1:66", "network": {"id": "c56f578e-f013-4483-b9f2-ee1459896133", "bridge": "br-int", "label": "tempest-network-smoke--1680080003", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.214", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa47e0808-69", "ovs_interfaceid": "a47e0808-6910-4418-ad25-108c7168bf02", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:51:12 compute-0 nova_compute[192079]: 2025-10-02 12:51:12.875 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Releasing lock "refresh_cache-d891f4b6-930a-42b0-a95f-a5383e4f5827" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:51:12 compute-0 nova_compute[192079]: 2025-10-02 12:51:12.876 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Updated the network info_cache for instance _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9929
Oct 02 12:51:12 compute-0 nova_compute[192079]: 2025-10-02 12:51:12.876 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:51:14 compute-0 nova_compute[192079]: 2025-10-02 12:51:14.163 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:16 compute-0 nova_compute[192079]: 2025-10-02 12:51:16.609 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.116 12 DEBUG ceilometer.compute.discovery [-] instance data: {'id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'os_type': 'hvm', 'architecture': 'x86_64', 'OS-EXT-SRV-ATTR:instance_name': 'instance-000000ba', 'OS-EXT-SRV-ATTR:host': 'compute-0.ctlplane.example.com', 'OS-EXT-STS:vm_state': 'running', 'tenant_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'hostId': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'status': 'active', 'metadata': {}} discover_libvirt_polling /usr/lib/python3.9/site-packages/ceilometer/compute/discovery.py:228
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.117 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes.delta in the context of pollsters
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.120 12 DEBUG ceilometer.compute.virt.libvirt.inspector [-] No delta meter predecessor for d891f4b6-930a-42b0-a95f-a5383e4f5827 / tapa47e0808-69 inspect_vnics /usr/lib/python3.9/site-packages/ceilometer/compute/virt/libvirt/inspector.py:136
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.120 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/network.outgoing.bytes.delta volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '76ed7291-7022-47b9-bf71-b554b3df2ba5', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.bytes.delta', 'counter_type': 'delta', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'instance-000000ba-d891f4b6-930a-42b0-a95f-a5383e4f5827-tapa47e0808-69', 'timestamp': '2025-10-02T12:51:17.117464', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'tapa47e0808-69', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:a5:a1:66', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapa47e0808-69'}, 'message_id': '7c7cd426-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.804519346, 'message_signature': '2db6be7fffe70a76477b0ca02ab325490af5778b3bffd06b681fe66ee7b75962'}]}, 'timestamp': '2025-10-02 12:51:17.121634', '_unique_id': '93e9599b72a34845955cd7b0ef4ccf81'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.123 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.125 12 INFO ceilometer.polling.manager [-] Polling pollster memory.usage in the context of pollsters
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.147 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/memory.usage volume: 46.75 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '6bf70d6c-c718-4c56-aca9-0b193aae968d', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'memory.usage', 'counter_type': 'gauge', 'counter_unit': 'MB', 'counter_volume': 46.75, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'timestamp': '2025-10-02T12:51:17.125852', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'instance-000000ba', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1}, 'message_id': '7c80d1ca-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.834091342, 'message_signature': 'e0a534beeff07ae95d0d4a2d7aa04022f9ea79ab94ac724a1ee2b8298b77f673'}]}, 'timestamp': '2025-10-02 12:51:17.147670', '_unique_id': 'e258d3e106f24429827ac5ab6be5bba7'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.148 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.149 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.capacity in the context of pollsters
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.163 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/disk.device.capacity volume: 1073741824 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.163 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/disk.device.capacity volume: 485376 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '0a35f337-fa78-4325-8691-cc706c24320b', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 1073741824, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827-vda', 'timestamp': '2025-10-02T12:51:17.149370', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'instance-000000ba', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '7c834dd8-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.836430896, 'message_signature': '2b3875db5e6c4be253f3e54a67c1d02d3d136f3e4fe758b00a5598712e916593'}, {'source': 'openstack', 'counter_name': 'disk.device.capacity', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 485376, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827-sda', 'timestamp': '2025-10-02T12:51:17.149370', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'instance-000000ba', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '7c83583c-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.836430896, 'message_signature': 'e7898a6ba58a14f1355d3d6ed9d3fcc1af94d4ddaa5cad76d97c7db225f33ac3'}]}, 'timestamp': '2025-10-02 12:51:17.164151', '_unique_id': 'e7bb7fca0b76475eb008b7ee78b11cc1'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.164 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.165 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes in the context of pollsters
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.165 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/network.incoming.bytes volume: 12776 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '81c6f943-8719-442a-9055-25adb3a559ef', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 12776, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'instance-000000ba-d891f4b6-930a-42b0-a95f-a5383e4f5827-tapa47e0808-69', 'timestamp': '2025-10-02T12:51:17.165653', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'tapa47e0808-69', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:a5:a1:66', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapa47e0808-69'}, 'message_id': '7c839c52-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.804519346, 'message_signature': 'f5bc129accdb2564b2527ec59aee34a6d0a4a52b116a8522f2528dc4d381b9e7'}]}, 'timestamp': '2025-10-02 12:51:17.165891', '_unique_id': '7a3ef703f09f41c89fe1ba8833c4bbab'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.166 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.167 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.latency in the context of pollsters
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.189 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/disk.device.write.latency volume: 3584689423 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.189 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/disk.device.write.latency volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '8c445a77-0660-49d9-82d7-4dc4fd30b7f0', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 3584689423, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827-vda', 'timestamp': '2025-10-02T12:51:17.167350', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'instance-000000ba', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '7c87428a-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.854416257, 'message_signature': '2266474314ec8ea1d377942e98f897887d833d949fe83e5031fc4304229e5da2'}, {'source': 'openstack', 'counter_name': 'disk.device.write.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 0, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827-sda', 'timestamp': '2025-10-02T12:51:17.167350', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'instance-000000ba', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '7c874d2a-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.854416257, 'message_signature': '29308ab6178af95695377245a01f05fd109bb40c6487d1878b3679bd4a258c54'}]}, 'timestamp': '2025-10-02 12:51:17.190104', '_unique_id': 'd207dacad2ea4cca933afb2bb0054c9d'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.190 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.191 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.requests in the context of pollsters
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.191 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/disk.device.read.requests volume: 1114 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.191 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/disk.device.read.requests volume: 108 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'efacbfe2-e948-42c5-a774-dfc03c76726b', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 1114, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827-vda', 'timestamp': '2025-10-02T12:51:17.191637', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'instance-000000ba', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '7c879406-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.854416257, 'message_signature': '460c1cd855258791a6d4a4b6f5d39f847b0eb55939e6688dca5d67b3df4a626e'}, {'source': 'openstack', 'counter_name': 'disk.device.read.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 108, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827-sda', 'timestamp': '2025-10-02T12:51:17.191637', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'instance-000000ba', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '7c879f78-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.854416257, 'message_signature': '6808dbda658b1d1c25ae3682fe5e3fdab387a89b0da47c9f1ac6989fe10c25c1'}]}, 'timestamp': '2025-10-02 12:51:17.192209', '_unique_id': '5ce3e57e61954517b6ad82982fa7366a'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.192 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.193 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets in the context of pollsters
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.193 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/network.incoming.packets volume: 67 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'a987447e-2944-4460-bc5e-5ce4b5b35781', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 67, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'instance-000000ba-d891f4b6-930a-42b0-a95f-a5383e4f5827-tapa47e0808-69', 'timestamp': '2025-10-02T12:51:17.193429', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'tapa47e0808-69', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:a5:a1:66', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapa47e0808-69'}, 'message_id': '7c87d9a2-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.804519346, 'message_signature': 'c96feb6d516f65e5feaef945aaf85331c820f8abe5a9f3b7d2e470b2883ed885'}]}, 'timestamp': '2025-10-02 12:51:17.193682', '_unique_id': '1e0f8e83ae15476996566c66ed317102'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.latency in the context of pollsters
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/disk.device.read.latency volume: 789296289 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.194 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/disk.device.read.latency volume: 178090097 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'b9b5f34d-e0ef-40b1-8ca8-d14a300105bd', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 789296289, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827-vda', 'timestamp': '2025-10-02T12:51:17.194729', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'instance-000000ba', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '7c880ba2-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.854416257, 'message_signature': '05750adbad3d00098206991d82025e576aefb9f62afca4d4ac2637f0a9d610fe'}, {'source': 'openstack', 'counter_name': 'disk.device.read.latency', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 178090097, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827-sda', 'timestamp': '2025-10-02T12:51:17.194729', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'instance-000000ba', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '7c88141c-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.854416257, 'message_signature': '1828abd9298928b461a4b7b09d006054394724576cac4236bf96c0023a7b6013'}]}, 'timestamp': '2025-10-02 12:51:17.195174', '_unique_id': 'ac5bb4cde72549e79e7234181268be99'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.195 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.196 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes.delta in the context of pollsters
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.196 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/network.incoming.bytes.delta volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '1d24e776-ddfd-4319-bb07-8c19e4b0c5e1', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.bytes.delta', 'counter_type': 'delta', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'instance-000000ba-d891f4b6-930a-42b0-a95f-a5383e4f5827-tapa47e0808-69', 'timestamp': '2025-10-02T12:51:17.196337', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'tapa47e0808-69', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:a5:a1:66', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapa47e0808-69'}, 'message_id': '7c884aa4-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.804519346, 'message_signature': 'bbdcc774362643dae7dee67c4c3aefc7893075e5ca9f715b9c517cecaea97a30'}]}, 'timestamp': '2025-10-02 12:51:17.196610', '_unique_id': 'a87298561f93435f862b3316a9f166ff'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.requests in the context of pollsters
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.197 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/disk.device.write.requests volume: 308 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/disk.device.write.requests volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '6eac89d9-0c2c-4d7c-b00a-bcc59d8d71d3', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 308, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827-vda', 'timestamp': '2025-10-02T12:51:17.197840', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'instance-000000ba', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '7c88851e-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.854416257, 'message_signature': 'c9514e93cc05ff2b61fdfb456ce3d3f55efec73dc7ea8d02a6ae06d7559445da'}, {'source': 'openstack', 'counter_name': 'disk.device.write.requests', 'counter_type': 'cumulative', 'counter_unit': 'request', 'counter_volume': 0, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827-sda', 'timestamp': '2025-10-02T12:51:17.197840', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'instance-000000ba', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '7c888dc0-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.854416257, 'message_signature': '94a8e0057050aee5fde8cae93ddd61b9a3080ccd7157a1ad10dfea04b08c565a'}]}, 'timestamp': '2025-10-02 12:51:17.198269', '_unique_id': '34b6de9b9b9946cb9257547e57e76e04'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.198 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.199 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets.drop in the context of pollsters
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.199 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/network.outgoing.packets.drop volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'be2f3ed6-639a-416f-be13-5391eba680ad', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets.drop', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'instance-000000ba-d891f4b6-930a-42b0-a95f-a5383e4f5827-tapa47e0808-69', 'timestamp': '2025-10-02T12:51:17.199330', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'tapa47e0808-69', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:a5:a1:66', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapa47e0808-69'}, 'message_id': '7c88bf70-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.804519346, 'message_signature': 'f2d05fc91a8278ef76c14655b4a091a23e0d5b4a5673288704b602b7d73d3923'}]}, 'timestamp': '2025-10-02 12:51:17.199556', '_unique_id': '52bcb6d855ba490bb33126701b52438c'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets in the context of pollsters
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.200 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/network.outgoing.packets volume: 69 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '4b80d770-05e1-450d-a2cc-327d263b59c4', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 69, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'instance-000000ba-d891f4b6-930a-42b0-a95f-a5383e4f5827-tapa47e0808-69', 'timestamp': '2025-10-02T12:51:17.200603', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'tapa47e0808-69', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:a5:a1:66', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapa47e0808-69'}, 'message_id': '7c88f10c-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.804519346, 'message_signature': 'bc560a2480aba8966a7fa72ca43ee5716d855cc4a54cc651f8de2a1b1bef2b3b'}]}, 'timestamp': '2025-10-02 12:51:17.200825', '_unique_id': '77386200be894343a77666e2a8e9dbe1'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets.drop in the context of pollsters
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.201 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/network.incoming.packets.drop volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '8ead7669-bdfb-4998-ab52-c938f3127913', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets.drop', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'instance-000000ba-d891f4b6-930a-42b0-a95f-a5383e4f5827-tapa47e0808-69', 'timestamp': '2025-10-02T12:51:17.201864', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'tapa47e0808-69', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:a5:a1:66', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapa47e0808-69'}, 'message_id': '7c892244-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.804519346, 'message_signature': '19e72f10677d01f997942674c35d3171ecf0adb0d1f173d9d1fd6780760cb131'}]}, 'timestamp': '2025-10-02 12:51:17.202105', '_unique_id': '3b6eabea8e5545dba24934b5dcaa6a2c'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.202 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 INFO ceilometer.polling.manager [-] Polling pollster cpu in the context of pollsters
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/cpu volume: 11970000000 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'f182a6f0-fde6-4547-b63c-89b72e30c6d6', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'cpu', 'counter_type': 'cumulative', 'counter_unit': 'ns', 'counter_volume': 11970000000, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'timestamp': '2025-10-02T12:51:17.203211', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'instance-000000ba', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'cpu_number': 1}, 'message_id': '7c8956ec-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.834091342, 'message_signature': '59670d7f3ae87be0ad3d66b95ee1586de21516c5bc32899999a981f0783cd81c'}]}, 'timestamp': '2025-10-02 12:51:17.203455', '_unique_id': '4ba1c15c43c44d9fabe88aab21ecf787'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.203 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.204 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes in the context of pollsters
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.204 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/network.outgoing.bytes volume: 9590 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'ca6fb288-7bcf-445f-ade7-b9c7cbe44ded', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 9590, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'instance-000000ba-d891f4b6-930a-42b0-a95f-a5383e4f5827-tapa47e0808-69', 'timestamp': '2025-10-02T12:51:17.204607', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'tapa47e0808-69', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:a5:a1:66', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapa47e0808-69'}, 'message_id': '7c898d88-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.804519346, 'message_signature': 'eb52e2ee3c4549a214ffa1cc16b8d5859fe771eb2fde47c9fe3099b0fa1ed5e6'}]}, 'timestamp': '2025-10-02 12:51:17.204831', '_unique_id': '4fb0f04e1bf1418ca2f498d16712008f'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.latency in the context of pollsters
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.205 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for PerDeviceDiskLatencyPollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.206 12 ERROR ceilometer.polling.manager [-] Prevent pollster disk.device.latency from polling [<NovaLikeServer: tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549>]
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.206 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.allocation in the context of pollsters
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.206 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/disk.device.allocation volume: 30679040 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.206 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/disk.device.allocation volume: 487424 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '9f965ac8-c12c-4e7d-893e-0504571f1b1c', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 30679040, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827-vda', 'timestamp': '2025-10-02T12:51:17.206233', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'instance-000000ba', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '7c89cd0c-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.836430896, 'message_signature': 'bf9df150a9f1e59922d14652a37cd3a166ccd3ba4421656eaefeb5e207439667'}, {'source': 'openstack', 'counter_name': 'disk.device.allocation', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 487424, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827-sda', 'timestamp': '2025-10-02T12:51:17.206233', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'instance-000000ba', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '7c89d4c8-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.836430896, 'message_signature': '5f191e0326cd9111e2ba8d365080725deb887f6c521a01e2b8733e863e3c7f32'}]}, 'timestamp': '2025-10-02 12:51:17.206642', '_unique_id': 'eb5060adb0bb479e86a204f1aef0bea8'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.bytes.rate in the context of pollsters
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for IncomingBytesRatePollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.207 12 ERROR ceilometer.polling.manager [-] Prevent pollster network.incoming.bytes.rate from polling [<NovaLikeServer: tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549>]
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.208 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.iops in the context of pollsters
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.208 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for PerDeviceDiskIOPSPollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.208 12 ERROR ceilometer.polling.manager [-] Prevent pollster disk.device.iops from polling [<NovaLikeServer: tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549>]
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.208 12 INFO ceilometer.polling.manager [-] Polling pollster network.incoming.packets.error in the context of pollsters
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.208 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/network.incoming.packets.error volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '4854b01b-04d9-41d6-88f9-b57c24d7a53e', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.incoming.packets.error', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'instance-000000ba-d891f4b6-930a-42b0-a95f-a5383e4f5827-tapa47e0808-69', 'timestamp': '2025-10-02T12:51:17.208619', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'tapa47e0808-69', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:a5:a1:66', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapa47e0808-69'}, 'message_id': '7c8a2a72-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.804519346, 'message_signature': '36e7cd4e4732175b669ee46e186bbe8fdca714a15e18195412f7154da52b6de3'}]}, 'timestamp': '2025-10-02 12:51:17.208847', '_unique_id': '9ac6f8ef62014fd09701e766d8faa7c3'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.usage in the context of pollsters
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.209 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/disk.device.usage volume: 30015488 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/disk.device.usage volume: 485376 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '02d2cb1b-7742-4565-860a-bd574d6f03c9', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 30015488, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827-vda', 'timestamp': '2025-10-02T12:51:17.209870', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'instance-000000ba', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '7c8a5ae2-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.836430896, 'message_signature': 'ef9225d1a5d57ea728ce5877108e604387dfdafcf4858e2ffc60efc050250329'}, {'source': 'openstack', 'counter_name': 'disk.device.usage', 'counter_type': 'gauge', 'counter_unit': 'B', 'counter_volume': 485376, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827-sda', 'timestamp': '2025-10-02T12:51:17.209870', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'instance-000000ba', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '7c8a63f2-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.836430896, 'message_signature': '84ef235ee3fcb918cf7bde004fc24965d71d6727743c47dbb1992e6607a9e450'}]}, 'timestamp': '2025-10-02 12:51:17.210313', '_unique_id': '29e136426daf4bf7b3aa7d01a8d4238c'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.210 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.211 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.read.bytes in the context of pollsters
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.211 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/disk.device.read.bytes volume: 30820864 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.211 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/disk.device.read.bytes volume: 274750 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'e2b31cf0-2b52-4cef-9557-4ca46b769bea', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 30820864, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827-vda', 'timestamp': '2025-10-02T12:51:17.211539', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'instance-000000ba', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '7c8a9c28-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.854416257, 'message_signature': 'bcac9b41c3c0e5ac7842c11b866162e1821b39fceefbc26652c0fb05a9981158'}, {'source': 'openstack', 'counter_name': 'disk.device.read.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 274750, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827-sda', 'timestamp': '2025-10-02T12:51:17.211539', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'instance-000000ba', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '7c8aa498-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.854416257, 'message_signature': 'ec1f9943a22661c24ddbfe5db1f469de8a3e3237b209a0b032745ea8148975bc'}]}, 'timestamp': '2025-10-02 12:51:17.211967', '_unique_id': 'a152aa9770de41a8844e10ca95c3fdea'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.212 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.packets.error in the context of pollsters
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/network.outgoing.packets.error volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': '6174e9f6-1574-425d-8d86-25fea9667595', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'network.outgoing.packets.error', 'counter_type': 'cumulative', 'counter_unit': 'packet', 'counter_volume': 0, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'instance-000000ba-d891f4b6-930a-42b0-a95f-a5383e4f5827-tapa47e0808-69', 'timestamp': '2025-10-02T12:51:17.213105', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'tapa47e0808-69', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'mac': 'fa:16:3e:a5:a1:66', 'fref': None, 'parameters': {'interfaceid': None, 'bridge': None}, 'vnic_name': 'tapa47e0808-69'}, 'message_id': '7c8ada30-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.804519346, 'message_signature': '0c991b3dfdf582b9fd4651a83f3a709a640696935d5a494ed8645fdf77abf69d'}]}, 'timestamp': '2025-10-02 12:51:17.213347', '_unique_id': 'aa68121f6613445885959d63f5ee42f5'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.213 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.214 12 INFO ceilometer.polling.manager [-] Polling pollster disk.device.write.bytes in the context of pollsters
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.214 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/disk.device.write.bytes volume: 72994816 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.214 12 DEBUG ceilometer.compute.pollsters [-] d891f4b6-930a-42b0-a95f-a5383e4f5827/disk.device.write.bytes volume: 0 _stats_to_sample /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:108
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging [-] Could not send notification to notifications. Payload={'message_id': 'e7995add-da0b-401d-b5fd-4bd8af00d0de', 'publisher_id': 'ceilometer.polling', 'event_type': 'telemetry.polling', 'priority': 'SAMPLE', 'payload': {'samples': [{'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 72994816, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827-vda', 'timestamp': '2025-10-02T12:51:17.214412', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'instance-000000ba', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'vda'}, 'message_id': '7c8b0c62-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.854416257, 'message_signature': '0f5fdacb52667c652b324c8b08a20d09ffb688c297a0ebacce303f1f5821c7d1'}, {'source': 'openstack', 'counter_name': 'disk.device.write.bytes', 'counter_type': 'cumulative', 'counter_unit': 'B', 'counter_volume': 0, 'user_id': '2d2b4a2da57543ef88e44ae28ad61647', 'user_name': None, 'project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'project_name': None, 'resource_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827-sda', 'timestamp': '2025-10-02T12:51:17.214412', 'resource_metadata': {'display_name': 'tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549', 'name': 'instance-000000ba', 'instance_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'instance_type': 'm1.nano', 'host': 'c8719e0ed6521fbac8ac703b97d00c2c9a9a0a93333201216490086f', 'instance_host': 'compute-0.ctlplane.example.com', 'flavor': {'id': '9ac83da7-f31e-4467-8569-d28002f6aeed', 'name': 'm1.nano', 'vcpus': 1, 'ram': 128, 'disk': 1, 'ephemeral': 0, 'swap': 0}, 'status': 'active', 'state': 'running', 'task_state': '', 'image': {'id': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2'}, 'image_ref': 'cf60d86d-f1d5-4be4-976e-7488dbdcf0b2', 'image_ref_url': None, 'architecture': 'x86_64', 'os_type': 'hvm', 'vcpus': 1, 'memory_mb': 128, 'disk_gb': 1, 'ephemeral_gb': 0, 'root_gb': 1, 'disk_name': 'sda'}, 'message_id': '7c8b13e2-9f8e-11f0-af18-fa163efc5e78', 'monotonic_time': 7474.854416257, 'message_signature': '06fcb7638baf7b45766e41f506b764d167749aec40067295e6b99bae87771ef2'}]}, 'timestamp': '2025-10-02 12:51:17.214808', '_unique_id': 'b03e5305de564a629a763946d6d82c10'}: kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 446, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging     yield
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/utils/functional.py", line 312, in retry_over_time
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging     return fun(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 877, in _connection_factory
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging     self._connection = self._establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 812, in _establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging     conn = self.transport.establish_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/transport/pyamqp.py", line 201, in establish_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging     conn.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/connection.py", line 323, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging     self.transport.connect()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 129, in connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging     self._connect(self.host, self.port, self.connect_timeout)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/amqp/transport.py", line 184, in _connect
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging     self.sock.connect(sa)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging ConnectionRefusedError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging The above exception was the direct cause of the following exception:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging Traceback (most recent call last):
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/notify/messaging.py", line 78, in notify
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging     self.transport._send_notification(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/transport.py", line 134, in _send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging     self._driver.send_notification(target, ctxt, message, version,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 694, in send_notification
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging     return self._send(target, ctxt, message,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 653, in _send
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging     with self._get_connection(rpc_common.PURPOSE_SEND, retry) as conn:
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 605, in _get_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging     return rpc_common.ConnectionContext(self._connection_pool,
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/common.py", line 423, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging     self.connection = connection_pool.get(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 98, in get
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging     return self.create(retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/pool.py", line 135, in create
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging     return self.connection_cls(self.conf, self.url, purpose, retry=retry)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 826, in __init__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging     self.ensure_connection()
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/oslo_messaging/_drivers/impl_rabbit.py", line 957, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging     self.connection.ensure_connection(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 ERROR oslo_messaging.notify.messaging 
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 INFO ceilometer.polling.manager [-] Polling pollster network.outgoing.bytes.rate in the context of pollsters
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.215 12 DEBUG ceilometer.compute.pollsters [-] LibvirtInspector does not provide data for OutgoingBytesRatePollster get_samples /usr/lib/python3.9/site-packages/ceilometer/compute/pollsters/__init__.py:163
Oct 02 12:51:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:51:17.216 12 ERROR ceilometer.polling.manager [-] Prevent pollster network.outgoing.bytes.rate from polling [<NovaLikeServer: tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549>] on source pollsters anymore!: ceilometer.polling.plugin_base.PollsterPermanentError: [<NovaLikeServer: tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549>]
Oct 02 12:51:19 compute-0 nova_compute[192079]: 2025-10-02 12:51:19.165 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:19 compute-0 podman[254867]: 2025-10-02 12:51:19.176707621 +0000 UTC m=+0.076507587 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, container_name=ceilometer_agent_compute, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 12:51:21 compute-0 nova_compute[192079]: 2025-10-02 12:51:21.612 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:24 compute-0 nova_compute[192079]: 2025-10-02 12:51:24.169 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:26 compute-0 podman[254888]: 2025-10-02 12:51:26.154546905 +0000 UTC m=+0.064151581 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, distribution-scope=public, io.buildah.version=1.33.7, url=https://catalog.redhat.com/en/search?searchType=containers, vcs-type=git, vendor=Red Hat, Inc., version=9.6, io.openshift.tags=minimal rhel9, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, managed_by=edpm_ansible, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, architecture=x86_64, com.redhat.component=ubi9-minimal-container, container_name=openstack_network_exporter, io.openshift.expose-services=, maintainer=Red Hat, Inc., config_id=edpm, release=1755695350, build-date=2025-08-20T13:12:41, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., name=ubi9-minimal)
Oct 02 12:51:26 compute-0 podman[254889]: 2025-10-02 12:51:26.162931174 +0000 UTC m=+0.069206719 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, container_name=multipathd, org.label-schema.vendor=CentOS, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:51:26 compute-0 nova_compute[192079]: 2025-10-02 12:51:26.615 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:29 compute-0 nova_compute[192079]: 2025-10-02 12:51:29.171 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:31 compute-0 nova_compute[192079]: 2025-10-02 12:51:31.617 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:33 compute-0 podman[254939]: 2025-10-02 12:51:33.135825292 +0000 UTC m=+0.050136309 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>)
Oct 02 12:51:33 compute-0 podman[254940]: 2025-10-02 12:51:33.151497129 +0000 UTC m=+0.056270696 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, config_id=iscsid, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, container_name=iscsid, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, tcib_managed=true)
Oct 02 12:51:33 compute-0 ovn_controller[94336]: 2025-10-02T12:51:33Z|00735|memory_trim|INFO|Detected inactivity (last active 30001 ms ago): trimming memory
Oct 02 12:51:34 compute-0 nova_compute[192079]: 2025-10-02 12:51:34.173 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:36 compute-0 nova_compute[192079]: 2025-10-02 12:51:36.667 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:51:37.276 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=57, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=56) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:51:37 compute-0 nova_compute[192079]: 2025-10-02 12:51:37.278 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:37 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:51:37.278 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 5 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:51:39 compute-0 nova_compute[192079]: 2025-10-02 12:51:39.175 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:41 compute-0 nova_compute[192079]: 2025-10-02 12:51:41.671 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:42 compute-0 podman[254984]: 2025-10-02 12:51:42.147927219 +0000 UTC m=+0.050123059 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_metadata_agent, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.vendor=CentOS, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, config_id=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:51:42 compute-0 podman[254986]: 2025-10-02 12:51:42.177754903 +0000 UTC m=+0.070033462 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 12:51:42 compute-0 podman[254985]: 2025-10-02 12:51:42.22093295 +0000 UTC m=+0.112674424 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, container_name=ovn_controller, managed_by=edpm_ansible, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 12:51:42 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:51:42.280 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '57'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:51:43 compute-0 nova_compute[192079]: 2025-10-02 12:51:43.262 2 DEBUG nova.compute.manager [req-9acbc39a-475c-4d9a-88c6-81f9cbf4d36a req-2f56a4f4-f938-4f7a-a054-7e88fa6d8b7e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Received event network-changed-a47e0808-6910-4418-ad25-108c7168bf02 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:51:43 compute-0 nova_compute[192079]: 2025-10-02 12:51:43.263 2 DEBUG nova.compute.manager [req-9acbc39a-475c-4d9a-88c6-81f9cbf4d36a req-2f56a4f4-f938-4f7a-a054-7e88fa6d8b7e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Refreshing instance network info cache due to event network-changed-a47e0808-6910-4418-ad25-108c7168bf02. external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11053
Oct 02 12:51:43 compute-0 nova_compute[192079]: 2025-10-02 12:51:43.263 2 DEBUG oslo_concurrency.lockutils [req-9acbc39a-475c-4d9a-88c6-81f9cbf4d36a req-2f56a4f4-f938-4f7a-a054-7e88fa6d8b7e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "refresh_cache-d891f4b6-930a-42b0-a95f-a5383e4f5827" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:312
Oct 02 12:51:43 compute-0 nova_compute[192079]: 2025-10-02 12:51:43.263 2 DEBUG oslo_concurrency.lockutils [req-9acbc39a-475c-4d9a-88c6-81f9cbf4d36a req-2f56a4f4-f938-4f7a-a054-7e88fa6d8b7e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquired lock "refresh_cache-d891f4b6-930a-42b0-a95f-a5383e4f5827" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315
Oct 02 12:51:43 compute-0 nova_compute[192079]: 2025-10-02 12:51:43.263 2 DEBUG nova.network.neutron [req-9acbc39a-475c-4d9a-88c6-81f9cbf4d36a req-2f56a4f4-f938-4f7a-a054-7e88fa6d8b7e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Refreshing network info cache for port a47e0808-6910-4418-ad25-108c7168bf02 _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2007
Oct 02 12:51:43 compute-0 nova_compute[192079]: 2025-10-02 12:51:43.841 2 DEBUG oslo_concurrency.lockutils [None req-057e3751-c95f-4dec-bf0e-e5e97f0ddd59 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "d891f4b6-930a-42b0-a95f-a5383e4f5827" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:51:43 compute-0 nova_compute[192079]: 2025-10-02 12:51:43.842 2 DEBUG oslo_concurrency.lockutils [None req-057e3751-c95f-4dec-bf0e-e5e97f0ddd59 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "d891f4b6-930a-42b0-a95f-a5383e4f5827" acquired by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:51:43 compute-0 nova_compute[192079]: 2025-10-02 12:51:43.842 2 DEBUG oslo_concurrency.lockutils [None req-057e3751-c95f-4dec-bf0e-e5e97f0ddd59 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "d891f4b6-930a-42b0-a95f-a5383e4f5827-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:51:43 compute-0 nova_compute[192079]: 2025-10-02 12:51:43.842 2 DEBUG oslo_concurrency.lockutils [None req-057e3751-c95f-4dec-bf0e-e5e97f0ddd59 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "d891f4b6-930a-42b0-a95f-a5383e4f5827-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:51:43 compute-0 nova_compute[192079]: 2025-10-02 12:51:43.842 2 DEBUG oslo_concurrency.lockutils [None req-057e3751-c95f-4dec-bf0e-e5e97f0ddd59 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "d891f4b6-930a-42b0-a95f-a5383e4f5827-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.<locals>._clear_events" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:51:44 compute-0 nova_compute[192079]: 2025-10-02 12:51:44.177 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:44 compute-0 nova_compute[192079]: 2025-10-02 12:51:44.186 2 INFO nova.compute.manager [None req-057e3751-c95f-4dec-bf0e-e5e97f0ddd59 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Terminating instance
Oct 02 12:51:44 compute-0 nova_compute[192079]: 2025-10-02 12:51:44.458 2 DEBUG nova.compute.manager [None req-057e3751-c95f-4dec-bf0e-e5e97f0ddd59 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Start destroying the instance on the hypervisor. _shutdown_instance /usr/lib/python3.9/site-packages/nova/compute/manager.py:3120
Oct 02 12:51:44 compute-0 kernel: tapa47e0808-69 (unregistering): left promiscuous mode
Oct 02 12:51:44 compute-0 NetworkManager[51160]: <info>  [1759409504.4775] device (tapa47e0808-69): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Oct 02 12:51:44 compute-0 ovn_controller[94336]: 2025-10-02T12:51:44Z|00736|binding|INFO|Releasing lport a47e0808-6910-4418-ad25-108c7168bf02 from this chassis (sb_readonly=0)
Oct 02 12:51:44 compute-0 ovn_controller[94336]: 2025-10-02T12:51:44Z|00737|binding|INFO|Setting lport a47e0808-6910-4418-ad25-108c7168bf02 down in Southbound
Oct 02 12:51:44 compute-0 ovn_controller[94336]: 2025-10-02T12:51:44Z|00738|binding|INFO|Removing iface tapa47e0808-69 ovn-installed in OVS
Oct 02 12:51:44 compute-0 nova_compute[192079]: 2025-10-02 12:51:44.491 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:44 compute-0 nova_compute[192079]: 2025-10-02 12:51:44.507 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:51:44.539 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: PortBindingUpdatedEvent(events=('update',), table='Port_Binding', conditions=None, old_conditions=None), priority=20 to row=Port_Binding(mac=['fa:16:3e:a5:a1:66 10.100.0.6'], port_security=['fa:16:3e:a5:a1:66 10.100.0.6'], type=, nat_addresses=[], virtual_parent=[], up=[False], options={'requested-chassis': 'compute-0.ctlplane.example.com'}, parent_port=[], requested_additional_chassis=[], ha_chassis_group=[], external_ids={'neutron:cidrs': '10.100.0.6/28', 'neutron:device_id': 'd891f4b6-930a-42b0-a95f-a5383e4f5827', 'neutron:device_owner': 'compute:nova', 'neutron:mtu': '', 'neutron:network_name': 'neutron-c56f578e-f013-4483-b9f2-ee1459896133', 'neutron:port_capabilities': '', 'neutron:port_name': '', 'neutron:project_id': '575f3d227ab24f2daa62e65e14a4cd9c', 'neutron:revision_number': '4', 'neutron:security_group_ids': '48a842a9-048b-49fa-aad1-710802b3266f fa097138-2672-47b1-9f70-62839b3b8fff', 'neutron:subnet_pool_addr_scope4': '', 'neutron:subnet_pool_addr_scope6': '', 'neutron:vnic_type': 'normal', 'neutron:host_id': 'compute-0.ctlplane.example.com'}, additional_chassis=[], tag=[], additional_encap=[], encap=[], mirror_rules=[], datapath=50529df0-c539-4067-a62b-3ef6d48b20aa, chassis=[], tunnel_key=3, gateway_chassis=[], requested_chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>], logical_port=a47e0808-6910-4418-ad25-108c7168bf02) old=Port_Binding(up=[True], chassis=[<ovs.db.idl.Row object at 0x7ff20b14e910>]) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:51:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:51:44.541 103294 INFO neutron.agent.ovn.metadata.agent [-] Port a47e0808-6910-4418-ad25-108c7168bf02 in datapath c56f578e-f013-4483-b9f2-ee1459896133 unbound from our chassis
Oct 02 12:51:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:51:44.542 103294 DEBUG neutron.agent.ovn.metadata.agent [-] No valid VIF ports were found for network c56f578e-f013-4483-b9f2-ee1459896133, tearing the namespace down if needed _get_provision_params /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:628
Oct 02 12:51:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:51:44.543 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[b4f65a66-607f-478a-b046-072704c7915a]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:51:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:51:44.544 103294 INFO neutron.agent.ovn.metadata.agent [-] Cleaning up ovnmeta-c56f578e-f013-4483-b9f2-ee1459896133 namespace which is not needed anymore
Oct 02 12:51:44 compute-0 systemd[1]: machine-qemu\x2d88\x2dinstance\x2d000000ba.scope: Deactivated successfully.
Oct 02 12:51:44 compute-0 systemd[1]: machine-qemu\x2d88\x2dinstance\x2d000000ba.scope: Consumed 16.459s CPU time.
Oct 02 12:51:44 compute-0 systemd-machined[152150]: Machine qemu-88-instance-000000ba terminated.
Oct 02 12:51:44 compute-0 neutron-haproxy-ovnmeta-c56f578e-f013-4483-b9f2-ee1459896133[254519]: [NOTICE]   (254554) : haproxy version is 2.8.14-c23fe91
Oct 02 12:51:44 compute-0 neutron-haproxy-ovnmeta-c56f578e-f013-4483-b9f2-ee1459896133[254519]: [NOTICE]   (254554) : path to executable is /usr/sbin/haproxy
Oct 02 12:51:44 compute-0 neutron-haproxy-ovnmeta-c56f578e-f013-4483-b9f2-ee1459896133[254519]: [WARNING]  (254554) : Exiting Master process...
Oct 02 12:51:44 compute-0 neutron-haproxy-ovnmeta-c56f578e-f013-4483-b9f2-ee1459896133[254519]: [ALERT]    (254554) : Current worker (254556) exited with code 143 (Terminated)
Oct 02 12:51:44 compute-0 neutron-haproxy-ovnmeta-c56f578e-f013-4483-b9f2-ee1459896133[254519]: [WARNING]  (254554) : All workers exited. Exiting... (0)
Oct 02 12:51:44 compute-0 systemd[1]: libpod-4795c3a08209c2dd904c52069e126e3251352ae61c0fec6cff675e6a90e7818d.scope: Deactivated successfully.
Oct 02 12:51:44 compute-0 podman[255076]: 2025-10-02 12:51:44.674021736 +0000 UTC m=+0.043763595 container died 4795c3a08209c2dd904c52069e126e3251352ae61c0fec6cff675e6a90e7818d (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-c56f578e-f013-4483-b9f2-ee1459896133, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS)
Oct 02 12:51:44 compute-0 nova_compute[192079]: 2025-10-02 12:51:44.675 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:44 compute-0 nova_compute[192079]: 2025-10-02 12:51:44.680 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:44 compute-0 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-4795c3a08209c2dd904c52069e126e3251352ae61c0fec6cff675e6a90e7818d-userdata-shm.mount: Deactivated successfully.
Oct 02 12:51:44 compute-0 systemd[1]: var-lib-containers-storage-overlay-769a952cbbcaee7cb12a1679004257196ee275021cdf6b200922ba961974763c-merged.mount: Deactivated successfully.
Oct 02 12:51:44 compute-0 podman[255076]: 2025-10-02 12:51:44.714145651 +0000 UTC m=+0.083887500 container cleanup 4795c3a08209c2dd904c52069e126e3251352ae61c0fec6cff675e6a90e7818d (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-c56f578e-f013-4483-b9f2-ee1459896133, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:51:44 compute-0 systemd[1]: libpod-conmon-4795c3a08209c2dd904c52069e126e3251352ae61c0fec6cff675e6a90e7818d.scope: Deactivated successfully.
Oct 02 12:51:44 compute-0 nova_compute[192079]: 2025-10-02 12:51:44.721 2 INFO nova.virt.libvirt.driver [-] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Instance destroyed successfully.
Oct 02 12:51:44 compute-0 nova_compute[192079]: 2025-10-02 12:51:44.722 2 DEBUG nova.objects.instance [None req-057e3751-c95f-4dec-bf0e-e5e97f0ddd59 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lazy-loading 'resources' on Instance uuid d891f4b6-930a-42b0-a95f-a5383e4f5827 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105
Oct 02 12:51:44 compute-0 podman[255120]: 2025-10-02 12:51:44.777138938 +0000 UTC m=+0.040003842 container remove 4795c3a08209c2dd904c52069e126e3251352ae61c0fec6cff675e6a90e7818d (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-c56f578e-f013-4483-b9f2-ee1459896133, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:51:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:51:44.787 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[504e7cf1-1198-4aef-8342-52fbfdfa9958]: (4, ('Thu Oct  2 12:51:44 PM UTC 2025 Stopping container neutron-haproxy-ovnmeta-c56f578e-f013-4483-b9f2-ee1459896133 (4795c3a08209c2dd904c52069e126e3251352ae61c0fec6cff675e6a90e7818d)\n4795c3a08209c2dd904c52069e126e3251352ae61c0fec6cff675e6a90e7818d\nThu Oct  2 12:51:44 PM UTC 2025 Deleting container neutron-haproxy-ovnmeta-c56f578e-f013-4483-b9f2-ee1459896133 (4795c3a08209c2dd904c52069e126e3251352ae61c0fec6cff675e6a90e7818d)\n4795c3a08209c2dd904c52069e126e3251352ae61c0fec6cff675e6a90e7818d\n', '', 0)) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:51:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:51:44.789 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[e3b72cb6-0d81-42c2-a173-16077321dfb0]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:51:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:51:44.790 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapc56f578e-f0, bridge=None, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:51:44 compute-0 nova_compute[192079]: 2025-10-02 12:51:44.792 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:44 compute-0 kernel: tapc56f578e-f0: left promiscuous mode
Oct 02 12:51:44 compute-0 nova_compute[192079]: 2025-10-02 12:51:44.807 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:51:44.810 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[73daec2a-97c8-4cd9-bea4-3e518a8a3ea1]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:51:44 compute-0 nova_compute[192079]: 2025-10-02 12:51:44.816 2 DEBUG nova.virt.libvirt.vif [None req-057e3751-c95f-4dec-bf0e-e5e97f0ddd59 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] vif_type=ovs instance=Instance(access_ip_v4=None,access_ip_v6=None,architecture=None,auto_disk_config=False,availability_zone='nova',cell_name=None,cleaned=False,config_drive='True',created_at=2025-10-02T12:50:08Z,default_ephemeral_device=None,default_swap_device=None,deleted=False,deleted_at=None,device_metadata=<?>,disable_terminate=False,display_description='tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549',display_name='tempest-server-tempest-TestSecurityGroupsBasicOps-1020134341-access_point-1089190549',ec2_ids=<?>,ephemeral_gb=0,ephemeral_key_uuid=None,fault=<?>,flavor=Flavor(1),hidden=False,host='compute-0.ctlplane.example.com',hostname='tempest-server-tempest-testsecuritygroupsbasicops-1020134341-ac',id=186,image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',info_cache=InstanceInfoCache,instance_type_id=1,kernel_id='',key_data='ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBPtdLgRziYi/gtQwh2c90NnE9jWcSnkXXhVGvo+TNtzW3MSE83NoyumTXAaB/UU4ExIeaKr77+vb5N+WSXOcyyn7dDdXMPaG0pk4M0kXpEwbShNs9Jn1NVnaa85coBBWBQ==',key_name='tempest-TestSecurityGroupsBasicOps-1574692784',keypairs=<?>,launch_index=0,launched_at=2025-10-02T12:50:27Z,launched_on='compute-0.ctlplane.example.com',locked=False,locked_by=None,memory_mb=128,metadata={},migration_context=<?>,new_flavor=None,node='compute-0.ctlplane.example.com',numa_topology=None,old_flavor=None,os_type=None,pci_devices=<?>,pci_requests=<?>,power_state=1,progress=0,project_id='575f3d227ab24f2daa62e65e14a4cd9c',ramdisk_id='',reservation_id='r-dtj46nao',resources=None,root_device_name='/dev/vda',root_gb=1,security_groups=SecurityGroupList,services=<?>,shutdown_terminate=False,system_metadata={boot_roles='reader,member',image_base_image_ref='cf60d86d-f1d5-4be4-976e-7488dbdcf0b2',image_container_format='bare',image_disk_format='qcow2',image_hw_cdrom_bus='sata',image_hw_disk_bus='virtio',image_hw_input_bus='usb',image_hw_machine_type='q35',image_hw_pointer_model='usbtablet',image_hw_rng_model='virtio',image_hw_video_model='virtio',image_hw_vif_model='virtio',image_min_disk='1',image_min_ram='0',owner_project_name='tempest-TestSecurityGroupsBasicOps-1020134341',owner_user_name='tempest-TestSecurityGroupsBasicOps-1020134341-project-member'},tags=<?>,task_state='deleting',terminated_at=None,trusted_certs=<?>,updated_at=2025-10-02T12:50:27Z,user_data=None,user_id='2d2b4a2da57543ef88e44ae28ad61647',uuid=d891f4b6-930a-42b0-a95f-a5383e4f5827,vcpu_model=<?>,vcpus=1,vm_mode=None,vm_state='active') vif={"id": "a47e0808-6910-4418-ad25-108c7168bf02", "address": "fa:16:3e:a5:a1:66", "network": {"id": "c56f578e-f013-4483-b9f2-ee1459896133", "bridge": "br-int", "label": "tempest-network-smoke--1680080003", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.214", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa47e0808-69", "ovs_interfaceid": "a47e0808-6910-4418-ad25-108c7168bf02", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} unplug /usr/lib/python3.9/site-packages/nova/virt/libvirt/vif.py:828
Oct 02 12:51:44 compute-0 nova_compute[192079]: 2025-10-02 12:51:44.817 2 DEBUG nova.network.os_vif_util [None req-057e3751-c95f-4dec-bf0e-e5e97f0ddd59 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Converting VIF {"id": "a47e0808-6910-4418-ad25-108c7168bf02", "address": "fa:16:3e:a5:a1:66", "network": {"id": "c56f578e-f013-4483-b9f2-ee1459896133", "bridge": "br-int", "label": "tempest-network-smoke--1680080003", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.214", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa47e0808-69", "ovs_interfaceid": "a47e0808-6910-4418-ad25-108c7168bf02", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}} nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:511
Oct 02 12:51:44 compute-0 nova_compute[192079]: 2025-10-02 12:51:44.818 2 DEBUG nova.network.os_vif_util [None req-057e3751-c95f-4dec-bf0e-e5e97f0ddd59 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Converted object VIFOpenVSwitch(active=True,address=fa:16:3e:a5:a1:66,bridge_name='br-int',has_traffic_filtering=True,id=a47e0808-6910-4418-ad25-108c7168bf02,network=Network(c56f578e-f013-4483-b9f2-ee1459896133),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapa47e0808-69') nova_to_osvif_vif /usr/lib/python3.9/site-packages/nova/network/os_vif_util.py:548
Oct 02 12:51:44 compute-0 nova_compute[192079]: 2025-10-02 12:51:44.819 2 DEBUG os_vif [None req-057e3751-c95f-4dec-bf0e-e5e97f0ddd59 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Unplugging vif VIFOpenVSwitch(active=True,address=fa:16:3e:a5:a1:66,bridge_name='br-int',has_traffic_filtering=True,id=a47e0808-6910-4418-ad25-108c7168bf02,network=Network(c56f578e-f013-4483-b9f2-ee1459896133),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapa47e0808-69') unplug /usr/lib/python3.9/site-packages/os_vif/__init__.py:109
Oct 02 12:51:44 compute-0 nova_compute[192079]: 2025-10-02 12:51:44.821 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:44 compute-0 nova_compute[192079]: 2025-10-02 12:51:44.821 2 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DelPortCommand(_result=None, port=tapa47e0808-69, bridge=br-int, if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:51:44 compute-0 nova_compute[192079]: 2025-10-02 12:51:44.825 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 12:51:44 compute-0 nova_compute[192079]: 2025-10-02 12:51:44.829 2 INFO os_vif [None req-057e3751-c95f-4dec-bf0e-e5e97f0ddd59 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Successfully unplugged vif VIFOpenVSwitch(active=True,address=fa:16:3e:a5:a1:66,bridge_name='br-int',has_traffic_filtering=True,id=a47e0808-6910-4418-ad25-108c7168bf02,network=Network(c56f578e-f013-4483-b9f2-ee1459896133),plugin='ovs',port_profile=VIFPortProfileOpenVSwitch,preserve_on_delete=False,vif_name='tapa47e0808-69')
Oct 02 12:51:44 compute-0 nova_compute[192079]: 2025-10-02 12:51:44.829 2 INFO nova.virt.libvirt.driver [None req-057e3751-c95f-4dec-bf0e-e5e97f0ddd59 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Deleting instance files /var/lib/nova/instances/d891f4b6-930a-42b0-a95f-a5383e4f5827_del
Oct 02 12:51:44 compute-0 nova_compute[192079]: 2025-10-02 12:51:44.830 2 INFO nova.virt.libvirt.driver [None req-057e3751-c95f-4dec-bf0e-e5e97f0ddd59 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Deletion of /var/lib/nova/instances/d891f4b6-930a-42b0-a95f-a5383e4f5827_del complete
Oct 02 12:51:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:51:44.855 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[0f20d496-a047-4b1d-b74b-427a722ba53f]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:51:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:51:44.856 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[1f299df4-7d46-40ae-af11-f7f0f9e32a24]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:51:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:51:44.874 219793 DEBUG oslo.privsep.daemon [-] privsep: reply[82fc0dfe-cf11-490b-8c3d-10e972c8f709]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 742125, 'reachable_time': 22448, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 255138, 'error': None, 'target': 'ovnmeta-c56f578e-f013-4483-b9f2-ee1459896133', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:51:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:51:44.877 103404 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-c56f578e-f013-4483-b9f2-ee1459896133 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607
Oct 02 12:51:44 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:51:44.877 103404 DEBUG oslo.privsep.daemon [-] privsep: reply[b1f8dd8f-7c6d-4eec-b68a-6d874ad7a33a]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501
Oct 02 12:51:44 compute-0 systemd[1]: run-netns-ovnmeta\x2dc56f578e\x2df013\x2d4483\x2db9f2\x2dee1459896133.mount: Deactivated successfully.
Oct 02 12:51:45 compute-0 nova_compute[192079]: 2025-10-02 12:51:45.057 2 DEBUG nova.compute.manager [req-2d831fe2-fcfc-46e3-85b3-9a3cc6926a17 req-5db181b1-2aab-48c1-b034-29f7333ef6ec 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Received event network-vif-unplugged-a47e0808-6910-4418-ad25-108c7168bf02 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:51:45 compute-0 nova_compute[192079]: 2025-10-02 12:51:45.057 2 DEBUG oslo_concurrency.lockutils [req-2d831fe2-fcfc-46e3-85b3-9a3cc6926a17 req-5db181b1-2aab-48c1-b034-29f7333ef6ec 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "d891f4b6-930a-42b0-a95f-a5383e4f5827-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:51:45 compute-0 nova_compute[192079]: 2025-10-02 12:51:45.058 2 DEBUG oslo_concurrency.lockutils [req-2d831fe2-fcfc-46e3-85b3-9a3cc6926a17 req-5db181b1-2aab-48c1-b034-29f7333ef6ec 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d891f4b6-930a-42b0-a95f-a5383e4f5827-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:51:45 compute-0 nova_compute[192079]: 2025-10-02 12:51:45.058 2 DEBUG oslo_concurrency.lockutils [req-2d831fe2-fcfc-46e3-85b3-9a3cc6926a17 req-5db181b1-2aab-48c1-b034-29f7333ef6ec 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d891f4b6-930a-42b0-a95f-a5383e4f5827-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:51:45 compute-0 nova_compute[192079]: 2025-10-02 12:51:45.058 2 DEBUG nova.compute.manager [req-2d831fe2-fcfc-46e3-85b3-9a3cc6926a17 req-5db181b1-2aab-48c1-b034-29f7333ef6ec 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] No waiting events found dispatching network-vif-unplugged-a47e0808-6910-4418-ad25-108c7168bf02 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:51:45 compute-0 nova_compute[192079]: 2025-10-02 12:51:45.059 2 DEBUG nova.compute.manager [req-2d831fe2-fcfc-46e3-85b3-9a3cc6926a17 req-5db181b1-2aab-48c1-b034-29f7333ef6ec 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Received event network-vif-unplugged-a47e0808-6910-4418-ad25-108c7168bf02 for instance with task_state deleting. _process_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10826
Oct 02 12:51:45 compute-0 nova_compute[192079]: 2025-10-02 12:51:45.352 2 INFO nova.compute.manager [None req-057e3751-c95f-4dec-bf0e-e5e97f0ddd59 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Took 0.89 seconds to destroy the instance on the hypervisor.
Oct 02 12:51:45 compute-0 nova_compute[192079]: 2025-10-02 12:51:45.353 2 DEBUG oslo.service.loopingcall [None req-057e3751-c95f-4dec-bf0e-e5e97f0ddd59 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.<locals>._deallocate_network_with_retries to return. func /usr/lib/python3.9/site-packages/oslo_service/loopingcall.py:435
Oct 02 12:51:45 compute-0 nova_compute[192079]: 2025-10-02 12:51:45.353 2 DEBUG nova.compute.manager [-] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Deallocating network for instance _deallocate_network /usr/lib/python3.9/site-packages/nova/compute/manager.py:2259
Oct 02 12:51:45 compute-0 nova_compute[192079]: 2025-10-02 12:51:45.353 2 DEBUG nova.network.neutron [-] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] deallocate_for_instance() deallocate_for_instance /usr/lib/python3.9/site-packages/nova/network/neutron.py:1803
Oct 02 12:51:47 compute-0 nova_compute[192079]: 2025-10-02 12:51:47.009 2 DEBUG nova.network.neutron [req-9acbc39a-475c-4d9a-88c6-81f9cbf4d36a req-2f56a4f4-f938-4f7a-a054-7e88fa6d8b7e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Updated VIF entry in instance network info cache for port a47e0808-6910-4418-ad25-108c7168bf02. _build_network_info_model /usr/lib/python3.9/site-packages/nova/network/neutron.py:3482
Oct 02 12:51:47 compute-0 nova_compute[192079]: 2025-10-02 12:51:47.009 2 DEBUG nova.network.neutron [req-9acbc39a-475c-4d9a-88c6-81f9cbf4d36a req-2f56a4f4-f938-4f7a-a054-7e88fa6d8b7e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Updating instance_info_cache with network_info: [{"id": "a47e0808-6910-4418-ad25-108c7168bf02", "address": "fa:16:3e:a5:a1:66", "network": {"id": "c56f578e-f013-4483-b9f2-ee1459896133", "bridge": "br-int", "label": "tempest-network-smoke--1680080003", "subnets": [{"cidr": "10.100.0.0/28", "dns": [], "gateway": {"address": "10.100.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "10.100.0.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "575f3d227ab24f2daa62e65e14a4cd9c", "mtu": 1442, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapa47e0808-69", "ovs_interfaceid": "a47e0808-6910-4418-ad25-108c7168bf02", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:51:47 compute-0 nova_compute[192079]: 2025-10-02 12:51:47.157 2 DEBUG oslo_concurrency.lockutils [req-9acbc39a-475c-4d9a-88c6-81f9cbf4d36a req-2f56a4f4-f938-4f7a-a054-7e88fa6d8b7e 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Releasing lock "refresh_cache-d891f4b6-930a-42b0-a95f-a5383e4f5827" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333
Oct 02 12:51:47 compute-0 nova_compute[192079]: 2025-10-02 12:51:47.225 2 DEBUG nova.compute.manager [req-5e570ac4-7364-495b-8b81-41bfb20109c8 req-91e0de6b-7e6c-4108-93f8-10a8dff14fbd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Received event network-vif-plugged-a47e0808-6910-4418-ad25-108c7168bf02 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:51:47 compute-0 nova_compute[192079]: 2025-10-02 12:51:47.226 2 DEBUG oslo_concurrency.lockutils [req-5e570ac4-7364-495b-8b81-41bfb20109c8 req-91e0de6b-7e6c-4108-93f8-10a8dff14fbd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Acquiring lock "d891f4b6-930a-42b0-a95f-a5383e4f5827-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:51:47 compute-0 nova_compute[192079]: 2025-10-02 12:51:47.226 2 DEBUG oslo_concurrency.lockutils [req-5e570ac4-7364-495b-8b81-41bfb20109c8 req-91e0de6b-7e6c-4108-93f8-10a8dff14fbd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d891f4b6-930a-42b0-a95f-a5383e4f5827-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:51:47 compute-0 nova_compute[192079]: 2025-10-02 12:51:47.227 2 DEBUG oslo_concurrency.lockutils [req-5e570ac4-7364-495b-8b81-41bfb20109c8 req-91e0de6b-7e6c-4108-93f8-10a8dff14fbd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] Lock "d891f4b6-930a-42b0-a95f-a5383e4f5827-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.<locals>._pop_event" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:51:47 compute-0 nova_compute[192079]: 2025-10-02 12:51:47.227 2 DEBUG nova.compute.manager [req-5e570ac4-7364-495b-8b81-41bfb20109c8 req-91e0de6b-7e6c-4108-93f8-10a8dff14fbd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] No waiting events found dispatching network-vif-plugged-a47e0808-6910-4418-ad25-108c7168bf02 pop_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:320
Oct 02 12:51:47 compute-0 nova_compute[192079]: 2025-10-02 12:51:47.227 2 WARNING nova.compute.manager [req-5e570ac4-7364-495b-8b81-41bfb20109c8 req-91e0de6b-7e6c-4108-93f8-10a8dff14fbd 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Received unexpected event network-vif-plugged-a47e0808-6910-4418-ad25-108c7168bf02 for instance with vm_state active and task_state deleting.
Oct 02 12:51:48 compute-0 nova_compute[192079]: 2025-10-02 12:51:48.037 2 DEBUG nova.network.neutron [-] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:51:48 compute-0 nova_compute[192079]: 2025-10-02 12:51:48.130 2 DEBUG nova.compute.manager [req-02bccd6c-da62-4fb7-8756-99229d3ffbe3 req-a58dc375-19eb-4f59-b3a6-ea71795e1339 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Received event network-vif-deleted-a47e0808-6910-4418-ad25-108c7168bf02 external_instance_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:11048
Oct 02 12:51:48 compute-0 nova_compute[192079]: 2025-10-02 12:51:48.131 2 INFO nova.compute.manager [req-02bccd6c-da62-4fb7-8756-99229d3ffbe3 req-a58dc375-19eb-4f59-b3a6-ea71795e1339 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Neutron deleted interface a47e0808-6910-4418-ad25-108c7168bf02; detaching it from the instance and deleting it from the info cache
Oct 02 12:51:48 compute-0 nova_compute[192079]: 2025-10-02 12:51:48.131 2 DEBUG nova.network.neutron [req-02bccd6c-da62-4fb7-8756-99229d3ffbe3 req-a58dc375-19eb-4f59-b3a6-ea71795e1339 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Updating instance_info_cache with network_info: [] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116
Oct 02 12:51:48 compute-0 nova_compute[192079]: 2025-10-02 12:51:48.135 2 INFO nova.compute.manager [-] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Took 2.78 seconds to deallocate network for instance.
Oct 02 12:51:48 compute-0 nova_compute[192079]: 2025-10-02 12:51:48.205 2 DEBUG nova.compute.manager [req-02bccd6c-da62-4fb7-8756-99229d3ffbe3 req-a58dc375-19eb-4f59-b3a6-ea71795e1339 0d72f6c58d6b4ef89c0d1d75b420b96f bffbc2e8eeb448dcbb34a8b5bc72922e - - default default] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Detach interface failed, port_id=a47e0808-6910-4418-ad25-108c7168bf02, reason: Instance d891f4b6-930a-42b0-a95f-a5383e4f5827 could not be found. _process_instance_vif_deleted_event /usr/lib/python3.9/site-packages/nova/compute/manager.py:10882
Oct 02 12:51:48 compute-0 nova_compute[192079]: 2025-10-02 12:51:48.674 2 DEBUG oslo_concurrency.lockutils [None req-057e3751-c95f-4dec-bf0e-e5e97f0ddd59 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:51:48 compute-0 nova_compute[192079]: 2025-10-02 12:51:48.674 2 DEBUG oslo_concurrency.lockutils [None req-057e3751-c95f-4dec-bf0e-e5e97f0ddd59 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:51:48 compute-0 nova_compute[192079]: 2025-10-02 12:51:48.762 2 DEBUG nova.compute.provider_tree [None req-057e3751-c95f-4dec-bf0e-e5e97f0ddd59 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:51:48 compute-0 nova_compute[192079]: 2025-10-02 12:51:48.871 2 DEBUG nova.scheduler.client.report [None req-057e3751-c95f-4dec-bf0e-e5e97f0ddd59 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:51:48 compute-0 nova_compute[192079]: 2025-10-02 12:51:48.953 2 DEBUG oslo_concurrency.lockutils [None req-057e3751-c95f-4dec-bf0e-e5e97f0ddd59 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.279s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:51:49 compute-0 nova_compute[192079]: 2025-10-02 12:51:49.039 2 INFO nova.scheduler.client.report [None req-057e3751-c95f-4dec-bf0e-e5e97f0ddd59 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Deleted allocations for instance d891f4b6-930a-42b0-a95f-a5383e4f5827
Oct 02 12:51:49 compute-0 nova_compute[192079]: 2025-10-02 12:51:49.179 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:49 compute-0 nova_compute[192079]: 2025-10-02 12:51:49.828 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:50 compute-0 podman[255139]: 2025-10-02 12:51:50.150051173 +0000 UTC m=+0.064802898 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_id=edpm, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:51:50 compute-0 nova_compute[192079]: 2025-10-02 12:51:50.330 2 DEBUG oslo_concurrency.lockutils [None req-057e3751-c95f-4dec-bf0e-e5e97f0ddd59 2d2b4a2da57543ef88e44ae28ad61647 575f3d227ab24f2daa62e65e14a4cd9c - - default default] Lock "d891f4b6-930a-42b0-a95f-a5383e4f5827" "released" by "nova.compute.manager.ComputeManager.terminate_instance.<locals>.do_terminate_instance" :: held 6.488s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:51:52 compute-0 nova_compute[192079]: 2025-10-02 12:51:52.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:51:52 compute-0 nova_compute[192079]: 2025-10-02 12:51:52.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:51:54 compute-0 nova_compute[192079]: 2025-10-02 12:51:54.220 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:54 compute-0 nova_compute[192079]: 2025-10-02 12:51:54.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:51:54 compute-0 nova_compute[192079]: 2025-10-02 12:51:54.829 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:57 compute-0 podman[255162]: 2025-10-02 12:51:57.147124954 +0000 UTC m=+0.056017979 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=multipathd, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_managed=true, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2)
Oct 02 12:51:57 compute-0 podman[255161]: 2025-10-02 12:51:57.147631077 +0000 UTC m=+0.057806167 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, name=ubi9-minimal, vendor=Red Hat, Inc., architecture=x86_64, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., config_id=edpm, distribution-scope=public, com.redhat.component=ubi9-minimal-container, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., maintainer=Red Hat, Inc., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, release=1755695350, version=9.6, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., managed_by=edpm_ansible, build-date=2025-08-20T13:12:41, url=https://catalog.redhat.com/en/search?searchType=containers, container_name=openstack_network_exporter, io.buildah.version=1.33.7, vcs-type=git, io.openshift.expose-services=, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.openshift.tags=minimal rhel9)
Oct 02 12:51:57 compute-0 nova_compute[192079]: 2025-10-02 12:51:57.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:51:57 compute-0 nova_compute[192079]: 2025-10-02 12:51:57.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:51:57 compute-0 nova_compute[192079]: 2025-10-02 12:51:57.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:51:58 compute-0 nova_compute[192079]: 2025-10-02 12:51:58.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:51:59 compute-0 nova_compute[192079]: 2025-10-02 12:51:59.254 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:51:59 compute-0 nova_compute[192079]: 2025-10-02 12:51:59.720 2 DEBUG nova.virt.driver [-] Emitting event <LifecycleEvent: 1759409504.7184768, d891f4b6-930a-42b0-a95f-a5383e4f5827 => Stopped> emit_event /usr/lib/python3.9/site-packages/nova/virt/driver.py:1653
Oct 02 12:51:59 compute-0 nova_compute[192079]: 2025-10-02 12:51:59.720 2 INFO nova.compute.manager [-] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] VM Stopped (Lifecycle Event)
Oct 02 12:51:59 compute-0 nova_compute[192079]: 2025-10-02 12:51:59.760 2 DEBUG nova.compute.manager [None req-5ebf662f-ce2c-4df7-a48a-48511bc086b7 - - - - - -] [instance: d891f4b6-930a-42b0-a95f-a5383e4f5827] Checking state _get_power_state /usr/lib/python3.9/site-packages/nova/compute/manager.py:1762
Oct 02 12:51:59 compute-0 nova_compute[192079]: 2025-10-02 12:51:59.830 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:52:01 compute-0 nova_compute[192079]: 2025-10-02 12:52:01.045 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:52:01 compute-0 nova_compute[192079]: 2025-10-02 12:52:01.113 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:52:01 compute-0 nova_compute[192079]: 2025-10-02 12:52:01.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:52:02 compute-0 nova_compute[192079]: 2025-10-02 12:52:02.002 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:52:02 compute-0 nova_compute[192079]: 2025-10-02 12:52:02.003 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:52:02 compute-0 nova_compute[192079]: 2025-10-02 12:52:02.003 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:52:02 compute-0 nova_compute[192079]: 2025-10-02 12:52:02.003 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:52:02 compute-0 nova_compute[192079]: 2025-10-02 12:52:02.196 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:52:02 compute-0 nova_compute[192079]: 2025-10-02 12:52:02.197 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5732MB free_disk=73.27163314819336GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:52:02 compute-0 nova_compute[192079]: 2025-10-02 12:52:02.198 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:52:02 compute-0 nova_compute[192079]: 2025-10-02 12:52:02.198 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:52:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:52:02.260 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:52:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:52:02.261 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:52:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:52:02.261 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:52:02 compute-0 nova_compute[192079]: 2025-10-02 12:52:02.500 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:52:02 compute-0 nova_compute[192079]: 2025-10-02 12:52:02.500 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:52:02 compute-0 nova_compute[192079]: 2025-10-02 12:52:02.599 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing inventories for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708 _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:804
Oct 02 12:52:02 compute-0 nova_compute[192079]: 2025-10-02 12:52:02.703 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Updating ProviderTree inventory for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 from _refresh_and_get_inventory using data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} _refresh_and_get_inventory /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:768
Oct 02 12:52:02 compute-0 nova_compute[192079]: 2025-10-02 12:52:02.703 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Updating inventory in ProviderTree for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 with inventory: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:176
Oct 02 12:52:02 compute-0 nova_compute[192079]: 2025-10-02 12:52:02.722 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing aggregate associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, aggregates: None _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:813
Oct 02 12:52:02 compute-0 nova_compute[192079]: 2025-10-02 12:52:02.747 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing trait associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, traits: COMPUTE_SECURITY_UEFI_SECURE_BOOT,COMPUTE_VIOMMU_MODEL_VIRTIO,COMPUTE_VIOMMU_MODEL_AUTO,COMPUTE_IMAGE_TYPE_AKI,COMPUTE_GRAPHICS_MODEL_VIRTIO,COMPUTE_NET_VIF_MODEL_PCNET,HW_CPU_X86_SSE42,COMPUTE_RESCUE_BFV,COMPUTE_VOLUME_EXTEND,COMPUTE_IMAGE_TYPE_QCOW2,COMPUTE_TRUSTED_CERTS,COMPUTE_SOCKET_PCI_NUMA_AFFINITY,COMPUTE_GRAPHICS_MODEL_CIRRUS,HW_CPU_X86_MMX,COMPUTE_STORAGE_BUS_VIRTIO,COMPUTE_NET_ATTACH_INTERFACE_WITH_TAG,COMPUTE_STORAGE_BUS_FDC,COMPUTE_STORAGE_BUS_USB,COMPUTE_NODE,HW_CPU_X86_SSSE3,HW_CPU_X86_SSE2,COMPUTE_GRAPHICS_MODEL_BOCHS,COMPUTE_NET_VIF_MODEL_E1000E,COMPUTE_IMAGE_TYPE_RAW,COMPUTE_NET_VIF_MODEL_NE2K_PCI,COMPUTE_IMAGE_TYPE_AMI,COMPUTE_VIOMMU_MODEL_INTEL,COMPUTE_SECURITY_TPM_2_0,COMPUTE_STORAGE_BUS_SCSI,COMPUTE_IMAGE_TYPE_ARI,COMPUTE_NET_VIF_MODEL_VMXNET3,COMPUTE_SECURITY_TPM_1_2,COMPUTE_NET_VIF_MODEL_E1000,HW_CPU_X86_SSE,COMPUTE_VOLUME_MULTI_ATTACH,COMPUTE_STORAGE_BUS_IDE,COMPUTE_GRAPHICS_MODEL_NONE,COMPUTE_VOLUME_ATTACH_WITH_TAG,COMPUTE_NET_VIF_MODEL_VIRTIO,HW_CPU_X86_SSE41,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_DEVICE_TAGGING,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_ACCELERATORS,COMPUTE_NET_VIF_MODEL_RTL8139,COMPUTE_GRAPHICS_MODEL_VGA,COMPUTE_STORAGE_BUS_SATA,COMPUTE_NET_VIF_MODEL_SPAPR_VLAN _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:825
Oct 02 12:52:02 compute-0 nova_compute[192079]: 2025-10-02 12:52:02.780 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:52:02 compute-0 nova_compute[192079]: 2025-10-02 12:52:02.873 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:52:03 compute-0 nova_compute[192079]: 2025-10-02 12:52:03.257 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:52:03 compute-0 nova_compute[192079]: 2025-10-02 12:52:03.258 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.060s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:52:04 compute-0 podman[255205]: 2025-10-02 12:52:04.146447134 +0000 UTC m=+0.059386941 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:52:04 compute-0 podman[255206]: 2025-10-02 12:52:04.200248832 +0000 UTC m=+0.102556349 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_managed=true, container_name=iscsid, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=iscsid, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS)
Oct 02 12:52:04 compute-0 nova_compute[192079]: 2025-10-02 12:52:04.255 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:52:04 compute-0 nova_compute[192079]: 2025-10-02 12:52:04.831 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:52:07 compute-0 nova_compute[192079]: 2025-10-02 12:52:07.259 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:52:07 compute-0 nova_compute[192079]: 2025-10-02 12:52:07.259 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:52:07 compute-0 nova_compute[192079]: 2025-10-02 12:52:07.259 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:52:07 compute-0 nova_compute[192079]: 2025-10-02 12:52:07.282 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:52:07 compute-0 nova_compute[192079]: 2025-10-02 12:52:07.282 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:52:09 compute-0 nova_compute[192079]: 2025-10-02 12:52:09.256 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:52:09 compute-0 nova_compute[192079]: 2025-10-02 12:52:09.833 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:52:13 compute-0 podman[255248]: 2025-10-02 12:52:13.140984299 +0000 UTC m=+0.051758713 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_metadata_agent, container_name=ovn_metadata_agent, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.license=GPLv2)
Oct 02 12:52:13 compute-0 podman[255250]: 2025-10-02 12:52:13.152633287 +0000 UTC m=+0.055158636 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:52:13 compute-0 podman[255249]: 2025-10-02 12:52:13.19014437 +0000 UTC m=+0.099924467 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_controller, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0)
Oct 02 12:52:14 compute-0 nova_compute[192079]: 2025-10-02 12:52:14.289 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:52:14 compute-0 nova_compute[192079]: 2025-10-02 12:52:14.836 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:52:19 compute-0 nova_compute[192079]: 2025-10-02 12:52:19.292 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:52:19 compute-0 nova_compute[192079]: 2025-10-02 12:52:19.838 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:52:21 compute-0 podman[255313]: 2025-10-02 12:52:21.145269753 +0000 UTC m=+0.063790882 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=edpm, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.license=GPLv2, container_name=ceilometer_agent_compute, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:52:24 compute-0 nova_compute[192079]: 2025-10-02 12:52:24.293 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:52:24 compute-0 nova_compute[192079]: 2025-10-02 12:52:24.841 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:52:25 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:52:25.484 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=58, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=57) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:52:25 compute-0 nova_compute[192079]: 2025-10-02 12:52:25.484 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:52:25 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:52:25.486 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 8 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:52:28 compute-0 podman[255334]: 2025-10-02 12:52:28.161164934 +0000 UTC m=+0.061768136 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, version=9.6, url=https://catalog.redhat.com/en/search?searchType=containers, container_name=openstack_network_exporter, maintainer=Red Hat, Inc., architecture=x86_64, com.redhat.component=ubi9-minimal-container, io.openshift.expose-services=, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.openshift.tags=minimal rhel9, io.buildah.version=1.33.7, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., build-date=2025-08-20T13:12:41, config_id=edpm, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, name=ubi9-minimal, distribution-scope=public, managed_by=edpm_ansible, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, release=1755695350, vendor=Red Hat, Inc., description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vcs-type=git, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal)
Oct 02 12:52:28 compute-0 podman[255335]: 2025-10-02 12:52:28.192788577 +0000 UTC m=+0.094119749 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, config_id=multipathd, container_name=multipathd, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:52:29 compute-0 nova_compute[192079]: 2025-10-02 12:52:29.295 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:52:29 compute-0 nova_compute[192079]: 2025-10-02 12:52:29.842 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:52:33 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:52:33.487 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '58'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:52:34 compute-0 nova_compute[192079]: 2025-10-02 12:52:34.297 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:52:34 compute-0 nova_compute[192079]: 2025-10-02 12:52:34.844 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:52:35 compute-0 podman[255376]: 2025-10-02 12:52:35.130837726 +0000 UTC m=+0.047271771 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']})
Oct 02 12:52:35 compute-0 podman[255377]: 2025-10-02 12:52:35.142811972 +0000 UTC m=+0.057706245 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS)
Oct 02 12:52:39 compute-0 nova_compute[192079]: 2025-10-02 12:52:39.326 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:52:39 compute-0 nova_compute[192079]: 2025-10-02 12:52:39.847 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:52:44 compute-0 podman[255419]: 2025-10-02 12:52:44.146808925 +0000 UTC m=+0.051034444 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, container_name=ovn_metadata_agent, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, config_id=ovn_metadata_agent, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:52:44 compute-0 podman[255421]: 2025-10-02 12:52:44.147523345 +0000 UTC m=+0.045690278 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:52:44 compute-0 podman[255420]: 2025-10-02 12:52:44.17779186 +0000 UTC m=+0.077810433 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, container_name=ovn_controller, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, managed_by=edpm_ansible, tcib_managed=true)
Oct 02 12:52:44 compute-0 nova_compute[192079]: 2025-10-02 12:52:44.328 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:52:44 compute-0 nova_compute[192079]: 2025-10-02 12:52:44.848 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:52:49 compute-0 nova_compute[192079]: 2025-10-02 12:52:49.328 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:52:49 compute-0 nova_compute[192079]: 2025-10-02 12:52:49.849 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:52:52 compute-0 podman[255485]: 2025-10-02 12:52:52.151743455 +0000 UTC m=+0.056639987 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, config_id=edpm, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']})
Oct 02 12:52:52 compute-0 nova_compute[192079]: 2025-10-02 12:52:52.684 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:52:53 compute-0 nova_compute[192079]: 2025-10-02 12:52:53.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:52:54 compute-0 nova_compute[192079]: 2025-10-02 12:52:54.332 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:52:54 compute-0 nova_compute[192079]: 2025-10-02 12:52:54.852 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:52:56 compute-0 ovn_controller[94336]: 2025-10-02T12:52:56Z|00739|memory_trim|INFO|Detected inactivity (last active 30012 ms ago): trimming memory
Oct 02 12:52:56 compute-0 nova_compute[192079]: 2025-10-02 12:52:56.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:52:58 compute-0 nova_compute[192079]: 2025-10-02 12:52:58.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:52:58 compute-0 nova_compute[192079]: 2025-10-02 12:52:58.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:52:58 compute-0 nova_compute[192079]: 2025-10-02 12:52:58.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:52:58 compute-0 nova_compute[192079]: 2025-10-02 12:52:58.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:52:59 compute-0 podman[255507]: 2025-10-02 12:52:59.129255082 +0000 UTC m=+0.047065285 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.openshift.expose-services=, managed_by=edpm_ansible, release=1755695350, build-date=2025-08-20T13:12:41, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, container_name=openstack_network_exporter, distribution-scope=public, name=ubi9-minimal, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, vendor=Red Hat, Inc., architecture=x86_64, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., version=9.6, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vcs-type=git, io.buildah.version=1.33.7, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, com.redhat.component=ubi9-minimal-container, config_id=edpm, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.tags=minimal rhel9, maintainer=Red Hat, Inc., url=https://catalog.redhat.com/en/search?searchType=containers)
Oct 02 12:52:59 compute-0 podman[255508]: 2025-10-02 12:52:59.144689582 +0000 UTC m=+0.056001138 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, container_name=multipathd, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_id=multipathd, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001)
Oct 02 12:52:59 compute-0 nova_compute[192079]: 2025-10-02 12:52:59.390 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:52:59 compute-0 nova_compute[192079]: 2025-10-02 12:52:59.852 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:53:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:53:02.262 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:53:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:53:02.262 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:53:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:53:02.262 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:53:03 compute-0 nova_compute[192079]: 2025-10-02 12:53:03.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:53:03 compute-0 nova_compute[192079]: 2025-10-02 12:53:03.722 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:53:03 compute-0 nova_compute[192079]: 2025-10-02 12:53:03.723 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:53:03 compute-0 nova_compute[192079]: 2025-10-02 12:53:03.723 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:53:03 compute-0 nova_compute[192079]: 2025-10-02 12:53:03.723 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:53:03 compute-0 nova_compute[192079]: 2025-10-02 12:53:03.857 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:53:03 compute-0 nova_compute[192079]: 2025-10-02 12:53:03.857 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5737MB free_disk=73.27163314819336GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:53:03 compute-0 nova_compute[192079]: 2025-10-02 12:53:03.858 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:53:03 compute-0 nova_compute[192079]: 2025-10-02 12:53:03.858 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:53:04 compute-0 nova_compute[192079]: 2025-10-02 12:53:04.308 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:53:04 compute-0 nova_compute[192079]: 2025-10-02 12:53:04.309 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:53:04 compute-0 nova_compute[192079]: 2025-10-02 12:53:04.370 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:53:04 compute-0 nova_compute[192079]: 2025-10-02 12:53:04.392 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:53:04 compute-0 nova_compute[192079]: 2025-10-02 12:53:04.402 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:53:04 compute-0 nova_compute[192079]: 2025-10-02 12:53:04.403 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:53:04 compute-0 nova_compute[192079]: 2025-10-02 12:53:04.404 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.546s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:53:04 compute-0 nova_compute[192079]: 2025-10-02 12:53:04.854 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:53:06 compute-0 podman[255546]: 2025-10-02 12:53:06.1339804 +0000 UTC m=+0.050055687 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:53:06 compute-0 podman[255547]: 2025-10-02 12:53:06.138825692 +0000 UTC m=+0.050065587 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid, container_name=iscsid)
Oct 02 12:53:07 compute-0 nova_compute[192079]: 2025-10-02 12:53:07.406 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:53:07 compute-0 nova_compute[192079]: 2025-10-02 12:53:07.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:53:07 compute-0 nova_compute[192079]: 2025-10-02 12:53:07.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:53:07 compute-0 nova_compute[192079]: 2025-10-02 12:53:07.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:53:07 compute-0 nova_compute[192079]: 2025-10-02 12:53:07.706 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:53:08 compute-0 nova_compute[192079]: 2025-10-02 12:53:08.701 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:53:09 compute-0 nova_compute[192079]: 2025-10-02 12:53:09.392 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:53:09 compute-0 nova_compute[192079]: 2025-10-02 12:53:09.855 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:53:14 compute-0 nova_compute[192079]: 2025-10-02 12:53:14.392 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:53:14 compute-0 nova_compute[192079]: 2025-10-02 12:53:14.856 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:53:15 compute-0 podman[255588]: 2025-10-02 12:53:15.144048659 +0000 UTC m=+0.058170118 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, managed_by=edpm_ansible, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.build-date=20251001)
Oct 02 12:53:15 compute-0 podman[255595]: 2025-10-02 12:53:15.149470487 +0000 UTC m=+0.050655513 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>)
Oct 02 12:53:15 compute-0 podman[255589]: 2025-10-02 12:53:15.17559784 +0000 UTC m=+0.082871972 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, config_id=ovn_controller, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, container_name=ovn_controller, managed_by=edpm_ansible)
Oct 02 12:53:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:53:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:53:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:53:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:53:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:53:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster cpu, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:53:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:53:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:53:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:53:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:53:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:53:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.allocation, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:53:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:53:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:53:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:53:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:53:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:53:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.iops, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:53:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:53:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:53:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:53:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:53:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:53:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:53:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:53:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:53:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:53:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.capacity, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:53:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:53:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:53:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:53:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:53:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:53:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:53:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:53:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:53:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:53:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:53:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:53:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:53:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:53:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:53:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:53:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:53:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:53:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:53:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:53:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster memory.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:53:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:53:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:53:19 compute-0 nova_compute[192079]: 2025-10-02 12:53:19.393 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:53:19 compute-0 nova_compute[192079]: 2025-10-02 12:53:19.858 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:53:23 compute-0 podman[255652]: 2025-10-02 12:53:23.13174338 +0000 UTC m=+0.050477518 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible)
Oct 02 12:53:24 compute-0 nova_compute[192079]: 2025-10-02 12:53:24.394 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:53:24 compute-0 nova_compute[192079]: 2025-10-02 12:53:24.859 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:53:29 compute-0 nova_compute[192079]: 2025-10-02 12:53:29.396 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:53:29 compute-0 nova_compute[192079]: 2025-10-02 12:53:29.860 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:53:30 compute-0 podman[255672]: 2025-10-02 12:53:30.148957038 +0000 UTC m=+0.066819494 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, io.openshift.expose-services=, maintainer=Red Hat, Inc., managed_by=edpm_ansible, architecture=x86_64, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, vcs-type=git, config_id=edpm, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., version=9.6, com.redhat.component=ubi9-minimal-container, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, url=https://catalog.redhat.com/en/search?searchType=containers, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, release=1755695350, name=ubi9-minimal, build-date=2025-08-20T13:12:41, container_name=openstack_network_exporter, distribution-scope=public, io.openshift.tags=minimal rhel9, vendor=Red Hat, Inc., description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.buildah.version=1.33.7)
Oct 02 12:53:30 compute-0 podman[255673]: 2025-10-02 12:53:30.169240111 +0000 UTC m=+0.066613038 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, config_id=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, tcib_managed=true, container_name=multipathd, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']})
Oct 02 12:53:34 compute-0 nova_compute[192079]: 2025-10-02 12:53:34.396 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:53:34 compute-0 nova_compute[192079]: 2025-10-02 12:53:34.862 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:53:37 compute-0 podman[255714]: 2025-10-02 12:53:37.129878276 +0000 UTC m=+0.048124064 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>)
Oct 02 12:53:37 compute-0 podman[255715]: 2025-10-02 12:53:37.13881624 +0000 UTC m=+0.052328659 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, tcib_managed=true, container_name=iscsid, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=iscsid, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:53:39 compute-0 nova_compute[192079]: 2025-10-02 12:53:39.397 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:53:39 compute-0 nova_compute[192079]: 2025-10-02 12:53:39.863 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:53:44 compute-0 nova_compute[192079]: 2025-10-02 12:53:44.398 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:53:44 compute-0 nova_compute[192079]: 2025-10-02 12:53:44.907 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:53:46 compute-0 podman[255759]: 2025-10-02 12:53:46.152886507 +0000 UTC m=+0.062570827 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, config_id=ovn_metadata_agent, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, container_name=ovn_metadata_agent, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, tcib_managed=true)
Oct 02 12:53:46 compute-0 podman[255761]: 2025-10-02 12:53:46.169839531 +0000 UTC m=+0.067075741 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>)
Oct 02 12:53:46 compute-0 podman[255760]: 2025-10-02 12:53:46.183688418 +0000 UTC m=+0.088619858 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_id=ovn_controller, container_name=ovn_controller, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3)
Oct 02 12:53:49 compute-0 nova_compute[192079]: 2025-10-02 12:53:49.399 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:53:49 compute-0 nova_compute[192079]: 2025-10-02 12:53:49.908 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:53:52 compute-0 nova_compute[192079]: 2025-10-02 12:53:52.694 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:53:54 compute-0 podman[255828]: 2025-10-02 12:53:54.159928857 +0000 UTC m=+0.079794588 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0)
Oct 02 12:53:54 compute-0 nova_compute[192079]: 2025-10-02 12:53:54.443 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:53:54 compute-0 nova_compute[192079]: 2025-10-02 12:53:54.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:53:54 compute-0 nova_compute[192079]: 2025-10-02 12:53:54.909 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:53:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:53:56.077 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=59, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=58) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 12:53:56 compute-0 nova_compute[192079]: 2025-10-02 12:53:56.077 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:53:56 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:53:56.078 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 3 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 12:53:57 compute-0 nova_compute[192079]: 2025-10-02 12:53:57.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:53:58 compute-0 nova_compute[192079]: 2025-10-02 12:53:58.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:53:58 compute-0 nova_compute[192079]: 2025-10-02 12:53:58.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:53:58 compute-0 nova_compute[192079]: 2025-10-02 12:53:58.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:53:59 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:53:59.080 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '59'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 12:53:59 compute-0 nova_compute[192079]: 2025-10-02 12:53:59.445 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:53:59 compute-0 nova_compute[192079]: 2025-10-02 12:53:59.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:53:59 compute-0 nova_compute[192079]: 2025-10-02 12:53:59.910 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:54:01 compute-0 podman[255850]: 2025-10-02 12:54:01.149948233 +0000 UTC m=+0.060647636 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_id=multipathd, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:54:01 compute-0 podman[255849]: 2025-10-02 12:54:01.162039653 +0000 UTC m=+0.078370770 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, version=9.6, maintainer=Red Hat, Inc., architecture=x86_64, io.openshift.expose-services=, name=ubi9-minimal, build-date=2025-08-20T13:12:41, config_id=edpm, container_name=openstack_network_exporter, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., distribution-scope=public, managed_by=edpm_ansible, io.openshift.tags=minimal rhel9, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., io.buildah.version=1.33.7, vendor=Red Hat, Inc., io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., com.redhat.component=ubi9-minimal-container, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, url=https://catalog.redhat.com/en/search?searchType=containers, release=1755695350, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, vcs-type=git)
Oct 02 12:54:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:54:02.263 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:54:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:54:02.263 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:54:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:54:02.263 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:54:02 compute-0 nova_compute[192079]: 2025-10-02 12:54:02.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_shelved_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:54:03 compute-0 nova_compute[192079]: 2025-10-02 12:54:03.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:54:03 compute-0 nova_compute[192079]: 2025-10-02 12:54:03.693 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:54:03 compute-0 nova_compute[192079]: 2025-10-02 12:54:03.693 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:54:03 compute-0 nova_compute[192079]: 2025-10-02 12:54:03.694 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:54:03 compute-0 nova_compute[192079]: 2025-10-02 12:54:03.694 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:54:03 compute-0 nova_compute[192079]: 2025-10-02 12:54:03.842 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:54:03 compute-0 nova_compute[192079]: 2025-10-02 12:54:03.842 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5739MB free_disk=73.27163314819336GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:54:03 compute-0 nova_compute[192079]: 2025-10-02 12:54:03.843 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:54:03 compute-0 nova_compute[192079]: 2025-10-02 12:54:03.843 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:54:03 compute-0 nova_compute[192079]: 2025-10-02 12:54:03.964 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:54:03 compute-0 nova_compute[192079]: 2025-10-02 12:54:03.964 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:54:03 compute-0 nova_compute[192079]: 2025-10-02 12:54:03.990 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:54:04 compute-0 nova_compute[192079]: 2025-10-02 12:54:04.006 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:54:04 compute-0 nova_compute[192079]: 2025-10-02 12:54:04.008 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:54:04 compute-0 nova_compute[192079]: 2025-10-02 12:54:04.008 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.165s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:54:04 compute-0 nova_compute[192079]: 2025-10-02 12:54:04.447 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:54:04 compute-0 nova_compute[192079]: 2025-10-02 12:54:04.971 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:54:08 compute-0 nova_compute[192079]: 2025-10-02 12:54:08.007 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:54:08 compute-0 podman[255890]: 2025-10-02 12:54:08.135656652 +0000 UTC m=+0.054864548 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:54:08 compute-0 podman[255891]: 2025-10-02 12:54:08.151183865 +0000 UTC m=+0.061034106 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, container_name=iscsid, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, tcib_managed=true, config_id=iscsid, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']})
Oct 02 12:54:08 compute-0 nova_compute[192079]: 2025-10-02 12:54:08.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:54:08 compute-0 nova_compute[192079]: 2025-10-02 12:54:08.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:54:08 compute-0 nova_compute[192079]: 2025-10-02 12:54:08.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:54:08 compute-0 nova_compute[192079]: 2025-10-02 12:54:08.695 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:54:09 compute-0 nova_compute[192079]: 2025-10-02 12:54:09.449 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:54:09 compute-0 nova_compute[192079]: 2025-10-02 12:54:09.973 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:54:14 compute-0 nova_compute[192079]: 2025-10-02 12:54:14.451 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:54:14 compute-0 nova_compute[192079]: 2025-10-02 12:54:14.975 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:54:17 compute-0 podman[255935]: 2025-10-02 12:54:17.13723623 +0000 UTC m=+0.046982742 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 12:54:17 compute-0 podman[255933]: 2025-10-02 12:54:17.140310654 +0000 UTC m=+0.055652539 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_managed=true, config_id=ovn_metadata_agent, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2)
Oct 02 12:54:17 compute-0 podman[255934]: 2025-10-02 12:54:17.176793629 +0000 UTC m=+0.087221860 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, container_name=ovn_controller, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:54:19 compute-0 nova_compute[192079]: 2025-10-02 12:54:19.453 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:54:20 compute-0 nova_compute[192079]: 2025-10-02 12:54:20.003 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:54:24 compute-0 nova_compute[192079]: 2025-10-02 12:54:24.455 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:54:25 compute-0 nova_compute[192079]: 2025-10-02 12:54:25.005 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:54:25 compute-0 podman[256000]: 2025-10-02 12:54:25.153183772 +0000 UTC m=+0.073326571 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, container_name=ceilometer_agent_compute, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:54:29 compute-0 nova_compute[192079]: 2025-10-02 12:54:29.456 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:54:30 compute-0 nova_compute[192079]: 2025-10-02 12:54:30.008 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:54:32 compute-0 podman[256021]: 2025-10-02 12:54:32.144851463 +0000 UTC m=+0.057669645 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, container_name=multipathd, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, config_id=multipathd, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:54:32 compute-0 podman[256020]: 2025-10-02 12:54:32.151728211 +0000 UTC m=+0.068019027 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Red Hat, Inc., vcs-type=git, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, container_name=openstack_network_exporter, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.tags=minimal rhel9, name=ubi9-minimal, distribution-scope=public, io.buildah.version=1.33.7, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, architecture=x86_64, config_id=edpm, io.openshift.expose-services=, release=1755695350, url=https://catalog.redhat.com/en/search?searchType=containers, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, version=9.6, build-date=2025-08-20T13:12:41, managed_by=edpm_ansible, com.redhat.component=ubi9-minimal-container, vendor=Red Hat, Inc.)
Oct 02 12:54:34 compute-0 nova_compute[192079]: 2025-10-02 12:54:34.457 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:54:35 compute-0 nova_compute[192079]: 2025-10-02 12:54:35.010 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:54:39 compute-0 podman[256060]: 2025-10-02 12:54:39.138534849 +0000 UTC m=+0.050500309 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']})
Oct 02 12:54:39 compute-0 podman[256061]: 2025-10-02 12:54:39.157450424 +0000 UTC m=+0.064631804 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, tcib_managed=true, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=iscsid, container_name=iscsid, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0)
Oct 02 12:54:39 compute-0 nova_compute[192079]: 2025-10-02 12:54:39.458 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:54:40 compute-0 nova_compute[192079]: 2025-10-02 12:54:40.012 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:54:43 compute-0 nova_compute[192079]: 2025-10-02 12:54:43.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_incomplete_migrations run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:54:43 compute-0 nova_compute[192079]: 2025-10-02 12:54:43.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Cleaning up deleted instances with incomplete migration  _cleanup_incomplete_migrations /usr/lib/python3.9/site-packages/nova/compute/manager.py:11183
Oct 02 12:54:44 compute-0 nova_compute[192079]: 2025-10-02 12:54:44.460 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:54:45 compute-0 nova_compute[192079]: 2025-10-02 12:54:45.013 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:54:48 compute-0 podman[256106]: 2025-10-02 12:54:48.164000257 +0000 UTC m=+0.070018532 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, container_name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=ovn_metadata_agent, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001)
Oct 02 12:54:48 compute-0 podman[256108]: 2025-10-02 12:54:48.176771886 +0000 UTC m=+0.085727941 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>)
Oct 02 12:54:48 compute-0 podman[256107]: 2025-10-02 12:54:48.187857518 +0000 UTC m=+0.100661447 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, config_id=ovn_controller, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_managed=true, container_name=ovn_controller, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']})
Oct 02 12:54:49 compute-0 nova_compute[192079]: 2025-10-02 12:54:49.463 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:54:49 compute-0 nova_compute[192079]: 2025-10-02 12:54:49.928 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_running_deleted_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:54:50 compute-0 nova_compute[192079]: 2025-10-02 12:54:50.016 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:54:54 compute-0 nova_compute[192079]: 2025-10-02 12:54:54.463 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:54:54 compute-0 nova_compute[192079]: 2025-10-02 12:54:54.662 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:54:55 compute-0 nova_compute[192079]: 2025-10-02 12:54:55.018 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:54:56 compute-0 podman[256173]: 2025-10-02 12:54:56.132164577 +0000 UTC m=+0.050707495 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=ceilometer_agent_compute, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, io.buildah.version=1.41.3, managed_by=edpm_ansible)
Oct 02 12:54:56 compute-0 nova_compute[192079]: 2025-10-02 12:54:56.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:54:58 compute-0 nova_compute[192079]: 2025-10-02 12:54:58.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:54:59 compute-0 nova_compute[192079]: 2025-10-02 12:54:59.469 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:55:00 compute-0 nova_compute[192079]: 2025-10-02 12:55:00.020 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:55:00 compute-0 nova_compute[192079]: 2025-10-02 12:55:00.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:55:00 compute-0 nova_compute[192079]: 2025-10-02 12:55:00.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:55:00 compute-0 nova_compute[192079]: 2025-10-02 12:55:00.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:55:00 compute-0 nova_compute[192079]: 2025-10-02 12:55:00.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:55:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:55:02.266 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:55:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:55:02.266 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:55:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:55:02.266 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:55:03 compute-0 podman[256193]: 2025-10-02 12:55:03.133249055 +0000 UTC m=+0.051924028 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, distribution-scope=public, architecture=x86_64, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, version=9.6, config_id=edpm, name=ubi9-minimal, vcs-type=git, com.redhat.component=ubi9-minimal-container, maintainer=Red Hat, Inc., release=1755695350, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.expose-services=, vendor=Red Hat, Inc., io.openshift.tags=minimal rhel9, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, container_name=openstack_network_exporter, managed_by=edpm_ansible, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., build-date=2025-08-20T13:12:41, io.buildah.version=1.33.7, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., url=https://catalog.redhat.com/en/search?searchType=containers)
Oct 02 12:55:03 compute-0 podman[256194]: 2025-10-02 12:55:03.161930857 +0000 UTC m=+0.077555797 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=multipathd, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, config_id=multipathd, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS)
Oct 02 12:55:04 compute-0 nova_compute[192079]: 2025-10-02 12:55:04.472 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:55:04 compute-0 nova_compute[192079]: 2025-10-02 12:55:04.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:55:04 compute-0 nova_compute[192079]: 2025-10-02 12:55:04.728 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:55:04 compute-0 nova_compute[192079]: 2025-10-02 12:55:04.729 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:55:04 compute-0 nova_compute[192079]: 2025-10-02 12:55:04.729 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:55:04 compute-0 nova_compute[192079]: 2025-10-02 12:55:04.729 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:55:04 compute-0 nova_compute[192079]: 2025-10-02 12:55:04.869 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:55:04 compute-0 nova_compute[192079]: 2025-10-02 12:55:04.871 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5738MB free_disk=73.27256774902344GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:55:04 compute-0 nova_compute[192079]: 2025-10-02 12:55:04.871 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:55:04 compute-0 nova_compute[192079]: 2025-10-02 12:55:04.871 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:55:04 compute-0 nova_compute[192079]: 2025-10-02 12:55:04.993 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:55:04 compute-0 nova_compute[192079]: 2025-10-02 12:55:04.994 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:55:05 compute-0 nova_compute[192079]: 2025-10-02 12:55:05.022 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:55:05 compute-0 nova_compute[192079]: 2025-10-02 12:55:05.034 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:55:05 compute-0 nova_compute[192079]: 2025-10-02 12:55:05.154 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:55:05 compute-0 nova_compute[192079]: 2025-10-02 12:55:05.156 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:55:05 compute-0 nova_compute[192079]: 2025-10-02 12:55:05.156 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.285s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:55:09 compute-0 nova_compute[192079]: 2025-10-02 12:55:09.158 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:55:09 compute-0 nova_compute[192079]: 2025-10-02 12:55:09.474 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:55:10 compute-0 nova_compute[192079]: 2025-10-02 12:55:10.024 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:55:10 compute-0 podman[256233]: 2025-10-02 12:55:10.140763856 +0000 UTC m=+0.053202873 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:55:10 compute-0 podman[256234]: 2025-10-02 12:55:10.149624428 +0000 UTC m=+0.057947512 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, container_name=iscsid, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_managed=true, config_id=iscsid, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:55:10 compute-0 nova_compute[192079]: 2025-10-02 12:55:10.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:55:10 compute-0 nova_compute[192079]: 2025-10-02 12:55:10.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:55:10 compute-0 nova_compute[192079]: 2025-10-02 12:55:10.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:55:10 compute-0 nova_compute[192079]: 2025-10-02 12:55:10.704 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:55:13 compute-0 nova_compute[192079]: 2025-10-02 12:55:13.699 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:55:14 compute-0 nova_compute[192079]: 2025-10-02 12:55:14.477 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:55:15 compute-0 nova_compute[192079]: 2025-10-02 12:55:15.026 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:55:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:55:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster memory.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:55:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:55:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:55:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:55:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:55:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:55:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.iops, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:55:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:55:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.capacity, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:55:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:55:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:55:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:55:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:55:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:55:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:55:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:55:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:55:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.allocation, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:55:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:55:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:55:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:55:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:55:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:55:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:55:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster cpu, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:55:19 compute-0 podman[256277]: 2025-10-02 12:55:19.135788176 +0000 UTC m=+0.049348127 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:55:19 compute-0 podman[256275]: 2025-10-02 12:55:19.150729754 +0000 UTC m=+0.070790973 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.license=GPLv2, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_metadata_agent, container_name=ovn_metadata_agent)
Oct 02 12:55:19 compute-0 podman[256276]: 2025-10-02 12:55:19.185122142 +0000 UTC m=+0.100158774 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_managed=true, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:55:19 compute-0 nova_compute[192079]: 2025-10-02 12:55:19.478 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:55:19 compute-0 sshd-session[256342]: Accepted publickey for zuul from 192.168.122.10 port 51138 ssh2: ECDSA SHA256:tAEsFSop2MjuMQQ/UqKU2uCkxqWXGfsM5crdhd6tlI4
Oct 02 12:55:19 compute-0 systemd-logind[827]: New session 76 of user zuul.
Oct 02 12:55:19 compute-0 systemd[1]: Started Session 76 of User zuul.
Oct 02 12:55:19 compute-0 sshd-session[256342]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Oct 02 12:55:19 compute-0 sudo[256346]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/bash -c 'rm -rf /var/tmp/sos-osp && mkdir /var/tmp/sos-osp && sos report --batch --all-logs --tmp-dir=/var/tmp/sos-osp -p container,openstack_edpm,system,storage,virt'
Oct 02 12:55:19 compute-0 sudo[256346]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 12:55:20 compute-0 nova_compute[192079]: 2025-10-02 12:55:20.028 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:55:24 compute-0 ovs-vsctl[256516]: ovs|00001|db_ctl_base|ERR|no key "dpdk-init" in Open_vSwitch record "." column other_config
Oct 02 12:55:24 compute-0 nova_compute[192079]: 2025-10-02 12:55:24.474 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_power_states run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:55:24 compute-0 nova_compute[192079]: 2025-10-02 12:55:24.479 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:55:24 compute-0 systemd[1]: proc-sys-fs-binfmt_misc.automount: Got automount request for /proc/sys/fs/binfmt_misc, triggered by 256370 (sos)
Oct 02 12:55:24 compute-0 systemd[1]: Mounting Arbitrary Executable File Formats File System...
Oct 02 12:55:24 compute-0 systemd[1]: Mounted Arbitrary Executable File Formats File System.
Oct 02 12:55:25 compute-0 nova_compute[192079]: 2025-10-02 12:55:25.030 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:55:25 compute-0 virtqemud[191807]: Failed to connect socket to '/var/run/libvirt/virtnetworkd-sock-ro': No such file or directory
Oct 02 12:55:25 compute-0 virtqemud[191807]: Failed to connect socket to '/var/run/libvirt/virtnwfilterd-sock-ro': No such file or directory
Oct 02 12:55:25 compute-0 virtqemud[191807]: Failed to connect socket to '/var/run/libvirt/virtstoraged-sock-ro': No such file or directory
Oct 02 12:55:25 compute-0 kernel: block vda: the capability attribute has been deprecated.
Oct 02 12:55:26 compute-0 crontab[256944]: (root) LIST (root)
Oct 02 12:55:27 compute-0 podman[257003]: 2025-10-02 12:55:27.148765767 +0000 UTC m=+0.060287356 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3)
Oct 02 12:55:28 compute-0 systemd[1]: Starting Hostname Service...
Oct 02 12:55:28 compute-0 systemd[1]: Started Hostname Service.
Oct 02 12:55:29 compute-0 nova_compute[192079]: 2025-10-02 12:55:29.480 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:55:30 compute-0 nova_compute[192079]: 2025-10-02 12:55:30.033 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:55:34 compute-0 podman[257678]: 2025-10-02 12:55:34.007624015 +0000 UTC m=+0.066225478 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=multipathd, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:55:34 compute-0 podman[257676]: 2025-10-02 12:55:34.035922277 +0000 UTC m=+0.094571561 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, vcs-type=git, com.redhat.component=ubi9-minimal-container, architecture=x86_64, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., version=9.6, build-date=2025-08-20T13:12:41, url=https://catalog.redhat.com/en/search?searchType=containers, distribution-scope=public, managed_by=edpm_ansible, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.buildah.version=1.33.7, name=ubi9-minimal, release=1755695350, maintainer=Red Hat, Inc., vendor=Red Hat, Inc., io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, io.openshift.expose-services=, io.openshift.tags=minimal rhel9, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., config_id=edpm, container_name=openstack_network_exporter)
Oct 02 12:55:34 compute-0 nova_compute[192079]: 2025-10-02 12:55:34.481 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:55:34 compute-0 ovs-appctl[258021]: ovs|00001|daemon_unix|WARN|/var/run/openvswitch/ovs-monitor-ipsec.pid: open: No such file or directory
Oct 02 12:55:34 compute-0 ovs-appctl[258025]: ovs|00001|daemon_unix|WARN|/var/run/openvswitch/ovs-monitor-ipsec.pid: open: No such file or directory
Oct 02 12:55:34 compute-0 ovs-appctl[258029]: ovs|00001|daemon_unix|WARN|/var/run/openvswitch/ovs-monitor-ipsec.pid: open: No such file or directory
Oct 02 12:55:35 compute-0 nova_compute[192079]: 2025-10-02 12:55:35.034 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:55:39 compute-0 podman[206352]: time="2025-10-02T12:55:39Z" level=info msg="List containers: received `last` parameter - overwriting `limit`"
Oct 02 12:55:39 compute-0 podman[206352]: @ - - [02/Oct/2025:12:55:39 +0000] "GET /v4.9.3/libpod/containers/json?all=true&external=false&last=0&namespace=false&size=true&sync=false HTTP/1.1" 200 25331 "" "Go-http-client/1.1"
Oct 02 12:55:39 compute-0 nova_compute[192079]: 2025-10-02 12:55:39.483 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:55:39 compute-0 nova_compute[192079]: 2025-10-02 12:55:39.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:55:40 compute-0 nova_compute[192079]: 2025-10-02 12:55:40.036 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:55:40 compute-0 podman[259249]: 2025-10-02 12:55:40.694821881 +0000 UTC m=+0.063800412 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter)
Oct 02 12:55:40 compute-0 podman[259250]: 2025-10-02 12:55:40.743840329 +0000 UTC m=+0.104449431 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=iscsid, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.schema-version=1.0)
Oct 02 12:55:42 compute-0 virtqemud[191807]: Failed to connect socket to '/var/run/libvirt/virtstoraged-sock-ro': No such file or directory
Oct 02 12:55:44 compute-0 systemd[1]: Starting Time & Date Service...
Oct 02 12:55:44 compute-0 systemd[1]: Started Time & Date Service.
Oct 02 12:55:44 compute-0 nova_compute[192079]: 2025-10-02 12:55:44.486 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:55:45 compute-0 nova_compute[192079]: 2025-10-02 12:55:45.038 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:55:45 compute-0 nova_compute[192079]: 2025-10-02 12:55:45.709 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._run_pending_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:55:45 compute-0 nova_compute[192079]: 2025-10-02 12:55:45.710 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Cleaning up deleted instances _run_pending_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:11145
Oct 02 12:55:45 compute-0 nova_compute[192079]: 2025-10-02 12:55:45.918 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] There are 0 instances to clean _run_pending_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:11154
Oct 02 12:55:49 compute-0 podman[259778]: 2025-10-02 12:55:49.279901548 +0000 UTC m=+0.056410200 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_metadata_agent, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:55:49 compute-0 podman[259780]: 2025-10-02 12:55:49.285737647 +0000 UTC m=+0.061051817 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:55:49 compute-0 podman[259779]: 2025-10-02 12:55:49.305000302 +0000 UTC m=+0.081670739 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, config_id=ovn_controller, container_name=ovn_controller, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:55:49 compute-0 nova_compute[192079]: 2025-10-02 12:55:49.524 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:55:50 compute-0 nova_compute[192079]: 2025-10-02 12:55:50.039 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:55:54 compute-0 nova_compute[192079]: 2025-10-02 12:55:54.527 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:55:55 compute-0 nova_compute[192079]: 2025-10-02 12:55:55.041 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:55:55 compute-0 nova_compute[192079]: 2025-10-02 12:55:55.868 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:55:57 compute-0 podman[259844]: 2025-10-02 12:55:57.274161627 +0000 UTC m=+0.060116700 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=edpm, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute)
Oct 02 12:55:58 compute-0 nova_compute[192079]: 2025-10-02 12:55:58.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:55:59 compute-0 nova_compute[192079]: 2025-10-02 12:55:59.530 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:56:00 compute-0 nova_compute[192079]: 2025-10-02 12:56:00.044 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:56:00 compute-0 nova_compute[192079]: 2025-10-02 12:56:00.663 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:56:00 compute-0 nova_compute[192079]: 2025-10-02 12:56:00.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:56:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:56:02.267 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:56:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:56:02.267 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:56:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:56:02.267 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:56:02 compute-0 nova_compute[192079]: 2025-10-02 12:56:02.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:56:02 compute-0 nova_compute[192079]: 2025-10-02 12:56:02.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:56:02 compute-0 nova_compute[192079]: 2025-10-02 12:56:02.664 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:56:04 compute-0 podman[259866]: 2025-10-02 12:56:04.140385337 +0000 UTC m=+0.051019653 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, container_name=multipathd, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=multipathd, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.license=GPLv2)
Oct 02 12:56:04 compute-0 podman[259865]: 2025-10-02 12:56:04.165506262 +0000 UTC m=+0.079703575 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.openshift.expose-services=, vendor=Red Hat, Inc., io.openshift.tags=minimal rhel9, com.redhat.component=ubi9-minimal-container, release=1755695350, vcs-type=git, architecture=x86_64, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., url=https://catalog.redhat.com/en/search?searchType=containers, version=9.6, container_name=openstack_network_exporter, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, maintainer=Red Hat, Inc., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, distribution-scope=public, io.buildah.version=1.33.7, name=ubi9-minimal, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, build-date=2025-08-20T13:12:41, managed_by=edpm_ansible, config_id=edpm)
Oct 02 12:56:04 compute-0 nova_compute[192079]: 2025-10-02 12:56:04.531 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:56:05 compute-0 nova_compute[192079]: 2025-10-02 12:56:05.044 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:56:05 compute-0 nova_compute[192079]: 2025-10-02 12:56:05.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:56:05 compute-0 nova_compute[192079]: 2025-10-02 12:56:05.708 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:56:05 compute-0 nova_compute[192079]: 2025-10-02 12:56:05.709 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:56:05 compute-0 nova_compute[192079]: 2025-10-02 12:56:05.709 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:56:05 compute-0 nova_compute[192079]: 2025-10-02 12:56:05.709 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:56:05 compute-0 nova_compute[192079]: 2025-10-02 12:56:05.845 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:56:05 compute-0 nova_compute[192079]: 2025-10-02 12:56:05.845 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5455MB free_disk=72.9134521484375GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:56:05 compute-0 nova_compute[192079]: 2025-10-02 12:56:05.846 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:56:05 compute-0 nova_compute[192079]: 2025-10-02 12:56:05.846 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:56:06 compute-0 nova_compute[192079]: 2025-10-02 12:56:06.006 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:56:06 compute-0 nova_compute[192079]: 2025-10-02 12:56:06.007 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:56:06 compute-0 nova_compute[192079]: 2025-10-02 12:56:06.041 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:56:06 compute-0 nova_compute[192079]: 2025-10-02 12:56:06.080 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:56:06 compute-0 nova_compute[192079]: 2025-10-02 12:56:06.100 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:56:06 compute-0 nova_compute[192079]: 2025-10-02 12:56:06.101 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.255s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:56:09 compute-0 sudo[256346]: pam_unix(sudo:session): session closed for user root
Oct 02 12:56:09 compute-0 sshd-session[256345]: Received disconnect from 192.168.122.10 port 51138:11: disconnected by user
Oct 02 12:56:09 compute-0 sshd-session[256345]: Disconnected from user zuul 192.168.122.10 port 51138
Oct 02 12:56:09 compute-0 sshd-session[256342]: pam_unix(sshd:session): session closed for user zuul
Oct 02 12:56:09 compute-0 systemd[1]: session-76.scope: Deactivated successfully.
Oct 02 12:56:09 compute-0 systemd[1]: session-76.scope: Consumed 1min 19.836s CPU time, 642.5M memory peak, read 162.9M from disk, written 21.8M to disk.
Oct 02 12:56:09 compute-0 systemd-logind[827]: Session 76 logged out. Waiting for processes to exit.
Oct 02 12:56:09 compute-0 systemd-logind[827]: Removed session 76.
Oct 02 12:56:09 compute-0 sshd-session[259905]: Accepted publickey for zuul from 192.168.122.10 port 51640 ssh2: ECDSA SHA256:tAEsFSop2MjuMQQ/UqKU2uCkxqWXGfsM5crdhd6tlI4
Oct 02 12:56:09 compute-0 systemd-logind[827]: New session 77 of user zuul.
Oct 02 12:56:09 compute-0 systemd[1]: Started Session 77 of User zuul.
Oct 02 12:56:09 compute-0 sshd-session[259905]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Oct 02 12:56:09 compute-0 sudo[259909]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/cat /var/tmp/sos-osp/sosreport-compute-0-2025-10-02-qecnupz.tar.xz
Oct 02 12:56:09 compute-0 sudo[259909]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 12:56:09 compute-0 sudo[259909]: pam_unix(sudo:session): session closed for user root
Oct 02 12:56:09 compute-0 sshd-session[259908]: Received disconnect from 192.168.122.10 port 51640:11: disconnected by user
Oct 02 12:56:09 compute-0 sshd-session[259908]: Disconnected from user zuul 192.168.122.10 port 51640
Oct 02 12:56:09 compute-0 nova_compute[192079]: 2025-10-02 12:56:09.531 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:56:09 compute-0 sshd-session[259905]: pam_unix(sshd:session): session closed for user zuul
Oct 02 12:56:09 compute-0 systemd[1]: session-77.scope: Deactivated successfully.
Oct 02 12:56:09 compute-0 systemd-logind[827]: Session 77 logged out. Waiting for processes to exit.
Oct 02 12:56:09 compute-0 systemd-logind[827]: Removed session 77.
Oct 02 12:56:09 compute-0 sshd-session[259934]: Accepted publickey for zuul from 192.168.122.10 port 51644 ssh2: ECDSA SHA256:tAEsFSop2MjuMQQ/UqKU2uCkxqWXGfsM5crdhd6tlI4
Oct 02 12:56:09 compute-0 systemd-logind[827]: New session 78 of user zuul.
Oct 02 12:56:09 compute-0 systemd[1]: Started Session 78 of User zuul.
Oct 02 12:56:09 compute-0 sshd-session[259934]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Oct 02 12:56:09 compute-0 sudo[259938]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/rm -rf /var/tmp/sos-osp
Oct 02 12:56:09 compute-0 sudo[259938]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 12:56:09 compute-0 sudo[259938]: pam_unix(sudo:session): session closed for user root
Oct 02 12:56:09 compute-0 sshd-session[259937]: Received disconnect from 192.168.122.10 port 51644:11: disconnected by user
Oct 02 12:56:09 compute-0 sshd-session[259937]: Disconnected from user zuul 192.168.122.10 port 51644
Oct 02 12:56:09 compute-0 sshd-session[259934]: pam_unix(sshd:session): session closed for user zuul
Oct 02 12:56:09 compute-0 systemd[1]: session-78.scope: Deactivated successfully.
Oct 02 12:56:09 compute-0 systemd-logind[827]: Session 78 logged out. Waiting for processes to exit.
Oct 02 12:56:09 compute-0 systemd-logind[827]: Removed session 78.
Oct 02 12:56:10 compute-0 nova_compute[192079]: 2025-10-02 12:56:10.045 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:56:11 compute-0 nova_compute[192079]: 2025-10-02 12:56:11.102 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:56:11 compute-0 podman[259963]: 2025-10-02 12:56:11.140815357 +0000 UTC m=+0.055087423 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']})
Oct 02 12:56:11 compute-0 podman[259964]: 2025-10-02 12:56:11.140810117 +0000 UTC m=+0.057170960 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid, managed_by=edpm_ansible, container_name=iscsid, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001)
Oct 02 12:56:11 compute-0 nova_compute[192079]: 2025-10-02 12:56:11.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:56:11 compute-0 nova_compute[192079]: 2025-10-02 12:56:11.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:56:11 compute-0 nova_compute[192079]: 2025-10-02 12:56:11.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:56:11 compute-0 nova_compute[192079]: 2025-10-02 12:56:11.771 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:56:14 compute-0 systemd[1]: systemd-timedated.service: Deactivated successfully.
Oct 02 12:56:14 compute-0 systemd[1]: systemd-hostnamed.service: Deactivated successfully.
Oct 02 12:56:14 compute-0 nova_compute[192079]: 2025-10-02 12:56:14.533 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:56:15 compute-0 nova_compute[192079]: 2025-10-02 12:56:15.046 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:56:19 compute-0 nova_compute[192079]: 2025-10-02 12:56:19.534 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:56:20 compute-0 nova_compute[192079]: 2025-10-02 12:56:20.048 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:56:20 compute-0 podman[260010]: 2025-10-02 12:56:20.144302019 +0000 UTC m=+0.052890964 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, container_name=ovn_metadata_agent, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=ovn_metadata_agent, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, managed_by=edpm_ansible, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001)
Oct 02 12:56:20 compute-0 podman[260012]: 2025-10-02 12:56:20.168447318 +0000 UTC m=+0.071575664 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 12:56:20 compute-0 podman[260011]: 2025-10-02 12:56:20.174946735 +0000 UTC m=+0.080535208 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_managed=true, config_id=ovn_controller, container_name=ovn_controller, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible)
Oct 02 12:56:24 compute-0 nova_compute[192079]: 2025-10-02 12:56:24.537 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:56:25 compute-0 nova_compute[192079]: 2025-10-02 12:56:25.051 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:56:28 compute-0 podman[260077]: 2025-10-02 12:56:28.167100377 +0000 UTC m=+0.075575723 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, tcib_managed=true, container_name=ceilometer_agent_compute, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:56:29 compute-0 nova_compute[192079]: 2025-10-02 12:56:29.537 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:56:30 compute-0 nova_compute[192079]: 2025-10-02 12:56:30.052 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:56:34 compute-0 nova_compute[192079]: 2025-10-02 12:56:34.537 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:56:35 compute-0 nova_compute[192079]: 2025-10-02 12:56:35.054 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:56:35 compute-0 podman[260097]: 2025-10-02 12:56:35.146068992 +0000 UTC m=+0.054562119 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., version=9.6, name=ubi9-minimal, url=https://catalog.redhat.com/en/search?searchType=containers, vendor=Red Hat, Inc., architecture=x86_64, release=1755695350, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., maintainer=Red Hat, Inc., com.redhat.component=ubi9-minimal-container, io.openshift.expose-services=, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.buildah.version=1.33.7, container_name=openstack_network_exporter, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, config_id=edpm, io.openshift.tags=minimal rhel9, managed_by=edpm_ansible, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, vcs-type=git, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, build-date=2025-08-20T13:12:41, distribution-scope=public)
Oct 02 12:56:35 compute-0 podman[260098]: 2025-10-02 12:56:35.162683025 +0000 UTC m=+0.063466232 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_id=multipathd, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:56:39 compute-0 nova_compute[192079]: 2025-10-02 12:56:39.538 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:56:40 compute-0 nova_compute[192079]: 2025-10-02 12:56:40.055 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:56:42 compute-0 podman[260139]: 2025-10-02 12:56:42.135929713 +0000 UTC m=+0.049509102 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter)
Oct 02 12:56:42 compute-0 podman[260140]: 2025-10-02 12:56:42.153024351 +0000 UTC m=+0.062786896 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid, io.buildah.version=1.41.3, tcib_managed=true, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, container_name=iscsid, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:56:44 compute-0 nova_compute[192079]: 2025-10-02 12:56:44.540 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:56:45 compute-0 nova_compute[192079]: 2025-10-02 12:56:45.058 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:56:49 compute-0 nova_compute[192079]: 2025-10-02 12:56:49.542 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:56:50 compute-0 nova_compute[192079]: 2025-10-02 12:56:50.059 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:56:51 compute-0 podman[260183]: 2025-10-02 12:56:51.143892826 +0000 UTC m=+0.054104396 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>)
Oct 02 12:56:51 compute-0 podman[260181]: 2025-10-02 12:56:51.165904977 +0000 UTC m=+0.078483432 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, config_id=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_metadata_agent)
Oct 02 12:56:51 compute-0 podman[260182]: 2025-10-02 12:56:51.167281744 +0000 UTC m=+0.080410474 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_controller, io.buildah.version=1.41.3, tcib_managed=true, managed_by=edpm_ansible, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_id=ovn_controller)
Oct 02 12:56:54 compute-0 nova_compute[192079]: 2025-10-02 12:56:54.543 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:56:55 compute-0 nova_compute[192079]: 2025-10-02 12:56:55.060 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:56:56 compute-0 nova_compute[192079]: 2025-10-02 12:56:56.766 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:56:58 compute-0 nova_compute[192079]: 2025-10-02 12:56:58.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:56:59 compute-0 podman[260251]: 2025-10-02 12:56:59.139914984 +0000 UTC m=+0.059369541 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.license=GPLv2, config_id=edpm, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible)
Oct 02 12:56:59 compute-0 nova_compute[192079]: 2025-10-02 12:56:59.547 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:57:00 compute-0 nova_compute[192079]: 2025-10-02 12:57:00.062 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:57:00 compute-0 nova_compute[192079]: 2025-10-02 12:57:00.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:57:01 compute-0 nova_compute[192079]: 2025-10-02 12:57:01.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:57:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:57:02.268 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:57:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:57:02.268 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:57:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:57:02.268 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:57:03 compute-0 nova_compute[192079]: 2025-10-02 12:57:03.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:57:04 compute-0 nova_compute[192079]: 2025-10-02 12:57:04.568 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:57:04 compute-0 nova_compute[192079]: 2025-10-02 12:57:04.663 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:57:04 compute-0 nova_compute[192079]: 2025-10-02 12:57:04.664 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:57:05 compute-0 nova_compute[192079]: 2025-10-02 12:57:05.064 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:57:06 compute-0 podman[260271]: 2025-10-02 12:57:06.133684743 +0000 UTC m=+0.049883333 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_managed=true, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=multipathd)
Oct 02 12:57:06 compute-0 podman[260270]: 2025-10-02 12:57:06.134451843 +0000 UTC m=+0.053452449 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, architecture=x86_64, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.buildah.version=1.33.7, com.redhat.component=ubi9-minimal-container, maintainer=Red Hat, Inc., summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, distribution-scope=public, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, name=ubi9-minimal, release=1755695350, config_id=edpm, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, io.openshift.expose-services=, managed_by=edpm_ansible, version=9.6, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.tags=minimal rhel9, build-date=2025-08-20T13:12:41, container_name=openstack_network_exporter, url=https://catalog.redhat.com/en/search?searchType=containers, vendor=Red Hat, Inc., vcs-type=git)
Oct 02 12:57:07 compute-0 nova_compute[192079]: 2025-10-02 12:57:07.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:57:07 compute-0 nova_compute[192079]: 2025-10-02 12:57:07.705 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:57:07 compute-0 nova_compute[192079]: 2025-10-02 12:57:07.705 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:57:07 compute-0 nova_compute[192079]: 2025-10-02 12:57:07.705 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:57:07 compute-0 nova_compute[192079]: 2025-10-02 12:57:07.706 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:57:07 compute-0 nova_compute[192079]: 2025-10-02 12:57:07.852 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:57:07 compute-0 nova_compute[192079]: 2025-10-02 12:57:07.853 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5689MB free_disk=73.27230834960938GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:57:07 compute-0 nova_compute[192079]: 2025-10-02 12:57:07.853 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:57:07 compute-0 nova_compute[192079]: 2025-10-02 12:57:07.854 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:57:07 compute-0 nova_compute[192079]: 2025-10-02 12:57:07.930 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:57:07 compute-0 nova_compute[192079]: 2025-10-02 12:57:07.931 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:57:07 compute-0 nova_compute[192079]: 2025-10-02 12:57:07.951 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing inventories for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708 _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:804
Oct 02 12:57:07 compute-0 nova_compute[192079]: 2025-10-02 12:57:07.986 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Updating ProviderTree inventory for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 from _refresh_and_get_inventory using data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} _refresh_and_get_inventory /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:768
Oct 02 12:57:07 compute-0 nova_compute[192079]: 2025-10-02 12:57:07.986 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Updating inventory in ProviderTree for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 with inventory: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:176
Oct 02 12:57:08 compute-0 nova_compute[192079]: 2025-10-02 12:57:08.003 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing aggregate associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, aggregates: None _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:813
Oct 02 12:57:08 compute-0 nova_compute[192079]: 2025-10-02 12:57:08.039 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing trait associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, traits: COMPUTE_SECURITY_UEFI_SECURE_BOOT,COMPUTE_VIOMMU_MODEL_VIRTIO,COMPUTE_VIOMMU_MODEL_AUTO,COMPUTE_IMAGE_TYPE_AKI,COMPUTE_GRAPHICS_MODEL_VIRTIO,COMPUTE_NET_VIF_MODEL_PCNET,HW_CPU_X86_SSE42,COMPUTE_RESCUE_BFV,COMPUTE_VOLUME_EXTEND,COMPUTE_IMAGE_TYPE_QCOW2,COMPUTE_TRUSTED_CERTS,COMPUTE_SOCKET_PCI_NUMA_AFFINITY,COMPUTE_GRAPHICS_MODEL_CIRRUS,HW_CPU_X86_MMX,COMPUTE_STORAGE_BUS_VIRTIO,COMPUTE_NET_ATTACH_INTERFACE_WITH_TAG,COMPUTE_STORAGE_BUS_FDC,COMPUTE_STORAGE_BUS_USB,COMPUTE_NODE,HW_CPU_X86_SSSE3,HW_CPU_X86_SSE2,COMPUTE_GRAPHICS_MODEL_BOCHS,COMPUTE_NET_VIF_MODEL_E1000E,COMPUTE_IMAGE_TYPE_RAW,COMPUTE_NET_VIF_MODEL_NE2K_PCI,COMPUTE_IMAGE_TYPE_AMI,COMPUTE_VIOMMU_MODEL_INTEL,COMPUTE_SECURITY_TPM_2_0,COMPUTE_STORAGE_BUS_SCSI,COMPUTE_IMAGE_TYPE_ARI,COMPUTE_NET_VIF_MODEL_VMXNET3,COMPUTE_SECURITY_TPM_1_2,COMPUTE_NET_VIF_MODEL_E1000,HW_CPU_X86_SSE,COMPUTE_VOLUME_MULTI_ATTACH,COMPUTE_STORAGE_BUS_IDE,COMPUTE_GRAPHICS_MODEL_NONE,COMPUTE_VOLUME_ATTACH_WITH_TAG,COMPUTE_NET_VIF_MODEL_VIRTIO,HW_CPU_X86_SSE41,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_DEVICE_TAGGING,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_ACCELERATORS,COMPUTE_NET_VIF_MODEL_RTL8139,COMPUTE_GRAPHICS_MODEL_VGA,COMPUTE_STORAGE_BUS_SATA,COMPUTE_NET_VIF_MODEL_SPAPR_VLAN _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:825
Oct 02 12:57:08 compute-0 nova_compute[192079]: 2025-10-02 12:57:08.067 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:57:08 compute-0 nova_compute[192079]: 2025-10-02 12:57:08.121 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:57:08 compute-0 nova_compute[192079]: 2025-10-02 12:57:08.167 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:57:08 compute-0 nova_compute[192079]: 2025-10-02 12:57:08.167 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.313s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:57:09 compute-0 nova_compute[192079]: 2025-10-02 12:57:09.569 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:57:10 compute-0 nova_compute[192079]: 2025-10-02 12:57:10.066 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:57:11 compute-0 nova_compute[192079]: 2025-10-02 12:57:11.168 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:57:11 compute-0 nova_compute[192079]: 2025-10-02 12:57:11.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:57:11 compute-0 nova_compute[192079]: 2025-10-02 12:57:11.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:57:11 compute-0 nova_compute[192079]: 2025-10-02 12:57:11.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:57:11 compute-0 nova_compute[192079]: 2025-10-02 12:57:11.706 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:57:13 compute-0 podman[260307]: 2025-10-02 12:57:13.142351387 +0000 UTC m=+0.056965665 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter)
Oct 02 12:57:13 compute-0 podman[260308]: 2025-10-02 12:57:13.149477071 +0000 UTC m=+0.056128142 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, io.buildah.version=1.41.3, managed_by=edpm_ansible, container_name=iscsid, config_id=iscsid, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 12:57:13 compute-0 nova_compute[192079]: 2025-10-02 12:57:13.701 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:57:14 compute-0 nova_compute[192079]: 2025-10-02 12:57:14.572 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:57:15 compute-0 nova_compute[192079]: 2025-10-02 12:57:15.067 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:57:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:57:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:57:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.allocation, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:57:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.capacity, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:57:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:57:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:57:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster memory.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:57:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:57:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:57:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:57:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster cpu, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:57:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:57:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:57:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:57:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.iops, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:57:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:57:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:57:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:57:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:57:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:57:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:57:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:57:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:57:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:57:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:57:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:57:19 compute-0 nova_compute[192079]: 2025-10-02 12:57:19.574 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:57:20 compute-0 nova_compute[192079]: 2025-10-02 12:57:20.069 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:57:22 compute-0 podman[260354]: 2025-10-02 12:57:22.154612197 +0000 UTC m=+0.065832898 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=ovn_metadata_agent, io.buildah.version=1.41.3, container_name=ovn_metadata_agent, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 12:57:22 compute-0 podman[260356]: 2025-10-02 12:57:22.167566519 +0000 UTC m=+0.060986693 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 12:57:22 compute-0 podman[260355]: 2025-10-02 12:57:22.188783409 +0000 UTC m=+0.097093740 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, container_name=ovn_controller, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:57:24 compute-0 nova_compute[192079]: 2025-10-02 12:57:24.577 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:57:25 compute-0 nova_compute[192079]: 2025-10-02 12:57:25.070 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:57:29 compute-0 nova_compute[192079]: 2025-10-02 12:57:29.579 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:57:30 compute-0 nova_compute[192079]: 2025-10-02 12:57:30.071 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:57:30 compute-0 podman[260423]: 2025-10-02 12:57:30.139099411 +0000 UTC m=+0.054490537 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, container_name=ceilometer_agent_compute, managed_by=edpm_ansible, org.label-schema.build-date=20251001, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=edpm)
Oct 02 12:57:34 compute-0 nova_compute[192079]: 2025-10-02 12:57:34.580 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:57:35 compute-0 nova_compute[192079]: 2025-10-02 12:57:35.073 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:57:37 compute-0 podman[260443]: 2025-10-02 12:57:37.127192994 +0000 UTC m=+0.046045698 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, com.redhat.component=ubi9-minimal-container, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, managed_by=edpm_ansible, architecture=x86_64, build-date=2025-08-20T13:12:41, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, vcs-type=git, config_id=edpm, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., distribution-scope=public, url=https://catalog.redhat.com/en/search?searchType=containers, vendor=Red Hat, Inc., container_name=openstack_network_exporter, maintainer=Red Hat, Inc., description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, io.openshift.expose-services=, version=9.6, release=1755695350, io.openshift.tags=minimal rhel9, io.buildah.version=1.33.7, name=ubi9-minimal)
Oct 02 12:57:37 compute-0 podman[260444]: 2025-10-02 12:57:37.131161562 +0000 UTC m=+0.046976983 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, config_id=multipathd, container_name=multipathd, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS)
Oct 02 12:57:39 compute-0 nova_compute[192079]: 2025-10-02 12:57:39.581 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:57:40 compute-0 nova_compute[192079]: 2025-10-02 12:57:40.075 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:57:44 compute-0 podman[260484]: 2025-10-02 12:57:44.124417207 +0000 UTC m=+0.043717893 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>)
Oct 02 12:57:44 compute-0 podman[260485]: 2025-10-02 12:57:44.140837345 +0000 UTC m=+0.055532156 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, config_id=iscsid)
Oct 02 12:57:44 compute-0 nova_compute[192079]: 2025-10-02 12:57:44.582 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:57:45 compute-0 nova_compute[192079]: 2025-10-02 12:57:45.077 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:57:49 compute-0 nova_compute[192079]: 2025-10-02 12:57:49.585 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:57:50 compute-0 nova_compute[192079]: 2025-10-02 12:57:50.077 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:57:53 compute-0 podman[260532]: 2025-10-02 12:57:53.143848028 +0000 UTC m=+0.055533954 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']})
Oct 02 12:57:53 compute-0 podman[260530]: 2025-10-02 12:57:53.151096026 +0000 UTC m=+0.059125142 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=ovn_metadata_agent, container_name=ovn_metadata_agent, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, tcib_managed=true)
Oct 02 12:57:53 compute-0 podman[260531]: 2025-10-02 12:57:53.170824708 +0000 UTC m=+0.084421837 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_managed=true, io.buildah.version=1.41.3, managed_by=edpm_ansible, config_id=ovn_controller, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:57:54 compute-0 nova_compute[192079]: 2025-10-02 12:57:54.586 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:57:55 compute-0 nova_compute[192079]: 2025-10-02 12:57:55.078 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:57:56 compute-0 nova_compute[192079]: 2025-10-02 12:57:56.677 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:57:59 compute-0 nova_compute[192079]: 2025-10-02 12:57:59.624 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:57:59 compute-0 nova_compute[192079]: 2025-10-02 12:57:59.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:58:00 compute-0 nova_compute[192079]: 2025-10-02 12:58:00.080 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:58:01 compute-0 podman[260596]: 2025-10-02 12:58:01.129455579 +0000 UTC m=+0.048028939 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, config_id=edpm, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:58:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:58:02.268 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:58:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:58:02.269 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:58:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:58:02.269 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:58:02 compute-0 nova_compute[192079]: 2025-10-02 12:58:02.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:58:03 compute-0 nova_compute[192079]: 2025-10-02 12:58:03.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:58:03 compute-0 nova_compute[192079]: 2025-10-02 12:58:03.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:58:04 compute-0 nova_compute[192079]: 2025-10-02 12:58:04.627 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:58:04 compute-0 nova_compute[192079]: 2025-10-02 12:58:04.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:58:04 compute-0 nova_compute[192079]: 2025-10-02 12:58:04.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:58:05 compute-0 nova_compute[192079]: 2025-10-02 12:58:05.081 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:58:08 compute-0 podman[260617]: 2025-10-02 12:58:08.126028054 +0000 UTC m=+0.045116967 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=openstack_network_exporter, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., build-date=2025-08-20T13:12:41, vendor=Red Hat, Inc., architecture=x86_64, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, managed_by=edpm_ansible, com.redhat.component=ubi9-minimal-container, maintainer=Red Hat, Inc., distribution-scope=public, name=ubi9-minimal, version=9.6, url=https://catalog.redhat.com/en/search?searchType=containers, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.buildah.version=1.33.7, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, config_id=edpm, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.openshift.tags=minimal rhel9, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, release=1755695350, io.openshift.expose-services=, vcs-type=git)
Oct 02 12:58:08 compute-0 podman[260618]: 2025-10-02 12:58:08.157684822 +0000 UTC m=+0.072258651 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, container_name=multipathd, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, config_id=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true)
Oct 02 12:58:09 compute-0 nova_compute[192079]: 2025-10-02 12:58:09.634 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:58:09 compute-0 nova_compute[192079]: 2025-10-02 12:58:09.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:58:09 compute-0 nova_compute[192079]: 2025-10-02 12:58:09.697 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:58:09 compute-0 nova_compute[192079]: 2025-10-02 12:58:09.697 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:58:09 compute-0 nova_compute[192079]: 2025-10-02 12:58:09.698 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:58:09 compute-0 nova_compute[192079]: 2025-10-02 12:58:09.698 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:58:09 compute-0 nova_compute[192079]: 2025-10-02 12:58:09.837 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:58:09 compute-0 nova_compute[192079]: 2025-10-02 12:58:09.839 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5706MB free_disk=73.27230834960938GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:58:09 compute-0 nova_compute[192079]: 2025-10-02 12:58:09.839 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:58:09 compute-0 nova_compute[192079]: 2025-10-02 12:58:09.839 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:58:09 compute-0 nova_compute[192079]: 2025-10-02 12:58:09.952 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:58:09 compute-0 nova_compute[192079]: 2025-10-02 12:58:09.953 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:58:09 compute-0 nova_compute[192079]: 2025-10-02 12:58:09.975 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:58:09 compute-0 nova_compute[192079]: 2025-10-02 12:58:09.989 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:58:09 compute-0 nova_compute[192079]: 2025-10-02 12:58:09.990 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:58:09 compute-0 nova_compute[192079]: 2025-10-02 12:58:09.991 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.151s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:58:10 compute-0 nova_compute[192079]: 2025-10-02 12:58:10.083 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:58:12 compute-0 nova_compute[192079]: 2025-10-02 12:58:12.991 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:58:13 compute-0 nova_compute[192079]: 2025-10-02 12:58:13.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:58:13 compute-0 nova_compute[192079]: 2025-10-02 12:58:13.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:58:13 compute-0 nova_compute[192079]: 2025-10-02 12:58:13.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:58:13 compute-0 nova_compute[192079]: 2025-10-02 12:58:13.682 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:58:14 compute-0 nova_compute[192079]: 2025-10-02 12:58:14.639 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:58:15 compute-0 nova_compute[192079]: 2025-10-02 12:58:15.085 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:58:15 compute-0 podman[260657]: 2025-10-02 12:58:15.129616442 +0000 UTC m=+0.047259237 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 12:58:15 compute-0 podman[260658]: 2025-10-02 12:58:15.136981904 +0000 UTC m=+0.048813130 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=iscsid, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid)
Oct 02 12:58:19 compute-0 nova_compute[192079]: 2025-10-02 12:58:19.642 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:58:20 compute-0 nova_compute[192079]: 2025-10-02 12:58:20.087 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:58:24 compute-0 podman[260702]: 2025-10-02 12:58:24.139105543 +0000 UTC m=+0.046361612 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_managed=true, config_id=ovn_metadata_agent, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_metadata_agent)
Oct 02 12:58:24 compute-0 podman[260704]: 2025-10-02 12:58:24.148885011 +0000 UTC m=+0.051445552 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:58:24 compute-0 podman[260703]: 2025-10-02 12:58:24.17586748 +0000 UTC m=+0.081256468 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, tcib_managed=true, config_id=ovn_controller, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, container_name=ovn_controller)
Oct 02 12:58:24 compute-0 nova_compute[192079]: 2025-10-02 12:58:24.643 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:58:25 compute-0 nova_compute[192079]: 2025-10-02 12:58:25.129 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:58:29 compute-0 nova_compute[192079]: 2025-10-02 12:58:29.645 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:58:30 compute-0 nova_compute[192079]: 2025-10-02 12:58:30.131 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:58:32 compute-0 podman[260768]: 2025-10-02 12:58:32.134831089 +0000 UTC m=+0.054708920 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, config_id=edpm)
Oct 02 12:58:34 compute-0 nova_compute[192079]: 2025-10-02 12:58:34.648 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:58:35 compute-0 nova_compute[192079]: 2025-10-02 12:58:35.133 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:58:39 compute-0 podman[260785]: 2025-10-02 12:58:39.132810624 +0000 UTC m=+0.051073641 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, name=ubi9-minimal, config_id=edpm, vcs-type=git, vendor=Red Hat, Inc., release=1755695350, url=https://catalog.redhat.com/en/search?searchType=containers, com.redhat.component=ubi9-minimal-container, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., distribution-scope=public, io.buildah.version=1.33.7, io.openshift.expose-services=, maintainer=Red Hat, Inc., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, version=9.6, architecture=x86_64, container_name=openstack_network_exporter, managed_by=edpm_ansible, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., build-date=2025-08-20T13:12:41, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.tags=minimal rhel9)
Oct 02 12:58:39 compute-0 podman[260786]: 2025-10-02 12:58:39.162304483 +0000 UTC m=+0.078245666 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, container_name=multipathd, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=multipathd, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team)
Oct 02 12:58:39 compute-0 nova_compute[192079]: 2025-10-02 12:58:39.651 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:58:40 compute-0 nova_compute[192079]: 2025-10-02 12:58:40.135 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:58:44 compute-0 nova_compute[192079]: 2025-10-02 12:58:44.653 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:58:45 compute-0 nova_compute[192079]: 2025-10-02 12:58:45.137 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:58:46 compute-0 podman[260828]: 2025-10-02 12:58:46.135912969 +0000 UTC m=+0.048588113 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>)
Oct 02 12:58:46 compute-0 podman[260829]: 2025-10-02 12:58:46.139739225 +0000 UTC m=+0.049219861 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_id=iscsid, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001)
Oct 02 12:58:49 compute-0 nova_compute[192079]: 2025-10-02 12:58:49.656 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:58:50 compute-0 nova_compute[192079]: 2025-10-02 12:58:50.138 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:58:54 compute-0 nova_compute[192079]: 2025-10-02 12:58:54.656 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:58:55 compute-0 podman[260871]: 2025-10-02 12:58:55.128424235 +0000 UTC m=+0.045501618 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, config_id=ovn_metadata_agent, org.label-schema.vendor=CentOS, tcib_managed=true, container_name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0)
Oct 02 12:58:55 compute-0 podman[260873]: 2025-10-02 12:58:55.132886638 +0000 UTC m=+0.043751541 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 12:58:55 compute-0 nova_compute[192079]: 2025-10-02 12:58:55.139 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:58:55 compute-0 podman[260872]: 2025-10-02 12:58:55.158717196 +0000 UTC m=+0.072981782 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_managed=true, container_name=ovn_controller, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller, managed_by=edpm_ansible)
Oct 02 12:58:57 compute-0 nova_compute[192079]: 2025-10-02 12:58:57.677 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:58:59 compute-0 nova_compute[192079]: 2025-10-02 12:58:59.657 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:59:00 compute-0 nova_compute[192079]: 2025-10-02 12:59:00.140 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:59:01 compute-0 nova_compute[192079]: 2025-10-02 12:59:01.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:59:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:59:02.269 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:59:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:59:02.269 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:59:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 12:59:02.269 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:59:03 compute-0 podman[260937]: 2025-10-02 12:59:03.132806468 +0000 UTC m=+0.052606513 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, config_id=edpm, managed_by=edpm_ansible, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2)
Oct 02 12:59:03 compute-0 nova_compute[192079]: 2025-10-02 12:59:03.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:59:03 compute-0 nova_compute[192079]: 2025-10-02 12:59:03.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:59:04 compute-0 nova_compute[192079]: 2025-10-02 12:59:04.660 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:59:04 compute-0 nova_compute[192079]: 2025-10-02 12:59:04.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:59:04 compute-0 nova_compute[192079]: 2025-10-02 12:59:04.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:59:04 compute-0 nova_compute[192079]: 2025-10-02 12:59:04.664 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 12:59:05 compute-0 nova_compute[192079]: 2025-10-02 12:59:05.142 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:59:09 compute-0 nova_compute[192079]: 2025-10-02 12:59:09.663 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:59:09 compute-0 nova_compute[192079]: 2025-10-02 12:59:09.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:59:09 compute-0 nova_compute[192079]: 2025-10-02 12:59:09.694 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:59:09 compute-0 nova_compute[192079]: 2025-10-02 12:59:09.694 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:59:09 compute-0 nova_compute[192079]: 2025-10-02 12:59:09.694 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:59:09 compute-0 nova_compute[192079]: 2025-10-02 12:59:09.695 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 12:59:09 compute-0 nova_compute[192079]: 2025-10-02 12:59:09.853 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 12:59:09 compute-0 nova_compute[192079]: 2025-10-02 12:59:09.854 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5709MB free_disk=73.27230834960938GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 12:59:09 compute-0 nova_compute[192079]: 2025-10-02 12:59:09.854 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 12:59:09 compute-0 nova_compute[192079]: 2025-10-02 12:59:09.854 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 12:59:09 compute-0 nova_compute[192079]: 2025-10-02 12:59:09.928 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 12:59:09 compute-0 nova_compute[192079]: 2025-10-02 12:59:09.929 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 12:59:10 compute-0 nova_compute[192079]: 2025-10-02 12:59:10.111 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 12:59:10 compute-0 nova_compute[192079]: 2025-10-02 12:59:10.144 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 12:59:10 compute-0 nova_compute[192079]: 2025-10-02 12:59:10.146 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 12:59:10 compute-0 nova_compute[192079]: 2025-10-02 12:59:10.146 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.292s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 12:59:10 compute-0 nova_compute[192079]: 2025-10-02 12:59:10.146 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:59:10 compute-0 podman[260958]: 2025-10-02 12:59:10.146912535 +0000 UTC m=+0.058650729 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.openshift.tags=minimal rhel9, url=https://catalog.redhat.com/en/search?searchType=containers, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, vendor=Red Hat, Inc., io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., architecture=x86_64, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., name=ubi9-minimal, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, vcs-type=git, version=9.6, io.buildah.version=1.33.7, managed_by=edpm_ansible, com.redhat.component=ubi9-minimal-container, config_id=edpm, distribution-scope=public, build-date=2025-08-20T13:12:41, io.openshift.expose-services=, maintainer=Red Hat, Inc., release=1755695350, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., container_name=openstack_network_exporter)
Oct 02 12:59:10 compute-0 podman[260959]: 2025-10-02 12:59:10.156782216 +0000 UTC m=+0.060810688 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=multipathd, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, container_name=multipathd, io.buildah.version=1.41.3, tcib_managed=true, managed_by=edpm_ansible)
Oct 02 12:59:13 compute-0 nova_compute[192079]: 2025-10-02 12:59:13.148 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:59:14 compute-0 nova_compute[192079]: 2025-10-02 12:59:14.665 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:59:15 compute-0 nova_compute[192079]: 2025-10-02 12:59:15.181 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:59:15 compute-0 nova_compute[192079]: 2025-10-02 12:59:15.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:59:15 compute-0 nova_compute[192079]: 2025-10-02 12:59:15.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 12:59:15 compute-0 nova_compute[192079]: 2025-10-02 12:59:15.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 12:59:16 compute-0 nova_compute[192079]: 2025-10-02 12:59:16.128 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 12:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:59:17.115 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:59:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:59:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:59:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:59:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.allocation, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:59:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:59:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:59:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster memory.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:59:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:59:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:59:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:59:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:59:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:59:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:59:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:59:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.iops, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:59:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:59:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:59:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:59:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster cpu, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:59:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:59:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:59:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:59:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.capacity, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:59:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 12:59:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 12:59:17 compute-0 podman[260996]: 2025-10-02 12:59:17.134709602 +0000 UTC m=+0.050175867 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter)
Oct 02 12:59:17 compute-0 podman[260997]: 2025-10-02 12:59:17.141180119 +0000 UTC m=+0.054926177 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, config_id=iscsid, container_name=iscsid, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.license=GPLv2, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 12:59:19 compute-0 nova_compute[192079]: 2025-10-02 12:59:19.124 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:59:19 compute-0 nova_compute[192079]: 2025-10-02 12:59:19.668 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:59:20 compute-0 nova_compute[192079]: 2025-10-02 12:59:20.182 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:59:24 compute-0 nova_compute[192079]: 2025-10-02 12:59:24.669 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:59:25 compute-0 nova_compute[192079]: 2025-10-02 12:59:25.185 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:59:26 compute-0 podman[261039]: 2025-10-02 12:59:26.132294737 +0000 UTC m=+0.047361080 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=ovn_metadata_agent, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, io.buildah.version=1.41.3)
Oct 02 12:59:26 compute-0 podman[261041]: 2025-10-02 12:59:26.141880009 +0000 UTC m=+0.049689583 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>)
Oct 02 12:59:26 compute-0 podman[261040]: 2025-10-02 12:59:26.177731812 +0000 UTC m=+0.085374492 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, container_name=ovn_controller, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:59:29 compute-0 nova_compute[192079]: 2025-10-02 12:59:29.672 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:59:30 compute-0 nova_compute[192079]: 2025-10-02 12:59:30.187 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:59:34 compute-0 podman[261104]: 2025-10-02 12:59:34.145491332 +0000 UTC m=+0.055088092 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, tcib_managed=true, config_id=edpm, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.license=GPLv2)
Oct 02 12:59:34 compute-0 nova_compute[192079]: 2025-10-02 12:59:34.673 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:59:35 compute-0 nova_compute[192079]: 2025-10-02 12:59:35.188 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:59:39 compute-0 nova_compute[192079]: 2025-10-02 12:59:39.675 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:59:40 compute-0 nova_compute[192079]: 2025-10-02 12:59:40.190 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:59:41 compute-0 podman[261125]: 2025-10-02 12:59:41.153317228 +0000 UTC m=+0.065375104 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., distribution-scope=public, io.buildah.version=1.33.7, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., url=https://catalog.redhat.com/en/search?searchType=containers, vcs-type=git, build-date=2025-08-20T13:12:41, container_name=openstack_network_exporter, io.openshift.expose-services=, name=ubi9-minimal, architecture=x86_64, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., maintainer=Red Hat, Inc., managed_by=edpm_ansible, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, version=9.6, com.redhat.component=ubi9-minimal-container, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.openshift.tags=minimal rhel9, release=1755695350, vendor=Red Hat, Inc., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 12:59:41 compute-0 podman[261126]: 2025-10-02 12:59:41.154829168 +0000 UTC m=+0.065682031 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_id=multipathd, container_name=multipathd, managed_by=edpm_ansible, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2)
Oct 02 12:59:44 compute-0 nova_compute[192079]: 2025-10-02 12:59:44.676 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:59:45 compute-0 nova_compute[192079]: 2025-10-02 12:59:45.195 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:59:48 compute-0 podman[261166]: 2025-10-02 12:59:48.152408231 +0000 UTC m=+0.064571921 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>)
Oct 02 12:59:48 compute-0 podman[261167]: 2025-10-02 12:59:48.157131011 +0000 UTC m=+0.067192053 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=iscsid, io.buildah.version=1.41.3, org.label-schema.license=GPLv2)
Oct 02 12:59:48 compute-0 nova_compute[192079]: 2025-10-02 12:59:48.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_incomplete_migrations run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 12:59:48 compute-0 nova_compute[192079]: 2025-10-02 12:59:48.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Cleaning up deleted instances with incomplete migration  _cleanup_incomplete_migrations /usr/lib/python3.9/site-packages/nova/compute/manager.py:11183
Oct 02 12:59:49 compute-0 nova_compute[192079]: 2025-10-02 12:59:49.687 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:59:50 compute-0 nova_compute[192079]: 2025-10-02 12:59:50.197 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:59:54 compute-0 nova_compute[192079]: 2025-10-02 12:59:54.178 2 DEBUG oslo_concurrency.processutils [None req-659d4632-c79e-4633-9620-923cb0652eb1 6f66e2b43c7641758f7c71dec37ebcb6 c543175414e2485bb476e4dfce01c394 - - default default] Running cmd (subprocess): env LANG=C uptime execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:384
Oct 02 12:59:54 compute-0 nova_compute[192079]: 2025-10-02 12:59:54.198 2 DEBUG oslo_concurrency.processutils [None req-659d4632-c79e-4633-9620-923cb0652eb1 6f66e2b43c7641758f7c71dec37ebcb6 c543175414e2485bb476e4dfce01c394 - - default default] CMD "env LANG=C uptime" returned: 0 in 0.021s execute /usr/lib/python3.9/site-packages/oslo_concurrency/processutils.py:422
Oct 02 12:59:54 compute-0 nova_compute[192079]: 2025-10-02 12:59:54.690 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:59:55 compute-0 nova_compute[192079]: 2025-10-02 12:59:55.198 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:59:57 compute-0 podman[261210]: 2025-10-02 12:59:57.135795248 +0000 UTC m=+0.045767866 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, managed_by=edpm_ansible, org.label-schema.license=GPLv2, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 12:59:57 compute-0 podman[261212]: 2025-10-02 12:59:57.14173524 +0000 UTC m=+0.045118368 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>)
Oct 02 12:59:57 compute-0 podman[261211]: 2025-10-02 12:59:57.162662073 +0000 UTC m=+0.069377742 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.schema-version=1.0, container_name=ovn_controller, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=ovn_controller)
Oct 02 12:59:59 compute-0 nova_compute[192079]: 2025-10-02 12:59:59.692 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 12:59:59 compute-0 nova_compute[192079]: 2025-10-02 12:59:59.797 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:00:00 compute-0 nova_compute[192079]: 2025-10-02 13:00:00.200 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:00:01 compute-0 nova_compute[192079]: 2025-10-02 13:00:01.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:00:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 13:00:02.191 103294 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=60, options={'arp_ns_explicit_output': 'true', 'mac_prefix': 'c2:1e:eb', 'max_tunid': '16711680', 'northd_internal_version': '24.03.7-20.33.0-76.8', 'svc_monitor_mac': '76:55:7f:40:de:c3'}, ipsec=False) old=SB_Global(nb_cfg=59) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43
Oct 02 13:00:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 13:00:02.192 103294 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 3 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274
Oct 02 13:00:02 compute-0 nova_compute[192079]: 2025-10-02 13:00:02.236 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:00:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 13:00:02.270 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 13:00:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 13:00:02.270 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 13:00:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 13:00:02.271 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 13:00:04 compute-0 nova_compute[192079]: 2025-10-02 13:00:04.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:00:04 compute-0 nova_compute[192079]: 2025-10-02 13:00:04.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:00:04 compute-0 nova_compute[192079]: 2025-10-02 13:00:04.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 13:00:04 compute-0 nova_compute[192079]: 2025-10-02 13:00:04.694 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:00:05 compute-0 podman[261280]: 2025-10-02 13:00:05.131732711 +0000 UTC m=+0.048069159 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, config_id=edpm)
Oct 02 13:00:05 compute-0 ovn_metadata_agent[103289]: 2025-10-02 13:00:05.193 103294 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=c9f3d658-5c7a-4803-9bbb-01adfb7e88ca, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '60'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89
Oct 02 13:00:05 compute-0 nova_compute[192079]: 2025-10-02 13:00:05.201 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:00:05 compute-0 nova_compute[192079]: 2025-10-02 13:00:05.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:00:05 compute-0 nova_compute[192079]: 2025-10-02 13:00:05.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:00:09 compute-0 nova_compute[192079]: 2025-10-02 13:00:09.696 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:00:10 compute-0 nova_compute[192079]: 2025-10-02 13:00:10.203 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:00:10 compute-0 nova_compute[192079]: 2025-10-02 13:00:10.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:00:12 compute-0 podman[261300]: 2025-10-02 13:00:12.127686207 +0000 UTC m=+0.046127985 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, vcs-type=git, com.redhat.component=ubi9-minimal-container, io.openshift.expose-services=, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., release=1755695350, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.openshift.tags=minimal rhel9, io.buildah.version=1.33.7, name=ubi9-minimal, container_name=openstack_network_exporter, architecture=x86_64, config_id=edpm, vendor=Red Hat, Inc., version=9.6, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., distribution-scope=public, managed_by=edpm_ansible, build-date=2025-08-20T13:12:41, url=https://catalog.redhat.com/en/search?searchType=containers, maintainer=Red Hat, Inc., summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal)
Oct 02 13:00:12 compute-0 podman[261301]: 2025-10-02 13:00:12.129924958 +0000 UTC m=+0.043627726 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=multipathd, io.buildah.version=1.41.3, managed_by=edpm_ansible, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_id=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001)
Oct 02 13:00:13 compute-0 nova_compute[192079]: 2025-10-02 13:00:13.139 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 13:00:13 compute-0 nova_compute[192079]: 2025-10-02 13:00:13.139 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 13:00:13 compute-0 nova_compute[192079]: 2025-10-02 13:00:13.139 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 13:00:13 compute-0 nova_compute[192079]: 2025-10-02 13:00:13.140 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 13:00:13 compute-0 nova_compute[192079]: 2025-10-02 13:00:13.285 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 13:00:13 compute-0 nova_compute[192079]: 2025-10-02 13:00:13.286 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5698MB free_disk=73.27244567871094GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 13:00:13 compute-0 nova_compute[192079]: 2025-10-02 13:00:13.286 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 13:00:13 compute-0 nova_compute[192079]: 2025-10-02 13:00:13.286 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 13:00:13 compute-0 nova_compute[192079]: 2025-10-02 13:00:13.364 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 13:00:13 compute-0 nova_compute[192079]: 2025-10-02 13:00:13.365 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 13:00:13 compute-0 nova_compute[192079]: 2025-10-02 13:00:13.439 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 13:00:13 compute-0 nova_compute[192079]: 2025-10-02 13:00:13.456 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 13:00:13 compute-0 nova_compute[192079]: 2025-10-02 13:00:13.457 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 13:00:13 compute-0 nova_compute[192079]: 2025-10-02 13:00:13.457 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.171s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 13:00:14 compute-0 nova_compute[192079]: 2025-10-02 13:00:14.699 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:00:15 compute-0 nova_compute[192079]: 2025-10-02 13:00:15.206 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:00:15 compute-0 nova_compute[192079]: 2025-10-02 13:00:15.458 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:00:17 compute-0 nova_compute[192079]: 2025-10-02 13:00:17.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:00:17 compute-0 nova_compute[192079]: 2025-10-02 13:00:17.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 13:00:17 compute-0 nova_compute[192079]: 2025-10-02 13:00:17.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 13:00:17 compute-0 nova_compute[192079]: 2025-10-02 13:00:17.686 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 13:00:19 compute-0 podman[261340]: 2025-10-02 13:00:19.138795651 +0000 UTC m=+0.048963473 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_managed=true, config_id=iscsid, container_name=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.schema-version=1.0)
Oct 02 13:00:19 compute-0 podman[261339]: 2025-10-02 13:00:19.138788421 +0000 UTC m=+0.049186879 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 13:00:19 compute-0 nova_compute[192079]: 2025-10-02 13:00:19.701 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:00:20 compute-0 nova_compute[192079]: 2025-10-02 13:00:20.213 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:00:24 compute-0 nova_compute[192079]: 2025-10-02 13:00:24.703 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:00:25 compute-0 nova_compute[192079]: 2025-10-02 13:00:25.214 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:00:28 compute-0 podman[261379]: 2025-10-02 13:00:28.135828783 +0000 UTC m=+0.053297392 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, config_id=ovn_metadata_agent, container_name=ovn_metadata_agent, managed_by=edpm_ansible, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0)
Oct 02 13:00:28 compute-0 podman[261381]: 2025-10-02 13:00:28.163946154 +0000 UTC m=+0.074466703 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>)
Oct 02 13:00:28 compute-0 podman[261380]: 2025-10-02 13:00:28.163946184 +0000 UTC m=+0.077400004 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, container_name=ovn_controller)
Oct 02 13:00:29 compute-0 nova_compute[192079]: 2025-10-02 13:00:29.754 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:00:30 compute-0 nova_compute[192079]: 2025-10-02 13:00:30.217 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:00:34 compute-0 nova_compute[192079]: 2025-10-02 13:00:34.756 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:00:35 compute-0 nova_compute[192079]: 2025-10-02 13:00:35.217 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:00:36 compute-0 podman[261445]: 2025-10-02 13:00:36.127825299 +0000 UTC m=+0.043082863 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, config_id=edpm, org.label-schema.schema-version=1.0, tcib_managed=true, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 13:00:39 compute-0 nova_compute[192079]: 2025-10-02 13:00:39.759 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:00:40 compute-0 nova_compute[192079]: 2025-10-02 13:00:40.218 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:00:43 compute-0 podman[261466]: 2025-10-02 13:00:43.157955832 +0000 UTC m=+0.071044608 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, name=ubi9-minimal, distribution-scope=public, io.buildah.version=1.33.7, io.openshift.expose-services=, com.redhat.component=ubi9-minimal-container, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, url=https://catalog.redhat.com/en/search?searchType=containers, vendor=Red Hat, Inc., managed_by=edpm_ansible, io.openshift.tags=minimal rhel9, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, build-date=2025-08-20T13:12:41, container_name=openstack_network_exporter, maintainer=Red Hat, Inc., summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vcs-type=git, version=9.6, architecture=x86_64, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., release=1755695350, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, config_id=edpm, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly.)
Oct 02 13:00:43 compute-0 podman[261467]: 2025-10-02 13:00:43.165946151 +0000 UTC m=+0.069471535 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, container_name=multipathd, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, managed_by=edpm_ansible, org.label-schema.license=GPLv2, io.buildah.version=1.41.3)
Oct 02 13:00:44 compute-0 nova_compute[192079]: 2025-10-02 13:00:44.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:00:44 compute-0 nova_compute[192079]: 2025-10-02 13:00:44.759 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:00:45 compute-0 nova_compute[192079]: 2025-10-02 13:00:45.219 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:00:49 compute-0 nova_compute[192079]: 2025-10-02 13:00:49.687 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._run_pending_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:00:49 compute-0 nova_compute[192079]: 2025-10-02 13:00:49.687 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Cleaning up deleted instances _run_pending_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:11145
Oct 02 13:00:49 compute-0 nova_compute[192079]: 2025-10-02 13:00:49.725 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] There are 0 instances to clean _run_pending_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:11154
Oct 02 13:00:49 compute-0 nova_compute[192079]: 2025-10-02 13:00:49.761 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:00:50 compute-0 podman[261509]: 2025-10-02 13:00:50.140893976 +0000 UTC m=+0.049158389 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>)
Oct 02 13:00:50 compute-0 podman[261510]: 2025-10-02 13:00:50.148712731 +0000 UTC m=+0.053030405 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=iscsid, org.label-schema.build-date=20251001, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 13:00:50 compute-0 nova_compute[192079]: 2025-10-02 13:00:50.221 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:00:54 compute-0 nova_compute[192079]: 2025-10-02 13:00:54.764 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:00:55 compute-0 nova_compute[192079]: 2025-10-02 13:00:55.224 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:00:59 compute-0 podman[261555]: 2025-10-02 13:00:59.129789832 +0000 UTC m=+0.047914355 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_metadata_agent, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, container_name=ovn_metadata_agent, org.label-schema.build-date=20251001)
Oct 02 13:00:59 compute-0 podman[261557]: 2025-10-02 13:00:59.135649903 +0000 UTC m=+0.047940605 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>)
Oct 02 13:00:59 compute-0 podman[261556]: 2025-10-02 13:00:59.185851459 +0000 UTC m=+0.099417837 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_controller, io.buildah.version=1.41.3)
Oct 02 13:00:59 compute-0 nova_compute[192079]: 2025-10-02 13:00:59.797 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:01:00 compute-0 nova_compute[192079]: 2025-10-02 13:01:00.226 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:01:00 compute-0 nova_compute[192079]: 2025-10-02 13:01:00.698 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:01:01 compute-0 CROND[261620]: (root) CMD (run-parts /etc/cron.hourly)
Oct 02 13:01:01 compute-0 run-parts[261623]: (/etc/cron.hourly) starting 0anacron
Oct 02 13:01:01 compute-0 run-parts[261629]: (/etc/cron.hourly) finished 0anacron
Oct 02 13:01:01 compute-0 CROND[261619]: (root) CMDEND (run-parts /etc/cron.hourly)
Oct 02 13:01:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 13:01:02.271 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 13:01:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 13:01:02.271 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 13:01:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 13:01:02.271 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 13:01:03 compute-0 nova_compute[192079]: 2025-10-02 13:01:03.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:01:04 compute-0 nova_compute[192079]: 2025-10-02 13:01:04.800 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:01:05 compute-0 nova_compute[192079]: 2025-10-02 13:01:05.227 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:01:05 compute-0 nova_compute[192079]: 2025-10-02 13:01:05.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:01:05 compute-0 nova_compute[192079]: 2025-10-02 13:01:05.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:01:05 compute-0 nova_compute[192079]: 2025-10-02 13:01:05.664 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 13:01:07 compute-0 podman[261630]: 2025-10-02 13:01:07.135400919 +0000 UTC m=+0.048335726 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_id=edpm, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team)
Oct 02 13:01:07 compute-0 nova_compute[192079]: 2025-10-02 13:01:07.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:01:07 compute-0 nova_compute[192079]: 2025-10-02 13:01:07.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:01:09 compute-0 nova_compute[192079]: 2025-10-02 13:01:09.803 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:01:10 compute-0 nova_compute[192079]: 2025-10-02 13:01:10.229 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:01:10 compute-0 nova_compute[192079]: 2025-10-02 13:01:10.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:01:10 compute-0 nova_compute[192079]: 2025-10-02 13:01:10.763 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 13:01:10 compute-0 nova_compute[192079]: 2025-10-02 13:01:10.764 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 13:01:10 compute-0 nova_compute[192079]: 2025-10-02 13:01:10.764 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 13:01:10 compute-0 nova_compute[192079]: 2025-10-02 13:01:10.764 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 13:01:10 compute-0 nova_compute[192079]: 2025-10-02 13:01:10.915 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 13:01:10 compute-0 nova_compute[192079]: 2025-10-02 13:01:10.917 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5702MB free_disk=73.27244567871094GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 13:01:10 compute-0 nova_compute[192079]: 2025-10-02 13:01:10.917 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 13:01:10 compute-0 nova_compute[192079]: 2025-10-02 13:01:10.918 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 13:01:11 compute-0 nova_compute[192079]: 2025-10-02 13:01:11.127 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 13:01:11 compute-0 nova_compute[192079]: 2025-10-02 13:01:11.127 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 13:01:11 compute-0 nova_compute[192079]: 2025-10-02 13:01:11.183 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 13:01:11 compute-0 nova_compute[192079]: 2025-10-02 13:01:11.275 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 13:01:11 compute-0 nova_compute[192079]: 2025-10-02 13:01:11.278 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 13:01:11 compute-0 nova_compute[192079]: 2025-10-02 13:01:11.279 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.361s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 13:01:13 compute-0 nova_compute[192079]: 2025-10-02 13:01:13.278 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:01:14 compute-0 podman[261650]: 2025-10-02 13:01:14.155267084 +0000 UTC m=+0.066643138 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, vendor=Red Hat, Inc., io.buildah.version=1.33.7, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, url=https://catalog.redhat.com/en/search?searchType=containers, architecture=x86_64, com.redhat.component=ubi9-minimal-container, build-date=2025-08-20T13:12:41, io.openshift.tags=minimal rhel9, release=1755695350, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, config_id=edpm, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., distribution-scope=public, vcs-type=git, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., managed_by=edpm_ansible, container_name=openstack_network_exporter, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, maintainer=Red Hat, Inc., version=9.6, io.openshift.expose-services=, name=ubi9-minimal)
Oct 02 13:01:14 compute-0 podman[261651]: 2025-10-02 13:01:14.191753585 +0000 UTC m=+0.093299729 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, config_id=multipathd, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, container_name=multipathd, tcib_managed=true, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 13:01:14 compute-0 nova_compute[192079]: 2025-10-02 13:01:14.806 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:01:15 compute-0 nova_compute[192079]: 2025-10-02 13:01:15.229 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:01:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:01:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.allocation, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:01:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:01:17.116 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.capacity, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:01:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:01:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:01:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:01:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:01:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:01:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:01:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:01:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:01:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:01:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:01:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:01:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:01:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:01:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:01:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:01:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.iops, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:01:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster cpu, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:01:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:01:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:01:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:01:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:01:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster memory.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:01:17 compute-0 nova_compute[192079]: 2025-10-02 13:01:17.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:01:17 compute-0 nova_compute[192079]: 2025-10-02 13:01:17.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 13:01:17 compute-0 nova_compute[192079]: 2025-10-02 13:01:17.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 13:01:17 compute-0 nova_compute[192079]: 2025-10-02 13:01:17.723 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 13:01:19 compute-0 nova_compute[192079]: 2025-10-02 13:01:19.808 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:01:20 compute-0 nova_compute[192079]: 2025-10-02 13:01:20.231 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:01:20 compute-0 nova_compute[192079]: 2025-10-02 13:01:20.718 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:01:21 compute-0 podman[261693]: 2025-10-02 13:01:21.162243865 +0000 UTC m=+0.065682122 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 13:01:21 compute-0 podman[261694]: 2025-10-02 13:01:21.162609026 +0000 UTC m=+0.070029771 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, config_id=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS)
Oct 02 13:01:24 compute-0 nova_compute[192079]: 2025-10-02 13:01:24.809 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:01:25 compute-0 nova_compute[192079]: 2025-10-02 13:01:25.232 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:01:29 compute-0 nova_compute[192079]: 2025-10-02 13:01:29.811 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:01:30 compute-0 podman[261736]: 2025-10-02 13:01:30.142173853 +0000 UTC m=+0.051379229 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, container_name=ovn_metadata_agent, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=ovn_metadata_agent, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 13:01:30 compute-0 podman[261738]: 2025-10-02 13:01:30.148124147 +0000 UTC m=+0.050097745 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']})
Oct 02 13:01:30 compute-0 podman[261737]: 2025-10-02 13:01:30.182856748 +0000 UTC m=+0.087081758 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true, config_id=ovn_controller, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, container_name=ovn_controller, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 13:01:30 compute-0 nova_compute[192079]: 2025-10-02 13:01:30.233 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:01:34 compute-0 nova_compute[192079]: 2025-10-02 13:01:34.953 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:01:35 compute-0 nova_compute[192079]: 2025-10-02 13:01:35.234 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:01:38 compute-0 podman[261808]: 2025-10-02 13:01:38.134062269 +0000 UTC m=+0.053305193 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_id=edpm, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 13:01:39 compute-0 nova_compute[192079]: 2025-10-02 13:01:39.954 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:01:40 compute-0 nova_compute[192079]: 2025-10-02 13:01:40.235 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:01:44 compute-0 nova_compute[192079]: 2025-10-02 13:01:44.956 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:01:45 compute-0 podman[261829]: 2025-10-02 13:01:45.134864518 +0000 UTC m=+0.045426386 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, org.label-schema.vendor=CentOS, tcib_managed=true, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible)
Oct 02 13:01:45 compute-0 podman[261828]: 2025-10-02 13:01:45.14880505 +0000 UTC m=+0.062466043 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., version=9.6, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.buildah.version=1.33.7, architecture=x86_64, io.openshift.expose-services=, build-date=2025-08-20T13:12:41, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.tags=minimal rhel9, managed_by=edpm_ansible, container_name=openstack_network_exporter, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., maintainer=Red Hat, Inc., release=1755695350, url=https://catalog.redhat.com/en/search?searchType=containers, config_id=edpm, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, name=ubi9-minimal, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, vcs-type=git, vendor=Red Hat, Inc., com.redhat.component=ubi9-minimal-container, distribution-scope=public)
Oct 02 13:01:45 compute-0 nova_compute[192079]: 2025-10-02 13:01:45.237 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:01:49 compute-0 nova_compute[192079]: 2025-10-02 13:01:49.995 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:01:50 compute-0 nova_compute[192079]: 2025-10-02 13:01:50.239 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:01:52 compute-0 podman[261867]: 2025-10-02 13:01:52.148234655 +0000 UTC m=+0.058937907 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 13:01:52 compute-0 podman[261868]: 2025-10-02 13:01:52.149047158 +0000 UTC m=+0.055320168 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, config_id=iscsid, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, container_name=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, tcib_managed=true)
Oct 02 13:01:54 compute-0 nova_compute[192079]: 2025-10-02 13:01:54.998 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:01:55 compute-0 nova_compute[192079]: 2025-10-02 13:01:55.241 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:02:00 compute-0 nova_compute[192079]: 2025-10-02 13:02:00.001 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:02:00 compute-0 nova_compute[192079]: 2025-10-02 13:02:00.243 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:02:00 compute-0 nova_compute[192079]: 2025-10-02 13:02:00.677 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:02:01 compute-0 podman[261911]: 2025-10-02 13:02:01.147837045 +0000 UTC m=+0.057031974 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>)
Oct 02 13:02:01 compute-0 podman[261909]: 2025-10-02 13:02:01.158154629 +0000 UTC m=+0.073352173 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2, tcib_managed=true, org.label-schema.vendor=CentOS, config_id=ovn_metadata_agent, org.label-schema.schema-version=1.0, container_name=ovn_metadata_agent, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 13:02:01 compute-0 podman[261910]: 2025-10-02 13:02:01.171461033 +0000 UTC m=+0.084860958 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, config_id=ovn_controller, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ovn_controller, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS)
Oct 02 13:02:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 13:02:02.272 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 13:02:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 13:02:02.273 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 13:02:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 13:02:02.273 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 13:02:04 compute-0 nova_compute[192079]: 2025-10-02 13:02:04.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:02:05 compute-0 nova_compute[192079]: 2025-10-02 13:02:05.003 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:02:05 compute-0 nova_compute[192079]: 2025-10-02 13:02:05.245 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:02:06 compute-0 nova_compute[192079]: 2025-10-02 13:02:06.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:02:06 compute-0 nova_compute[192079]: 2025-10-02 13:02:06.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 13:02:07 compute-0 nova_compute[192079]: 2025-10-02 13:02:07.666 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:02:08 compute-0 nova_compute[192079]: 2025-10-02 13:02:08.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:02:09 compute-0 podman[261978]: 2025-10-02 13:02:09.134848224 +0000 UTC m=+0.050920657 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, config_id=edpm, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.build-date=20251001)
Oct 02 13:02:09 compute-0 nova_compute[192079]: 2025-10-02 13:02:09.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:02:10 compute-0 nova_compute[192079]: 2025-10-02 13:02:10.005 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:02:10 compute-0 nova_compute[192079]: 2025-10-02 13:02:10.246 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:02:10 compute-0 nova_compute[192079]: 2025-10-02 13:02:10.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:02:10 compute-0 nova_compute[192079]: 2025-10-02 13:02:10.720 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 13:02:10 compute-0 nova_compute[192079]: 2025-10-02 13:02:10.721 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 13:02:10 compute-0 nova_compute[192079]: 2025-10-02 13:02:10.721 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 13:02:10 compute-0 nova_compute[192079]: 2025-10-02 13:02:10.721 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 13:02:10 compute-0 nova_compute[192079]: 2025-10-02 13:02:10.854 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 13:02:10 compute-0 nova_compute[192079]: 2025-10-02 13:02:10.855 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5706MB free_disk=73.27244567871094GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 13:02:10 compute-0 nova_compute[192079]: 2025-10-02 13:02:10.856 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 13:02:10 compute-0 nova_compute[192079]: 2025-10-02 13:02:10.856 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 13:02:10 compute-0 nova_compute[192079]: 2025-10-02 13:02:10.916 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 13:02:10 compute-0 nova_compute[192079]: 2025-10-02 13:02:10.916 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 13:02:10 compute-0 nova_compute[192079]: 2025-10-02 13:02:10.930 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing inventories for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708 _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:804
Oct 02 13:02:10 compute-0 nova_compute[192079]: 2025-10-02 13:02:10.947 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Updating ProviderTree inventory for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 from _refresh_and_get_inventory using data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} _refresh_and_get_inventory /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:768
Oct 02 13:02:10 compute-0 nova_compute[192079]: 2025-10-02 13:02:10.947 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Updating inventory in ProviderTree for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 with inventory: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:176
Oct 02 13:02:10 compute-0 nova_compute[192079]: 2025-10-02 13:02:10.957 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing aggregate associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, aggregates: None _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:813
Oct 02 13:02:10 compute-0 nova_compute[192079]: 2025-10-02 13:02:10.976 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing trait associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, traits: COMPUTE_SECURITY_UEFI_SECURE_BOOT,COMPUTE_VIOMMU_MODEL_VIRTIO,COMPUTE_VIOMMU_MODEL_AUTO,COMPUTE_IMAGE_TYPE_AKI,COMPUTE_GRAPHICS_MODEL_VIRTIO,COMPUTE_NET_VIF_MODEL_PCNET,HW_CPU_X86_SSE42,COMPUTE_RESCUE_BFV,COMPUTE_VOLUME_EXTEND,COMPUTE_IMAGE_TYPE_QCOW2,COMPUTE_TRUSTED_CERTS,COMPUTE_SOCKET_PCI_NUMA_AFFINITY,COMPUTE_GRAPHICS_MODEL_CIRRUS,HW_CPU_X86_MMX,COMPUTE_STORAGE_BUS_VIRTIO,COMPUTE_NET_ATTACH_INTERFACE_WITH_TAG,COMPUTE_STORAGE_BUS_FDC,COMPUTE_STORAGE_BUS_USB,COMPUTE_NODE,HW_CPU_X86_SSSE3,HW_CPU_X86_SSE2,COMPUTE_GRAPHICS_MODEL_BOCHS,COMPUTE_NET_VIF_MODEL_E1000E,COMPUTE_IMAGE_TYPE_RAW,COMPUTE_NET_VIF_MODEL_NE2K_PCI,COMPUTE_IMAGE_TYPE_AMI,COMPUTE_VIOMMU_MODEL_INTEL,COMPUTE_SECURITY_TPM_2_0,COMPUTE_STORAGE_BUS_SCSI,COMPUTE_IMAGE_TYPE_ARI,COMPUTE_NET_VIF_MODEL_VMXNET3,COMPUTE_SECURITY_TPM_1_2,COMPUTE_NET_VIF_MODEL_E1000,HW_CPU_X86_SSE,COMPUTE_VOLUME_MULTI_ATTACH,COMPUTE_STORAGE_BUS_IDE,COMPUTE_GRAPHICS_MODEL_NONE,COMPUTE_VOLUME_ATTACH_WITH_TAG,COMPUTE_NET_VIF_MODEL_VIRTIO,HW_CPU_X86_SSE41,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_DEVICE_TAGGING,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_ACCELERATORS,COMPUTE_NET_VIF_MODEL_RTL8139,COMPUTE_GRAPHICS_MODEL_VGA,COMPUTE_STORAGE_BUS_SATA,COMPUTE_NET_VIF_MODEL_SPAPR_VLAN _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:825
Oct 02 13:02:11 compute-0 nova_compute[192079]: 2025-10-02 13:02:11.009 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 13:02:11 compute-0 nova_compute[192079]: 2025-10-02 13:02:11.026 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 13:02:11 compute-0 nova_compute[192079]: 2025-10-02 13:02:11.027 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 13:02:11 compute-0 nova_compute[192079]: 2025-10-02 13:02:11.027 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.171s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 13:02:13 compute-0 nova_compute[192079]: 2025-10-02 13:02:13.027 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:02:15 compute-0 nova_compute[192079]: 2025-10-02 13:02:15.007 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:02:15 compute-0 nova_compute[192079]: 2025-10-02 13:02:15.247 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:02:16 compute-0 podman[261999]: 2025-10-02 13:02:16.157766182 +0000 UTC m=+0.070906054 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, container_name=multipathd, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team)
Oct 02 13:02:16 compute-0 podman[261998]: 2025-10-02 13:02:16.161287619 +0000 UTC m=+0.069315042 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.expose-services=, managed_by=edpm_ansible, distribution-scope=public, vcs-type=git, com.redhat.component=ubi9-minimal-container, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, url=https://catalog.redhat.com/en/search?searchType=containers, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, maintainer=Red Hat, Inc., container_name=openstack_network_exporter, version=9.6, architecture=x86_64, io.openshift.tags=minimal rhel9, build-date=2025-08-20T13:12:41, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., config_id=edpm, vendor=Red Hat, Inc., description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, release=1755695350, name=ubi9-minimal, io.buildah.version=1.33.7)
Oct 02 13:02:19 compute-0 nova_compute[192079]: 2025-10-02 13:02:19.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:02:19 compute-0 nova_compute[192079]: 2025-10-02 13:02:19.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 13:02:19 compute-0 nova_compute[192079]: 2025-10-02 13:02:19.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 13:02:19 compute-0 nova_compute[192079]: 2025-10-02 13:02:19.688 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 13:02:20 compute-0 nova_compute[192079]: 2025-10-02 13:02:20.009 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:02:20 compute-0 nova_compute[192079]: 2025-10-02 13:02:20.249 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:02:23 compute-0 podman[262040]: 2025-10-02 13:02:23.157025141 +0000 UTC m=+0.071763618 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, container_name=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, config_id=iscsid, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, org.label-schema.build-date=20251001)
Oct 02 13:02:23 compute-0 podman[262039]: 2025-10-02 13:02:23.160921039 +0000 UTC m=+0.071137432 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>)
Oct 02 13:02:25 compute-0 nova_compute[192079]: 2025-10-02 13:02:25.010 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:02:25 compute-0 nova_compute[192079]: 2025-10-02 13:02:25.250 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:02:30 compute-0 nova_compute[192079]: 2025-10-02 13:02:30.057 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:02:30 compute-0 nova_compute[192079]: 2025-10-02 13:02:30.252 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:02:32 compute-0 podman[262086]: 2025-10-02 13:02:32.132033406 +0000 UTC m=+0.044554783 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 13:02:32 compute-0 podman[262084]: 2025-10-02 13:02:32.132267172 +0000 UTC m=+0.050026692 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, tcib_managed=true, managed_by=edpm_ansible, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, container_name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team)
Oct 02 13:02:32 compute-0 podman[262085]: 2025-10-02 13:02:32.189385169 +0000 UTC m=+0.103035066 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.license=GPLv2, tcib_managed=true, container_name=ovn_controller, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0)
Oct 02 13:02:35 compute-0 nova_compute[192079]: 2025-10-02 13:02:35.059 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:02:35 compute-0 nova_compute[192079]: 2025-10-02 13:02:35.253 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:02:40 compute-0 nova_compute[192079]: 2025-10-02 13:02:40.061 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:02:40 compute-0 podman[262147]: 2025-10-02 13:02:40.151375033 +0000 UTC m=+0.056985213 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 13:02:40 compute-0 nova_compute[192079]: 2025-10-02 13:02:40.255 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:02:45 compute-0 nova_compute[192079]: 2025-10-02 13:02:45.063 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:02:45 compute-0 nova_compute[192079]: 2025-10-02 13:02:45.257 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:02:47 compute-0 podman[262167]: 2025-10-02 13:02:47.137854631 +0000 UTC m=+0.052530571 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, version=9.6, com.redhat.component=ubi9-minimal-container, container_name=openstack_network_exporter, build-date=2025-08-20T13:12:41, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., io.buildah.version=1.33.7, architecture=x86_64, release=1755695350, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, config_id=edpm, distribution-scope=public, io.openshift.tags=minimal rhel9, name=ubi9-minimal, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, vendor=Red Hat, Inc., url=https://catalog.redhat.com/en/search?searchType=containers, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.expose-services=, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., maintainer=Red Hat, Inc., managed_by=edpm_ansible, vcs-type=git)
Oct 02 13:02:47 compute-0 podman[262168]: 2025-10-02 13:02:47.14692946 +0000 UTC m=+0.056042978 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, container_name=multipathd, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=multipathd, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team)
Oct 02 13:02:50 compute-0 nova_compute[192079]: 2025-10-02 13:02:50.064 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:02:50 compute-0 nova_compute[192079]: 2025-10-02 13:02:50.259 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:02:54 compute-0 podman[262209]: 2025-10-02 13:02:54.138347645 +0000 UTC m=+0.054493576 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=iscsid, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, tcib_managed=true)
Oct 02 13:02:54 compute-0 podman[262208]: 2025-10-02 13:02:54.139417304 +0000 UTC m=+0.055540314 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 13:02:55 compute-0 nova_compute[192079]: 2025-10-02 13:02:55.099 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:02:55 compute-0 nova_compute[192079]: 2025-10-02 13:02:55.261 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:03:00 compute-0 nova_compute[192079]: 2025-10-02 13:03:00.101 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:03:00 compute-0 nova_compute[192079]: 2025-10-02 13:03:00.262 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:03:00 compute-0 nova_compute[192079]: 2025-10-02 13:03:00.682 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:03:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 13:03:02.274 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 13:03:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 13:03:02.274 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 13:03:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 13:03:02.274 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 13:03:03 compute-0 podman[262254]: 2025-10-02 13:03:03.13923814 +0000 UTC m=+0.044563523 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, container_name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, config_id=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 13:03:03 compute-0 podman[262256]: 2025-10-02 13:03:03.148168185 +0000 UTC m=+0.046720613 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>)
Oct 02 13:03:03 compute-0 podman[262255]: 2025-10-02 13:03:03.168147172 +0000 UTC m=+0.071163101 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, org.label-schema.license=GPLv2, tcib_managed=true, managed_by=edpm_ansible, org.label-schema.schema-version=1.0)
Oct 02 13:03:04 compute-0 nova_compute[192079]: 2025-10-02 13:03:04.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:03:05 compute-0 nova_compute[192079]: 2025-10-02 13:03:05.102 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:03:05 compute-0 nova_compute[192079]: 2025-10-02 13:03:05.263 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:03:07 compute-0 nova_compute[192079]: 2025-10-02 13:03:07.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:03:07 compute-0 nova_compute[192079]: 2025-10-02 13:03:07.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 13:03:08 compute-0 nova_compute[192079]: 2025-10-02 13:03:08.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:03:08 compute-0 nova_compute[192079]: 2025-10-02 13:03:08.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:03:10 compute-0 nova_compute[192079]: 2025-10-02 13:03:10.104 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:03:10 compute-0 nova_compute[192079]: 2025-10-02 13:03:10.264 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:03:11 compute-0 podman[262321]: 2025-10-02 13:03:11.14088939 +0000 UTC m=+0.055433601 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_managed=true, container_name=ceilometer_agent_compute, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 13:03:11 compute-0 nova_compute[192079]: 2025-10-02 13:03:11.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:03:11 compute-0 nova_compute[192079]: 2025-10-02 13:03:11.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:03:11 compute-0 nova_compute[192079]: 2025-10-02 13:03:11.746 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 13:03:11 compute-0 nova_compute[192079]: 2025-10-02 13:03:11.747 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 13:03:11 compute-0 nova_compute[192079]: 2025-10-02 13:03:11.747 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 13:03:11 compute-0 nova_compute[192079]: 2025-10-02 13:03:11.747 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 13:03:11 compute-0 nova_compute[192079]: 2025-10-02 13:03:11.884 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 13:03:11 compute-0 nova_compute[192079]: 2025-10-02 13:03:11.885 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5700MB free_disk=73.27244567871094GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 13:03:11 compute-0 nova_compute[192079]: 2025-10-02 13:03:11.885 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 13:03:11 compute-0 nova_compute[192079]: 2025-10-02 13:03:11.886 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 13:03:12 compute-0 nova_compute[192079]: 2025-10-02 13:03:12.224 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 13:03:12 compute-0 nova_compute[192079]: 2025-10-02 13:03:12.224 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 13:03:12 compute-0 nova_compute[192079]: 2025-10-02 13:03:12.270 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 13:03:12 compute-0 nova_compute[192079]: 2025-10-02 13:03:12.324 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 13:03:12 compute-0 nova_compute[192079]: 2025-10-02 13:03:12.325 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 13:03:12 compute-0 nova_compute[192079]: 2025-10-02 13:03:12.326 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.440s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 13:03:15 compute-0 nova_compute[192079]: 2025-10-02 13:03:15.105 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:03:15 compute-0 nova_compute[192079]: 2025-10-02 13:03:15.265 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:03:15 compute-0 nova_compute[192079]: 2025-10-02 13:03:15.324 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:03:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster memory.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:03:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:03:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:03:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:03:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:03:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:03:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.capacity, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:03:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:03:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:03:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:03:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:03:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:03:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:03:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster cpu, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:03:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:03:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.allocation, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:03:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:03:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:03:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:03:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.iops, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:03:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:03:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:03:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:03:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:03:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:03:17.120 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:03:18 compute-0 podman[262342]: 2025-10-02 13:03:18.146785121 +0000 UTC m=+0.051324598 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.license=GPLv2, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, container_name=multipathd, config_id=multipathd, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3)
Oct 02 13:03:18 compute-0 podman[262341]: 2025-10-02 13:03:18.170973104 +0000 UTC m=+0.081744983 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, build-date=2025-08-20T13:12:41, io.openshift.tags=minimal rhel9, name=ubi9-minimal, version=9.6, distribution-scope=public, config_id=edpm, maintainer=Red Hat, Inc., io.openshift.expose-services=, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., url=https://catalog.redhat.com/en/search?searchType=containers, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.buildah.version=1.33.7, vendor=Red Hat, Inc., io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., managed_by=edpm_ansible, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., release=1755695350, container_name=openstack_network_exporter, com.redhat.component=ubi9-minimal-container, vcs-type=git, architecture=x86_64, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b)
Oct 02 13:03:20 compute-0 nova_compute[192079]: 2025-10-02 13:03:20.107 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:03:20 compute-0 nova_compute[192079]: 2025-10-02 13:03:20.267 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:03:21 compute-0 nova_compute[192079]: 2025-10-02 13:03:21.660 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:03:21 compute-0 nova_compute[192079]: 2025-10-02 13:03:21.702 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:03:21 compute-0 nova_compute[192079]: 2025-10-02 13:03:21.702 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 13:03:21 compute-0 nova_compute[192079]: 2025-10-02 13:03:21.703 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 13:03:21 compute-0 nova_compute[192079]: 2025-10-02 13:03:21.720 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 13:03:25 compute-0 nova_compute[192079]: 2025-10-02 13:03:25.110 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:03:25 compute-0 podman[262382]: 2025-10-02 13:03:25.128440808 +0000 UTC m=+0.043278077 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 13:03:25 compute-0 podman[262383]: 2025-10-02 13:03:25.178791608 +0000 UTC m=+0.087016406 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, config_id=iscsid, io.buildah.version=1.41.3)
Oct 02 13:03:25 compute-0 nova_compute[192079]: 2025-10-02 13:03:25.270 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:03:30 compute-0 nova_compute[192079]: 2025-10-02 13:03:30.120 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:03:30 compute-0 nova_compute[192079]: 2025-10-02 13:03:30.270 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:03:34 compute-0 podman[262430]: 2025-10-02 13:03:34.145840956 +0000 UTC m=+0.053718024 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 13:03:34 compute-0 podman[262428]: 2025-10-02 13:03:34.172844576 +0000 UTC m=+0.088063356 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_id=ovn_metadata_agent, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS)
Oct 02 13:03:34 compute-0 podman[262429]: 2025-10-02 13:03:34.173116474 +0000 UTC m=+0.084748365 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, managed_by=edpm_ansible, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=ovn_controller)
Oct 02 13:03:35 compute-0 nova_compute[192079]: 2025-10-02 13:03:35.121 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:03:35 compute-0 nova_compute[192079]: 2025-10-02 13:03:35.272 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:03:40 compute-0 nova_compute[192079]: 2025-10-02 13:03:40.121 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:03:40 compute-0 nova_compute[192079]: 2025-10-02 13:03:40.273 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:03:42 compute-0 podman[262496]: 2025-10-02 13:03:42.158785614 +0000 UTC m=+0.076837007 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=edpm, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, container_name=ceilometer_agent_compute, org.label-schema.vendor=CentOS)
Oct 02 13:03:45 compute-0 nova_compute[192079]: 2025-10-02 13:03:45.122 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:03:45 compute-0 nova_compute[192079]: 2025-10-02 13:03:45.275 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:03:49 compute-0 podman[262517]: 2025-10-02 13:03:49.163031202 +0000 UTC m=+0.076567811 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2, config_id=multipathd, container_name=multipathd, io.buildah.version=1.41.3, tcib_managed=true)
Oct 02 13:03:49 compute-0 podman[262516]: 2025-10-02 13:03:49.163031352 +0000 UTC m=+0.068802508 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., name=ubi9-minimal, vcs-type=git, release=1755695350, io.openshift.expose-services=, vendor=Red Hat, Inc., maintainer=Red Hat, Inc., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, config_id=edpm, container_name=openstack_network_exporter, io.buildah.version=1.33.7, managed_by=edpm_ansible, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, url=https://catalog.redhat.com/en/search?searchType=containers, com.redhat.component=ubi9-minimal-container, io.openshift.tags=minimal rhel9, version=9.6, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, build-date=2025-08-20T13:12:41, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, architecture=x86_64, distribution-scope=public)
Oct 02 13:03:50 compute-0 nova_compute[192079]: 2025-10-02 13:03:50.125 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:03:50 compute-0 nova_compute[192079]: 2025-10-02 13:03:50.276 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:03:55 compute-0 nova_compute[192079]: 2025-10-02 13:03:55.128 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:03:55 compute-0 nova_compute[192079]: 2025-10-02 13:03:55.277 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:03:56 compute-0 podman[262559]: 2025-10-02 13:03:56.145881849 +0000 UTC m=+0.061206128 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, config_id=iscsid)
Oct 02 13:03:56 compute-0 podman[262558]: 2025-10-02 13:03:56.166713341 +0000 UTC m=+0.085363212 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter)
Oct 02 13:04:00 compute-0 nova_compute[192079]: 2025-10-02 13:04:00.167 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:04:00 compute-0 nova_compute[192079]: 2025-10-02 13:04:00.279 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:04:01 compute-0 nova_compute[192079]: 2025-10-02 13:04:01.720 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:04:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 13:04:02.277 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 13:04:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 13:04:02.277 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 13:04:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 13:04:02.277 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 13:04:05 compute-0 podman[262599]: 2025-10-02 13:04:05.13597595 +0000 UTC m=+0.054313311 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, container_name=ovn_metadata_agent, org.label-schema.license=GPLv2, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_metadata_agent, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']})
Oct 02 13:04:05 compute-0 podman[262601]: 2025-10-02 13:04:05.137551592 +0000 UTC m=+0.050070034 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 13:04:05 compute-0 podman[262600]: 2025-10-02 13:04:05.168497531 +0000 UTC m=+0.082197965 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, container_name=ovn_controller, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_managed=true)
Oct 02 13:04:05 compute-0 nova_compute[192079]: 2025-10-02 13:04:05.168 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:04:05 compute-0 nova_compute[192079]: 2025-10-02 13:04:05.279 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:04:05 compute-0 nova_compute[192079]: 2025-10-02 13:04:05.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:04:07 compute-0 nova_compute[192079]: 2025-10-02 13:04:07.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:04:07 compute-0 nova_compute[192079]: 2025-10-02 13:04:07.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 13:04:08 compute-0 nova_compute[192079]: 2025-10-02 13:04:08.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:04:10 compute-0 nova_compute[192079]: 2025-10-02 13:04:10.170 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:04:10 compute-0 nova_compute[192079]: 2025-10-02 13:04:10.280 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:04:10 compute-0 nova_compute[192079]: 2025-10-02 13:04:10.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:04:11 compute-0 nova_compute[192079]: 2025-10-02 13:04:11.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:04:12 compute-0 nova_compute[192079]: 2025-10-02 13:04:12.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:04:12 compute-0 nova_compute[192079]: 2025-10-02 13:04:12.695 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 13:04:12 compute-0 nova_compute[192079]: 2025-10-02 13:04:12.695 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 13:04:12 compute-0 nova_compute[192079]: 2025-10-02 13:04:12.696 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 13:04:12 compute-0 nova_compute[192079]: 2025-10-02 13:04:12.696 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 13:04:12 compute-0 nova_compute[192079]: 2025-10-02 13:04:12.838 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 13:04:12 compute-0 nova_compute[192079]: 2025-10-02 13:04:12.839 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5699MB free_disk=73.27244567871094GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 13:04:12 compute-0 nova_compute[192079]: 2025-10-02 13:04:12.840 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 13:04:12 compute-0 nova_compute[192079]: 2025-10-02 13:04:12.840 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 13:04:12 compute-0 nova_compute[192079]: 2025-10-02 13:04:12.981 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 13:04:12 compute-0 nova_compute[192079]: 2025-10-02 13:04:12.981 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 13:04:13 compute-0 nova_compute[192079]: 2025-10-02 13:04:13.005 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 13:04:13 compute-0 nova_compute[192079]: 2025-10-02 13:04:13.022 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 13:04:13 compute-0 nova_compute[192079]: 2025-10-02 13:04:13.023 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 13:04:13 compute-0 nova_compute[192079]: 2025-10-02 13:04:13.023 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.183s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 13:04:13 compute-0 podman[262667]: 2025-10-02 13:04:13.12814738 +0000 UTC m=+0.047596966 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_managed=true, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']})
Oct 02 13:04:15 compute-0 nova_compute[192079]: 2025-10-02 13:04:15.024 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:04:15 compute-0 nova_compute[192079]: 2025-10-02 13:04:15.172 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:04:15 compute-0 nova_compute[192079]: 2025-10-02 13:04:15.282 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:04:20 compute-0 podman[262688]: 2025-10-02 13:04:20.139605173 +0000 UTC m=+0.054438823 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, config_id=multipathd, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, container_name=multipathd)
Oct 02 13:04:20 compute-0 podman[262687]: 2025-10-02 13:04:20.167907309 +0000 UTC m=+0.083521240 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.33.7, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., url=https://catalog.redhat.com/en/search?searchType=containers, config_id=edpm, version=9.6, architecture=x86_64, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.openshift.expose-services=, maintainer=Red Hat, Inc., io.openshift.tags=minimal rhel9, managed_by=edpm_ansible, build-date=2025-08-20T13:12:41, name=ubi9-minimal, vendor=Red Hat, Inc., distribution-scope=public, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, com.redhat.component=ubi9-minimal-container, container_name=openstack_network_exporter, release=1755695350, vcs-type=git, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b)
Oct 02 13:04:20 compute-0 nova_compute[192079]: 2025-10-02 13:04:20.176 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:04:20 compute-0 nova_compute[192079]: 2025-10-02 13:04:20.283 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:04:21 compute-0 nova_compute[192079]: 2025-10-02 13:04:21.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:04:21 compute-0 nova_compute[192079]: 2025-10-02 13:04:21.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 13:04:21 compute-0 nova_compute[192079]: 2025-10-02 13:04:21.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 13:04:21 compute-0 nova_compute[192079]: 2025-10-02 13:04:21.780 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 13:04:25 compute-0 nova_compute[192079]: 2025-10-02 13:04:25.177 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:04:25 compute-0 nova_compute[192079]: 2025-10-02 13:04:25.285 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:04:27 compute-0 podman[262728]: 2025-10-02 13:04:27.151035515 +0000 UTC m=+0.064024966 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 13:04:27 compute-0 podman[262729]: 2025-10-02 13:04:27.158589863 +0000 UTC m=+0.060104000 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, tcib_managed=true, config_id=iscsid, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001)
Oct 02 13:04:30 compute-0 nova_compute[192079]: 2025-10-02 13:04:30.179 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:04:30 compute-0 nova_compute[192079]: 2025-10-02 13:04:30.286 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:04:35 compute-0 nova_compute[192079]: 2025-10-02 13:04:35.180 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:04:35 compute-0 nova_compute[192079]: 2025-10-02 13:04:35.287 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:04:36 compute-0 podman[262774]: 2025-10-02 13:04:36.135161351 +0000 UTC m=+0.049963471 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, config_id=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, container_name=ovn_metadata_agent, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 13:04:36 compute-0 podman[262776]: 2025-10-02 13:04:36.147540381 +0000 UTC m=+0.050502425 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 13:04:36 compute-0 podman[262775]: 2025-10-02 13:04:36.184954687 +0000 UTC m=+0.090928355 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, tcib_managed=true, container_name=ovn_controller, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 13:04:40 compute-0 nova_compute[192079]: 2025-10-02 13:04:40.183 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:04:40 compute-0 nova_compute[192079]: 2025-10-02 13:04:40.289 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:04:44 compute-0 podman[262842]: 2025-10-02 13:04:44.143152727 +0000 UTC m=+0.056522581 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, org.label-schema.schema-version=1.0, tcib_managed=true, config_id=edpm, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2)
Oct 02 13:04:45 compute-0 nova_compute[192079]: 2025-10-02 13:04:45.184 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:04:45 compute-0 nova_compute[192079]: 2025-10-02 13:04:45.290 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:04:50 compute-0 nova_compute[192079]: 2025-10-02 13:04:50.185 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:04:50 compute-0 podman[262862]: 2025-10-02 13:04:50.265856654 +0000 UTC m=+0.052391456 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, url=https://catalog.redhat.com/en/search?searchType=containers, architecture=x86_64, com.redhat.component=ubi9-minimal-container, io.buildah.version=1.33.7, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.expose-services=, vendor=Red Hat, Inc., build-date=2025-08-20T13:12:41, release=1755695350, distribution-scope=public, name=ubi9-minimal, managed_by=edpm_ansible, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, vcs-type=git, version=9.6, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., container_name=openstack_network_exporter, io.openshift.tags=minimal rhel9, maintainer=Red Hat, Inc.)
Oct 02 13:04:50 compute-0 podman[262863]: 2025-10-02 13:04:50.266108231 +0000 UTC m=+0.049228040 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, container_name=multipathd, managed_by=edpm_ansible, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, config_id=multipathd, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0)
Oct 02 13:04:50 compute-0 nova_compute[192079]: 2025-10-02 13:04:50.290 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:04:55 compute-0 nova_compute[192079]: 2025-10-02 13:04:55.187 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:04:55 compute-0 nova_compute[192079]: 2025-10-02 13:04:55.293 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:04:58 compute-0 podman[262903]: 2025-10-02 13:04:58.132049631 +0000 UTC m=+0.045829837 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 13:04:58 compute-0 podman[262904]: 2025-10-02 13:04:58.162948638 +0000 UTC m=+0.074323428 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid, container_name=iscsid, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, tcib_managed=true)
Oct 02 13:05:00 compute-0 nova_compute[192079]: 2025-10-02 13:05:00.190 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:05:00 compute-0 nova_compute[192079]: 2025-10-02 13:05:00.294 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:05:00 compute-0 nova_compute[192079]: 2025-10-02 13:05:00.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_incomplete_migrations run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:05:00 compute-0 nova_compute[192079]: 2025-10-02 13:05:00.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Cleaning up deleted instances with incomplete migration  _cleanup_incomplete_migrations /usr/lib/python3.9/site-packages/nova/compute/manager.py:11183
Oct 02 13:05:01 compute-0 nova_compute[192079]: 2025-10-02 13:05:01.794 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:05:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 13:05:02.278 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 13:05:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 13:05:02.278 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 13:05:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 13:05:02.278 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 13:05:05 compute-0 nova_compute[192079]: 2025-10-02 13:05:05.191 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:05:05 compute-0 nova_compute[192079]: 2025-10-02 13:05:05.296 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:05:06 compute-0 nova_compute[192079]: 2025-10-02 13:05:06.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:05:07 compute-0 podman[262948]: 2025-10-02 13:05:07.140056981 +0000 UTC m=+0.052126070 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, container_name=ovn_metadata_agent, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, managed_by=edpm_ansible, config_id=ovn_metadata_agent, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a)
Oct 02 13:05:07 compute-0 podman[262950]: 2025-10-02 13:05:07.141894692 +0000 UTC m=+0.047615587 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter)
Oct 02 13:05:07 compute-0 podman[262949]: 2025-10-02 13:05:07.172737727 +0000 UTC m=+0.080303773 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, container_name=ovn_controller, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0)
Oct 02 13:05:08 compute-0 nova_compute[192079]: 2025-10-02 13:05:08.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:05:08 compute-0 nova_compute[192079]: 2025-10-02 13:05:08.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 13:05:09 compute-0 nova_compute[192079]: 2025-10-02 13:05:09.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:05:10 compute-0 nova_compute[192079]: 2025-10-02 13:05:10.192 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:05:10 compute-0 nova_compute[192079]: 2025-10-02 13:05:10.297 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:05:11 compute-0 nova_compute[192079]: 2025-10-02 13:05:11.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:05:12 compute-0 nova_compute[192079]: 2025-10-02 13:05:12.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:05:13 compute-0 nova_compute[192079]: 2025-10-02 13:05:13.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:05:13 compute-0 nova_compute[192079]: 2025-10-02 13:05:13.741 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 13:05:13 compute-0 nova_compute[192079]: 2025-10-02 13:05:13.741 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 13:05:13 compute-0 nova_compute[192079]: 2025-10-02 13:05:13.741 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 13:05:13 compute-0 nova_compute[192079]: 2025-10-02 13:05:13.741 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 13:05:13 compute-0 nova_compute[192079]: 2025-10-02 13:05:13.896 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 13:05:13 compute-0 nova_compute[192079]: 2025-10-02 13:05:13.897 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5706MB free_disk=73.27244567871094GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 13:05:13 compute-0 nova_compute[192079]: 2025-10-02 13:05:13.897 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 13:05:13 compute-0 nova_compute[192079]: 2025-10-02 13:05:13.898 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 13:05:14 compute-0 nova_compute[192079]: 2025-10-02 13:05:14.068 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 13:05:14 compute-0 nova_compute[192079]: 2025-10-02 13:05:14.069 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 13:05:14 compute-0 nova_compute[192079]: 2025-10-02 13:05:14.138 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 13:05:14 compute-0 nova_compute[192079]: 2025-10-02 13:05:14.182 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 13:05:14 compute-0 nova_compute[192079]: 2025-10-02 13:05:14.184 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 13:05:14 compute-0 nova_compute[192079]: 2025-10-02 13:05:14.184 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.286s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 13:05:15 compute-0 podman[263016]: 2025-10-02 13:05:15.132874819 +0000 UTC m=+0.052357857 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=edpm, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_managed=true, container_name=ceilometer_agent_compute, org.label-schema.license=GPLv2, maintainer=OpenStack Kubernetes Operator team)
Oct 02 13:05:15 compute-0 nova_compute[192079]: 2025-10-02 13:05:15.184 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:05:15 compute-0 nova_compute[192079]: 2025-10-02 13:05:15.237 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:05:15 compute-0 nova_compute[192079]: 2025-10-02 13:05:15.298 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:05:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:05:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.iops, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:05:17.117 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:05:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:05:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:05:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:05:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster memory.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:05:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.allocation, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:05:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.capacity, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:05:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:05:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:05:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:05:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:05:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster cpu, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:05:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:05:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:05:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:05:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:05:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:05:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:05:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:05:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:05:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:05:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:05:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:05:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:05:20 compute-0 nova_compute[192079]: 2025-10-02 13:05:20.239 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:05:20 compute-0 nova_compute[192079]: 2025-10-02 13:05:20.299 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:05:21 compute-0 podman[263036]: 2025-10-02 13:05:21.138020355 +0000 UTC m=+0.050179477 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vcs-type=git, io.buildah.version=1.33.7, container_name=openstack_network_exporter, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., managed_by=edpm_ansible, name=ubi9-minimal, distribution-scope=public, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, com.redhat.component=ubi9-minimal-container, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, maintainer=Red Hat, Inc., build-date=2025-08-20T13:12:41, url=https://catalog.redhat.com/en/search?searchType=containers, version=9.6, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, release=1755695350, vendor=Red Hat, Inc., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, io.openshift.tags=minimal rhel9, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., io.openshift.expose-services=, architecture=x86_64)
Oct 02 13:05:21 compute-0 podman[263037]: 2025-10-02 13:05:21.142172358 +0000 UTC m=+0.049590111 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=multipathd, org.label-schema.vendor=CentOS, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible)
Oct 02 13:05:21 compute-0 nova_compute[192079]: 2025-10-02 13:05:21.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:05:21 compute-0 nova_compute[192079]: 2025-10-02 13:05:21.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 13:05:21 compute-0 nova_compute[192079]: 2025-10-02 13:05:21.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 13:05:21 compute-0 nova_compute[192079]: 2025-10-02 13:05:21.720 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 13:05:24 compute-0 nova_compute[192079]: 2025-10-02 13:05:24.715 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:05:25 compute-0 nova_compute[192079]: 2025-10-02 13:05:25.239 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:05:25 compute-0 nova_compute[192079]: 2025-10-02 13:05:25.301 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:05:29 compute-0 podman[263077]: 2025-10-02 13:05:29.139756357 +0000 UTC m=+0.058707521 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']})
Oct 02 13:05:29 compute-0 podman[263078]: 2025-10-02 13:05:29.158519252 +0000 UTC m=+0.073263900 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, io.buildah.version=1.41.3, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=iscsid, container_name=iscsid, maintainer=OpenStack Kubernetes Operator team)
Oct 02 13:05:30 compute-0 nova_compute[192079]: 2025-10-02 13:05:30.241 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:05:30 compute-0 nova_compute[192079]: 2025-10-02 13:05:30.303 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:05:35 compute-0 nova_compute[192079]: 2025-10-02 13:05:35.243 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:05:35 compute-0 nova_compute[192079]: 2025-10-02 13:05:35.303 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:05:38 compute-0 podman[263119]: 2025-10-02 13:05:38.128416827 +0000 UTC m=+0.046032163 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_metadata_agent, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, config_id=ovn_metadata_agent, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20251001)
Oct 02 13:05:38 compute-0 podman[263121]: 2025-10-02 13:05:38.136951271 +0000 UTC m=+0.049494748 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>)
Oct 02 13:05:38 compute-0 podman[263120]: 2025-10-02 13:05:38.188215787 +0000 UTC m=+0.103358395 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, tcib_managed=true, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, config_id=ovn_controller, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, managed_by=edpm_ansible)
Oct 02 13:05:40 compute-0 nova_compute[192079]: 2025-10-02 13:05:40.243 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:05:40 compute-0 nova_compute[192079]: 2025-10-02 13:05:40.304 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:05:45 compute-0 nova_compute[192079]: 2025-10-02 13:05:45.245 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:05:45 compute-0 nova_compute[192079]: 2025-10-02 13:05:45.305 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:05:45 compute-0 nova_compute[192079]: 2025-10-02 13:05:45.473 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_power_states run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:05:46 compute-0 podman[263187]: 2025-10-02 13:05:46.162167977 +0000 UTC m=+0.076254472 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, container_name=ceilometer_agent_compute, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Oct 02 13:05:49 compute-0 nova_compute[192079]: 2025-10-02 13:05:49.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._run_pending_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:05:49 compute-0 nova_compute[192079]: 2025-10-02 13:05:49.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Cleaning up deleted instances _run_pending_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:11145
Oct 02 13:05:49 compute-0 nova_compute[192079]: 2025-10-02 13:05:49.712 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] There are 0 instances to clean _run_pending_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:11154
Oct 02 13:05:50 compute-0 nova_compute[192079]: 2025-10-02 13:05:50.247 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:05:50 compute-0 nova_compute[192079]: 2025-10-02 13:05:50.307 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:05:51 compute-0 nova_compute[192079]: 2025-10-02 13:05:51.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:05:52 compute-0 podman[263208]: 2025-10-02 13:05:52.132798476 +0000 UTC m=+0.047470372 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, managed_by=edpm_ansible, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, org.label-schema.build-date=20251001, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=multipathd, container_name=multipathd)
Oct 02 13:05:52 compute-0 podman[263207]: 2025-10-02 13:05:52.149976917 +0000 UTC m=+0.066739241 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, vendor=Red Hat, Inc., build-date=2025-08-20T13:12:41, container_name=openstack_network_exporter, vcs-type=git, name=ubi9-minimal, version=9.6, config_id=edpm, maintainer=Red Hat, Inc., vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, release=1755695350, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.buildah.version=1.33.7, architecture=x86_64, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., managed_by=edpm_ansible, io.openshift.expose-services=, distribution-scope=public, com.redhat.component=ubi9-minimal-container, url=https://catalog.redhat.com/en/search?searchType=containers, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.tags=minimal rhel9)
Oct 02 13:05:55 compute-0 nova_compute[192079]: 2025-10-02 13:05:55.247 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:05:55 compute-0 nova_compute[192079]: 2025-10-02 13:05:55.307 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:06:00 compute-0 podman[263251]: 2025-10-02 13:06:00.138267931 +0000 UTC m=+0.053837847 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter)
Oct 02 13:06:00 compute-0 podman[263252]: 2025-10-02 13:06:00.141817709 +0000 UTC m=+0.055578495 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, container_name=iscsid, managed_by=edpm_ansible, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.schema-version=1.0, config_id=iscsid, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2)
Oct 02 13:06:00 compute-0 nova_compute[192079]: 2025-10-02 13:06:00.249 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:06:00 compute-0 nova_compute[192079]: 2025-10-02 13:06:00.308 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:06:01 compute-0 nova_compute[192079]: 2025-10-02 13:06:01.678 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:06:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 13:06:02.279 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 13:06:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 13:06:02.279 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 13:06:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 13:06:02.279 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 13:06:05 compute-0 nova_compute[192079]: 2025-10-02 13:06:05.252 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:06:05 compute-0 nova_compute[192079]: 2025-10-02 13:06:05.310 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:06:06 compute-0 nova_compute[192079]: 2025-10-02 13:06:06.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:06:09 compute-0 podman[263293]: 2025-10-02 13:06:09.135815013 +0000 UTC m=+0.050080274 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_metadata_agent, org.label-schema.vendor=CentOS, config_id=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_managed=true)
Oct 02 13:06:09 compute-0 podman[263295]: 2025-10-02 13:06:09.167944435 +0000 UTC m=+0.076187530 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 13:06:09 compute-0 podman[263294]: 2025-10-02 13:06:09.167964425 +0000 UTC m=+0.077652430 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller, container_name=ovn_controller, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 13:06:09 compute-0 nova_compute[192079]: 2025-10-02 13:06:09.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:06:09 compute-0 nova_compute[192079]: 2025-10-02 13:06:09.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 13:06:10 compute-0 nova_compute[192079]: 2025-10-02 13:06:10.253 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:06:10 compute-0 nova_compute[192079]: 2025-10-02 13:06:10.337 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:06:11 compute-0 nova_compute[192079]: 2025-10-02 13:06:11.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:06:12 compute-0 nova_compute[192079]: 2025-10-02 13:06:12.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:06:13 compute-0 nova_compute[192079]: 2025-10-02 13:06:13.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:06:14 compute-0 nova_compute[192079]: 2025-10-02 13:06:14.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:06:14 compute-0 nova_compute[192079]: 2025-10-02 13:06:14.690 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 13:06:14 compute-0 nova_compute[192079]: 2025-10-02 13:06:14.691 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 13:06:14 compute-0 nova_compute[192079]: 2025-10-02 13:06:14.691 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 13:06:14 compute-0 nova_compute[192079]: 2025-10-02 13:06:14.691 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 13:06:14 compute-0 nova_compute[192079]: 2025-10-02 13:06:14.828 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 13:06:14 compute-0 nova_compute[192079]: 2025-10-02 13:06:14.829 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5712MB free_disk=73.27244567871094GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 13:06:14 compute-0 nova_compute[192079]: 2025-10-02 13:06:14.829 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 13:06:14 compute-0 nova_compute[192079]: 2025-10-02 13:06:14.830 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 13:06:14 compute-0 nova_compute[192079]: 2025-10-02 13:06:14.891 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 13:06:14 compute-0 nova_compute[192079]: 2025-10-02 13:06:14.891 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 13:06:14 compute-0 nova_compute[192079]: 2025-10-02 13:06:14.907 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 13:06:14 compute-0 nova_compute[192079]: 2025-10-02 13:06:14.919 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 13:06:14 compute-0 nova_compute[192079]: 2025-10-02 13:06:14.921 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 13:06:14 compute-0 nova_compute[192079]: 2025-10-02 13:06:14.921 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.091s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 13:06:15 compute-0 nova_compute[192079]: 2025-10-02 13:06:15.255 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:06:15 compute-0 nova_compute[192079]: 2025-10-02 13:06:15.338 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:06:15 compute-0 nova_compute[192079]: 2025-10-02 13:06:15.921 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:06:17 compute-0 podman[263360]: 2025-10-02 13:06:17.14169214 +0000 UTC m=+0.051858672 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=ceilometer_agent_compute, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, config_id=edpm, managed_by=edpm_ansible, tcib_managed=true, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2)
Oct 02 13:06:20 compute-0 nova_compute[192079]: 2025-10-02 13:06:20.305 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:06:20 compute-0 nova_compute[192079]: 2025-10-02 13:06:20.339 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:06:22 compute-0 nova_compute[192079]: 2025-10-02 13:06:22.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:06:22 compute-0 nova_compute[192079]: 2025-10-02 13:06:22.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 13:06:22 compute-0 nova_compute[192079]: 2025-10-02 13:06:22.666 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 13:06:22 compute-0 nova_compute[192079]: 2025-10-02 13:06:22.687 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 13:06:23 compute-0 podman[263380]: 2025-10-02 13:06:23.139525595 +0000 UTC m=+0.056174931 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, vcs-type=git, vendor=Red Hat, Inc., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.buildah.version=1.33.7, maintainer=Red Hat, Inc., managed_by=edpm_ansible, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., distribution-scope=public, io.openshift.tags=minimal rhel9, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., url=https://catalog.redhat.com/en/search?searchType=containers, version=9.6, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, name=ubi9-minimal, architecture=x86_64, container_name=openstack_network_exporter, io.openshift.expose-services=, com.redhat.component=ubi9-minimal-container, release=1755695350, build-date=2025-08-20T13:12:41)
Oct 02 13:06:23 compute-0 podman[263381]: 2025-10-02 13:06:23.153864678 +0000 UTC m=+0.065973769 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.build-date=20251001, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, container_name=multipathd, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, config_id=multipathd, managed_by=edpm_ansible)
Oct 02 13:06:25 compute-0 nova_compute[192079]: 2025-10-02 13:06:25.307 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:06:25 compute-0 nova_compute[192079]: 2025-10-02 13:06:25.340 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:06:30 compute-0 nova_compute[192079]: 2025-10-02 13:06:30.342 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 4999-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 13:06:30 compute-0 nova_compute[192079]: 2025-10-02 13:06:30.344 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 13:06:30 compute-0 nova_compute[192079]: 2025-10-02 13:06:30.344 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] tcp:127.0.0.1:6640: idle 5003 ms, sending inactivity probe run /usr/lib64/python3.9/site-packages/ovs/reconnect.py:117
Oct 02 13:06:30 compute-0 nova_compute[192079]: 2025-10-02 13:06:30.344 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] tcp:127.0.0.1:6640: entering IDLE _transition /usr/lib64/python3.9/site-packages/ovs/reconnect.py:519
Oct 02 13:06:30 compute-0 nova_compute[192079]: 2025-10-02 13:06:30.349 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:06:30 compute-0 nova_compute[192079]: 2025-10-02 13:06:30.349 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] tcp:127.0.0.1:6640: entering ACTIVE _transition /usr/lib64/python3.9/site-packages/ovs/reconnect.py:519
Oct 02 13:06:31 compute-0 podman[263421]: 2025-10-02 13:06:31.129655739 +0000 UTC m=+0.046270629 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 13:06:31 compute-0 podman[263422]: 2025-10-02 13:06:31.134706687 +0000 UTC m=+0.048210082 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, container_name=iscsid, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team)
Oct 02 13:06:35 compute-0 nova_compute[192079]: 2025-10-02 13:06:35.350 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 4999-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 13:06:35 compute-0 nova_compute[192079]: 2025-10-02 13:06:35.352 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:06:35 compute-0 nova_compute[192079]: 2025-10-02 13:06:35.352 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] tcp:127.0.0.1:6640: idle 5002 ms, sending inactivity probe run /usr/lib64/python3.9/site-packages/ovs/reconnect.py:117
Oct 02 13:06:35 compute-0 nova_compute[192079]: 2025-10-02 13:06:35.352 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] tcp:127.0.0.1:6640: entering IDLE _transition /usr/lib64/python3.9/site-packages/ovs/reconnect.py:519
Oct 02 13:06:35 compute-0 nova_compute[192079]: 2025-10-02 13:06:35.352 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] tcp:127.0.0.1:6640: entering ACTIVE _transition /usr/lib64/python3.9/site-packages/ovs/reconnect.py:519
Oct 02 13:06:35 compute-0 nova_compute[192079]: 2025-10-02 13:06:35.353 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:06:40 compute-0 podman[263460]: 2025-10-02 13:06:40.15551278 +0000 UTC m=+0.069438005 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.vendor=CentOS, tcib_managed=true, config_id=ovn_metadata_agent, container_name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.build-date=20251001)
Oct 02 13:06:40 compute-0 podman[263461]: 2025-10-02 13:06:40.167829848 +0000 UTC m=+0.077579248 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=ovn_controller, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, container_name=ovn_controller, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Oct 02 13:06:40 compute-0 podman[263462]: 2025-10-02 13:06:40.16937291 +0000 UTC m=+0.074181945 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Oct 02 13:06:40 compute-0 nova_compute[192079]: 2025-10-02 13:06:40.353 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 4999-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 13:06:40 compute-0 nova_compute[192079]: 2025-10-02 13:06:40.355 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:06:40 compute-0 nova_compute[192079]: 2025-10-02 13:06:40.355 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] tcp:127.0.0.1:6640: idle 5001 ms, sending inactivity probe run /usr/lib64/python3.9/site-packages/ovs/reconnect.py:117
Oct 02 13:06:40 compute-0 nova_compute[192079]: 2025-10-02 13:06:40.355 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] tcp:127.0.0.1:6640: entering IDLE _transition /usr/lib64/python3.9/site-packages/ovs/reconnect.py:519
Oct 02 13:06:40 compute-0 nova_compute[192079]: 2025-10-02 13:06:40.355 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] tcp:127.0.0.1:6640: entering ACTIVE _transition /usr/lib64/python3.9/site-packages/ovs/reconnect.py:519
Oct 02 13:06:40 compute-0 nova_compute[192079]: 2025-10-02 13:06:40.356 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:06:45 compute-0 nova_compute[192079]: 2025-10-02 13:06:45.357 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 4999-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 13:06:45 compute-0 nova_compute[192079]: 2025-10-02 13:06:45.359 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 13:06:45 compute-0 nova_compute[192079]: 2025-10-02 13:06:45.359 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] tcp:127.0.0.1:6640: idle 5002 ms, sending inactivity probe run /usr/lib64/python3.9/site-packages/ovs/reconnect.py:117
Oct 02 13:06:45 compute-0 nova_compute[192079]: 2025-10-02 13:06:45.359 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] tcp:127.0.0.1:6640: entering IDLE _transition /usr/lib64/python3.9/site-packages/ovs/reconnect.py:519
Oct 02 13:06:45 compute-0 nova_compute[192079]: 2025-10-02 13:06:45.411 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:06:45 compute-0 nova_compute[192079]: 2025-10-02 13:06:45.411 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] tcp:127.0.0.1:6640: entering ACTIVE _transition /usr/lib64/python3.9/site-packages/ovs/reconnect.py:519
Oct 02 13:06:48 compute-0 podman[263527]: 2025-10-02 13:06:48.134517479 +0000 UTC m=+0.053455597 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, org.label-schema.build-date=20251001, io.buildah.version=1.41.3, managed_by=edpm_ansible, tcib_managed=true, config_id=edpm, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0)
Oct 02 13:06:50 compute-0 nova_compute[192079]: 2025-10-02 13:06:50.413 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 4999-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 13:06:54 compute-0 podman[263547]: 2025-10-02 13:06:54.133202457 +0000 UTC m=+0.052067229 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, container_name=openstack_network_exporter, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., build-date=2025-08-20T13:12:41, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., managed_by=edpm_ansible, io.buildah.version=1.33.7, url=https://catalog.redhat.com/en/search?searchType=containers, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, vcs-type=git, io.openshift.tags=minimal rhel9, com.redhat.component=ubi9-minimal-container, vendor=Red Hat, Inc., release=1755695350, version=9.6, architecture=x86_64, config_id=edpm, distribution-scope=public, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, io.openshift.expose-services=, maintainer=Red Hat, Inc., summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., name=ubi9-minimal)
Oct 02 13:06:54 compute-0 podman[263548]: 2025-10-02 13:06:54.16505444 +0000 UTC m=+0.080482908 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.name=CentOS Stream 9 Base Image, container_name=multipathd, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, config_id=multipathd, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2)
Oct 02 13:06:55 compute-0 nova_compute[192079]: 2025-10-02 13:06:55.414 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:07:00 compute-0 nova_compute[192079]: 2025-10-02 13:07:00.415 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 4999-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 13:07:00 compute-0 nova_compute[192079]: 2025-10-02 13:07:00.417 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 13:07:00 compute-0 nova_compute[192079]: 2025-10-02 13:07:00.417 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] tcp:127.0.0.1:6640: idle 5002 ms, sending inactivity probe run /usr/lib64/python3.9/site-packages/ovs/reconnect.py:117
Oct 02 13:07:00 compute-0 nova_compute[192079]: 2025-10-02 13:07:00.417 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] tcp:127.0.0.1:6640: entering IDLE _transition /usr/lib64/python3.9/site-packages/ovs/reconnect.py:519
Oct 02 13:07:00 compute-0 nova_compute[192079]: 2025-10-02 13:07:00.423 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:07:00 compute-0 nova_compute[192079]: 2025-10-02 13:07:00.424 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] tcp:127.0.0.1:6640: entering ACTIVE _transition /usr/lib64/python3.9/site-packages/ovs/reconnect.py:519
Oct 02 13:07:01 compute-0 nova_compute[192079]: 2025-10-02 13:07:01.684 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._check_instance_build_time run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:07:02 compute-0 podman[263590]: 2025-10-02 13:07:02.13789463 +0000 UTC m=+0.049933440 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=iscsid, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, managed_by=edpm_ansible, tcib_managed=true, container_name=iscsid)
Oct 02 13:07:02 compute-0 podman[263589]: 2025-10-02 13:07:02.157069816 +0000 UTC m=+0.072314944 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)
Oct 02 13:07:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 13:07:02.280 103294 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 13:07:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 13:07:02.281 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 13:07:02 compute-0 ovn_metadata_agent[103289]: 2025-10-02 13:07:02.281 103294 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 13:07:05 compute-0 nova_compute[192079]: 2025-10-02 13:07:05.425 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 4999-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 13:07:05 compute-0 nova_compute[192079]: 2025-10-02 13:07:05.426 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 13:07:05 compute-0 nova_compute[192079]: 2025-10-02 13:07:05.426 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] tcp:127.0.0.1:6640: idle 5002 ms, sending inactivity probe run /usr/lib64/python3.9/site-packages/ovs/reconnect.py:117
Oct 02 13:07:05 compute-0 nova_compute[192079]: 2025-10-02 13:07:05.426 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] tcp:127.0.0.1:6640: entering IDLE _transition /usr/lib64/python3.9/site-packages/ovs/reconnect.py:519
Oct 02 13:07:05 compute-0 nova_compute[192079]: 2025-10-02 13:07:05.457 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:07:05 compute-0 nova_compute[192079]: 2025-10-02 13:07:05.457 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] tcp:127.0.0.1:6640: entering ACTIVE _transition /usr/lib64/python3.9/site-packages/ovs/reconnect.py:519
Oct 02 13:07:08 compute-0 nova_compute[192079]: 2025-10-02 13:07:08.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:07:10 compute-0 nova_compute[192079]: 2025-10-02 13:07:10.457 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 4999-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 13:07:10 compute-0 nova_compute[192079]: 2025-10-02 13:07:10.458 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:07:10 compute-0 nova_compute[192079]: 2025-10-02 13:07:10.458 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] tcp:127.0.0.1:6640: idle 5001 ms, sending inactivity probe run /usr/lib64/python3.9/site-packages/ovs/reconnect.py:117
Oct 02 13:07:10 compute-0 nova_compute[192079]: 2025-10-02 13:07:10.458 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] tcp:127.0.0.1:6640: entering IDLE _transition /usr/lib64/python3.9/site-packages/ovs/reconnect.py:519
Oct 02 13:07:10 compute-0 nova_compute[192079]: 2025-10-02 13:07:10.459 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] tcp:127.0.0.1:6640: entering ACTIVE _transition /usr/lib64/python3.9/site-packages/ovs/reconnect.py:519
Oct 02 13:07:10 compute-0 nova_compute[192079]: 2025-10-02 13:07:10.460 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 0-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 13:07:11 compute-0 podman[263634]: 2025-10-02 13:07:11.131128375 +0000 UTC m=+0.043350429 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']})
Oct 02 13:07:11 compute-0 podman[263632]: 2025-10-02 13:07:11.150727932 +0000 UTC m=+0.068443447 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, org.label-schema.build-date=20251001, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, org.label-schema.vendor=CentOS, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, config_id=ovn_metadata_agent, container_name=ovn_metadata_agent, maintainer=OpenStack Kubernetes Operator team)
Oct 02 13:07:11 compute-0 podman[263633]: 2025-10-02 13:07:11.160901782 +0000 UTC m=+0.076112928 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=ovn_controller, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, container_name=ovn_controller, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team)
Oct 02 13:07:11 compute-0 nova_compute[192079]: 2025-10-02 13:07:11.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._reclaim_queued_deletes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:07:11 compute-0 nova_compute[192079]: 2025-10-02 13:07:11.664 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] CONF.reclaim_instance_interval <= 0, skipping... _reclaim_queued_deletes /usr/lib/python3.9/site-packages/nova/compute/manager.py:10477
Oct 02 13:07:12 compute-0 nova_compute[192079]: 2025-10-02 13:07:12.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_volume_usage run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:07:13 compute-0 nova_compute[192079]: 2025-10-02 13:07:13.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_rescued_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:07:15 compute-0 nova_compute[192079]: 2025-10-02 13:07:15.461 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:07:15 compute-0 nova_compute[192079]: 2025-10-02 13:07:15.664 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:07:15 compute-0 nova_compute[192079]: 2025-10-02 13:07:15.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager.update_available_resource run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:07:15 compute-0 nova_compute[192079]: 2025-10-02 13:07:15.703 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 13:07:15 compute-0 nova_compute[192079]: 2025-10-02 13:07:15.703 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 13:07:15 compute-0 nova_compute[192079]: 2025-10-02 13:07:15.704 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 13:07:15 compute-0 nova_compute[192079]: 2025-10-02 13:07:15.704 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Auditing locally available compute resources for compute-0.ctlplane.example.com (node: compute-0.ctlplane.example.com) update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:861
Oct 02 13:07:15 compute-0 nova_compute[192079]: 2025-10-02 13:07:15.891 2 WARNING nova.virt.libvirt.driver [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] This host appears to have multiple sockets per NUMA node. The `socket` PCI NUMA affinity will not be supported.
Oct 02 13:07:15 compute-0 nova_compute[192079]: 2025-10-02 13:07:15.892 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Hypervisor/Node resource view: name=compute-0.ctlplane.example.com free_ram=5716MB free_disk=73.27244567871094GB free_vcpus=8 pci_devices=[{"dev_id": "pci_0000_00_06_0", "address": "0000:00:06.0", "product_id": "1005", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1005", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_05_0", "address": "0000:00:05.0", "product_id": "1002", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1002", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_02_0", "address": "0000:00:02.0", "product_id": "1050", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1050", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_2", "address": "0000:00:01.2", "product_id": "7020", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7020", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_03_0", "address": "0000:00:03.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_07_0", "address": "0000:00:07.0", "product_id": "1000", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_1", "address": "0000:00:01.1", "product_id": "7010", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7010", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_0", "address": "0000:00:01.0", "product_id": "7000", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7000", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_01_3", "address": "0000:00:01.3", "product_id": "7113", "vendor_id": "8086", "numa_node": null, "label": "label_8086_7113", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_04_0", "address": "0000:00:04.0", "product_id": "1001", "vendor_id": "1af4", "numa_node": null, "label": "label_1af4_1001", "dev_type": "type-PCI"}, {"dev_id": "pci_0000_00_00_0", "address": "0000:00:00.0", "product_id": "1237", "vendor_id": "8086", "numa_node": null, "label": "label_8086_1237", "dev_type": "type-PCI"}] _report_hypervisor_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1034
Oct 02 13:07:15 compute-0 nova_compute[192079]: 2025-10-02 13:07:15.892 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404
Oct 02 13:07:15 compute-0 nova_compute[192079]: 2025-10-02 13:07:15.892 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409
Oct 02 13:07:15 compute-0 nova_compute[192079]: 2025-10-02 13:07:15.981 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Total usable vcpus: 8, total allocated vcpus: 0 _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1057
Oct 02 13:07:15 compute-0 nova_compute[192079]: 2025-10-02 13:07:15.981 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Final resource view: name=compute-0.ctlplane.example.com phys_ram=7679MB used_ram=512MB phys_disk=79GB used_disk=0GB total_vcpus=8 used_vcpus=0 pci_stats=[] _report_final_resource_view /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:1066
Oct 02 13:07:16 compute-0 nova_compute[192079]: 2025-10-02 13:07:16.009 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing inventories for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708 _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:804
Oct 02 13:07:16 compute-0 nova_compute[192079]: 2025-10-02 13:07:16.032 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Updating ProviderTree inventory for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 from _refresh_and_get_inventory using data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} _refresh_and_get_inventory /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:768
Oct 02 13:07:16 compute-0 nova_compute[192079]: 2025-10-02 13:07:16.032 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Updating inventory in ProviderTree for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 with inventory: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:176
Oct 02 13:07:16 compute-0 nova_compute[192079]: 2025-10-02 13:07:16.046 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing aggregate associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, aggregates: None _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:813
Oct 02 13:07:16 compute-0 nova_compute[192079]: 2025-10-02 13:07:16.080 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Refreshing trait associations for resource provider 55f2ae21-42ea-47d7-8c73-c3134981d708, traits: COMPUTE_SECURITY_UEFI_SECURE_BOOT,COMPUTE_VIOMMU_MODEL_VIRTIO,COMPUTE_VIOMMU_MODEL_AUTO,COMPUTE_IMAGE_TYPE_AKI,COMPUTE_GRAPHICS_MODEL_VIRTIO,COMPUTE_NET_VIF_MODEL_PCNET,HW_CPU_X86_SSE42,COMPUTE_RESCUE_BFV,COMPUTE_VOLUME_EXTEND,COMPUTE_IMAGE_TYPE_QCOW2,COMPUTE_TRUSTED_CERTS,COMPUTE_SOCKET_PCI_NUMA_AFFINITY,COMPUTE_GRAPHICS_MODEL_CIRRUS,HW_CPU_X86_MMX,COMPUTE_STORAGE_BUS_VIRTIO,COMPUTE_NET_ATTACH_INTERFACE_WITH_TAG,COMPUTE_STORAGE_BUS_FDC,COMPUTE_STORAGE_BUS_USB,COMPUTE_NODE,HW_CPU_X86_SSSE3,HW_CPU_X86_SSE2,COMPUTE_GRAPHICS_MODEL_BOCHS,COMPUTE_NET_VIF_MODEL_E1000E,COMPUTE_IMAGE_TYPE_RAW,COMPUTE_NET_VIF_MODEL_NE2K_PCI,COMPUTE_IMAGE_TYPE_AMI,COMPUTE_VIOMMU_MODEL_INTEL,COMPUTE_SECURITY_TPM_2_0,COMPUTE_STORAGE_BUS_SCSI,COMPUTE_IMAGE_TYPE_ARI,COMPUTE_NET_VIF_MODEL_VMXNET3,COMPUTE_SECURITY_TPM_1_2,COMPUTE_NET_VIF_MODEL_E1000,HW_CPU_X86_SSE,COMPUTE_VOLUME_MULTI_ATTACH,COMPUTE_STORAGE_BUS_IDE,COMPUTE_GRAPHICS_MODEL_NONE,COMPUTE_VOLUME_ATTACH_WITH_TAG,COMPUTE_NET_VIF_MODEL_VIRTIO,HW_CPU_X86_SSE41,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_DEVICE_TAGGING,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_ACCELERATORS,COMPUTE_NET_VIF_MODEL_RTL8139,COMPUTE_GRAPHICS_MODEL_VGA,COMPUTE_STORAGE_BUS_SATA,COMPUTE_NET_VIF_MODEL_SPAPR_VLAN _refresh_associations /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:825
Oct 02 13:07:16 compute-0 nova_compute[192079]: 2025-10-02 13:07:16.103 2 DEBUG nova.compute.provider_tree [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed in ProviderTree for provider: 55f2ae21-42ea-47d7-8c73-c3134981d708 update_inventory /usr/lib/python3.9/site-packages/nova/compute/provider_tree.py:180
Oct 02 13:07:16 compute-0 nova_compute[192079]: 2025-10-02 13:07:16.156 2 DEBUG nova.scheduler.client.report [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Inventory has not changed for provider 55f2ae21-42ea-47d7-8c73-c3134981d708 based on inventory data: {'VCPU': {'total': 8, 'reserved': 0, 'min_unit': 1, 'max_unit': 8, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 7679, 'reserved': 512, 'min_unit': 1, 'max_unit': 7679, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 79, 'reserved': 1, 'min_unit': 1, 'max_unit': 79, 'step_size': 1, 'allocation_ratio': 0.9}} set_inventory_for_provider /usr/lib/python3.9/site-packages/nova/scheduler/client/report.py:940
Oct 02 13:07:16 compute-0 nova_compute[192079]: 2025-10-02 13:07:16.158 2 DEBUG nova.compute.resource_tracker [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Compute_service record updated for compute-0.ctlplane.example.com:compute-0.ctlplane.example.com _update_available_resource /usr/lib/python3.9/site-packages/nova/compute/resource_tracker.py:995
Oct 02 13:07:16 compute-0 nova_compute[192079]: 2025-10-02 13:07:16.158 2 DEBUG oslo_concurrency.lockutils [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.266s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423
Oct 02 13:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:07:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:07:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:07:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:07:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:07:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:07:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.allocation, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:07:17.118 12 DEBUG ceilometer.polling.manager [-] Skip pollster cpu, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:07:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:07:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:07:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.iops, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:07:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:07:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.rate, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:07:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.error, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:07:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:07:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:07:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:07:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster memory.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:07:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.packets.drop, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:07:17.119 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.outgoing.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:07:17.120 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.write.bytes, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:07:17.120 12 DEBUG ceilometer.polling.manager [-] Skip pollster network.incoming.bytes.delta, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:07:17.120 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.usage, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:07:17.120 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.latency, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:07:17.120 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.capacity, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:07:17 compute-0 ceilometer_agent_compute[202881]: 2025-10-02 13:07:17.120 12 DEBUG ceilometer.polling.manager [-] Skip pollster disk.device.read.requests, no  resources found this cycle poll_and_notify /usr/lib/python3.9/site-packages/ceilometer/polling/manager.py:193
Oct 02 13:07:17 compute-0 nova_compute[192079]: 2025-10-02 13:07:17.159 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._instance_usage_audit run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:07:19 compute-0 podman[263698]: 2025-10-02 13:07:19.132094227 +0000 UTC m=+0.051922915 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, config_id=edpm, org.label-schema.build-date=20251001, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, container_name=ceilometer_agent_compute)
Oct 02 13:07:20 compute-0 nova_compute[192079]: 2025-10-02 13:07:20.462 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 4999-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 13:07:20 compute-0 nova_compute[192079]: 2025-10-02 13:07:20.463 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:07:20 compute-0 nova_compute[192079]: 2025-10-02 13:07:20.463 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] tcp:127.0.0.1:6640: idle 5001 ms, sending inactivity probe run /usr/lib64/python3.9/site-packages/ovs/reconnect.py:117
Oct 02 13:07:20 compute-0 nova_compute[192079]: 2025-10-02 13:07:20.463 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] tcp:127.0.0.1:6640: entering IDLE _transition /usr/lib64/python3.9/site-packages/ovs/reconnect.py:519
Oct 02 13:07:20 compute-0 nova_compute[192079]: 2025-10-02 13:07:20.464 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] tcp:127.0.0.1:6640: entering ACTIVE _transition /usr/lib64/python3.9/site-packages/ovs/reconnect.py:519
Oct 02 13:07:20 compute-0 nova_compute[192079]: 2025-10-02 13:07:20.464 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:07:24 compute-0 nova_compute[192079]: 2025-10-02 13:07:24.665 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._heal_instance_info_cache run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:07:24 compute-0 nova_compute[192079]: 2025-10-02 13:07:24.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Starting heal instance info cache _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9858
Oct 02 13:07:24 compute-0 nova_compute[192079]: 2025-10-02 13:07:24.665 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Rebuilding the list of instances to heal _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9862
Oct 02 13:07:24 compute-0 nova_compute[192079]: 2025-10-02 13:07:24.786 2 DEBUG nova.compute.manager [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Didn't find any instances for network info cache update. _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9944
Oct 02 13:07:25 compute-0 podman[263718]: 2025-10-02 13:07:25.128011568 +0000 UTC m=+0.044167222 container health_status 468f049b7947eeb9c41c47c1e99fc71e80f739305cc32c226afc4f731fb33a00 (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, version=9.6, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.openshift.tags=minimal rhel9, vcs-ref=f4b088292653bbf5ca8188a5e59ffd06a8671d4b, url=https://catalog.redhat.com/en/search?searchType=containers, build-date=2025-08-20T13:12:41, release=1755695350, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, com.redhat.component=ubi9-minimal-container, config_id=edpm, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., io.buildah.version=1.33.7, vcs-type=git, container_name=openstack_network_exporter, name=ubi9-minimal, io.openshift.expose-services=, config_data={'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'restart': 'always', 'recreate': True, 'privileged': True, 'ports': ['9105:9105'], 'command': [], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml'}, 'healthcheck': {'test': '/openstack/healthcheck openstack-netwo', 'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, managed_by=edpm_ansible, distribution-scope=public, architecture=x86_64, maintainer=Red Hat, Inc., vendor=Red Hat, Inc.)
Oct 02 13:07:25 compute-0 podman[263719]: 2025-10-02 13:07:25.146047212 +0000 UTC m=+0.057626341 container health_status 8755b2db07f94a75531906f049bd47aa1e9f8c0d3ab139b22525f9888d703c26 (image=quay.io/podified-antelope-centos9/openstack-multipathd:current-podified, name=multipathd, health_status=healthy, health_failing_streak=0, health_log=, maintainer=OpenStack Kubernetes Operator team, container_name=multipathd, org.label-schema.build-date=20251001, tcib_managed=true, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/multipathd', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-multipathd:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/multipathd.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run/udev:/run/udev', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:ro', '/var/lib/iscsi:/var/lib/iscsi:z', '/etc/multipath:/etc/multipath:z', '/etc/multipath.conf:/etc/multipath.conf:ro', '/var/lib/openstack/healthchecks/multipathd:/openstack:ro,z']}, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_id=multipathd)
Oct 02 13:07:25 compute-0 nova_compute[192079]: 2025-10-02 13:07:25.465 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 4999-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 13:07:25 compute-0 nova_compute[192079]: 2025-10-02 13:07:25.467 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:07:25 compute-0 nova_compute[192079]: 2025-10-02 13:07:25.467 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] tcp:127.0.0.1:6640: idle 5002 ms, sending inactivity probe run /usr/lib64/python3.9/site-packages/ovs/reconnect.py:117
Oct 02 13:07:25 compute-0 nova_compute[192079]: 2025-10-02 13:07:25.467 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] tcp:127.0.0.1:6640: entering IDLE _transition /usr/lib64/python3.9/site-packages/ovs/reconnect.py:519
Oct 02 13:07:25 compute-0 nova_compute[192079]: 2025-10-02 13:07:25.468 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] tcp:127.0.0.1:6640: entering ACTIVE _transition /usr/lib64/python3.9/site-packages/ovs/reconnect.py:519
Oct 02 13:07:25 compute-0 nova_compute[192079]: 2025-10-02 13:07:25.469 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:07:26 compute-0 nova_compute[192079]: 2025-10-02 13:07:26.781 2 DEBUG oslo_service.periodic_task [None req-3b9ca021-ccdb-4ff2-bf41-b615fa67070f - - - - - -] Running periodic task ComputeManager._sync_scheduler_instance_info run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210
Oct 02 13:07:30 compute-0 nova_compute[192079]: 2025-10-02 13:07:30.468 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:07:33 compute-0 podman[263759]: 2025-10-02 13:07:33.143915699 +0000 UTC m=+0.060175410 container health_status cc86e55dc64190c2ff81abfe8be74e75d388ecfc4e53e0e3a7e0072fe346aca2 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9100:9100'], 'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'net': 'host', 'environment': {'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck node_exporter', 'mount': '/var/lib/openstack/healthchecks/node_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=edpm)
Oct 02 13:07:33 compute-0 podman[263760]: 2025-10-02 13:07:33.145212426 +0000 UTC m=+0.058140346 container health_status d85385fb4f9c5f3777ee6f115b388e992d983a9b9ad6badce8506d1283c99818 (image=quay.io/podified-antelope-centos9/openstack-iscsid:current-podified, name=iscsid, health_status=healthy, health_failing_streak=0, health_log=, tcib_managed=true, container_name=iscsid, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, config_data={'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/iscsid', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-iscsid:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'volumes': ['/etc/hosts:/etc/hosts:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/extracted:/etc/pki/ca-trust/extracted:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/etc/pki/tls/certs/ca-bundle.crt:/etc/pki/tls/certs/ca-bundle.crt:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/pki/tls/cert.pem:/etc/pki/tls/cert.pem:ro', '/dev/log:/dev/log', '/var/lib/kolla/config_files/iscsid.json:/var/lib/kolla/config_files/config.json:ro', '/dev:/dev', '/run:/run', '/sys:/sys', '/lib/modules:/lib/modules:ro', '/etc/iscsi:/etc/iscsi:z', '/etc/target:/etc/target:z', '/var/lib/iscsi:/var/lib/iscsi:z', '/var/lib/openstack/healthchecks/iscsid:/openstack:ro,z']}, config_id=iscsid, managed_by=edpm_ansible, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team)
Oct 02 13:07:34 compute-0 sshd-session[263804]: Accepted publickey for zuul from 192.168.122.10 port 56722 ssh2: ECDSA SHA256:tAEsFSop2MjuMQQ/UqKU2uCkxqWXGfsM5crdhd6tlI4
Oct 02 13:07:34 compute-0 systemd-logind[827]: New session 79 of user zuul.
Oct 02 13:07:34 compute-0 systemd[1]: Started Session 79 of User zuul.
Oct 02 13:07:34 compute-0 sshd-session[263804]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Oct 02 13:07:34 compute-0 sudo[263808]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/bash -c 'rm -rf /var/tmp/sos-osp && mkdir /var/tmp/sos-osp && sos report --batch --all-logs --tmp-dir=/var/tmp/sos-osp -p container,openstack_edpm,system,storage,virt'
Oct 02 13:07:34 compute-0 sudo[263808]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Oct 02 13:07:35 compute-0 nova_compute[192079]: 2025-10-02 13:07:35.470 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] 4999-ms timeout __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:248
Oct 02 13:07:38 compute-0 ovs-vsctl[263981]: ovs|00001|db_ctl_base|ERR|no key "dpdk-init" in Open_vSwitch record "." column other_config
Oct 02 13:07:39 compute-0 virtqemud[191807]: Failed to connect socket to '/var/run/libvirt/virtnetworkd-sock-ro': No such file or directory
Oct 02 13:07:39 compute-0 virtqemud[191807]: Failed to connect socket to '/var/run/libvirt/virtnwfilterd-sock-ro': No such file or directory
Oct 02 13:07:39 compute-0 virtqemud[191807]: Failed to connect socket to '/var/run/libvirt/virtstoraged-sock-ro': No such file or directory
Oct 02 13:07:40 compute-0 nova_compute[192079]: 2025-10-02 13:07:40.473 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:07:40 compute-0 crontab[264402]: (root) LIST (root)
Oct 02 13:07:42 compute-0 podman[264488]: 2025-10-02 13:07:42.157755881 +0000 UTC m=+0.061610660 container health_status 02db9c3a183159660161130e3270df4b28517dbad8980ecf16b657fce5659ff3 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=ovn_metadata_agent, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, managed_by=edpm_ansible, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': '509889af80292464c80ed237d31a2912cfbfae7a7fc6a98e3d5e94fcf1d4e04a'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/config-data/ansible-generated/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.license=GPLv2)
Oct 02 13:07:42 compute-0 podman[264492]: 2025-10-02 13:07:42.15774707 +0000 UTC m=+0.054531126 container health_status c2c435107ee78ae9d4558efc570119faaf85909dadce7b069493b44db36eff9e (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'restart': 'always', 'recreate': True, 'user': 'root', 'privileged': True, 'ports': ['9882:9882'], 'net': 'host', 'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'CONTAINER_HOST': 'unix:///run/podman/podman.sock'}, 'healthcheck': {'test': '/openstack/healthcheck podman_exporter', 'mount': '/var/lib/openstack/healthchecks/podman_exporter'}, 'volumes': ['/var/lib/openstack/config/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']})
Oct 02 13:07:42 compute-0 podman[264491]: 2025-10-02 13:07:42.193064229 +0000 UTC m=+0.097261298 container health_status 9787283d5bd91eb334d38794b96aabb471119bdedf824923ca0f7386b833ba7d (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_controller, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20251001, org.label-schema.vendor=CentOS, tcib_managed=true, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, io.buildah.version=1.41.3, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, config_id=ovn_controller)
Oct 02 13:07:42 compute-0 systemd[1]: Starting Hostname Service...
Oct 02 13:07:43 compute-0 systemd[1]: Started Hostname Service.
Oct 02 13:07:45 compute-0 nova_compute[192079]: 2025-10-02 13:07:45.475 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
Oct 02 13:07:49 compute-0 ovs-appctl[265489]: ovs|00001|daemon_unix|WARN|/var/run/openvswitch/ovs-monitor-ipsec.pid: open: No such file or directory
Oct 02 13:07:49 compute-0 ovs-appctl[265494]: ovs|00001|daemon_unix|WARN|/var/run/openvswitch/ovs-monitor-ipsec.pid: open: No such file or directory
Oct 02 13:07:49 compute-0 ovs-appctl[265498]: ovs|00001|daemon_unix|WARN|/var/run/openvswitch/ovs-monitor-ipsec.pid: open: No such file or directory
Oct 02 13:07:49 compute-0 podman[265555]: 2025-10-02 13:07:49.659188208 +0000 UTC m=+0.068541661 container health_status 48582d4aa8ca9a7aa097f7c1c6cb8cffc3ae43c806d68d19b4b441dd8eec85be (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, config_id=edpm, container_name=ceilometer_agent_compute, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, tcib_build_tag=a0eac564d779a7eaac46c9816bff261a, config_data={'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'user': 'ceilometer', 'restart': 'always', 'command': 'kolla_start', 'security_opt': 'label:type:ceilometer_polling_t', 'net': 'host', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal'}, 'healthcheck': {'test': '/openstack/healthcheck compute', 'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute'}, 'volumes': ['/var/lib/openstack/config/telemetry:/var/lib/openstack/config/:z', '/var/lib/openstack/config/telemetry/ceilometer-agent-compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/config/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, managed_by=edpm_ansible, io.buildah.version=1.41.3, org.label-schema.build-date=20251001, tcib_managed=true)
Oct 02 13:07:50 compute-0 nova_compute[192079]: 2025-10-02 13:07:50.477 2 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 29 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263
